file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
overloaded-index-autoderef.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test overloaded indexing combined with autoderef.
#![allow(unknown_features)]
#![feature(box_syntax)]
use std::ops::{Index, IndexMut};
struct Foo {
x: int,
y: int,
}
impl Index<int> for Foo {
type Output = int;
fn | (&self, z: &int) -> &int {
if *z == 0 {
&self.x
} else {
&self.y
}
}
}
impl IndexMut<int> for Foo {
fn index_mut(&mut self, z: &int) -> &mut int {
if *z == 0 {
&mut self.x
} else {
&mut self.y
}
}
}
trait Int {
fn get(self) -> int;
fn get_from_ref(&self) -> int;
fn inc(&mut self);
}
impl Int for int {
fn get(self) -> int { self }
fn get_from_ref(&self) -> int { *self }
fn inc(&mut self) { *self += 1; }
}
fn main() {
let mut f: Box<_> = box Foo {
x: 1,
y: 2,
};
assert_eq!(f[1], 2);
f[0] = 3;
assert_eq!(f[0], 3);
// Test explicit IndexMut where `f` must be autoderef:
{
let p = &mut f[1];
*p = 4;
}
// Test explicit Index where `f` must be autoderef:
{
let p = &f[1];
assert_eq!(*p, 4);
}
// Test calling methods with `&mut self`, `self, and `&self` receivers:
f[1].inc();
assert_eq!(f[1].get(), 5);
assert_eq!(f[1].get_from_ref(), 5);
}
| index | identifier_name |
overloaded-index-autoderef.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test overloaded indexing combined with autoderef.
#![allow(unknown_features)]
#![feature(box_syntax)]
use std::ops::{Index, IndexMut};
struct Foo {
x: int,
y: int,
}
impl Index<int> for Foo {
type Output = int;
fn index(&self, z: &int) -> &int |
}
impl IndexMut<int> for Foo {
fn index_mut(&mut self, z: &int) -> &mut int {
if *z == 0 {
&mut self.x
} else {
&mut self.y
}
}
}
trait Int {
fn get(self) -> int;
fn get_from_ref(&self) -> int;
fn inc(&mut self);
}
impl Int for int {
fn get(self) -> int { self }
fn get_from_ref(&self) -> int { *self }
fn inc(&mut self) { *self += 1; }
}
fn main() {
let mut f: Box<_> = box Foo {
x: 1,
y: 2,
};
assert_eq!(f[1], 2);
f[0] = 3;
assert_eq!(f[0], 3);
// Test explicit IndexMut where `f` must be autoderef:
{
let p = &mut f[1];
*p = 4;
}
// Test explicit Index where `f` must be autoderef:
{
let p = &f[1];
assert_eq!(*p, 4);
}
// Test calling methods with `&mut self`, `self, and `&self` receivers:
f[1].inc();
assert_eq!(f[1].get(), 5);
assert_eq!(f[1].get_from_ref(), 5);
}
| {
if *z == 0 {
&self.x
} else {
&self.y
}
} | identifier_body |
nested_macro_privacy.rs | // Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(decl_macro)]
macro n($foo:ident, $S:ident, $i:ident, $m:ident) {
mod $foo {
#[derive(Default)]
pub struct $S { $i: u32 }
pub macro $m($e:expr) { $e.$i }
}
}
n!(foo, S, i, m);
fn | () {
use foo::{S, m};
S::default().i; //~ ERROR field `i` of struct `foo::S` is private
m!(S::default()); // ok
}
| main | identifier_name |
nested_macro_privacy.rs | // Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(decl_macro)]
macro n($foo:ident, $S:ident, $i:ident, $m:ident) {
mod $foo {
#[derive(Default)]
pub struct $S { $i: u32 }
pub macro $m($e:expr) { $e.$i }
}
}
n!(foo, S, i, m);
fn main() | {
use foo::{S, m};
S::default().i; //~ ERROR field `i` of struct `foo::S` is private
m!(S::default()); // ok
} | identifier_body |
|
nested_macro_privacy.rs | // Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(decl_macro)]
| pub macro $m($e:expr) { $e.$i }
}
}
n!(foo, S, i, m);
fn main() {
use foo::{S, m};
S::default().i; //~ ERROR field `i` of struct `foo::S` is private
m!(S::default()); // ok
} | macro n($foo:ident, $S:ident, $i:ident, $m:ident) {
mod $foo {
#[derive(Default)]
pub struct $S { $i: u32 } | random_line_split |
a6.rs | fn main() { // Ciclos while, for, Enumerate
let mut x = 5;
let mut completado = false; // La variable mutable completado es de tipo booleano, es decir que solo tiene dos valores: false o true.
println!("Ciclo while");
while!completado { // El ciclo while termina cuando la sentencia se cumpla en este caso cuando completado sea diferente a false, es decir verdadero.
x += x;
println!("{}", x);
if x == 160 {
completado = true;
};
};
| for i in 0..6 { // Este ciclo empiza por el primer número y termina uno antes del indicado, para este caso empieza en 0 y termina en 5
println!("{}", i);
};
println!("Ciclo for con enumerate()");
for (i,j) in (5..11).enumerate() { // enumerate cuenta las veces que se iterao se hace el ciclo, es importante respetar los parentensis.
println!("i = {} y j = {}", i, j); // i imprime el número de iteración y j el rango en el que se esta iterando.
};
} | // El ciclo for de rust luce mas parecido al de ruby.
println!("Ciclo for"); | random_line_split |
a6.rs | fn main() { // Ciclos while, for, Enumerate
let mut x = 5;
let mut completado = false; // La variable mutable completado es de tipo booleano, es decir que solo tiene dos valores: false o true.
println!("Ciclo while");
while!completado { // El ciclo while termina cuando la sentencia se cumpla en este caso cuando completado sea diferente a false, es decir verdadero.
x += x;
println!("{}", x);
if x == 160 | ;
};
// El ciclo for de rust luce mas parecido al de ruby.
println!("Ciclo for");
for i in 0..6 { // Este ciclo empiza por el primer número y termina uno antes del indicado, para este caso empieza en 0 y termina en 5
println!("{}", i);
};
println!("Ciclo for con enumerate()");
for (i,j) in (5..11).enumerate() { // enumerate cuenta las veces que se iterao se hace el ciclo, es importante respetar los parentensis.
println!("i = {} y j = {}", i, j); // i imprime el número de iteración y j el rango en el que se esta iterando.
};
} | {
completado = true;
} | conditional_block |
a6.rs | fn main() | println!("Ciclo for con enumerate()");
for (i,j) in (5..11).enumerate() { // enumerate cuenta las veces que se iterao se hace el ciclo, es importante respetar los parentensis.
println!("i = {} y j = {}", i, j); // i imprime el número de iteración y j el rango en el que se esta iterando.
};
} | { // Ciclos while, for, Enumerate
let mut x = 5;
let mut completado = false; // La variable mutable completado es de tipo booleano, es decir que solo tiene dos valores: false o true.
println!("Ciclo while");
while !completado { // El ciclo while termina cuando la sentencia se cumpla en este caso cuando completado sea diferente a false, es decir verdadero.
x += x;
println!("{}", x);
if x == 160 {
completado = true;
};
};
// El ciclo for de rust luce mas parecido al de ruby.
println!("Ciclo for");
for i in 0..6 { // Este ciclo empiza por el primer número y termina uno antes del indicado, para este caso empieza en 0 y termina en 5
println!("{}", i);
};
| identifier_body |
a6.rs | fn | () { // Ciclos while, for, Enumerate
let mut x = 5;
let mut completado = false; // La variable mutable completado es de tipo booleano, es decir que solo tiene dos valores: false o true.
println!("Ciclo while");
while!completado { // El ciclo while termina cuando la sentencia se cumpla en este caso cuando completado sea diferente a false, es decir verdadero.
x += x;
println!("{}", x);
if x == 160 {
completado = true;
};
};
// El ciclo for de rust luce mas parecido al de ruby.
println!("Ciclo for");
for i in 0..6 { // Este ciclo empiza por el primer número y termina uno antes del indicado, para este caso empieza en 0 y termina en 5
println!("{}", i);
};
println!("Ciclo for con enumerate()");
for (i,j) in (5..11).enumerate() { // enumerate cuenta las veces que se iterao se hace el ciclo, es importante respetar los parentensis.
println!("i = {} y j = {}", i, j); // i imprime el número de iteración y j el rango en el que se esta iterando.
};
} | main | identifier_name |
lib.rs | extern crate csv;
extern crate failure;
extern crate flate2;
extern crate tar;
extern crate unicode_casefold;
extern crate unicode_segmentation;
pub use failure::Error;
use std::cmp::Ordering;
use std::collections::HashMap;
use std::collections::hash_map::Entry;
use std::io;
use std::io::prelude::*;
use std::result;
use unicode_casefold::{Locale, UnicodeCaseFold, Variant};
use unicode_segmentation::UnicodeSegmentation;
pub type Result<T> = result::Result<T, Error>;
const MODEL_SIZE_LIMIT: usize = 10000;
/// Used to build a "language model" describing a human language, where the
/// input data is assumed to come from subtitle files.
pub struct ModelBuilder {
grapheme_counts: HashMap<String, u64>,
pair_counts: HashMap<String, u64>,
word_counts: HashMap<String, u64>,
}
impl ModelBuilder {
/// Create a new `ModelBuilder`.
pub fn new() -> ModelBuilder {
ModelBuilder {
grapheme_counts: HashMap::new(),
pair_counts: HashMap::new(),
word_counts: HashMap::new(),
}
}
/// Add a subtitle line to the `ModelBuilder`.
pub fn add_line(&mut self, line: &str) {
let grapheme_buffer = line.graphemes(true).collect::<Vec<_>>();
for &grapheme in &grapheme_buffer {
if grapheme!= " " {
incr_map(&mut self.grapheme_counts, grapheme.to_owned());
}
}
if!grapheme_buffer.is_empty() {
incr_map(&mut self.pair_counts, format!("\n{}", grapheme_buffer[0]));
incr_map(&mut self.pair_counts,
format!("{}\n", grapheme_buffer[grapheme_buffer.len() - 1]));
}
for pair in grapheme_buffer.windows(2) {
incr_map(&mut self.pair_counts, format!("{}{}", pair[0], pair[1]));
}
for word in line.unicode_words() {
// TODO: Handle Turkic "i".
let word = word.case_fold_with(Variant::Full, Locale::NonTurkic).collect();
incr_map(&mut self.word_counts, word);
}
}
/// Write our current grapheme frequencies to `out`.
pub fn grapheme_frequencies<W: Write>(&self, out: W) -> Result<()> {
self.frequencies(&self.grapheme_counts, out)
}
/// Write our current pair frequencies to `out`.
pub fn pair_frequencies<W: Write>(&self, out: W) -> Result<()> {
self.frequencies(&self.pair_counts, out)
}
/// Write our current word frequencies to `out`.
pub fn word_frequencies<W: Write>(&self, out: W) -> Result<()> {
self.frequencies(&self.word_counts, out)
}
/// Write the frequencies in `counts` to `out`, labelling them with
/// `label`.
fn frequencies<W: Write>(&self,
counts: &HashMap<String, u64>,
out: W)
-> Result<()> {
// Count the total number of graphemes we've seen.
let mut total: f64 = 0.0;
for &count in counts.values() {
total += count as f64;
}
// Sort our results into a stable order and replace counts with
// probabilities.
let mut rows = counts.iter().collect::<Vec<_>>();
rows.sort_by(|&(k1, c1), &(k2, c2)| match c1.cmp(c2).reverse() {
Ordering::Equal => k1.cmp(k2),
other => other,
});
// Write output to a CSV.
let mut wtr = csv::Writer::from_writer(out);
for (key, &count) in rows.into_iter().take(MODEL_SIZE_LIMIT) {
wtr.encode((key, count as f64 / total))?;
}
Ok(())
}
/// Write out our language model to `out`. This is actually a gzipped
/// tar file containing multiple CSV files:
///
/// - `graphemes.csv`: Frequencies of single grapheme clusters.
/// - `pairs.csv`: Frequencies of grapheme pairs.
/// - `words.csv`: Frequencies of case-folded words.
///
/// All models will be truncted if they exceed a certain limit.
pub fn write_model<W: Write>(&self, out: W) -> Result<()> {
let gzip = flate2::write::GzEncoder::new(out, flate2::Compression::best());
let mut tar = tar::Builder::new(gzip);
self.append_model_part(&mut tar, "graphemes.csv", &self.grapheme_counts)?;
self.append_model_part(&mut tar, "pairs.csv", &self.pair_counts)?;
self.append_model_part(&mut tar, "words.csv", &self.word_counts)?;
tar.into_inner()?.finish()?;
Ok(())
}
/// Append a file to our model.
fn append_model_part<W: Write>(&self,
builder: &mut tar::Builder<W>,
path: &str,
counts: &HashMap<String, u64>)
-> Result<()> {
let mut csv = vec![];
self.frequencies(counts, &mut csv)?;
let mut header = tar::Header::new_old();
header.set_path(path)?;
// TODO: Can this fail with a cast error?
header.set_size(csv.len() as u64);
header.set_mode(0o600);
header.set_cksum();
builder.append(&header, io::Cursor::new(&csv))?;
Ok(())
}
}
/// Increment a key in a map.
fn incr_map(map: &mut HashMap<String, u64>, key: String) {
match map.entry(key.to_owned()) {
Entry::Occupied(mut occupied) => |
Entry::Vacant(vacant) => {
vacant.insert(1);
}
}
}
#[test]
fn grapheme_frequency() {
use std::str;
let mut builder = ModelBuilder::new();
builder.add_line("Hello world");
let mut csv = vec![];
builder.grapheme_frequencies(&mut csv).unwrap();
assert_eq!(str::from_utf8(&csv).unwrap(),
"\
l,0.3
o,0.2
H,0.1
d,0.1
e,0.1
r,0.1
w,0.1
");
}
#[test]
fn pair_frequency() {
use std::str;
let mut builder = ModelBuilder::new();
builder.add_line("Help");
let mut csv = vec![];
builder.pair_frequencies(&mut csv).unwrap();
assert_eq!(str::from_utf8(&csv).unwrap(),
"\
\"\nH\",0.2
He,0.2
el,0.2
lp,0.2
\"p\n\",0.2
");
}
#[test]
fn word_frequency() {
use std::str;
let mut builder = ModelBuilder::new();
builder.add_line("One potato, two potato!");
let mut csv = vec![];
builder.word_frequencies(&mut csv).unwrap();
assert_eq!(str::from_utf8(&csv).unwrap(),
"\
potato,0.5
one,0.25
two,0.25
");
}
#[test]
fn write_model() {
let mut builder = ModelBuilder::new();
builder.add_line("One potato, two potato!");
let mut model = vec![];
builder.write_model(&mut model).unwrap();
assert!(model.len() > 0);
}
| {
*occupied.get_mut() += 1;
} | conditional_block |
lib.rs | extern crate csv;
extern crate failure;
extern crate flate2;
extern crate tar;
extern crate unicode_casefold;
extern crate unicode_segmentation;
pub use failure::Error;
use std::cmp::Ordering;
use std::collections::HashMap;
use std::collections::hash_map::Entry;
use std::io;
use std::io::prelude::*;
use std::result;
use unicode_casefold::{Locale, UnicodeCaseFold, Variant};
use unicode_segmentation::UnicodeSegmentation;
pub type Result<T> = result::Result<T, Error>;
const MODEL_SIZE_LIMIT: usize = 10000;
/// Used to build a "language model" describing a human language, where the
/// input data is assumed to come from subtitle files.
pub struct ModelBuilder {
grapheme_counts: HashMap<String, u64>,
pair_counts: HashMap<String, u64>,
word_counts: HashMap<String, u64>,
}
impl ModelBuilder {
/// Create a new `ModelBuilder`.
pub fn new() -> ModelBuilder {
ModelBuilder {
grapheme_counts: HashMap::new(),
pair_counts: HashMap::new(),
word_counts: HashMap::new(),
}
}
/// Add a subtitle line to the `ModelBuilder`.
pub fn add_line(&mut self, line: &str) {
let grapheme_buffer = line.graphemes(true).collect::<Vec<_>>();
for &grapheme in &grapheme_buffer {
if grapheme!= " " {
incr_map(&mut self.grapheme_counts, grapheme.to_owned());
}
}
if!grapheme_buffer.is_empty() {
incr_map(&mut self.pair_counts, format!("\n{}", grapheme_buffer[0]));
incr_map(&mut self.pair_counts,
format!("{}\n", grapheme_buffer[grapheme_buffer.len() - 1]));
}
for pair in grapheme_buffer.windows(2) {
incr_map(&mut self.pair_counts, format!("{}{}", pair[0], pair[1]));
}
for word in line.unicode_words() {
// TODO: Handle Turkic "i".
let word = word.case_fold_with(Variant::Full, Locale::NonTurkic).collect();
incr_map(&mut self.word_counts, word);
}
}
/// Write our current grapheme frequencies to `out`.
pub fn grapheme_frequencies<W: Write>(&self, out: W) -> Result<()> {
self.frequencies(&self.grapheme_counts, out)
}
/// Write our current pair frequencies to `out`.
pub fn pair_frequencies<W: Write>(&self, out: W) -> Result<()> {
self.frequencies(&self.pair_counts, out)
}
/// Write our current word frequencies to `out`.
pub fn word_frequencies<W: Write>(&self, out: W) -> Result<()> {
self.frequencies(&self.word_counts, out)
}
/// Write the frequencies in `counts` to `out`, labelling them with
/// `label`.
fn frequencies<W: Write>(&self,
counts: &HashMap<String, u64>,
out: W)
-> Result<()> {
// Count the total number of graphemes we've seen.
let mut total: f64 = 0.0;
for &count in counts.values() {
total += count as f64;
}
// Sort our results into a stable order and replace counts with
// probabilities.
let mut rows = counts.iter().collect::<Vec<_>>();
rows.sort_by(|&(k1, c1), &(k2, c2)| match c1.cmp(c2).reverse() {
Ordering::Equal => k1.cmp(k2),
other => other,
});
// Write output to a CSV.
let mut wtr = csv::Writer::from_writer(out);
for (key, &count) in rows.into_iter().take(MODEL_SIZE_LIMIT) {
wtr.encode((key, count as f64 / total))?;
}
Ok(())
}
/// Write out our language model to `out`. This is actually a gzipped
/// tar file containing multiple CSV files:
///
/// - `graphemes.csv`: Frequencies of single grapheme clusters.
/// - `pairs.csv`: Frequencies of grapheme pairs.
/// - `words.csv`: Frequencies of case-folded words.
///
/// All models will be truncted if they exceed a certain limit.
pub fn write_model<W: Write>(&self, out: W) -> Result<()> {
let gzip = flate2::write::GzEncoder::new(out, flate2::Compression::best());
let mut tar = tar::Builder::new(gzip);
self.append_model_part(&mut tar, "graphemes.csv", &self.grapheme_counts)?;
self.append_model_part(&mut tar, "pairs.csv", &self.pair_counts)?;
self.append_model_part(&mut tar, "words.csv", &self.word_counts)?;
tar.into_inner()?.finish()?;
Ok(())
}
/// Append a file to our model.
fn append_model_part<W: Write>(&self,
builder: &mut tar::Builder<W>,
path: &str,
counts: &HashMap<String, u64>)
-> Result<()> {
let mut csv = vec![];
self.frequencies(counts, &mut csv)?;
let mut header = tar::Header::new_old();
header.set_path(path)?;
// TODO: Can this fail with a cast error?
header.set_size(csv.len() as u64);
header.set_mode(0o600);
header.set_cksum();
builder.append(&header, io::Cursor::new(&csv))?;
Ok(())
}
}
/// Increment a key in a map.
fn incr_map(map: &mut HashMap<String, u64>, key: String) |
#[test]
fn grapheme_frequency() {
use std::str;
let mut builder = ModelBuilder::new();
builder.add_line("Hello world");
let mut csv = vec![];
builder.grapheme_frequencies(&mut csv).unwrap();
assert_eq!(str::from_utf8(&csv).unwrap(),
"\
l,0.3
o,0.2
H,0.1
d,0.1
e,0.1
r,0.1
w,0.1
");
}
#[test]
fn pair_frequency() {
use std::str;
let mut builder = ModelBuilder::new();
builder.add_line("Help");
let mut csv = vec![];
builder.pair_frequencies(&mut csv).unwrap();
assert_eq!(str::from_utf8(&csv).unwrap(),
"\
\"\nH\",0.2
He,0.2
el,0.2
lp,0.2
\"p\n\",0.2
");
}
#[test]
fn word_frequency() {
use std::str;
let mut builder = ModelBuilder::new();
builder.add_line("One potato, two potato!");
let mut csv = vec![];
builder.word_frequencies(&mut csv).unwrap();
assert_eq!(str::from_utf8(&csv).unwrap(),
"\
potato,0.5
one,0.25
two,0.25
");
}
#[test]
fn write_model() {
let mut builder = ModelBuilder::new();
builder.add_line("One potato, two potato!");
let mut model = vec![];
builder.write_model(&mut model).unwrap();
assert!(model.len() > 0);
}
| {
match map.entry(key.to_owned()) {
Entry::Occupied(mut occupied) => {
*occupied.get_mut() += 1;
}
Entry::Vacant(vacant) => {
vacant.insert(1);
}
}
} | identifier_body |
lib.rs | extern crate csv;
extern crate failure;
extern crate flate2;
extern crate tar;
extern crate unicode_casefold;
extern crate unicode_segmentation;
pub use failure::Error;
use std::cmp::Ordering;
use std::collections::HashMap;
use std::collections::hash_map::Entry;
use std::io;
use std::io::prelude::*;
use std::result;
use unicode_casefold::{Locale, UnicodeCaseFold, Variant};
use unicode_segmentation::UnicodeSegmentation;
pub type Result<T> = result::Result<T, Error>;
const MODEL_SIZE_LIMIT: usize = 10000;
/// Used to build a "language model" describing a human language, where the
/// input data is assumed to come from subtitle files.
pub struct ModelBuilder {
grapheme_counts: HashMap<String, u64>,
pair_counts: HashMap<String, u64>,
word_counts: HashMap<String, u64>,
}
impl ModelBuilder {
/// Create a new `ModelBuilder`.
pub fn new() -> ModelBuilder {
ModelBuilder {
grapheme_counts: HashMap::new(),
pair_counts: HashMap::new(),
word_counts: HashMap::new(),
}
}
/// Add a subtitle line to the `ModelBuilder`.
pub fn add_line(&mut self, line: &str) {
let grapheme_buffer = line.graphemes(true).collect::<Vec<_>>();
for &grapheme in &grapheme_buffer {
if grapheme!= " " {
incr_map(&mut self.grapheme_counts, grapheme.to_owned());
}
}
if!grapheme_buffer.is_empty() {
incr_map(&mut self.pair_counts, format!("\n{}", grapheme_buffer[0]));
incr_map(&mut self.pair_counts,
format!("{}\n", grapheme_buffer[grapheme_buffer.len() - 1]));
}
for pair in grapheme_buffer.windows(2) {
incr_map(&mut self.pair_counts, format!("{}{}", pair[0], pair[1]));
}
for word in line.unicode_words() {
// TODO: Handle Turkic "i".
let word = word.case_fold_with(Variant::Full, Locale::NonTurkic).collect();
incr_map(&mut self.word_counts, word);
}
}
/// Write our current grapheme frequencies to `out`.
pub fn grapheme_frequencies<W: Write>(&self, out: W) -> Result<()> {
self.frequencies(&self.grapheme_counts, out)
}
/// Write our current pair frequencies to `out`.
pub fn pair_frequencies<W: Write>(&self, out: W) -> Result<()> {
self.frequencies(&self.pair_counts, out)
}
/// Write our current word frequencies to `out`.
pub fn word_frequencies<W: Write>(&self, out: W) -> Result<()> {
self.frequencies(&self.word_counts, out)
}
/// Write the frequencies in `counts` to `out`, labelling them with
/// `label`.
fn frequencies<W: Write>(&self,
counts: &HashMap<String, u64>,
out: W)
-> Result<()> {
// Count the total number of graphemes we've seen.
let mut total: f64 = 0.0;
for &count in counts.values() {
total += count as f64;
}
// Sort our results into a stable order and replace counts with
// probabilities.
let mut rows = counts.iter().collect::<Vec<_>>();
rows.sort_by(|&(k1, c1), &(k2, c2)| match c1.cmp(c2).reverse() {
Ordering::Equal => k1.cmp(k2),
other => other,
});
// Write output to a CSV.
let mut wtr = csv::Writer::from_writer(out);
for (key, &count) in rows.into_iter().take(MODEL_SIZE_LIMIT) {
wtr.encode((key, count as f64 / total))?;
}
Ok(())
}
/// Write out our language model to `out`. This is actually a gzipped
/// tar file containing multiple CSV files:
///
/// - `graphemes.csv`: Frequencies of single grapheme clusters.
/// - `pairs.csv`: Frequencies of grapheme pairs.
/// - `words.csv`: Frequencies of case-folded words.
///
/// All models will be truncted if they exceed a certain limit.
pub fn write_model<W: Write>(&self, out: W) -> Result<()> {
let gzip = flate2::write::GzEncoder::new(out, flate2::Compression::best());
let mut tar = tar::Builder::new(gzip);
self.append_model_part(&mut tar, "graphemes.csv", &self.grapheme_counts)?;
self.append_model_part(&mut tar, "pairs.csv", &self.pair_counts)?;
self.append_model_part(&mut tar, "words.csv", &self.word_counts)?;
tar.into_inner()?.finish()?;
Ok(())
}
/// Append a file to our model.
fn append_model_part<W: Write>(&self,
builder: &mut tar::Builder<W>,
path: &str,
counts: &HashMap<String, u64>)
-> Result<()> {
let mut csv = vec![];
self.frequencies(counts, &mut csv)?;
let mut header = tar::Header::new_old();
header.set_path(path)?;
// TODO: Can this fail with a cast error?
header.set_size(csv.len() as u64);
header.set_mode(0o600);
header.set_cksum();
builder.append(&header, io::Cursor::new(&csv))?;
Ok(())
}
}
/// Increment a key in a map.
fn incr_map(map: &mut HashMap<String, u64>, key: String) {
match map.entry(key.to_owned()) {
Entry::Occupied(mut occupied) => {
*occupied.get_mut() += 1;
}
Entry::Vacant(vacant) => {
vacant.insert(1);
}
}
}
#[test]
fn grapheme_frequency() {
use std::str;
let mut builder = ModelBuilder::new();
builder.add_line("Hello world");
let mut csv = vec![];
builder.grapheme_frequencies(&mut csv).unwrap();
assert_eq!(str::from_utf8(&csv).unwrap(),
"\
l,0.3
o,0.2
H,0.1
d,0.1
e,0.1
r,0.1
w,0.1
"); |
let mut builder = ModelBuilder::new();
builder.add_line("Help");
let mut csv = vec![];
builder.pair_frequencies(&mut csv).unwrap();
assert_eq!(str::from_utf8(&csv).unwrap(),
"\
\"\nH\",0.2
He,0.2
el,0.2
lp,0.2
\"p\n\",0.2
");
}
#[test]
fn word_frequency() {
use std::str;
let mut builder = ModelBuilder::new();
builder.add_line("One potato, two potato!");
let mut csv = vec![];
builder.word_frequencies(&mut csv).unwrap();
assert_eq!(str::from_utf8(&csv).unwrap(),
"\
potato,0.5
one,0.25
two,0.25
");
}
#[test]
fn write_model() {
let mut builder = ModelBuilder::new();
builder.add_line("One potato, two potato!");
let mut model = vec![];
builder.write_model(&mut model).unwrap();
assert!(model.len() > 0);
} | }
#[test]
fn pair_frequency() {
use std::str; | random_line_split |
lib.rs | extern crate csv;
extern crate failure;
extern crate flate2;
extern crate tar;
extern crate unicode_casefold;
extern crate unicode_segmentation;
pub use failure::Error;
use std::cmp::Ordering;
use std::collections::HashMap;
use std::collections::hash_map::Entry;
use std::io;
use std::io::prelude::*;
use std::result;
use unicode_casefold::{Locale, UnicodeCaseFold, Variant};
use unicode_segmentation::UnicodeSegmentation;
pub type Result<T> = result::Result<T, Error>;
const MODEL_SIZE_LIMIT: usize = 10000;
/// Used to build a "language model" describing a human language, where the
/// input data is assumed to come from subtitle files.
pub struct ModelBuilder {
grapheme_counts: HashMap<String, u64>,
pair_counts: HashMap<String, u64>,
word_counts: HashMap<String, u64>,
}
impl ModelBuilder {
/// Create a new `ModelBuilder`.
pub fn | () -> ModelBuilder {
ModelBuilder {
grapheme_counts: HashMap::new(),
pair_counts: HashMap::new(),
word_counts: HashMap::new(),
}
}
/// Add a subtitle line to the `ModelBuilder`.
pub fn add_line(&mut self, line: &str) {
let grapheme_buffer = line.graphemes(true).collect::<Vec<_>>();
for &grapheme in &grapheme_buffer {
if grapheme!= " " {
incr_map(&mut self.grapheme_counts, grapheme.to_owned());
}
}
if!grapheme_buffer.is_empty() {
incr_map(&mut self.pair_counts, format!("\n{}", grapheme_buffer[0]));
incr_map(&mut self.pair_counts,
format!("{}\n", grapheme_buffer[grapheme_buffer.len() - 1]));
}
for pair in grapheme_buffer.windows(2) {
incr_map(&mut self.pair_counts, format!("{}{}", pair[0], pair[1]));
}
for word in line.unicode_words() {
// TODO: Handle Turkic "i".
let word = word.case_fold_with(Variant::Full, Locale::NonTurkic).collect();
incr_map(&mut self.word_counts, word);
}
}
/// Write our current grapheme frequencies to `out`.
pub fn grapheme_frequencies<W: Write>(&self, out: W) -> Result<()> {
self.frequencies(&self.grapheme_counts, out)
}
/// Write our current pair frequencies to `out`.
pub fn pair_frequencies<W: Write>(&self, out: W) -> Result<()> {
self.frequencies(&self.pair_counts, out)
}
/// Write our current word frequencies to `out`.
pub fn word_frequencies<W: Write>(&self, out: W) -> Result<()> {
self.frequencies(&self.word_counts, out)
}
/// Write the frequencies in `counts` to `out`, labelling them with
/// `label`.
fn frequencies<W: Write>(&self,
counts: &HashMap<String, u64>,
out: W)
-> Result<()> {
// Count the total number of graphemes we've seen.
let mut total: f64 = 0.0;
for &count in counts.values() {
total += count as f64;
}
// Sort our results into a stable order and replace counts with
// probabilities.
let mut rows = counts.iter().collect::<Vec<_>>();
rows.sort_by(|&(k1, c1), &(k2, c2)| match c1.cmp(c2).reverse() {
Ordering::Equal => k1.cmp(k2),
other => other,
});
// Write output to a CSV.
let mut wtr = csv::Writer::from_writer(out);
for (key, &count) in rows.into_iter().take(MODEL_SIZE_LIMIT) {
wtr.encode((key, count as f64 / total))?;
}
Ok(())
}
/// Write out our language model to `out`. This is actually a gzipped
/// tar file containing multiple CSV files:
///
/// - `graphemes.csv`: Frequencies of single grapheme clusters.
/// - `pairs.csv`: Frequencies of grapheme pairs.
/// - `words.csv`: Frequencies of case-folded words.
///
/// All models will be truncted if they exceed a certain limit.
pub fn write_model<W: Write>(&self, out: W) -> Result<()> {
let gzip = flate2::write::GzEncoder::new(out, flate2::Compression::best());
let mut tar = tar::Builder::new(gzip);
self.append_model_part(&mut tar, "graphemes.csv", &self.grapheme_counts)?;
self.append_model_part(&mut tar, "pairs.csv", &self.pair_counts)?;
self.append_model_part(&mut tar, "words.csv", &self.word_counts)?;
tar.into_inner()?.finish()?;
Ok(())
}
/// Append a file to our model.
fn append_model_part<W: Write>(&self,
builder: &mut tar::Builder<W>,
path: &str,
counts: &HashMap<String, u64>)
-> Result<()> {
let mut csv = vec![];
self.frequencies(counts, &mut csv)?;
let mut header = tar::Header::new_old();
header.set_path(path)?;
// TODO: Can this fail with a cast error?
header.set_size(csv.len() as u64);
header.set_mode(0o600);
header.set_cksum();
builder.append(&header, io::Cursor::new(&csv))?;
Ok(())
}
}
/// Increment a key in a map.
fn incr_map(map: &mut HashMap<String, u64>, key: String) {
match map.entry(key.to_owned()) {
Entry::Occupied(mut occupied) => {
*occupied.get_mut() += 1;
}
Entry::Vacant(vacant) => {
vacant.insert(1);
}
}
}
#[test]
fn grapheme_frequency() {
use std::str;
let mut builder = ModelBuilder::new();
builder.add_line("Hello world");
let mut csv = vec![];
builder.grapheme_frequencies(&mut csv).unwrap();
assert_eq!(str::from_utf8(&csv).unwrap(),
"\
l,0.3
o,0.2
H,0.1
d,0.1
e,0.1
r,0.1
w,0.1
");
}
#[test]
fn pair_frequency() {
use std::str;
let mut builder = ModelBuilder::new();
builder.add_line("Help");
let mut csv = vec![];
builder.pair_frequencies(&mut csv).unwrap();
assert_eq!(str::from_utf8(&csv).unwrap(),
"\
\"\nH\",0.2
He,0.2
el,0.2
lp,0.2
\"p\n\",0.2
");
}
#[test]
fn word_frequency() {
use std::str;
let mut builder = ModelBuilder::new();
builder.add_line("One potato, two potato!");
let mut csv = vec![];
builder.word_frequencies(&mut csv).unwrap();
assert_eq!(str::from_utf8(&csv).unwrap(),
"\
potato,0.5
one,0.25
two,0.25
");
}
#[test]
fn write_model() {
let mut builder = ModelBuilder::new();
builder.add_line("One potato, two potato!");
let mut model = vec![];
builder.write_model(&mut model).unwrap();
assert!(model.len() > 0);
}
| new | identifier_name |
binops.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Binop corner cases
fn test_nil() {
assert!((() == ()));
assert!((!(()!= ())));
assert!((!(() < ())));
assert!((() <= ()));
assert!((!(() > ())));
assert!((() >= ()));
}
fn test_bool() {
assert!((!(true < false)));
assert!((!(true <= false)));
assert!((true > false));
assert!((true >= false));
assert!((false < true));
assert!((false <= true));
assert!((!(false > true)));
assert!((!(false >= true)));
// Bools support bitwise binops
assert!((false & false == false));
assert!((true & false == false));
assert!((true & true == true));
assert!((false | false == false));
assert!((true | false == true));
assert!((true | true == true));
assert!((false ^ false == false));
assert!((true ^ false == true));
assert!((true ^ true == false));
}
fn test_char() {
let ch10 = 10 as char;
let ch4 = 4 as char;
let ch2 = 2 as char;
assert!((ch10 + ch4 == 14 as char));
assert!((ch10 - ch4 == 6 as char));
assert!((ch10 * ch4 == 40 as char));
assert!((ch10 / ch4 == ch2));
assert!((ch10 % ch4 == ch2));
assert!((ch10 >> ch2 == ch2));
assert!((ch10 << ch4 == 160 as char));
assert!((ch10 | ch4 == 14 as char));
assert!((ch10 & ch2 == ch2));
assert!((ch10 ^ ch2 == 8 as char));
}
fn test_box() {
assert!((@10 == @10));
}
fn test_ptr() {
unsafe {
let p1: *u8 = ::core::cast::reinterpret_cast(&0);
let p2: *u8 = ::core::cast::reinterpret_cast(&0);
let p3: *u8 = ::core::cast::reinterpret_cast(&1);
assert!(p1 == p2);
assert!(p1!= p3);
assert!(p1 < p3);
assert!(p1 <= p3);
assert!(p3 > p1);
assert!(p3 >= p3);
assert!(p1 <= p2);
assert!(p1 >= p2);
}
}
mod test {
#[abi = "cdecl"]
#[nolink]
pub extern {
pub fn rust_get_sched_id() -> libc::intptr_t;
pub fn get_task_id() -> libc::intptr_t;
}
}
#[deriving(Eq)]
struct p {
x: int,
y: int,
}
fn p(x: int, y: int) -> p |
fn test_class() {
let mut q = p(1, 2);
let mut r = p(1, 2);
unsafe {
error!("q = %x, r = %x",
(::core::cast::reinterpret_cast::<*p, uint>(&ptr::addr_of(&q))),
(::core::cast::reinterpret_cast::<*p, uint>(&ptr::addr_of(&r))));
}
assert!((q == r));
r.y = 17;
assert!((r.y!= q.y));
assert!((r.y == 17));
assert!((q!= r));
}
pub fn main() {
test_nil();
test_bool();
test_char();
test_box();
test_ptr();
test_class();
}
| {
p {
x: x,
y: y
}
} | identifier_body |
binops.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Binop corner cases
fn test_nil() {
assert!((() == ()));
assert!((!(()!= ())));
assert!((!(() < ())));
assert!((() <= ()));
assert!((!(() > ())));
assert!((() >= ()));
}
fn test_bool() {
assert!((!(true < false)));
assert!((!(true <= false)));
assert!((true > false));
assert!((true >= false));
assert!((false < true));
assert!((false <= true));
assert!((!(false > true)));
assert!((!(false >= true)));
// Bools support bitwise binops
assert!((false & false == false));
assert!((true & false == false));
assert!((true & true == true));
assert!((false | false == false));
assert!((true | false == true));
assert!((true | true == true));
assert!((false ^ false == false));
assert!((true ^ false == true));
assert!((true ^ true == false));
}
fn test_char() {
let ch10 = 10 as char;
let ch4 = 4 as char;
let ch2 = 2 as char;
assert!((ch10 + ch4 == 14 as char));
assert!((ch10 - ch4 == 6 as char));
assert!((ch10 * ch4 == 40 as char));
assert!((ch10 / ch4 == ch2));
assert!((ch10 % ch4 == ch2));
assert!((ch10 >> ch2 == ch2));
assert!((ch10 << ch4 == 160 as char));
assert!((ch10 | ch4 == 14 as char));
assert!((ch10 & ch2 == ch2));
assert!((ch10 ^ ch2 == 8 as char));
}
fn test_box() {
assert!((@10 == @10));
}
fn test_ptr() {
unsafe {
let p1: *u8 = ::core::cast::reinterpret_cast(&0);
let p2: *u8 = ::core::cast::reinterpret_cast(&0);
let p3: *u8 = ::core::cast::reinterpret_cast(&1);
assert!(p1 == p2);
assert!(p1!= p3);
assert!(p1 < p3);
assert!(p1 <= p3);
assert!(p3 > p1);
assert!(p3 >= p3);
assert!(p1 <= p2);
assert!(p1 >= p2);
}
}
mod test {
#[abi = "cdecl"]
#[nolink]
pub extern {
pub fn rust_get_sched_id() -> libc::intptr_t;
pub fn get_task_id() -> libc::intptr_t;
}
}
#[deriving(Eq)]
struct p {
x: int,
y: int,
}
fn p(x: int, y: int) -> p {
p {
x: x,
y: y
}
}
fn | () {
let mut q = p(1, 2);
let mut r = p(1, 2);
unsafe {
error!("q = %x, r = %x",
(::core::cast::reinterpret_cast::<*p, uint>(&ptr::addr_of(&q))),
(::core::cast::reinterpret_cast::<*p, uint>(&ptr::addr_of(&r))));
}
assert!((q == r));
r.y = 17;
assert!((r.y!= q.y));
assert!((r.y == 17));
assert!((q!= r));
}
pub fn main() {
test_nil();
test_bool();
test_char();
test_box();
test_ptr();
test_class();
}
| test_class | identifier_name |
binops.rs | // http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Binop corner cases
fn test_nil() {
assert!((() == ()));
assert!((!(()!= ())));
assert!((!(() < ())));
assert!((() <= ()));
assert!((!(() > ())));
assert!((() >= ()));
}
fn test_bool() {
assert!((!(true < false)));
assert!((!(true <= false)));
assert!((true > false));
assert!((true >= false));
assert!((false < true));
assert!((false <= true));
assert!((!(false > true)));
assert!((!(false >= true)));
// Bools support bitwise binops
assert!((false & false == false));
assert!((true & false == false));
assert!((true & true == true));
assert!((false | false == false));
assert!((true | false == true));
assert!((true | true == true));
assert!((false ^ false == false));
assert!((true ^ false == true));
assert!((true ^ true == false));
}
fn test_char() {
let ch10 = 10 as char;
let ch4 = 4 as char;
let ch2 = 2 as char;
assert!((ch10 + ch4 == 14 as char));
assert!((ch10 - ch4 == 6 as char));
assert!((ch10 * ch4 == 40 as char));
assert!((ch10 / ch4 == ch2));
assert!((ch10 % ch4 == ch2));
assert!((ch10 >> ch2 == ch2));
assert!((ch10 << ch4 == 160 as char));
assert!((ch10 | ch4 == 14 as char));
assert!((ch10 & ch2 == ch2));
assert!((ch10 ^ ch2 == 8 as char));
}
fn test_box() {
assert!((@10 == @10));
}
fn test_ptr() {
unsafe {
let p1: *u8 = ::core::cast::reinterpret_cast(&0);
let p2: *u8 = ::core::cast::reinterpret_cast(&0);
let p3: *u8 = ::core::cast::reinterpret_cast(&1);
assert!(p1 == p2);
assert!(p1!= p3);
assert!(p1 < p3);
assert!(p1 <= p3);
assert!(p3 > p1);
assert!(p3 >= p3);
assert!(p1 <= p2);
assert!(p1 >= p2);
}
}
mod test {
#[abi = "cdecl"]
#[nolink]
pub extern {
pub fn rust_get_sched_id() -> libc::intptr_t;
pub fn get_task_id() -> libc::intptr_t;
}
}
#[deriving(Eq)]
struct p {
x: int,
y: int,
}
fn p(x: int, y: int) -> p {
p {
x: x,
y: y
}
}
fn test_class() {
let mut q = p(1, 2);
let mut r = p(1, 2);
unsafe {
error!("q = %x, r = %x",
(::core::cast::reinterpret_cast::<*p, uint>(&ptr::addr_of(&q))),
(::core::cast::reinterpret_cast::<*p, uint>(&ptr::addr_of(&r))));
}
assert!((q == r));
r.y = 17;
assert!((r.y!= q.y));
assert!((r.y == 17));
assert!((q!= r));
}
pub fn main() {
test_nil();
test_bool();
test_char();
test_box();
test_ptr();
test_class();
} | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at | random_line_split |
|
log.rs | // This Source Code Form is subject to the terms of
// the Mozilla Public License, v. 2.0. If a copy of
// the MPL was not distributed with this file, You
// can obtain one at http://mozilla.org/MPL/2.0/.
use std::error::Error;
pub fn error(e: &Error) {
target::error(e)
} | // ANDROID //////////////////////////////////////////////////////////////////
#[cfg(target_os = "android")]
mod target {
use libc::*;
use std::error::Error;
use std::ffi::*;
const TAG: &'static [u8] = b"CryptoBox";
const LEVEL_ERROR: c_int = 6;
pub fn error(e: &Error) {
log(&format!("{}", e), LEVEL_ERROR)
}
fn log(msg: &str, lvl: c_int) {
let tag = CString::new(TAG).unwrap();
let msg = CString::new(msg.as_bytes()).unwrap_or(CString::new("<malformed log message>").unwrap());
unsafe {
__android_log_write(lvl, tag.as_ptr(), msg.as_ptr())
};
}
#[link(name = "log")]
extern {
fn __android_log_write(prio: c_int, tag: *const c_char, text: *const c_char) -> c_int;
}
}
// FALLBACK /////////////////////////////////////////////////////////////////
#[cfg(not(target_os = "android"))]
mod target {
use std::error::Error;
use std::io::{Write, stderr};
pub fn error(e: &Error) {
writeln!(&mut stderr(), "ERROR: {}", e).unwrap();
}
} | random_line_split |
|
log.rs | // This Source Code Form is subject to the terms of
// the Mozilla Public License, v. 2.0. If a copy of
// the MPL was not distributed with this file, You
// can obtain one at http://mozilla.org/MPL/2.0/.
use std::error::Error;
pub fn error(e: &Error) {
target::error(e)
}
// ANDROID //////////////////////////////////////////////////////////////////
#[cfg(target_os = "android")]
mod target {
use libc::*;
use std::error::Error;
use std::ffi::*;
const TAG: &'static [u8] = b"CryptoBox";
const LEVEL_ERROR: c_int = 6;
pub fn | (e: &Error) {
log(&format!("{}", e), LEVEL_ERROR)
}
fn log(msg: &str, lvl: c_int) {
let tag = CString::new(TAG).unwrap();
let msg = CString::new(msg.as_bytes()).unwrap_or(CString::new("<malformed log message>").unwrap());
unsafe {
__android_log_write(lvl, tag.as_ptr(), msg.as_ptr())
};
}
#[link(name = "log")]
extern {
fn __android_log_write(prio: c_int, tag: *const c_char, text: *const c_char) -> c_int;
}
}
// FALLBACK /////////////////////////////////////////////////////////////////
#[cfg(not(target_os = "android"))]
mod target {
use std::error::Error;
use std::io::{Write, stderr};
pub fn error(e: &Error) {
writeln!(&mut stderr(), "ERROR: {}", e).unwrap();
}
}
| error | identifier_name |
log.rs | // This Source Code Form is subject to the terms of
// the Mozilla Public License, v. 2.0. If a copy of
// the MPL was not distributed with this file, You
// can obtain one at http://mozilla.org/MPL/2.0/.
use std::error::Error;
pub fn error(e: &Error) {
target::error(e)
}
// ANDROID //////////////////////////////////////////////////////////////////
#[cfg(target_os = "android")]
mod target {
use libc::*;
use std::error::Error;
use std::ffi::*;
const TAG: &'static [u8] = b"CryptoBox";
const LEVEL_ERROR: c_int = 6;
pub fn error(e: &Error) {
log(&format!("{}", e), LEVEL_ERROR)
}
fn log(msg: &str, lvl: c_int) {
let tag = CString::new(TAG).unwrap();
let msg = CString::new(msg.as_bytes()).unwrap_or(CString::new("<malformed log message>").unwrap());
unsafe {
__android_log_write(lvl, tag.as_ptr(), msg.as_ptr())
};
}
#[link(name = "log")]
extern {
fn __android_log_write(prio: c_int, tag: *const c_char, text: *const c_char) -> c_int;
}
}
// FALLBACK /////////////////////////////////////////////////////////////////
#[cfg(not(target_os = "android"))]
mod target {
use std::error::Error;
use std::io::{Write, stderr};
pub fn error(e: &Error) |
}
| {
writeln!(&mut stderr(), "ERROR: {}", e).unwrap();
} | identifier_body |
mod.rs | pub use self::mouse_joint::{MouseJointConfig, MouseJoint};
use std::rc::{Rc, Weak};
use std::cell::RefCell;
use std::mem;
use std::ptr;
use super::{Body, BodyHandleWeak};
use super::island::{Position, Velocity};
use ::dynamics::world::TimeStep;
mod mouse_joint;
pub type JointHandle<'a> = Rc<RefCell<Joint<'a>>>;
pub type JointHandleWeak<'a> = Weak<RefCell<Joint<'a>>>;
/// A joint edge is used to connect bodies and joints together in a joint graph where
/// each body is a node and each joint is an edge. Each joint has two joint nodes,
/// one for each attached body.
pub struct JointEdge<'a> {
pub body: BodyHandleWeak<'a>,
pub joint: JointHandleWeak<'a>,
}
pub enum JointType {
Mouse(MouseJointConfig),
}
/// `JointConfig`s are used to construct joints.
pub struct JointConfig<'a> {
pub joint_type: JointType,
/// The first attached body.
pub body_a: BodyHandleWeak<'a>,
/// The second attached body.
pub body_b: BodyHandleWeak<'a>,
/// Set this flag to true if the attached bodies should collide.
pub collide_connected: bool,
}
pub struct JointData<'a> {
body_a: BodyHandleWeak<'a>,
body_b: BodyHandleWeak<'a>,
is_island: bool,
is_collide_connected: bool,
}
pub enum Joint<'a> {
Mouse(MouseJoint<'a>),
}
impl<'a> Joint<'a> {
/*pub fn new(joint_config: &JointConfig<'a>) -> JointHandle<'a> {
let result: JointHandle<'a>;
unsafe {
result = Rc::new(RefCell::new(mem::uninitialized()));
let edge_to_a = JointEdge {
body: joint_config.body_a.clone(),
joint: Rc::downgrade(&result),
};
let edge_to_b = JointEdge {
body: joint_config.body_b.clone(),
joint: Rc::downgrade(&result),
};
let joint_data = JointData {
edge_to_a: edge_to_a,
edge_to_b: edge_to_b,
is_island: false,
is_collide_connected: joint_config.collide_connected,
};
match joint_config.joint_type {
JointType::Mouse(ref joint_config) => {
ptr::write(&mut *result.borrow_mut(), Joint::Mouse(MouseJoint::new(joint_config, joint_data)));
}
}
}
result
}*/
pub fn new(joint_config: &JointConfig<'a>) -> JointHandle<'a> {
let joint_data = JointData {
body_a: joint_config.body_a.clone(),
body_b: joint_config.body_b.clone(),
is_island: false,
is_collide_connected: joint_config.collide_connected,
};
let result;
result = match joint_config.joint_type {
JointType::Mouse(ref joint_config) => Rc::new(RefCell::new(Joint::Mouse(MouseJoint::new(joint_config, joint_data)))),
};
result
}
fn get_joint_data(&self) -> &JointData<'a> {
match self {
&Joint::Mouse(ref joint) => &joint.joint_data,
}
}
fn get_joint_data_mut(&mut self) -> &mut JointData<'a> {
match self {
&mut Joint::Mouse(ref mut joint) => &mut joint.joint_data,
}
}
pub fn get_other_body(&self, body: BodyHandleWeak<'a>) -> Option<BodyHandleWeak<'a>> {
let b = body.upgrade().unwrap();
let pb = &(*b) as *const RefCell<Body>;
let b_a = self.get_joint_data().body_a.upgrade().unwrap();
let pb_a = &(*b_a) as *const RefCell<Body>;
if pb == pb_a {
return Some(self.get_joint_data().body_b.clone());
}
let b_b = self.get_joint_data().body_b.upgrade().unwrap();
let pb_b = &(*b_b) as *const RefCell<Body>;
if pb == pb_b {
return Some(self.get_joint_data().body_a.clone());
}
None
} | self.get_joint_data_mut().is_island = is_island;
}
pub fn is_island(&self) -> bool {
self.get_joint_data().is_island
}
pub fn initialize_velocity_constraints(&mut self, step: TimeStep, positions: &Vec<Position>, velocities: &mut Vec<Velocity>) {
match self {
&mut Joint::Mouse(ref mut joint) => joint.initialize_velocity_constraints(step, positions, velocities),
}
}
pub fn solve_velocity_constraints(&mut self, step: TimeStep, velocities: &mut Vec<Velocity>) {
match self {
&mut Joint::Mouse(ref mut joint) => joint.solve_velocity_constraints(step, velocities),
}
}
/// This returns true if the position errors are within tolerance.
pub fn solve_position_constraints(&mut self, step: TimeStep, positions: &mut Vec<Position>) -> bool {
true
}
} |
pub fn set_island(&mut self, is_island: bool) { | random_line_split |
mod.rs | pub use self::mouse_joint::{MouseJointConfig, MouseJoint};
use std::rc::{Rc, Weak};
use std::cell::RefCell;
use std::mem;
use std::ptr;
use super::{Body, BodyHandleWeak};
use super::island::{Position, Velocity};
use ::dynamics::world::TimeStep;
mod mouse_joint;
pub type JointHandle<'a> = Rc<RefCell<Joint<'a>>>;
pub type JointHandleWeak<'a> = Weak<RefCell<Joint<'a>>>;
/// A joint edge is used to connect bodies and joints together in a joint graph where
/// each body is a node and each joint is an edge. Each joint has two joint nodes,
/// one for each attached body.
pub struct JointEdge<'a> {
pub body: BodyHandleWeak<'a>,
pub joint: JointHandleWeak<'a>,
}
pub enum JointType {
Mouse(MouseJointConfig),
}
/// `JointConfig`s are used to construct joints.
pub struct JointConfig<'a> {
pub joint_type: JointType,
/// The first attached body.
pub body_a: BodyHandleWeak<'a>,
/// The second attached body.
pub body_b: BodyHandleWeak<'a>,
/// Set this flag to true if the attached bodies should collide.
pub collide_connected: bool,
}
pub struct JointData<'a> {
body_a: BodyHandleWeak<'a>,
body_b: BodyHandleWeak<'a>,
is_island: bool,
is_collide_connected: bool,
}
pub enum Joint<'a> {
Mouse(MouseJoint<'a>),
}
impl<'a> Joint<'a> {
/*pub fn new(joint_config: &JointConfig<'a>) -> JointHandle<'a> {
let result: JointHandle<'a>;
unsafe {
result = Rc::new(RefCell::new(mem::uninitialized()));
let edge_to_a = JointEdge {
body: joint_config.body_a.clone(),
joint: Rc::downgrade(&result),
};
let edge_to_b = JointEdge {
body: joint_config.body_b.clone(),
joint: Rc::downgrade(&result),
};
let joint_data = JointData {
edge_to_a: edge_to_a,
edge_to_b: edge_to_b,
is_island: false,
is_collide_connected: joint_config.collide_connected,
};
match joint_config.joint_type {
JointType::Mouse(ref joint_config) => {
ptr::write(&mut *result.borrow_mut(), Joint::Mouse(MouseJoint::new(joint_config, joint_data)));
}
}
}
result
}*/
pub fn new(joint_config: &JointConfig<'a>) -> JointHandle<'a> {
let joint_data = JointData {
body_a: joint_config.body_a.clone(),
body_b: joint_config.body_b.clone(),
is_island: false,
is_collide_connected: joint_config.collide_connected,
};
let result;
result = match joint_config.joint_type {
JointType::Mouse(ref joint_config) => Rc::new(RefCell::new(Joint::Mouse(MouseJoint::new(joint_config, joint_data)))),
};
result
}
fn get_joint_data(&self) -> &JointData<'a> {
match self {
&Joint::Mouse(ref joint) => &joint.joint_data,
}
}
fn get_joint_data_mut(&mut self) -> &mut JointData<'a> {
match self {
&mut Joint::Mouse(ref mut joint) => &mut joint.joint_data,
}
}
pub fn get_other_body(&self, body: BodyHandleWeak<'a>) -> Option<BodyHandleWeak<'a>> {
let b = body.upgrade().unwrap();
let pb = &(*b) as *const RefCell<Body>;
let b_a = self.get_joint_data().body_a.upgrade().unwrap();
let pb_a = &(*b_a) as *const RefCell<Body>;
if pb == pb_a {
return Some(self.get_joint_data().body_b.clone());
}
let b_b = self.get_joint_data().body_b.upgrade().unwrap();
let pb_b = &(*b_b) as *const RefCell<Body>;
if pb == pb_b {
return Some(self.get_joint_data().body_a.clone());
}
None
}
pub fn set_island(&mut self, is_island: bool) {
self.get_joint_data_mut().is_island = is_island;
}
pub fn | (&self) -> bool {
self.get_joint_data().is_island
}
pub fn initialize_velocity_constraints(&mut self, step: TimeStep, positions: &Vec<Position>, velocities: &mut Vec<Velocity>) {
match self {
&mut Joint::Mouse(ref mut joint) => joint.initialize_velocity_constraints(step, positions, velocities),
}
}
pub fn solve_velocity_constraints(&mut self, step: TimeStep, velocities: &mut Vec<Velocity>) {
match self {
&mut Joint::Mouse(ref mut joint) => joint.solve_velocity_constraints(step, velocities),
}
}
/// This returns true if the position errors are within tolerance.
pub fn solve_position_constraints(&mut self, step: TimeStep, positions: &mut Vec<Position>) -> bool {
true
}
}
| is_island | identifier_name |
lib.rs | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use channel::diem_channel::{self, Receiver};
use diem_config::{
config::{Peer, PeerRole},
network_id::NetworkContext,
};
use diem_crypto::x25519::PublicKey;
use diem_logger::prelude::*;
use diem_metrics::{
register_histogram, register_int_counter_vec, register_int_gauge_vec, DurationHistogram,
IntCounterVec, IntGaugeVec,
};
use diem_network_address_encryption::{Encryptor, Error as EncryptorError};
use diem_types::on_chain_config::{OnChainConfigPayload, ValidatorSet, ON_CHAIN_CONFIG_REGISTRY};
use futures::{sink::SinkExt, StreamExt};
use network::{
connectivity_manager::{ConnectivityRequest, DiscoverySource},
counters::inc_by_with_context,
logging::NetworkSchema,
};
use once_cell::sync::Lazy;
use short_hex_str::AsShortHexStr;
use std::{collections::HashSet, sync::Arc};
use subscription_service::ReconfigSubscription;
pub mod builder;
/// Histogram of idle time of spent in event processing loop
pub static EVENT_PROCESSING_LOOP_IDLE_DURATION_S: Lazy<DurationHistogram> = Lazy::new(|| {
DurationHistogram::new(
register_histogram!(
"simple_onchain_discovery_event_processing_loop_idle_duration_s",
"Histogram of idle time of spent in event processing loop"
)
.unwrap(),
)
});
/// Histogram of busy time of spent in event processing loop
pub static EVENT_PROCESSING_LOOP_BUSY_DURATION_S: Lazy<DurationHistogram> = Lazy::new(|| {
DurationHistogram::new(
register_histogram!(
"simple_onchain_discovery_event_processing_loop_busy_duration_s",
"Histogram of busy time of spent in event processing loop"
)
.unwrap(),
)
});
pub static DISCOVERY_COUNTS: Lazy<IntCounterVec> = Lazy::new(|| {
register_int_counter_vec!(
"diem_simple_onchain_discovery_counts",
"Histogram of busy time of spent in event processing loop",
&["role_type", "network_id", "peer_id", "metric"]
)
.unwrap()
});
pub static NETWORK_KEY_MISMATCH: Lazy<IntGaugeVec> = Lazy::new(|| {
register_int_gauge_vec!(
"diem_network_key_mismatch",
"Gauge of whether the network key mismatches onchain state",
&["role_type", "network_id", "peer_id"]
)
.unwrap()
});
/// Listener which converts published updates from the OnChainConfig to ConnectivityRequests
/// for the ConnectivityManager.
pub struct ConfigurationChangeListener {
network_context: Arc<NetworkContext>,
expected_pubkey: PublicKey,
encryptor: Encryptor,
conn_mgr_reqs_tx: channel::Sender<ConnectivityRequest>,
reconfig_events: diem_channel::Receiver<(), OnChainConfigPayload>,
}
pub fn gen_simple_discovery_reconfig_subscription(
) -> (ReconfigSubscription, Receiver<(), OnChainConfigPayload>) {
ReconfigSubscription::subscribe_all("network", ON_CHAIN_CONFIG_REGISTRY.to_vec(), vec![])
}
/// Extracts a set of ConnectivityRequests from a ValidatorSet which are appropriate for a network with type role.
fn extract_updates(
network_context: Arc<NetworkContext>,
encryptor: &Encryptor,
node_set: ValidatorSet,
) -> Vec<ConnectivityRequest> {
let is_validator = network_context.network_id().is_validator_network();
// Decode addresses while ignoring bad addresses
let discovered_peers = node_set
.into_iter()
.map(|info| {
let peer_id = *info.account_address();
let config = info.into_config();
let addrs = if is_validator {
let result = encryptor.decrypt(&config.validator_network_addresses, peer_id);
if let Err(EncryptorError::StorageError(_)) = result {
panic!(format!(
"Unable to initialize validator network addresses: {:?}",
result
));
}
result.map_err(anyhow::Error::from)
} else {
config
.fullnode_network_addresses()
.map_err(anyhow::Error::from)
}
.map_err(|err| {
inc_by_with_context(&DISCOVERY_COUNTS, &network_context, "read_failure", 1);
warn!(
NetworkSchema::new(&network_context),
"OnChainDiscovery: Failed to parse any network address: peer: {}, err: {}",
peer_id,
err
)
})
.unwrap_or_default();
let peer_role = if is_validator {
PeerRole::Validator
} else {
PeerRole::ValidatorFullNode
};
(peer_id, Peer::from_addrs(peer_role, addrs))
})
.collect();
vec![ConnectivityRequest::UpdateDiscoveredPeers(
DiscoverySource::OnChain,
discovered_peers,
)]
}
impl ConfigurationChangeListener {
/// Creates a new ConfigurationChangeListener
pub fn new(
network_context: Arc<NetworkContext>,
expected_pubkey: PublicKey,
encryptor: Encryptor,
conn_mgr_reqs_tx: channel::Sender<ConnectivityRequest>,
reconfig_events: diem_channel::Receiver<(), OnChainConfigPayload>,
) -> Self {
Self {
network_context,
expected_pubkey,
encryptor,
conn_mgr_reqs_tx,
reconfig_events,
}
}
async fn next_reconfig_event(&mut self) -> Option<OnChainConfigPayload> {
let _idle_timer = EVENT_PROCESSING_LOOP_IDLE_DURATION_S.start_timer();
self.reconfig_events.next().await
}
fn find_key_mismatches(&self, onchain_keys: Option<&HashSet<PublicKey>>) {
let mismatch = onchain_keys.map_or(0, |pubkeys| {
if!pubkeys.contains(&self.expected_pubkey) {
error!(
NetworkSchema::new(&self.network_context),
"Onchain pubkey {:?} differs from local pubkey {}",
pubkeys,
self.expected_pubkey
);
1
} else {
0
}
});
NETWORK_KEY_MISMATCH
.with_label_values(&[
self.network_context.role().as_str(),
self.network_context.network_id().as_str(),
self.network_context.peer_id().short_str().as_str(),
])
.set(mismatch);
}
/// Processes a received OnChainConfigPayload. Depending on role (Validator or FullNode), parses
/// the appropriate configuration changes and passes it to the ConnectionManager channel.
async fn process_payload(&mut self, payload: OnChainConfigPayload) {
let _process_timer = EVENT_PROCESSING_LOOP_BUSY_DURATION_S.start_timer();
let node_set: ValidatorSet = payload
.get()
.expect("failed to get ValidatorSet from payload");
let updates = extract_updates(self.network_context.clone(), &self.encryptor, node_set);
// Ensure that the public key matches what's onchain for this peer
for request in &updates {
if let ConnectivityRequest::UpdateDiscoveredPeers(_, peer_updates) = request {
self.find_key_mismatches(
peer_updates
.get(&self.network_context.peer_id())
.map(|peer| &peer.keys),
)
}
}
inc_by_with_context(
&DISCOVERY_COUNTS,
&self.network_context,
"new_nodes",
updates.len() as u64,
);
info!(
NetworkSchema::new(&self.network_context),
"Update {} Network about new Node IDs",
self.network_context.network_id()
);
for update in updates {
match self.conn_mgr_reqs_tx.send(update).await {
Ok(()) => (),
Err(e) => {
inc_by_with_context(
&DISCOVERY_COUNTS,
&self.network_context,
"send_failure",
1,
);
warn!(
NetworkSchema::new(&self.network_context),
"Failed to send update to ConnectivityManager {}", e
)
}
}
}
}
/// Starts the listener to wait on reconfiguration events.
pub async fn start(mut self) {
info!(
NetworkSchema::new(&self.network_context),
"{} Starting OnChain Discovery actor", self.network_context
); | self.process_payload(payload).await;
}
warn!(
NetworkSchema::new(&self.network_context),
"{} OnChain Discovery actor terminated", self.network_context,
);
}
}
#[cfg(test)]
mod tests {
use super::*;
use diem_config::config::HANDSHAKE_VERSION;
use diem_crypto::{
ed25519::{Ed25519PrivateKey, Ed25519PublicKey},
x25519::PrivateKey,
PrivateKey as PK, Uniform,
};
use diem_types::{
network_address::NetworkAddress, on_chain_config::OnChainConfig,
validator_config::ValidatorConfig, validator_info::ValidatorInfo, PeerId,
};
use futures::executor::block_on;
use rand::{rngs::StdRng, SeedableRng};
use std::{collections::HashMap, time::Instant};
use tokio::{
runtime::Runtime,
time::{timeout_at, Duration},
};
#[test]
fn metric_if_key_mismatch() {
diem_logger::DiemLogger::init_for_testing();
let runtime = Runtime::new().unwrap();
let consensus_private_key = Ed25519PrivateKey::generate_for_testing();
let consensus_pubkey = consensus_private_key.public_key();
let pubkey = test_pubkey([0u8; 32]);
let different_pubkey = test_pubkey([1u8; 32]);
let peer_id = diem_types::account_address::from_identity_public_key(pubkey);
// Build up the Reconfig Listener
let (conn_mgr_reqs_tx, _rx) = channel::new_test(1);
let (mut reconfig_tx, reconfig_rx) = gen_simple_discovery_reconfig_subscription();
let network_context = NetworkContext::mock_with_peer_id(peer_id);
let listener = ConfigurationChangeListener::new(
network_context.clone(),
pubkey,
Encryptor::for_testing(),
conn_mgr_reqs_tx,
reconfig_rx,
);
// Build up and send an update with a different pubkey
send_pubkey_update(
peer_id,
consensus_pubkey,
different_pubkey,
&mut reconfig_tx,
);
let listener_future = async move {
// Run the test, ensuring we actually stop after a couple seconds in case it fails to fail
timeout_at(
tokio::time::Instant::from(Instant::now() + Duration::from_secs(1)),
listener.start(),
)
.await
.expect_err("Expect timeout");
};
// Ensure the metric is updated
check_network_key_mismatch_metric(0, &network_context);
block_on(runtime.spawn(listener_future)).unwrap();
check_network_key_mismatch_metric(1, &network_context);
}
fn check_network_key_mismatch_metric(expected: i64, network_context: &NetworkContext) {
assert_eq!(
expected,
NETWORK_KEY_MISMATCH
.get_metric_with_label_values(&[
network_context.role().as_str(),
network_context.network_id().as_str(),
network_context.peer_id().short_str().as_str()
])
.unwrap()
.get()
)
}
fn send_pubkey_update(
peer_id: PeerId,
consensus_pubkey: Ed25519PublicKey,
pubkey: PublicKey,
reconfig_tx: &mut ReconfigSubscription,
) {
let validator_address =
NetworkAddress::mock().append_prod_protos(pubkey, HANDSHAKE_VERSION);
let addresses = vec![validator_address];
let encryptor = Encryptor::for_testing();
let encrypted_addresses = encryptor.encrypt(&addresses, peer_id, 0).unwrap();
let encoded_addresses = bcs::to_bytes(&addresses).unwrap();
let validator = ValidatorInfo::new(
peer_id,
0,
ValidatorConfig::new(consensus_pubkey, encrypted_addresses, encoded_addresses),
);
let validator_set = ValidatorSet::new(vec![validator]);
let mut configs = HashMap::new();
configs.insert(
ValidatorSet::CONFIG_ID,
bcs::to_bytes(&validator_set).unwrap(),
);
let payload = OnChainConfigPayload::new(1, Arc::new(configs));
reconfig_tx.publish(payload).unwrap();
}
fn test_pubkey(seed: [u8; 32]) -> PublicKey {
let mut rng: StdRng = SeedableRng::from_seed(seed);
let private_key = PrivateKey::generate(&mut rng);
private_key.public_key()
}
} |
while let Some(payload) = self.next_reconfig_event().await { | random_line_split |
lib.rs | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use channel::diem_channel::{self, Receiver};
use diem_config::{
config::{Peer, PeerRole},
network_id::NetworkContext,
};
use diem_crypto::x25519::PublicKey;
use diem_logger::prelude::*;
use diem_metrics::{
register_histogram, register_int_counter_vec, register_int_gauge_vec, DurationHistogram,
IntCounterVec, IntGaugeVec,
};
use diem_network_address_encryption::{Encryptor, Error as EncryptorError};
use diem_types::on_chain_config::{OnChainConfigPayload, ValidatorSet, ON_CHAIN_CONFIG_REGISTRY};
use futures::{sink::SinkExt, StreamExt};
use network::{
connectivity_manager::{ConnectivityRequest, DiscoverySource},
counters::inc_by_with_context,
logging::NetworkSchema,
};
use once_cell::sync::Lazy;
use short_hex_str::AsShortHexStr;
use std::{collections::HashSet, sync::Arc};
use subscription_service::ReconfigSubscription;
pub mod builder;
/// Histogram of idle time of spent in event processing loop
pub static EVENT_PROCESSING_LOOP_IDLE_DURATION_S: Lazy<DurationHistogram> = Lazy::new(|| {
DurationHistogram::new(
register_histogram!(
"simple_onchain_discovery_event_processing_loop_idle_duration_s",
"Histogram of idle time of spent in event processing loop"
)
.unwrap(),
)
});
/// Histogram of busy time of spent in event processing loop
pub static EVENT_PROCESSING_LOOP_BUSY_DURATION_S: Lazy<DurationHistogram> = Lazy::new(|| {
DurationHistogram::new(
register_histogram!(
"simple_onchain_discovery_event_processing_loop_busy_duration_s",
"Histogram of busy time of spent in event processing loop"
)
.unwrap(),
)
});
pub static DISCOVERY_COUNTS: Lazy<IntCounterVec> = Lazy::new(|| {
register_int_counter_vec!(
"diem_simple_onchain_discovery_counts",
"Histogram of busy time of spent in event processing loop",
&["role_type", "network_id", "peer_id", "metric"]
)
.unwrap()
});
pub static NETWORK_KEY_MISMATCH: Lazy<IntGaugeVec> = Lazy::new(|| {
register_int_gauge_vec!(
"diem_network_key_mismatch",
"Gauge of whether the network key mismatches onchain state",
&["role_type", "network_id", "peer_id"]
)
.unwrap()
});
/// Listener which converts published updates from the OnChainConfig to ConnectivityRequests
/// for the ConnectivityManager.
pub struct ConfigurationChangeListener {
network_context: Arc<NetworkContext>,
expected_pubkey: PublicKey,
encryptor: Encryptor,
conn_mgr_reqs_tx: channel::Sender<ConnectivityRequest>,
reconfig_events: diem_channel::Receiver<(), OnChainConfigPayload>,
}
pub fn gen_simple_discovery_reconfig_subscription(
) -> (ReconfigSubscription, Receiver<(), OnChainConfigPayload>) {
ReconfigSubscription::subscribe_all("network", ON_CHAIN_CONFIG_REGISTRY.to_vec(), vec![])
}
/// Extracts a set of ConnectivityRequests from a ValidatorSet which are appropriate for a network with type role.
fn extract_updates(
network_context: Arc<NetworkContext>,
encryptor: &Encryptor,
node_set: ValidatorSet,
) -> Vec<ConnectivityRequest> {
let is_validator = network_context.network_id().is_validator_network();
// Decode addresses while ignoring bad addresses
let discovered_peers = node_set
.into_iter()
.map(|info| {
let peer_id = *info.account_address();
let config = info.into_config();
let addrs = if is_validator {
let result = encryptor.decrypt(&config.validator_network_addresses, peer_id);
if let Err(EncryptorError::StorageError(_)) = result {
panic!(format!(
"Unable to initialize validator network addresses: {:?}",
result
));
}
result.map_err(anyhow::Error::from)
} else {
config
.fullnode_network_addresses()
.map_err(anyhow::Error::from)
}
.map_err(|err| {
inc_by_with_context(&DISCOVERY_COUNTS, &network_context, "read_failure", 1);
warn!(
NetworkSchema::new(&network_context),
"OnChainDiscovery: Failed to parse any network address: peer: {}, err: {}",
peer_id,
err
)
})
.unwrap_or_default();
let peer_role = if is_validator {
PeerRole::Validator
} else {
PeerRole::ValidatorFullNode
};
(peer_id, Peer::from_addrs(peer_role, addrs))
})
.collect();
vec![ConnectivityRequest::UpdateDiscoveredPeers(
DiscoverySource::OnChain,
discovered_peers,
)]
}
impl ConfigurationChangeListener {
/// Creates a new ConfigurationChangeListener
pub fn new(
network_context: Arc<NetworkContext>,
expected_pubkey: PublicKey,
encryptor: Encryptor,
conn_mgr_reqs_tx: channel::Sender<ConnectivityRequest>,
reconfig_events: diem_channel::Receiver<(), OnChainConfigPayload>,
) -> Self {
Self {
network_context,
expected_pubkey,
encryptor,
conn_mgr_reqs_tx,
reconfig_events,
}
}
async fn next_reconfig_event(&mut self) -> Option<OnChainConfigPayload> {
let _idle_timer = EVENT_PROCESSING_LOOP_IDLE_DURATION_S.start_timer();
self.reconfig_events.next().await
}
fn find_key_mismatches(&self, onchain_keys: Option<&HashSet<PublicKey>>) {
let mismatch = onchain_keys.map_or(0, |pubkeys| {
if!pubkeys.contains(&self.expected_pubkey) {
error!(
NetworkSchema::new(&self.network_context),
"Onchain pubkey {:?} differs from local pubkey {}",
pubkeys,
self.expected_pubkey
);
1
} else {
0
}
});
NETWORK_KEY_MISMATCH
.with_label_values(&[
self.network_context.role().as_str(),
self.network_context.network_id().as_str(),
self.network_context.peer_id().short_str().as_str(),
])
.set(mismatch);
}
/// Processes a received OnChainConfigPayload. Depending on role (Validator or FullNode), parses
/// the appropriate configuration changes and passes it to the ConnectionManager channel.
async fn process_payload(&mut self, payload: OnChainConfigPayload) {
let _process_timer = EVENT_PROCESSING_LOOP_BUSY_DURATION_S.start_timer();
let node_set: ValidatorSet = payload
.get()
.expect("failed to get ValidatorSet from payload");
let updates = extract_updates(self.network_context.clone(), &self.encryptor, node_set);
// Ensure that the public key matches what's onchain for this peer
for request in &updates {
if let ConnectivityRequest::UpdateDiscoveredPeers(_, peer_updates) = request {
self.find_key_mismatches(
peer_updates
.get(&self.network_context.peer_id())
.map(|peer| &peer.keys),
)
}
}
inc_by_with_context(
&DISCOVERY_COUNTS,
&self.network_context,
"new_nodes",
updates.len() as u64,
);
info!(
NetworkSchema::new(&self.network_context),
"Update {} Network about new Node IDs",
self.network_context.network_id()
);
for update in updates {
match self.conn_mgr_reqs_tx.send(update).await {
Ok(()) => (),
Err(e) => {
inc_by_with_context(
&DISCOVERY_COUNTS,
&self.network_context,
"send_failure",
1,
);
warn!(
NetworkSchema::new(&self.network_context),
"Failed to send update to ConnectivityManager {}", e
)
}
}
}
}
/// Starts the listener to wait on reconfiguration events.
pub async fn start(mut self) {
info!(
NetworkSchema::new(&self.network_context),
"{} Starting OnChain Discovery actor", self.network_context
);
while let Some(payload) = self.next_reconfig_event().await {
self.process_payload(payload).await;
}
warn!(
NetworkSchema::new(&self.network_context),
"{} OnChain Discovery actor terminated", self.network_context,
);
}
}
#[cfg(test)]
mod tests {
use super::*;
use diem_config::config::HANDSHAKE_VERSION;
use diem_crypto::{
ed25519::{Ed25519PrivateKey, Ed25519PublicKey},
x25519::PrivateKey,
PrivateKey as PK, Uniform,
};
use diem_types::{
network_address::NetworkAddress, on_chain_config::OnChainConfig,
validator_config::ValidatorConfig, validator_info::ValidatorInfo, PeerId,
};
use futures::executor::block_on;
use rand::{rngs::StdRng, SeedableRng};
use std::{collections::HashMap, time::Instant};
use tokio::{
runtime::Runtime,
time::{timeout_at, Duration},
};
#[test]
fn metric_if_key_mismatch() {
diem_logger::DiemLogger::init_for_testing();
let runtime = Runtime::new().unwrap();
let consensus_private_key = Ed25519PrivateKey::generate_for_testing();
let consensus_pubkey = consensus_private_key.public_key();
let pubkey = test_pubkey([0u8; 32]);
let different_pubkey = test_pubkey([1u8; 32]);
let peer_id = diem_types::account_address::from_identity_public_key(pubkey);
// Build up the Reconfig Listener
let (conn_mgr_reqs_tx, _rx) = channel::new_test(1);
let (mut reconfig_tx, reconfig_rx) = gen_simple_discovery_reconfig_subscription();
let network_context = NetworkContext::mock_with_peer_id(peer_id);
let listener = ConfigurationChangeListener::new(
network_context.clone(),
pubkey,
Encryptor::for_testing(),
conn_mgr_reqs_tx,
reconfig_rx,
);
// Build up and send an update with a different pubkey
send_pubkey_update(
peer_id,
consensus_pubkey,
different_pubkey,
&mut reconfig_tx,
);
let listener_future = async move {
// Run the test, ensuring we actually stop after a couple seconds in case it fails to fail
timeout_at(
tokio::time::Instant::from(Instant::now() + Duration::from_secs(1)),
listener.start(),
)
.await
.expect_err("Expect timeout");
};
// Ensure the metric is updated
check_network_key_mismatch_metric(0, &network_context);
block_on(runtime.spawn(listener_future)).unwrap();
check_network_key_mismatch_metric(1, &network_context);
}
fn check_network_key_mismatch_metric(expected: i64, network_context: &NetworkContext) {
assert_eq!(
expected,
NETWORK_KEY_MISMATCH
.get_metric_with_label_values(&[
network_context.role().as_str(),
network_context.network_id().as_str(),
network_context.peer_id().short_str().as_str()
])
.unwrap()
.get()
)
}
fn send_pubkey_update(
peer_id: PeerId,
consensus_pubkey: Ed25519PublicKey,
pubkey: PublicKey,
reconfig_tx: &mut ReconfigSubscription,
) {
let validator_address =
NetworkAddress::mock().append_prod_protos(pubkey, HANDSHAKE_VERSION);
let addresses = vec![validator_address];
let encryptor = Encryptor::for_testing();
let encrypted_addresses = encryptor.encrypt(&addresses, peer_id, 0).unwrap();
let encoded_addresses = bcs::to_bytes(&addresses).unwrap();
let validator = ValidatorInfo::new(
peer_id,
0,
ValidatorConfig::new(consensus_pubkey, encrypted_addresses, encoded_addresses),
);
let validator_set = ValidatorSet::new(vec![validator]);
let mut configs = HashMap::new();
configs.insert(
ValidatorSet::CONFIG_ID,
bcs::to_bytes(&validator_set).unwrap(),
);
let payload = OnChainConfigPayload::new(1, Arc::new(configs));
reconfig_tx.publish(payload).unwrap();
}
fn | (seed: [u8; 32]) -> PublicKey {
let mut rng: StdRng = SeedableRng::from_seed(seed);
let private_key = PrivateKey::generate(&mut rng);
private_key.public_key()
}
}
| test_pubkey | identifier_name |
lib.rs | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use channel::diem_channel::{self, Receiver};
use diem_config::{
config::{Peer, PeerRole},
network_id::NetworkContext,
};
use diem_crypto::x25519::PublicKey;
use diem_logger::prelude::*;
use diem_metrics::{
register_histogram, register_int_counter_vec, register_int_gauge_vec, DurationHistogram,
IntCounterVec, IntGaugeVec,
};
use diem_network_address_encryption::{Encryptor, Error as EncryptorError};
use diem_types::on_chain_config::{OnChainConfigPayload, ValidatorSet, ON_CHAIN_CONFIG_REGISTRY};
use futures::{sink::SinkExt, StreamExt};
use network::{
connectivity_manager::{ConnectivityRequest, DiscoverySource},
counters::inc_by_with_context,
logging::NetworkSchema,
};
use once_cell::sync::Lazy;
use short_hex_str::AsShortHexStr;
use std::{collections::HashSet, sync::Arc};
use subscription_service::ReconfigSubscription;
pub mod builder;
/// Histogram of idle time of spent in event processing loop
pub static EVENT_PROCESSING_LOOP_IDLE_DURATION_S: Lazy<DurationHistogram> = Lazy::new(|| {
DurationHistogram::new(
register_histogram!(
"simple_onchain_discovery_event_processing_loop_idle_duration_s",
"Histogram of idle time of spent in event processing loop"
)
.unwrap(),
)
});
/// Histogram of busy time of spent in event processing loop
pub static EVENT_PROCESSING_LOOP_BUSY_DURATION_S: Lazy<DurationHistogram> = Lazy::new(|| {
DurationHistogram::new(
register_histogram!(
"simple_onchain_discovery_event_processing_loop_busy_duration_s",
"Histogram of busy time of spent in event processing loop"
)
.unwrap(),
)
});
pub static DISCOVERY_COUNTS: Lazy<IntCounterVec> = Lazy::new(|| {
register_int_counter_vec!(
"diem_simple_onchain_discovery_counts",
"Histogram of busy time of spent in event processing loop",
&["role_type", "network_id", "peer_id", "metric"]
)
.unwrap()
});
pub static NETWORK_KEY_MISMATCH: Lazy<IntGaugeVec> = Lazy::new(|| {
register_int_gauge_vec!(
"diem_network_key_mismatch",
"Gauge of whether the network key mismatches onchain state",
&["role_type", "network_id", "peer_id"]
)
.unwrap()
});
/// Listener which converts published updates from the OnChainConfig to ConnectivityRequests
/// for the ConnectivityManager.
pub struct ConfigurationChangeListener {
network_context: Arc<NetworkContext>,
expected_pubkey: PublicKey,
encryptor: Encryptor,
conn_mgr_reqs_tx: channel::Sender<ConnectivityRequest>,
reconfig_events: diem_channel::Receiver<(), OnChainConfigPayload>,
}
pub fn gen_simple_discovery_reconfig_subscription(
) -> (ReconfigSubscription, Receiver<(), OnChainConfigPayload>) {
ReconfigSubscription::subscribe_all("network", ON_CHAIN_CONFIG_REGISTRY.to_vec(), vec![])
}
/// Extracts a set of ConnectivityRequests from a ValidatorSet which are appropriate for a network with type role.
fn extract_updates(
network_context: Arc<NetworkContext>,
encryptor: &Encryptor,
node_set: ValidatorSet,
) -> Vec<ConnectivityRequest> {
let is_validator = network_context.network_id().is_validator_network();
// Decode addresses while ignoring bad addresses
let discovered_peers = node_set
.into_iter()
.map(|info| {
let peer_id = *info.account_address();
let config = info.into_config();
let addrs = if is_validator {
let result = encryptor.decrypt(&config.validator_network_addresses, peer_id);
if let Err(EncryptorError::StorageError(_)) = result {
panic!(format!(
"Unable to initialize validator network addresses: {:?}",
result
));
}
result.map_err(anyhow::Error::from)
} else {
config
.fullnode_network_addresses()
.map_err(anyhow::Error::from)
}
.map_err(|err| {
inc_by_with_context(&DISCOVERY_COUNTS, &network_context, "read_failure", 1);
warn!(
NetworkSchema::new(&network_context),
"OnChainDiscovery: Failed to parse any network address: peer: {}, err: {}",
peer_id,
err
)
})
.unwrap_or_default();
let peer_role = if is_validator {
PeerRole::Validator
} else {
PeerRole::ValidatorFullNode
};
(peer_id, Peer::from_addrs(peer_role, addrs))
})
.collect();
vec![ConnectivityRequest::UpdateDiscoveredPeers(
DiscoverySource::OnChain,
discovered_peers,
)]
}
impl ConfigurationChangeListener {
/// Creates a new ConfigurationChangeListener
pub fn new(
network_context: Arc<NetworkContext>,
expected_pubkey: PublicKey,
encryptor: Encryptor,
conn_mgr_reqs_tx: channel::Sender<ConnectivityRequest>,
reconfig_events: diem_channel::Receiver<(), OnChainConfigPayload>,
) -> Self {
Self {
network_context,
expected_pubkey,
encryptor,
conn_mgr_reqs_tx,
reconfig_events,
}
}
async fn next_reconfig_event(&mut self) -> Option<OnChainConfigPayload> {
let _idle_timer = EVENT_PROCESSING_LOOP_IDLE_DURATION_S.start_timer();
self.reconfig_events.next().await
}
fn find_key_mismatches(&self, onchain_keys: Option<&HashSet<PublicKey>>) | ])
.set(mismatch);
}
/// Processes a received OnChainConfigPayload. Depending on role (Validator or FullNode), parses
/// the appropriate configuration changes and passes it to the ConnectionManager channel.
async fn process_payload(&mut self, payload: OnChainConfigPayload) {
let _process_timer = EVENT_PROCESSING_LOOP_BUSY_DURATION_S.start_timer();
let node_set: ValidatorSet = payload
.get()
.expect("failed to get ValidatorSet from payload");
let updates = extract_updates(self.network_context.clone(), &self.encryptor, node_set);
// Ensure that the public key matches what's onchain for this peer
for request in &updates {
if let ConnectivityRequest::UpdateDiscoveredPeers(_, peer_updates) = request {
self.find_key_mismatches(
peer_updates
.get(&self.network_context.peer_id())
.map(|peer| &peer.keys),
)
}
}
inc_by_with_context(
&DISCOVERY_COUNTS,
&self.network_context,
"new_nodes",
updates.len() as u64,
);
info!(
NetworkSchema::new(&self.network_context),
"Update {} Network about new Node IDs",
self.network_context.network_id()
);
for update in updates {
match self.conn_mgr_reqs_tx.send(update).await {
Ok(()) => (),
Err(e) => {
inc_by_with_context(
&DISCOVERY_COUNTS,
&self.network_context,
"send_failure",
1,
);
warn!(
NetworkSchema::new(&self.network_context),
"Failed to send update to ConnectivityManager {}", e
)
}
}
}
}
/// Starts the listener to wait on reconfiguration events.
pub async fn start(mut self) {
info!(
NetworkSchema::new(&self.network_context),
"{} Starting OnChain Discovery actor", self.network_context
);
while let Some(payload) = self.next_reconfig_event().await {
self.process_payload(payload).await;
}
warn!(
NetworkSchema::new(&self.network_context),
"{} OnChain Discovery actor terminated", self.network_context,
);
}
}
#[cfg(test)]
mod tests {
use super::*;
use diem_config::config::HANDSHAKE_VERSION;
use diem_crypto::{
ed25519::{Ed25519PrivateKey, Ed25519PublicKey},
x25519::PrivateKey,
PrivateKey as PK, Uniform,
};
use diem_types::{
network_address::NetworkAddress, on_chain_config::OnChainConfig,
validator_config::ValidatorConfig, validator_info::ValidatorInfo, PeerId,
};
use futures::executor::block_on;
use rand::{rngs::StdRng, SeedableRng};
use std::{collections::HashMap, time::Instant};
use tokio::{
runtime::Runtime,
time::{timeout_at, Duration},
};
#[test]
fn metric_if_key_mismatch() {
diem_logger::DiemLogger::init_for_testing();
let runtime = Runtime::new().unwrap();
let consensus_private_key = Ed25519PrivateKey::generate_for_testing();
let consensus_pubkey = consensus_private_key.public_key();
let pubkey = test_pubkey([0u8; 32]);
let different_pubkey = test_pubkey([1u8; 32]);
let peer_id = diem_types::account_address::from_identity_public_key(pubkey);
// Build up the Reconfig Listener
let (conn_mgr_reqs_tx, _rx) = channel::new_test(1);
let (mut reconfig_tx, reconfig_rx) = gen_simple_discovery_reconfig_subscription();
let network_context = NetworkContext::mock_with_peer_id(peer_id);
let listener = ConfigurationChangeListener::new(
network_context.clone(),
pubkey,
Encryptor::for_testing(),
conn_mgr_reqs_tx,
reconfig_rx,
);
// Build up and send an update with a different pubkey
send_pubkey_update(
peer_id,
consensus_pubkey,
different_pubkey,
&mut reconfig_tx,
);
let listener_future = async move {
// Run the test, ensuring we actually stop after a couple seconds in case it fails to fail
timeout_at(
tokio::time::Instant::from(Instant::now() + Duration::from_secs(1)),
listener.start(),
)
.await
.expect_err("Expect timeout");
};
// Ensure the metric is updated
check_network_key_mismatch_metric(0, &network_context);
block_on(runtime.spawn(listener_future)).unwrap();
check_network_key_mismatch_metric(1, &network_context);
}
fn check_network_key_mismatch_metric(expected: i64, network_context: &NetworkContext) {
assert_eq!(
expected,
NETWORK_KEY_MISMATCH
.get_metric_with_label_values(&[
network_context.role().as_str(),
network_context.network_id().as_str(),
network_context.peer_id().short_str().as_str()
])
.unwrap()
.get()
)
}
fn send_pubkey_update(
peer_id: PeerId,
consensus_pubkey: Ed25519PublicKey,
pubkey: PublicKey,
reconfig_tx: &mut ReconfigSubscription,
) {
let validator_address =
NetworkAddress::mock().append_prod_protos(pubkey, HANDSHAKE_VERSION);
let addresses = vec![validator_address];
let encryptor = Encryptor::for_testing();
let encrypted_addresses = encryptor.encrypt(&addresses, peer_id, 0).unwrap();
let encoded_addresses = bcs::to_bytes(&addresses).unwrap();
let validator = ValidatorInfo::new(
peer_id,
0,
ValidatorConfig::new(consensus_pubkey, encrypted_addresses, encoded_addresses),
);
let validator_set = ValidatorSet::new(vec![validator]);
let mut configs = HashMap::new();
configs.insert(
ValidatorSet::CONFIG_ID,
bcs::to_bytes(&validator_set).unwrap(),
);
let payload = OnChainConfigPayload::new(1, Arc::new(configs));
reconfig_tx.publish(payload).unwrap();
}
fn test_pubkey(seed: [u8; 32]) -> PublicKey {
let mut rng: StdRng = SeedableRng::from_seed(seed);
let private_key = PrivateKey::generate(&mut rng);
private_key.public_key()
}
}
| {
let mismatch = onchain_keys.map_or(0, |pubkeys| {
if !pubkeys.contains(&self.expected_pubkey) {
error!(
NetworkSchema::new(&self.network_context),
"Onchain pubkey {:?} differs from local pubkey {}",
pubkeys,
self.expected_pubkey
);
1
} else {
0
}
});
NETWORK_KEY_MISMATCH
.with_label_values(&[
self.network_context.role().as_str(),
self.network_context.network_id().as_str(),
self.network_context.peer_id().short_str().as_str(), | identifier_body |
lib.rs | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use channel::diem_channel::{self, Receiver};
use diem_config::{
config::{Peer, PeerRole},
network_id::NetworkContext,
};
use diem_crypto::x25519::PublicKey;
use diem_logger::prelude::*;
use diem_metrics::{
register_histogram, register_int_counter_vec, register_int_gauge_vec, DurationHistogram,
IntCounterVec, IntGaugeVec,
};
use diem_network_address_encryption::{Encryptor, Error as EncryptorError};
use diem_types::on_chain_config::{OnChainConfigPayload, ValidatorSet, ON_CHAIN_CONFIG_REGISTRY};
use futures::{sink::SinkExt, StreamExt};
use network::{
connectivity_manager::{ConnectivityRequest, DiscoverySource},
counters::inc_by_with_context,
logging::NetworkSchema,
};
use once_cell::sync::Lazy;
use short_hex_str::AsShortHexStr;
use std::{collections::HashSet, sync::Arc};
use subscription_service::ReconfigSubscription;
pub mod builder;
/// Histogram of idle time of spent in event processing loop
pub static EVENT_PROCESSING_LOOP_IDLE_DURATION_S: Lazy<DurationHistogram> = Lazy::new(|| {
DurationHistogram::new(
register_histogram!(
"simple_onchain_discovery_event_processing_loop_idle_duration_s",
"Histogram of idle time of spent in event processing loop"
)
.unwrap(),
)
});
/// Histogram of busy time of spent in event processing loop
pub static EVENT_PROCESSING_LOOP_BUSY_DURATION_S: Lazy<DurationHistogram> = Lazy::new(|| {
DurationHistogram::new(
register_histogram!(
"simple_onchain_discovery_event_processing_loop_busy_duration_s",
"Histogram of busy time of spent in event processing loop"
)
.unwrap(),
)
});
pub static DISCOVERY_COUNTS: Lazy<IntCounterVec> = Lazy::new(|| {
register_int_counter_vec!(
"diem_simple_onchain_discovery_counts",
"Histogram of busy time of spent in event processing loop",
&["role_type", "network_id", "peer_id", "metric"]
)
.unwrap()
});
pub static NETWORK_KEY_MISMATCH: Lazy<IntGaugeVec> = Lazy::new(|| {
register_int_gauge_vec!(
"diem_network_key_mismatch",
"Gauge of whether the network key mismatches onchain state",
&["role_type", "network_id", "peer_id"]
)
.unwrap()
});
/// Listener which converts published updates from the OnChainConfig to ConnectivityRequests
/// for the ConnectivityManager.
pub struct ConfigurationChangeListener {
network_context: Arc<NetworkContext>,
expected_pubkey: PublicKey,
encryptor: Encryptor,
conn_mgr_reqs_tx: channel::Sender<ConnectivityRequest>,
reconfig_events: diem_channel::Receiver<(), OnChainConfigPayload>,
}
pub fn gen_simple_discovery_reconfig_subscription(
) -> (ReconfigSubscription, Receiver<(), OnChainConfigPayload>) {
ReconfigSubscription::subscribe_all("network", ON_CHAIN_CONFIG_REGISTRY.to_vec(), vec![])
}
/// Extracts a set of ConnectivityRequests from a ValidatorSet which are appropriate for a network with type role.
fn extract_updates(
network_context: Arc<NetworkContext>,
encryptor: &Encryptor,
node_set: ValidatorSet,
) -> Vec<ConnectivityRequest> {
let is_validator = network_context.network_id().is_validator_network();
// Decode addresses while ignoring bad addresses
let discovered_peers = node_set
.into_iter()
.map(|info| {
let peer_id = *info.account_address();
let config = info.into_config();
let addrs = if is_validator {
let result = encryptor.decrypt(&config.validator_network_addresses, peer_id);
if let Err(EncryptorError::StorageError(_)) = result {
panic!(format!(
"Unable to initialize validator network addresses: {:?}",
result
));
}
result.map_err(anyhow::Error::from)
} else {
config
.fullnode_network_addresses()
.map_err(anyhow::Error::from)
}
.map_err(|err| {
inc_by_with_context(&DISCOVERY_COUNTS, &network_context, "read_failure", 1);
warn!(
NetworkSchema::new(&network_context),
"OnChainDiscovery: Failed to parse any network address: peer: {}, err: {}",
peer_id,
err
)
})
.unwrap_or_default();
let peer_role = if is_validator | else {
PeerRole::ValidatorFullNode
};
(peer_id, Peer::from_addrs(peer_role, addrs))
})
.collect();
vec![ConnectivityRequest::UpdateDiscoveredPeers(
DiscoverySource::OnChain,
discovered_peers,
)]
}
impl ConfigurationChangeListener {
/// Creates a new ConfigurationChangeListener
pub fn new(
network_context: Arc<NetworkContext>,
expected_pubkey: PublicKey,
encryptor: Encryptor,
conn_mgr_reqs_tx: channel::Sender<ConnectivityRequest>,
reconfig_events: diem_channel::Receiver<(), OnChainConfigPayload>,
) -> Self {
Self {
network_context,
expected_pubkey,
encryptor,
conn_mgr_reqs_tx,
reconfig_events,
}
}
async fn next_reconfig_event(&mut self) -> Option<OnChainConfigPayload> {
let _idle_timer = EVENT_PROCESSING_LOOP_IDLE_DURATION_S.start_timer();
self.reconfig_events.next().await
}
fn find_key_mismatches(&self, onchain_keys: Option<&HashSet<PublicKey>>) {
let mismatch = onchain_keys.map_or(0, |pubkeys| {
if!pubkeys.contains(&self.expected_pubkey) {
error!(
NetworkSchema::new(&self.network_context),
"Onchain pubkey {:?} differs from local pubkey {}",
pubkeys,
self.expected_pubkey
);
1
} else {
0
}
});
NETWORK_KEY_MISMATCH
.with_label_values(&[
self.network_context.role().as_str(),
self.network_context.network_id().as_str(),
self.network_context.peer_id().short_str().as_str(),
])
.set(mismatch);
}
/// Processes a received OnChainConfigPayload. Depending on role (Validator or FullNode), parses
/// the appropriate configuration changes and passes it to the ConnectionManager channel.
async fn process_payload(&mut self, payload: OnChainConfigPayload) {
let _process_timer = EVENT_PROCESSING_LOOP_BUSY_DURATION_S.start_timer();
let node_set: ValidatorSet = payload
.get()
.expect("failed to get ValidatorSet from payload");
let updates = extract_updates(self.network_context.clone(), &self.encryptor, node_set);
// Ensure that the public key matches what's onchain for this peer
for request in &updates {
if let ConnectivityRequest::UpdateDiscoveredPeers(_, peer_updates) = request {
self.find_key_mismatches(
peer_updates
.get(&self.network_context.peer_id())
.map(|peer| &peer.keys),
)
}
}
inc_by_with_context(
&DISCOVERY_COUNTS,
&self.network_context,
"new_nodes",
updates.len() as u64,
);
info!(
NetworkSchema::new(&self.network_context),
"Update {} Network about new Node IDs",
self.network_context.network_id()
);
for update in updates {
match self.conn_mgr_reqs_tx.send(update).await {
Ok(()) => (),
Err(e) => {
inc_by_with_context(
&DISCOVERY_COUNTS,
&self.network_context,
"send_failure",
1,
);
warn!(
NetworkSchema::new(&self.network_context),
"Failed to send update to ConnectivityManager {}", e
)
}
}
}
}
/// Starts the listener to wait on reconfiguration events.
pub async fn start(mut self) {
info!(
NetworkSchema::new(&self.network_context),
"{} Starting OnChain Discovery actor", self.network_context
);
while let Some(payload) = self.next_reconfig_event().await {
self.process_payload(payload).await;
}
warn!(
NetworkSchema::new(&self.network_context),
"{} OnChain Discovery actor terminated", self.network_context,
);
}
}
#[cfg(test)]
mod tests {
use super::*;
use diem_config::config::HANDSHAKE_VERSION;
use diem_crypto::{
ed25519::{Ed25519PrivateKey, Ed25519PublicKey},
x25519::PrivateKey,
PrivateKey as PK, Uniform,
};
use diem_types::{
network_address::NetworkAddress, on_chain_config::OnChainConfig,
validator_config::ValidatorConfig, validator_info::ValidatorInfo, PeerId,
};
use futures::executor::block_on;
use rand::{rngs::StdRng, SeedableRng};
use std::{collections::HashMap, time::Instant};
use tokio::{
runtime::Runtime,
time::{timeout_at, Duration},
};
#[test]
fn metric_if_key_mismatch() {
diem_logger::DiemLogger::init_for_testing();
let runtime = Runtime::new().unwrap();
let consensus_private_key = Ed25519PrivateKey::generate_for_testing();
let consensus_pubkey = consensus_private_key.public_key();
let pubkey = test_pubkey([0u8; 32]);
let different_pubkey = test_pubkey([1u8; 32]);
let peer_id = diem_types::account_address::from_identity_public_key(pubkey);
// Build up the Reconfig Listener
let (conn_mgr_reqs_tx, _rx) = channel::new_test(1);
let (mut reconfig_tx, reconfig_rx) = gen_simple_discovery_reconfig_subscription();
let network_context = NetworkContext::mock_with_peer_id(peer_id);
let listener = ConfigurationChangeListener::new(
network_context.clone(),
pubkey,
Encryptor::for_testing(),
conn_mgr_reqs_tx,
reconfig_rx,
);
// Build up and send an update with a different pubkey
send_pubkey_update(
peer_id,
consensus_pubkey,
different_pubkey,
&mut reconfig_tx,
);
let listener_future = async move {
// Run the test, ensuring we actually stop after a couple seconds in case it fails to fail
timeout_at(
tokio::time::Instant::from(Instant::now() + Duration::from_secs(1)),
listener.start(),
)
.await
.expect_err("Expect timeout");
};
// Ensure the metric is updated
check_network_key_mismatch_metric(0, &network_context);
block_on(runtime.spawn(listener_future)).unwrap();
check_network_key_mismatch_metric(1, &network_context);
}
fn check_network_key_mismatch_metric(expected: i64, network_context: &NetworkContext) {
assert_eq!(
expected,
NETWORK_KEY_MISMATCH
.get_metric_with_label_values(&[
network_context.role().as_str(),
network_context.network_id().as_str(),
network_context.peer_id().short_str().as_str()
])
.unwrap()
.get()
)
}
fn send_pubkey_update(
peer_id: PeerId,
consensus_pubkey: Ed25519PublicKey,
pubkey: PublicKey,
reconfig_tx: &mut ReconfigSubscription,
) {
let validator_address =
NetworkAddress::mock().append_prod_protos(pubkey, HANDSHAKE_VERSION);
let addresses = vec![validator_address];
let encryptor = Encryptor::for_testing();
let encrypted_addresses = encryptor.encrypt(&addresses, peer_id, 0).unwrap();
let encoded_addresses = bcs::to_bytes(&addresses).unwrap();
let validator = ValidatorInfo::new(
peer_id,
0,
ValidatorConfig::new(consensus_pubkey, encrypted_addresses, encoded_addresses),
);
let validator_set = ValidatorSet::new(vec![validator]);
let mut configs = HashMap::new();
configs.insert(
ValidatorSet::CONFIG_ID,
bcs::to_bytes(&validator_set).unwrap(),
);
let payload = OnChainConfigPayload::new(1, Arc::new(configs));
reconfig_tx.publish(payload).unwrap();
}
fn test_pubkey(seed: [u8; 32]) -> PublicKey {
let mut rng: StdRng = SeedableRng::from_seed(seed);
let private_key = PrivateKey::generate(&mut rng);
private_key.public_key()
}
}
| {
PeerRole::Validator
} | conditional_block |
main.rs | use std::io::BufReader;
use std::fs::File;
use std::str::FromStr;
use std::io;
use std::io::prelude::*;
use std::collections::HashSet;
use std::cmp::Reverse;
fn has_distinct_pair(set:&HashSet<i64>, sum: i64) -> bool {
for k in set.iter() {
let v = sum - k;
if *k!= v && set.contains(&v) {
return true;
}
}
false
}
fn print_medians(file_name: &str) -> io::Result<u64> {
let f = File::open(file_name)?;
let reader = BufReader::new(f);
let mut set:HashSet<i64> = HashSet::new();
for line in reader.lines() {
let line = line.unwrap();
let value = i64::from_str(&line).expect("error parsing value");
set.insert(value);
}
let mut result = 0;
for s in -10000..10001 {
if has_distinct_pair(&set, s) {
result = result + 1;
}
}
Ok(result)
}
fn main() | {
for arg in std::env::args().skip(1) {
let value = print_medians(&arg).expect("failed to read");
println!("answer = {}", value);
}
} | identifier_body |
|
main.rs | use std::io::BufReader;
use std::fs::File;
use std::str::FromStr;
use std::io;
use std::io::prelude::*;
use std::collections::HashSet;
use std::cmp::Reverse;
fn has_distinct_pair(set:&HashSet<i64>, sum: i64) -> bool {
for k in set.iter() {
let v = sum - k;
if *k!= v && set.contains(&v) {
return true;
}
}
false
}
fn print_medians(file_name: &str) -> io::Result<u64> {
let f = File::open(file_name)?;
let reader = BufReader::new(f);
let mut set:HashSet<i64> = HashSet::new();
for line in reader.lines() {
let line = line.unwrap();
let value = i64::from_str(&line).expect("error parsing value");
set.insert(value);
}
let mut result = 0;
for s in -10000..10001 {
if has_distinct_pair(&set, s) |
}
Ok(result)
}
fn main() {
for arg in std::env::args().skip(1) {
let value = print_medians(&arg).expect("failed to read");
println!("answer = {}", value);
}
}
| {
result = result + 1;
} | conditional_block |
main.rs | use std::io::BufReader;
use std::fs::File;
use std::str::FromStr;
use std::io;
use std::io::prelude::*;
use std::collections::HashSet;
use std::cmp::Reverse;
fn has_distinct_pair(set:&HashSet<i64>, sum: i64) -> bool {
for k in set.iter() {
let v = sum - k;
if *k!= v && set.contains(&v) {
return true;
}
}
false
}
fn print_medians(file_name: &str) -> io::Result<u64> {
let f = File::open(file_name)?;
let reader = BufReader::new(f);
let mut set:HashSet<i64> = HashSet::new();
for line in reader.lines() {
let line = line.unwrap();
let value = i64::from_str(&line).expect("error parsing value");
set.insert(value);
}
| let mut result = 0;
for s in -10000..10001 {
if has_distinct_pair(&set, s) {
result = result + 1;
}
}
Ok(result)
}
fn main() {
for arg in std::env::args().skip(1) {
let value = print_medians(&arg).expect("failed to read");
println!("answer = {}", value);
}
} | random_line_split |
|
main.rs | use std::io::BufReader;
use std::fs::File;
use std::str::FromStr;
use std::io;
use std::io::prelude::*;
use std::collections::HashSet;
use std::cmp::Reverse;
fn | (set:&HashSet<i64>, sum: i64) -> bool {
for k in set.iter() {
let v = sum - k;
if *k!= v && set.contains(&v) {
return true;
}
}
false
}
fn print_medians(file_name: &str) -> io::Result<u64> {
let f = File::open(file_name)?;
let reader = BufReader::new(f);
let mut set:HashSet<i64> = HashSet::new();
for line in reader.lines() {
let line = line.unwrap();
let value = i64::from_str(&line).expect("error parsing value");
set.insert(value);
}
let mut result = 0;
for s in -10000..10001 {
if has_distinct_pair(&set, s) {
result = result + 1;
}
}
Ok(result)
}
fn main() {
for arg in std::env::args().skip(1) {
let value = print_medians(&arg).expect("failed to read");
println!("answer = {}", value);
}
}
| has_distinct_pair | identifier_name |
get_event_authorization.rs | //! `GET /_matrix/federation/*/event_auth/{roomId}/{eventId}`
//!
//! Endpoint to retrieve the complete auth chain for a given event.
pub mod v1 {
//! `/v1/` ([spec])
//!
//! [spec]: https://spec.matrix.org/v1.2/server-server-api/#get_matrixfederationv1event_authroomideventid
use ruma_common::{api::ruma_api, EventId, RoomId};
use serde_json::value::RawValue as RawJsonValue;
ruma_api! {
metadata: {
description: "Retrieves the complete auth chain for a given event.",
name: "get_event_authorization",
method: GET,
stable_path: "/_matrix/federation/v1/event_auth/:room_id/:event_id",
rate_limited: false,
authentication: ServerSignatures,
added: 1.0,
}
request: {
/// The room ID to get the auth chain for.
#[ruma_api(path)]
pub room_id: &'a RoomId,
/// The event ID to get the auth chain for.
#[ruma_api(path)]
pub event_id: &'a EventId,
}
response: {
/// The full set of authorization events that make up the state of the room,
/// and their authorization events, recursively.
pub auth_chain: Vec<Box<RawJsonValue>>,
}
}
impl<'a> Request<'a> {
/// Creates a new `Request` with the given room id and event id.
pub fn new(room_id: &'a RoomId, event_id: &'a EventId) -> Self {
Self { room_id, event_id }
}
}
impl Response {
/// Creates a new `Response` with the given auth chain.
pub fn new(auth_chain: Vec<Box<RawJsonValue>>) -> Self |
}
}
| {
Self { auth_chain }
} | identifier_body |
get_event_authorization.rs | //! `GET /_matrix/federation/*/event_auth/{roomId}/{eventId}`
//!
//! Endpoint to retrieve the complete auth chain for a given event.
pub mod v1 {
//! `/v1/` ([spec])
//!
//! [spec]: https://spec.matrix.org/v1.2/server-server-api/#get_matrixfederationv1event_authroomideventid
use ruma_common::{api::ruma_api, EventId, RoomId};
use serde_json::value::RawValue as RawJsonValue;
ruma_api! {
metadata: {
description: "Retrieves the complete auth chain for a given event.",
name: "get_event_authorization",
method: GET,
stable_path: "/_matrix/federation/v1/event_auth/:room_id/:event_id",
rate_limited: false,
authentication: ServerSignatures,
added: 1.0,
}
request: {
/// The room ID to get the auth chain for.
#[ruma_api(path)]
pub room_id: &'a RoomId,
/// The event ID to get the auth chain for.
#[ruma_api(path)]
pub event_id: &'a EventId,
}
response: {
/// The full set of authorization events that make up the state of the room,
/// and their authorization events, recursively.
pub auth_chain: Vec<Box<RawJsonValue>>,
}
}
impl<'a> Request<'a> {
/// Creates a new `Request` with the given room id and event id.
pub fn new(room_id: &'a RoomId, event_id: &'a EventId) -> Self {
Self { room_id, event_id }
}
}
impl Response {
/// Creates a new `Response` with the given auth chain.
pub fn | (auth_chain: Vec<Box<RawJsonValue>>) -> Self {
Self { auth_chain }
}
}
}
| new | identifier_name |
mod.rs | // Copyright 2017 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// See the License for the specific language governing permissions and
// limitations under the License.
mod callback;
mod executor;
mod lock;
mod promise;
use std::fmt::{self, Debug, Formatter};
use std::sync::Arc;
use futures::task::{self, Task};
use futures::{Async, Future, Poll};
use self::callback::{Abort, Request as RequestCallback, UnaryRequest as UnaryRequestCallback};
use self::executor::SpawnNotify;
use self::promise::{Batch as BatchPromise, Shutdown as ShutdownPromise};
use crate::call::server::RequestContext;
use crate::call::{BatchContext, Call, MessageReader};
use crate::cq::CompletionQueue;
use crate::error::{Error, Result};
use crate::server::RequestCallContext;
pub(crate) use self::executor::{Executor, Kicker};
pub use self::lock::SpinLock;
pub use self::promise::BatchType;
/// A handle that is used to notify future that the task finishes.
pub struct NotifyHandle<T> {
result: Option<Result<T>>,
task: Option<Task>,
stale: bool,
}
impl<T> NotifyHandle<T> {
fn new() -> NotifyHandle<T> {
NotifyHandle {
result: None,
task: None,
stale: false,
}
}
/// Set the result and notify future if necessary.
fn set_result(&mut self, res: Result<T>) -> Option<Task> {
self.result = Some(res);
self.task.take()
}
}
type Inner<T> = SpinLock<NotifyHandle<T>>;
fn new_inner<T>() -> Arc<Inner<T>> {
Arc::new(SpinLock::new(NotifyHandle::new()))
}
/// Get the future status without the need to poll.
///
/// If the future is polled successfully, this function will return None.
/// Not implemented as method as it's only for internal usage.
pub fn check_alive<T>(f: &CqFuture<T>) -> Result<()> {
let guard = f.inner.lock();
match guard.result {
None => Ok(()),
Some(Err(Error::RpcFailure(ref status))) => {
Err(Error::RpcFinished(Some(status.to_owned())))
}
Some(Ok(_)) | Some(Err(_)) => Err(Error::RpcFinished(None)),
}
}
/// A future object for task that is scheduled to `CompletionQueue`.
pub struct CqFuture<T> {
inner: Arc<Inner<T>>,
}
impl<T> CqFuture<T> {
fn new(inner: Arc<Inner<T>>) -> CqFuture<T> {
CqFuture { inner }
}
}
impl<T> Future for CqFuture<T> {
type Item = T;
type Error = Error;
fn poll(&mut self) -> Poll<T, Error> {
let mut guard = self.inner.lock();
if guard.stale {
panic!("Resolved future is not supposed to be polled again.");
}
if let Some(res) = guard.result.take() {
guard.stale = true;
return Ok(Async::Ready(res?));
}
// So the task has not been finished yet, add notification hook.
if guard.task.is_none() ||!guard.task.as_ref().unwrap().will_notify_current() {
guard.task = Some(task::current());
}
Ok(Async::NotReady)
}
}
/// Future object for batch jobs.
pub type BatchFuture = CqFuture<Option<MessageReader>>;
/// A result holder for asynchronous execution.
// This enum is going to be passed to FFI, so don't use trait or generic here.
pub enum CallTag {
Batch(BatchPromise),
Request(RequestCallback),
UnaryRequest(UnaryRequestCallback),
Abort(Abort),
Shutdown(ShutdownPromise),
Spawn(SpawnNotify),
}
impl CallTag {
/// Generate a Future/CallTag pair for batch jobs.
pub fn batch_pair(ty: BatchType) -> (BatchFuture, CallTag) {
let inner = new_inner();
let batch = BatchPromise::new(ty, inner.clone());
(CqFuture::new(inner), CallTag::Batch(batch))
}
/// Generate a CallTag for request job. We don't have an eventloop
/// to pull the future, so just the tag is enough.
pub fn request(ctx: RequestCallContext) -> CallTag {
CallTag::Request(RequestCallback::new(ctx))
}
/// Generate a Future/CallTag pair for shutdown call.
pub fn shutdown_pair() -> (CqFuture<()>, CallTag) {
let inner = new_inner();
let shutdown = ShutdownPromise::new(inner.clone());
(CqFuture::new(inner), CallTag::Shutdown(shutdown))
}
/// Generate a CallTag for abort call before handler is called.
pub fn abort(call: Call) -> CallTag {
CallTag::Abort(Abort::new(call))
}
/// Generate a CallTag for unary request job.
pub fn unary_request(ctx: RequestContext, rc: RequestCallContext) -> CallTag {
let cb = UnaryRequestCallback::new(ctx, rc);
CallTag::UnaryRequest(cb)
}
/// Get the batch context from result holder.
pub fn batch_ctx(&self) -> Option<&BatchContext> {
match *self {
CallTag::Batch(ref prom) => Some(prom.context()),
CallTag::UnaryRequest(ref cb) => Some(cb.batch_ctx()),
CallTag::Abort(ref cb) => Some(cb.batch_ctx()),
_ => None,
}
}
/// Get the request context from the result holder.
pub fn request_ctx(&self) -> Option<&RequestContext> {
match *self {
CallTag::Request(ref prom) => Some(prom.context()),
CallTag::UnaryRequest(ref cb) => Some(cb.request_ctx()),
_ => None,
} | CallTag::Batch(prom) => prom.resolve(success),
CallTag::Request(cb) => cb.resolve(cq, success),
CallTag::UnaryRequest(cb) => cb.resolve(cq, success),
CallTag::Abort(_) => {}
CallTag::Shutdown(prom) => prom.resolve(success),
CallTag::Spawn(notify) => notify.resolve(success),
}
}
}
impl Debug for CallTag {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match *self {
CallTag::Batch(ref ctx) => write!(f, "CallTag::Batch({:?})", ctx),
CallTag::Request(_) => write!(f, "CallTag::Request(..)"),
CallTag::UnaryRequest(_) => write!(f, "CallTag::UnaryRequest(..)"),
CallTag::Abort(_) => write!(f, "CallTag::Abort(..)"),
CallTag::Shutdown(_) => write!(f, "CallTag::Shutdown"),
CallTag::Spawn(_) => write!(f, "CallTag::Spawn"),
}
}
}
#[cfg(test)]
mod tests {
use std::sync::mpsc::*;
use std::sync::*;
use std::thread;
use super::*;
use crate::env::Environment;
#[test]
fn test_resolve() {
let env = Environment::new(1);
let (cq_f1, tag1) = CallTag::shutdown_pair();
let (cq_f2, tag2) = CallTag::shutdown_pair();
let (tx, rx) = mpsc::channel();
let handler = thread::spawn(move || {
tx.send(cq_f1.wait()).unwrap();
tx.send(cq_f2.wait()).unwrap();
});
assert_eq!(rx.try_recv().unwrap_err(), TryRecvError::Empty);
tag1.resolve(&env.pick_cq(), true);
assert!(rx.recv().unwrap().is_ok());
assert_eq!(rx.try_recv().unwrap_err(), TryRecvError::Empty);
tag2.resolve(&env.pick_cq(), false);
match rx.recv() {
Ok(Err(Error::ShutdownFailed)) => {}
res => panic!("expect shutdown failed, but got {:?}", res),
}
handler.join().unwrap();
}
} | }
/// Resolve the CallTag with given status.
pub fn resolve(self, cq: &CompletionQueue, success: bool) {
match self { | random_line_split |
mod.rs | // Copyright 2017 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// See the License for the specific language governing permissions and
// limitations under the License.
mod callback;
mod executor;
mod lock;
mod promise;
use std::fmt::{self, Debug, Formatter};
use std::sync::Arc;
use futures::task::{self, Task};
use futures::{Async, Future, Poll};
use self::callback::{Abort, Request as RequestCallback, UnaryRequest as UnaryRequestCallback};
use self::executor::SpawnNotify;
use self::promise::{Batch as BatchPromise, Shutdown as ShutdownPromise};
use crate::call::server::RequestContext;
use crate::call::{BatchContext, Call, MessageReader};
use crate::cq::CompletionQueue;
use crate::error::{Error, Result};
use crate::server::RequestCallContext;
pub(crate) use self::executor::{Executor, Kicker};
pub use self::lock::SpinLock;
pub use self::promise::BatchType;
/// A handle that is used to notify future that the task finishes.
pub struct NotifyHandle<T> {
result: Option<Result<T>>,
task: Option<Task>,
stale: bool,
}
impl<T> NotifyHandle<T> {
fn new() -> NotifyHandle<T> {
NotifyHandle {
result: None,
task: None,
stale: false,
}
}
/// Set the result and notify future if necessary.
fn set_result(&mut self, res: Result<T>) -> Option<Task> {
self.result = Some(res);
self.task.take()
}
}
type Inner<T> = SpinLock<NotifyHandle<T>>;
fn new_inner<T>() -> Arc<Inner<T>> {
Arc::new(SpinLock::new(NotifyHandle::new()))
}
/// Get the future status without the need to poll.
///
/// If the future is polled successfully, this function will return None.
/// Not implemented as method as it's only for internal usage.
pub fn check_alive<T>(f: &CqFuture<T>) -> Result<()> {
let guard = f.inner.lock();
match guard.result {
None => Ok(()),
Some(Err(Error::RpcFailure(ref status))) => {
Err(Error::RpcFinished(Some(status.to_owned())))
}
Some(Ok(_)) | Some(Err(_)) => Err(Error::RpcFinished(None)),
}
}
/// A future object for task that is scheduled to `CompletionQueue`.
pub struct CqFuture<T> {
inner: Arc<Inner<T>>,
}
impl<T> CqFuture<T> {
fn new(inner: Arc<Inner<T>>) -> CqFuture<T> {
CqFuture { inner }
}
}
impl<T> Future for CqFuture<T> {
type Item = T;
type Error = Error;
fn | (&mut self) -> Poll<T, Error> {
let mut guard = self.inner.lock();
if guard.stale {
panic!("Resolved future is not supposed to be polled again.");
}
if let Some(res) = guard.result.take() {
guard.stale = true;
return Ok(Async::Ready(res?));
}
// So the task has not been finished yet, add notification hook.
if guard.task.is_none() ||!guard.task.as_ref().unwrap().will_notify_current() {
guard.task = Some(task::current());
}
Ok(Async::NotReady)
}
}
/// Future object for batch jobs.
pub type BatchFuture = CqFuture<Option<MessageReader>>;
/// A result holder for asynchronous execution.
// This enum is going to be passed to FFI, so don't use trait or generic here.
pub enum CallTag {
Batch(BatchPromise),
Request(RequestCallback),
UnaryRequest(UnaryRequestCallback),
Abort(Abort),
Shutdown(ShutdownPromise),
Spawn(SpawnNotify),
}
impl CallTag {
/// Generate a Future/CallTag pair for batch jobs.
pub fn batch_pair(ty: BatchType) -> (BatchFuture, CallTag) {
let inner = new_inner();
let batch = BatchPromise::new(ty, inner.clone());
(CqFuture::new(inner), CallTag::Batch(batch))
}
/// Generate a CallTag for request job. We don't have an eventloop
/// to pull the future, so just the tag is enough.
pub fn request(ctx: RequestCallContext) -> CallTag {
CallTag::Request(RequestCallback::new(ctx))
}
/// Generate a Future/CallTag pair for shutdown call.
pub fn shutdown_pair() -> (CqFuture<()>, CallTag) {
let inner = new_inner();
let shutdown = ShutdownPromise::new(inner.clone());
(CqFuture::new(inner), CallTag::Shutdown(shutdown))
}
/// Generate a CallTag for abort call before handler is called.
pub fn abort(call: Call) -> CallTag {
CallTag::Abort(Abort::new(call))
}
/// Generate a CallTag for unary request job.
pub fn unary_request(ctx: RequestContext, rc: RequestCallContext) -> CallTag {
let cb = UnaryRequestCallback::new(ctx, rc);
CallTag::UnaryRequest(cb)
}
/// Get the batch context from result holder.
pub fn batch_ctx(&self) -> Option<&BatchContext> {
match *self {
CallTag::Batch(ref prom) => Some(prom.context()),
CallTag::UnaryRequest(ref cb) => Some(cb.batch_ctx()),
CallTag::Abort(ref cb) => Some(cb.batch_ctx()),
_ => None,
}
}
/// Get the request context from the result holder.
pub fn request_ctx(&self) -> Option<&RequestContext> {
match *self {
CallTag::Request(ref prom) => Some(prom.context()),
CallTag::UnaryRequest(ref cb) => Some(cb.request_ctx()),
_ => None,
}
}
/// Resolve the CallTag with given status.
pub fn resolve(self, cq: &CompletionQueue, success: bool) {
match self {
CallTag::Batch(prom) => prom.resolve(success),
CallTag::Request(cb) => cb.resolve(cq, success),
CallTag::UnaryRequest(cb) => cb.resolve(cq, success),
CallTag::Abort(_) => {}
CallTag::Shutdown(prom) => prom.resolve(success),
CallTag::Spawn(notify) => notify.resolve(success),
}
}
}
impl Debug for CallTag {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match *self {
CallTag::Batch(ref ctx) => write!(f, "CallTag::Batch({:?})", ctx),
CallTag::Request(_) => write!(f, "CallTag::Request(..)"),
CallTag::UnaryRequest(_) => write!(f, "CallTag::UnaryRequest(..)"),
CallTag::Abort(_) => write!(f, "CallTag::Abort(..)"),
CallTag::Shutdown(_) => write!(f, "CallTag::Shutdown"),
CallTag::Spawn(_) => write!(f, "CallTag::Spawn"),
}
}
}
#[cfg(test)]
mod tests {
use std::sync::mpsc::*;
use std::sync::*;
use std::thread;
use super::*;
use crate::env::Environment;
#[test]
fn test_resolve() {
let env = Environment::new(1);
let (cq_f1, tag1) = CallTag::shutdown_pair();
let (cq_f2, tag2) = CallTag::shutdown_pair();
let (tx, rx) = mpsc::channel();
let handler = thread::spawn(move || {
tx.send(cq_f1.wait()).unwrap();
tx.send(cq_f2.wait()).unwrap();
});
assert_eq!(rx.try_recv().unwrap_err(), TryRecvError::Empty);
tag1.resolve(&env.pick_cq(), true);
assert!(rx.recv().unwrap().is_ok());
assert_eq!(rx.try_recv().unwrap_err(), TryRecvError::Empty);
tag2.resolve(&env.pick_cq(), false);
match rx.recv() {
Ok(Err(Error::ShutdownFailed)) => {}
res => panic!("expect shutdown failed, but got {:?}", res),
}
handler.join().unwrap();
}
}
| poll | identifier_name |
mod.rs | // Copyright 2017 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// See the License for the specific language governing permissions and
// limitations under the License.
mod callback;
mod executor;
mod lock;
mod promise;
use std::fmt::{self, Debug, Formatter};
use std::sync::Arc;
use futures::task::{self, Task};
use futures::{Async, Future, Poll};
use self::callback::{Abort, Request as RequestCallback, UnaryRequest as UnaryRequestCallback};
use self::executor::SpawnNotify;
use self::promise::{Batch as BatchPromise, Shutdown as ShutdownPromise};
use crate::call::server::RequestContext;
use crate::call::{BatchContext, Call, MessageReader};
use crate::cq::CompletionQueue;
use crate::error::{Error, Result};
use crate::server::RequestCallContext;
pub(crate) use self::executor::{Executor, Kicker};
pub use self::lock::SpinLock;
pub use self::promise::BatchType;
/// A handle that is used to notify future that the task finishes.
pub struct NotifyHandle<T> {
result: Option<Result<T>>,
task: Option<Task>,
stale: bool,
}
impl<T> NotifyHandle<T> {
fn new() -> NotifyHandle<T> {
NotifyHandle {
result: None,
task: None,
stale: false,
}
}
/// Set the result and notify future if necessary.
fn set_result(&mut self, res: Result<T>) -> Option<Task> {
self.result = Some(res);
self.task.take()
}
}
type Inner<T> = SpinLock<NotifyHandle<T>>;
fn new_inner<T>() -> Arc<Inner<T>> {
Arc::new(SpinLock::new(NotifyHandle::new()))
}
/// Get the future status without the need to poll.
///
/// If the future is polled successfully, this function will return None.
/// Not implemented as method as it's only for internal usage.
pub fn check_alive<T>(f: &CqFuture<T>) -> Result<()> {
let guard = f.inner.lock();
match guard.result {
None => Ok(()),
Some(Err(Error::RpcFailure(ref status))) => {
Err(Error::RpcFinished(Some(status.to_owned())))
}
Some(Ok(_)) | Some(Err(_)) => Err(Error::RpcFinished(None)),
}
}
/// A future object for task that is scheduled to `CompletionQueue`.
pub struct CqFuture<T> {
inner: Arc<Inner<T>>,
}
impl<T> CqFuture<T> {
fn new(inner: Arc<Inner<T>>) -> CqFuture<T> {
CqFuture { inner }
}
}
impl<T> Future for CqFuture<T> {
type Item = T;
type Error = Error;
fn poll(&mut self) -> Poll<T, Error> {
let mut guard = self.inner.lock();
if guard.stale {
panic!("Resolved future is not supposed to be polled again.");
}
if let Some(res) = guard.result.take() {
guard.stale = true;
return Ok(Async::Ready(res?));
}
// So the task has not been finished yet, add notification hook.
if guard.task.is_none() ||!guard.task.as_ref().unwrap().will_notify_current() {
guard.task = Some(task::current());
}
Ok(Async::NotReady)
}
}
/// Future object for batch jobs.
pub type BatchFuture = CqFuture<Option<MessageReader>>;
/// A result holder for asynchronous execution.
// This enum is going to be passed to FFI, so don't use trait or generic here.
pub enum CallTag {
Batch(BatchPromise),
Request(RequestCallback),
UnaryRequest(UnaryRequestCallback),
Abort(Abort),
Shutdown(ShutdownPromise),
Spawn(SpawnNotify),
}
impl CallTag {
/// Generate a Future/CallTag pair for batch jobs.
pub fn batch_pair(ty: BatchType) -> (BatchFuture, CallTag) {
let inner = new_inner();
let batch = BatchPromise::new(ty, inner.clone());
(CqFuture::new(inner), CallTag::Batch(batch))
}
/// Generate a CallTag for request job. We don't have an eventloop
/// to pull the future, so just the tag is enough.
pub fn request(ctx: RequestCallContext) -> CallTag {
CallTag::Request(RequestCallback::new(ctx))
}
/// Generate a Future/CallTag pair for shutdown call.
pub fn shutdown_pair() -> (CqFuture<()>, CallTag) {
let inner = new_inner();
let shutdown = ShutdownPromise::new(inner.clone());
(CqFuture::new(inner), CallTag::Shutdown(shutdown))
}
/// Generate a CallTag for abort call before handler is called.
pub fn abort(call: Call) -> CallTag {
CallTag::Abort(Abort::new(call))
}
/// Generate a CallTag for unary request job.
pub fn unary_request(ctx: RequestContext, rc: RequestCallContext) -> CallTag {
let cb = UnaryRequestCallback::new(ctx, rc);
CallTag::UnaryRequest(cb)
}
/// Get the batch context from result holder.
pub fn batch_ctx(&self) -> Option<&BatchContext> {
match *self {
CallTag::Batch(ref prom) => Some(prom.context()),
CallTag::UnaryRequest(ref cb) => Some(cb.batch_ctx()),
CallTag::Abort(ref cb) => Some(cb.batch_ctx()),
_ => None,
}
}
/// Get the request context from the result holder.
pub fn request_ctx(&self) -> Option<&RequestContext> {
match *self {
CallTag::Request(ref prom) => Some(prom.context()),
CallTag::UnaryRequest(ref cb) => Some(cb.request_ctx()),
_ => None,
}
}
/// Resolve the CallTag with given status.
pub fn resolve(self, cq: &CompletionQueue, success: bool) {
match self {
CallTag::Batch(prom) => prom.resolve(success),
CallTag::Request(cb) => cb.resolve(cq, success),
CallTag::UnaryRequest(cb) => cb.resolve(cq, success),
CallTag::Abort(_) => {}
CallTag::Shutdown(prom) => prom.resolve(success),
CallTag::Spawn(notify) => notify.resolve(success),
}
}
}
impl Debug for CallTag {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result |
}
#[cfg(test)]
mod tests {
use std::sync::mpsc::*;
use std::sync::*;
use std::thread;
use super::*;
use crate::env::Environment;
#[test]
fn test_resolve() {
let env = Environment::new(1);
let (cq_f1, tag1) = CallTag::shutdown_pair();
let (cq_f2, tag2) = CallTag::shutdown_pair();
let (tx, rx) = mpsc::channel();
let handler = thread::spawn(move || {
tx.send(cq_f1.wait()).unwrap();
tx.send(cq_f2.wait()).unwrap();
});
assert_eq!(rx.try_recv().unwrap_err(), TryRecvError::Empty);
tag1.resolve(&env.pick_cq(), true);
assert!(rx.recv().unwrap().is_ok());
assert_eq!(rx.try_recv().unwrap_err(), TryRecvError::Empty);
tag2.resolve(&env.pick_cq(), false);
match rx.recv() {
Ok(Err(Error::ShutdownFailed)) => {}
res => panic!("expect shutdown failed, but got {:?}", res),
}
handler.join().unwrap();
}
}
| {
match *self {
CallTag::Batch(ref ctx) => write!(f, "CallTag::Batch({:?})", ctx),
CallTag::Request(_) => write!(f, "CallTag::Request(..)"),
CallTag::UnaryRequest(_) => write!(f, "CallTag::UnaryRequest(..)"),
CallTag::Abort(_) => write!(f, "CallTag::Abort(..)"),
CallTag::Shutdown(_) => write!(f, "CallTag::Shutdown"),
CallTag::Spawn(_) => write!(f, "CallTag::Spawn"),
}
} | identifier_body |
wall.rs | use glium;
use types;
use camera;
use shaders;
use texture;
#[derive(Copy, Clone)]
struct Vertex {
position: [f32; 3],
normal: [f32; 3],
tex_coords: [f32; 2]
}
implement_vertex!(Vertex, position, normal, tex_coords);
pub struct Wall {
matrix: types::Mat,
buffer: glium::VertexBuffer<Vertex>,
diffuse_texture: glium::texture::SrgbTexture2d,
normal_map: glium::texture::texture2d::Texture2d
}
impl Wall {
pub fn new(window: &types::Display) -> Self {
let material = "wall".to_string();
let diffuse_texture = texture::load(window, &material);
let normal_map = texture::load_normal(window, &material); | normal: [0.0, 0.0, -1.0],
tex_coords: [1.0, 1.0] },
Vertex { position: [-1.0, -1.0, 0.0],
normal: [0.0, 0.0, -1.0],
tex_coords: [0.0, 0.0] },
Vertex { position: [ 1.0, -1.0, 0.0],
normal: [0.0, 0.0, -1.0],
tex_coords: [1.0, 0.0] },
]).unwrap();
Wall {
buffer: buffer,
diffuse_texture: diffuse_texture,
normal_map: normal_map,
matrix: [
[10.01, 0.0, 0.0, 0.0],
[0.0, 10.01, 0.0, 0.0],
[0.0, 0.0, 10.01, 0.0],
[0.0, 10.0, 10.0, 1.0f32]
]
}
}
pub fn render(self: &Self, frame_buffer: &mut glium::Frame,
camera: &camera::Camera, library: &shaders::ShaderLibrary) {
use glium::Surface;
use glium::DrawParameters;
let strip = glium::index::PrimitiveType::TriangleStrip;
let params = glium::DrawParameters {
depth: glium::Depth {
test: glium::draw_parameters::DepthTest::IfLess,
write: true,
.. Default::default()
},
.. Default::default()
};
let light = [-1.0, 0.4, 0.9f32];
let uniforms = uniform! {
model: self.matrix,
projection: camera.projection,
view: camera.get_view(),
u_light: light,
diffuse_tex: &self.diffuse_texture,
normal_tex: &self.normal_map
};
frame_buffer.draw(&self.buffer, &glium::index::NoIndices(strip),
&library.lit_texture,
&uniforms, ¶ms).unwrap();
}
} | let buffer = glium::vertex::VertexBuffer::new(window, &[
Vertex { position: [-1.0, 1.0, 0.0],
normal: [0.0, 0.0, -1.0],
tex_coords: [0.0, 1.0] },
Vertex { position: [ 1.0, 1.0, 0.0], | random_line_split |
wall.rs | use glium;
use types;
use camera;
use shaders;
use texture;
#[derive(Copy, Clone)]
struct | {
position: [f32; 3],
normal: [f32; 3],
tex_coords: [f32; 2]
}
implement_vertex!(Vertex, position, normal, tex_coords);
pub struct Wall {
matrix: types::Mat,
buffer: glium::VertexBuffer<Vertex>,
diffuse_texture: glium::texture::SrgbTexture2d,
normal_map: glium::texture::texture2d::Texture2d
}
impl Wall {
pub fn new(window: &types::Display) -> Self {
let material = "wall".to_string();
let diffuse_texture = texture::load(window, &material);
let normal_map = texture::load_normal(window, &material);
let buffer = glium::vertex::VertexBuffer::new(window, &[
Vertex { position: [-1.0, 1.0, 0.0],
normal: [0.0, 0.0, -1.0],
tex_coords: [0.0, 1.0] },
Vertex { position: [ 1.0, 1.0, 0.0],
normal: [0.0, 0.0, -1.0],
tex_coords: [1.0, 1.0] },
Vertex { position: [-1.0, -1.0, 0.0],
normal: [0.0, 0.0, -1.0],
tex_coords: [0.0, 0.0] },
Vertex { position: [ 1.0, -1.0, 0.0],
normal: [0.0, 0.0, -1.0],
tex_coords: [1.0, 0.0] },
]).unwrap();
Wall {
buffer: buffer,
diffuse_texture: diffuse_texture,
normal_map: normal_map,
matrix: [
[10.01, 0.0, 0.0, 0.0],
[0.0, 10.01, 0.0, 0.0],
[0.0, 0.0, 10.01, 0.0],
[0.0, 10.0, 10.0, 1.0f32]
]
}
}
pub fn render(self: &Self, frame_buffer: &mut glium::Frame,
camera: &camera::Camera, library: &shaders::ShaderLibrary) {
use glium::Surface;
use glium::DrawParameters;
let strip = glium::index::PrimitiveType::TriangleStrip;
let params = glium::DrawParameters {
depth: glium::Depth {
test: glium::draw_parameters::DepthTest::IfLess,
write: true,
.. Default::default()
},
.. Default::default()
};
let light = [-1.0, 0.4, 0.9f32];
let uniforms = uniform! {
model: self.matrix,
projection: camera.projection,
view: camera.get_view(),
u_light: light,
diffuse_tex: &self.diffuse_texture,
normal_tex: &self.normal_map
};
frame_buffer.draw(&self.buffer, &glium::index::NoIndices(strip),
&library.lit_texture,
&uniforms, ¶ms).unwrap();
}
}
| Vertex | identifier_name |
wall.rs | use glium;
use types;
use camera;
use shaders;
use texture;
#[derive(Copy, Clone)]
struct Vertex {
position: [f32; 3],
normal: [f32; 3],
tex_coords: [f32; 2]
}
implement_vertex!(Vertex, position, normal, tex_coords);
pub struct Wall {
matrix: types::Mat,
buffer: glium::VertexBuffer<Vertex>,
diffuse_texture: glium::texture::SrgbTexture2d,
normal_map: glium::texture::texture2d::Texture2d
}
impl Wall {
pub fn new(window: &types::Display) -> Self {
let material = "wall".to_string();
let diffuse_texture = texture::load(window, &material);
let normal_map = texture::load_normal(window, &material);
let buffer = glium::vertex::VertexBuffer::new(window, &[
Vertex { position: [-1.0, 1.0, 0.0],
normal: [0.0, 0.0, -1.0],
tex_coords: [0.0, 1.0] },
Vertex { position: [ 1.0, 1.0, 0.0],
normal: [0.0, 0.0, -1.0],
tex_coords: [1.0, 1.0] },
Vertex { position: [-1.0, -1.0, 0.0],
normal: [0.0, 0.0, -1.0],
tex_coords: [0.0, 0.0] },
Vertex { position: [ 1.0, -1.0, 0.0],
normal: [0.0, 0.0, -1.0],
tex_coords: [1.0, 0.0] },
]).unwrap();
Wall {
buffer: buffer,
diffuse_texture: diffuse_texture,
normal_map: normal_map,
matrix: [
[10.01, 0.0, 0.0, 0.0],
[0.0, 10.01, 0.0, 0.0],
[0.0, 0.0, 10.01, 0.0],
[0.0, 10.0, 10.0, 1.0f32]
]
}
}
pub fn render(self: &Self, frame_buffer: &mut glium::Frame,
camera: &camera::Camera, library: &shaders::ShaderLibrary) | u_light: light,
diffuse_tex: &self.diffuse_texture,
normal_tex: &self.normal_map
};
frame_buffer.draw(&self.buffer, &glium::index::NoIndices(strip),
&library.lit_texture,
&uniforms, ¶ms).unwrap();
}
}
| {
use glium::Surface;
use glium::DrawParameters;
let strip = glium::index::PrimitiveType::TriangleStrip;
let params = glium::DrawParameters {
depth: glium::Depth {
test: glium::draw_parameters::DepthTest::IfLess,
write: true,
.. Default::default()
},
.. Default::default()
};
let light = [-1.0, 0.4, 0.9f32];
let uniforms = uniform! {
model: self.matrix,
projection: camera.projection,
view: camera.get_view(), | identifier_body |
htmlsourceelement.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HTMLSourceElementBinding;
use dom::bindings::codegen::InheritTypes::HTMLSourceElementDerived;
use dom::bindings::js::{JSRef, Temporary};
use dom::bindings::utils::{Reflectable, Reflector};
use dom::document::Document;
use dom::element::HTMLSourceElementTypeId;
use dom::eventtarget::{EventTarget, NodeTargetTypeId};
use dom::htmlelement::HTMLElement;
use dom::node::{Node, ElementNodeTypeId};
use servo_util::str::DOMString;
#[deriving(Encodable)]
pub struct HTMLSourceElement {
pub htmlelement: HTMLElement
}
impl HTMLSourceElementDerived for EventTarget {
fn is_htmlsourceelement(&self) -> bool |
}
impl HTMLSourceElement {
pub fn new_inherited(localName: DOMString, document: &JSRef<Document>) -> HTMLSourceElement {
HTMLSourceElement {
htmlelement: HTMLElement::new_inherited(HTMLSourceElementTypeId, localName, document)
}
}
pub fn new(localName: DOMString, document: &JSRef<Document>) -> Temporary<HTMLSourceElement> {
let element = HTMLSourceElement::new_inherited(localName, document);
Node::reflect_node(box element, document, HTMLSourceElementBinding::Wrap)
}
}
pub trait HTMLSourceElementMethods {
}
impl Reflectable for HTMLSourceElement {
fn reflector<'a>(&'a self) -> &'a Reflector {
self.htmlelement.reflector()
}
}
| {
self.type_id == NodeTargetTypeId(ElementNodeTypeId(HTMLSourceElementTypeId))
} | identifier_body |
htmlsourceelement.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HTMLSourceElementBinding;
use dom::bindings::codegen::InheritTypes::HTMLSourceElementDerived;
use dom::bindings::js::{JSRef, Temporary};
use dom::bindings::utils::{Reflectable, Reflector};
use dom::document::Document;
use dom::element::HTMLSourceElementTypeId;
use dom::eventtarget::{EventTarget, NodeTargetTypeId};
use dom::htmlelement::HTMLElement;
use dom::node::{Node, ElementNodeTypeId};
use servo_util::str::DOMString;
#[deriving(Encodable)]
pub struct HTMLSourceElement {
pub htmlelement: HTMLElement
}
impl HTMLSourceElementDerived for EventTarget {
fn is_htmlsourceelement(&self) -> bool {
self.type_id == NodeTargetTypeId(ElementNodeTypeId(HTMLSourceElementTypeId))
}
}
impl HTMLSourceElement {
pub fn new_inherited(localName: DOMString, document: &JSRef<Document>) -> HTMLSourceElement {
HTMLSourceElement {
htmlelement: HTMLElement::new_inherited(HTMLSourceElementTypeId, localName, document)
}
}
pub fn new(localName: DOMString, document: &JSRef<Document>) -> Temporary<HTMLSourceElement> {
let element = HTMLSourceElement::new_inherited(localName, document);
Node::reflect_node(box element, document, HTMLSourceElementBinding::Wrap)
}
}
pub trait HTMLSourceElementMethods {
}
impl Reflectable for HTMLSourceElement {
fn | <'a>(&'a self) -> &'a Reflector {
self.htmlelement.reflector()
}
}
| reflector | identifier_name |
htmlsourceelement.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HTMLSourceElementBinding;
use dom::bindings::codegen::InheritTypes::HTMLSourceElementDerived;
use dom::bindings::js::{JSRef, Temporary};
use dom::bindings::utils::{Reflectable, Reflector};
use dom::document::Document;
use dom::element::HTMLSourceElementTypeId;
use dom::eventtarget::{EventTarget, NodeTargetTypeId};
use dom::htmlelement::HTMLElement;
use dom::node::{Node, ElementNodeTypeId}; | }
impl HTMLSourceElementDerived for EventTarget {
fn is_htmlsourceelement(&self) -> bool {
self.type_id == NodeTargetTypeId(ElementNodeTypeId(HTMLSourceElementTypeId))
}
}
impl HTMLSourceElement {
pub fn new_inherited(localName: DOMString, document: &JSRef<Document>) -> HTMLSourceElement {
HTMLSourceElement {
htmlelement: HTMLElement::new_inherited(HTMLSourceElementTypeId, localName, document)
}
}
pub fn new(localName: DOMString, document: &JSRef<Document>) -> Temporary<HTMLSourceElement> {
let element = HTMLSourceElement::new_inherited(localName, document);
Node::reflect_node(box element, document, HTMLSourceElementBinding::Wrap)
}
}
pub trait HTMLSourceElementMethods {
}
impl Reflectable for HTMLSourceElement {
fn reflector<'a>(&'a self) -> &'a Reflector {
self.htmlelement.reflector()
}
} | use servo_util::str::DOMString;
#[deriving(Encodable)]
pub struct HTMLSourceElement {
pub htmlelement: HTMLElement | random_line_split |
wrapper.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! A safe wrapper for DOM nodes that prevents layout from mutating the DOM, from letting DOM nodes
//! escape, and from generally doing anything that it isn't supposed to. This is accomplished via
//! a simple whitelist of allowed operations, along with some lifetime magic to prevent nodes from
//! escaping.
//!
//! As a security wrapper is only as good as its whitelist, be careful when adding operations to
//! this list. The cardinal rules are:
//!
//! 1. Layout is not allowed to mutate the DOM.
//!
//! 2. Layout is not allowed to see anything with `LayoutDom` in the name, because it could hang
//! onto these objects and cause use-after-free.
//!
//! When implementing wrapper functions, be careful that you do not touch the borrow flags, or you
//! will race and cause spurious thread failure. (Note that I do not believe these races are
//! exploitable, but they'll result in brokenness nonetheless.)
//!
//! Rules of the road for this file:
//!
//! * Do not call any methods on DOM nodes without checking to see whether they use borrow flags.
//!
//! o Instead of `get_attr()`, use `.get_attr_val_for_layout()`.
//!
//! o Instead of `html_element_in_html_document()`, use
//! `html_element_in_html_document_for_layout()`.
#![allow(unsafe_code)]
use atomic_refcell::{AtomicRef, AtomicRefMut};
use crate::data::{LayoutData, LayoutDataFlags, StyleAndLayoutData};
use script_layout_interface::wrapper_traits::GetLayoutData;
use script_layout_interface::wrapper_traits::{ThreadSafeLayoutElement, ThreadSafeLayoutNode};
use style::dom::{NodeInfo, TNode};
use style::selector_parser::RestyleDamage;
use style::values::computed::counters::ContentItem;
use style::values::generics::counters::Content;
pub trait LayoutNodeLayoutData {
/// Similar to borrow_data*, but returns the full PersistentLayoutData rather
/// than only the style::data::ElementData.
fn borrow_layout_data(&self) -> Option<AtomicRef<LayoutData>>;
fn mutate_layout_data(&self) -> Option<AtomicRefMut<LayoutData>>;
fn flow_debug_id(self) -> usize;
}
impl<T: GetLayoutData> LayoutNodeLayoutData for T {
fn borrow_layout_data(&self) -> Option<AtomicRef<LayoutData>> {
self.get_raw_data().map(|d| d.layout_data.borrow())
}
fn mutate_layout_data(&self) -> Option<AtomicRefMut<LayoutData>> {
self.get_raw_data().map(|d| d.layout_data.borrow_mut())
}
fn flow_debug_id(self) -> usize {
self.borrow_layout_data()
.map_or(0, |d| d.flow_construction_result.debug_id())
}
}
pub trait GetRawData {
fn get_raw_data(&self) -> Option<&StyleAndLayoutData>;
}
impl<T: GetLayoutData> GetRawData for T {
fn get_raw_data(&self) -> Option<&StyleAndLayoutData> {
self.get_style_and_layout_data().map(|opaque| {
let container = opaque.ptr.as_ptr() as *mut StyleAndLayoutData;
unsafe { &*container }
})
}
}
pub trait ThreadSafeLayoutNodeHelpers {
/// Returns the layout data flags for this node.
fn flags(self) -> LayoutDataFlags;
/// Adds the given flags to this node.
fn insert_flags(self, new_flags: LayoutDataFlags);
/// Removes the given flags from this node.
fn remove_flags(self, flags: LayoutDataFlags);
/// If this is a text node, generated content, or a form element, copies out
/// its content. Otherwise, panics.
///
/// FIXME(pcwalton): This might have too much copying and/or allocation. Profile this.
fn text_content(&self) -> TextContent;
/// The RestyleDamage from any restyling, or RestyleDamage::rebuild_and_reflow() if this
/// is the first time layout is visiting this node. We implement this here, rather than
/// with the rest of the wrapper layer, because we need layout code to determine whether
/// layout has visited the node.
fn restyle_damage(self) -> RestyleDamage;
}
impl<T: ThreadSafeLayoutNode> ThreadSafeLayoutNodeHelpers for T {
fn flags(self) -> LayoutDataFlags {
self.borrow_layout_data().as_ref().unwrap().flags
}
fn insert_flags(self, new_flags: LayoutDataFlags) {
self.mutate_layout_data().unwrap().flags.insert(new_flags);
}
fn remove_flags(self, flags: LayoutDataFlags) {
self.mutate_layout_data().unwrap().flags.remove(flags);
}
fn text_content(&self) -> TextContent {
if self.get_pseudo_element_type().is_replaced_content() {
let style = self.as_element().unwrap().resolved_style();
return TextContent::GeneratedContent(match style.as_ref().get_counters().content {
Content::Items(ref value) => value.to_vec(),
_ => vec![],
});
}
TextContent::Text(self.node_text_content().into_boxed_str())
}
fn restyle_damage(self) -> RestyleDamage { |
// If this is a text node, use the parent element, since that's what
// controls our style.
if node.is_text_node() {
node = node.parent_node().unwrap();
debug_assert!(node.is_element());
}
let damage = {
let data = node.get_raw_data().unwrap();
if!data
.layout_data
.borrow()
.flags
.contains(crate::data::LayoutDataFlags::HAS_BEEN_TRAVERSED)
{
// We're reflowing a node that was styled for the first time and
// has never been visited by layout. Return rebuild_and_reflow,
// because that's what the code expects.
RestyleDamage::rebuild_and_reflow()
} else {
data.style_data.element_data.borrow().damage
}
};
damage
}
}
pub enum TextContent {
Text(Box<str>),
GeneratedContent(Vec<ContentItem>),
}
impl TextContent {
pub fn is_empty(&self) -> bool {
match *self {
TextContent::Text(_) => false,
TextContent::GeneratedContent(ref content) => content.is_empty(),
}
}
} | // We need the underlying node to potentially access the parent in the
// case of text nodes. This is safe as long as we don't let the parent
// escape and never access its descendants.
let mut node = unsafe { self.unsafe_get() }; | random_line_split |
wrapper.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! A safe wrapper for DOM nodes that prevents layout from mutating the DOM, from letting DOM nodes
//! escape, and from generally doing anything that it isn't supposed to. This is accomplished via
//! a simple whitelist of allowed operations, along with some lifetime magic to prevent nodes from
//! escaping.
//!
//! As a security wrapper is only as good as its whitelist, be careful when adding operations to
//! this list. The cardinal rules are:
//!
//! 1. Layout is not allowed to mutate the DOM.
//!
//! 2. Layout is not allowed to see anything with `LayoutDom` in the name, because it could hang
//! onto these objects and cause use-after-free.
//!
//! When implementing wrapper functions, be careful that you do not touch the borrow flags, or you
//! will race and cause spurious thread failure. (Note that I do not believe these races are
//! exploitable, but they'll result in brokenness nonetheless.)
//!
//! Rules of the road for this file:
//!
//! * Do not call any methods on DOM nodes without checking to see whether they use borrow flags.
//!
//! o Instead of `get_attr()`, use `.get_attr_val_for_layout()`.
//!
//! o Instead of `html_element_in_html_document()`, use
//! `html_element_in_html_document_for_layout()`.
#![allow(unsafe_code)]
use atomic_refcell::{AtomicRef, AtomicRefMut};
use crate::data::{LayoutData, LayoutDataFlags, StyleAndLayoutData};
use script_layout_interface::wrapper_traits::GetLayoutData;
use script_layout_interface::wrapper_traits::{ThreadSafeLayoutElement, ThreadSafeLayoutNode};
use style::dom::{NodeInfo, TNode};
use style::selector_parser::RestyleDamage;
use style::values::computed::counters::ContentItem;
use style::values::generics::counters::Content;
pub trait LayoutNodeLayoutData {
/// Similar to borrow_data*, but returns the full PersistentLayoutData rather
/// than only the style::data::ElementData.
fn borrow_layout_data(&self) -> Option<AtomicRef<LayoutData>>;
fn mutate_layout_data(&self) -> Option<AtomicRefMut<LayoutData>>;
fn flow_debug_id(self) -> usize;
}
impl<T: GetLayoutData> LayoutNodeLayoutData for T {
fn borrow_layout_data(&self) -> Option<AtomicRef<LayoutData>> {
self.get_raw_data().map(|d| d.layout_data.borrow())
}
fn | (&self) -> Option<AtomicRefMut<LayoutData>> {
self.get_raw_data().map(|d| d.layout_data.borrow_mut())
}
fn flow_debug_id(self) -> usize {
self.borrow_layout_data()
.map_or(0, |d| d.flow_construction_result.debug_id())
}
}
pub trait GetRawData {
fn get_raw_data(&self) -> Option<&StyleAndLayoutData>;
}
impl<T: GetLayoutData> GetRawData for T {
fn get_raw_data(&self) -> Option<&StyleAndLayoutData> {
self.get_style_and_layout_data().map(|opaque| {
let container = opaque.ptr.as_ptr() as *mut StyleAndLayoutData;
unsafe { &*container }
})
}
}
pub trait ThreadSafeLayoutNodeHelpers {
/// Returns the layout data flags for this node.
fn flags(self) -> LayoutDataFlags;
/// Adds the given flags to this node.
fn insert_flags(self, new_flags: LayoutDataFlags);
/// Removes the given flags from this node.
fn remove_flags(self, flags: LayoutDataFlags);
/// If this is a text node, generated content, or a form element, copies out
/// its content. Otherwise, panics.
///
/// FIXME(pcwalton): This might have too much copying and/or allocation. Profile this.
fn text_content(&self) -> TextContent;
/// The RestyleDamage from any restyling, or RestyleDamage::rebuild_and_reflow() if this
/// is the first time layout is visiting this node. We implement this here, rather than
/// with the rest of the wrapper layer, because we need layout code to determine whether
/// layout has visited the node.
fn restyle_damage(self) -> RestyleDamage;
}
impl<T: ThreadSafeLayoutNode> ThreadSafeLayoutNodeHelpers for T {
fn flags(self) -> LayoutDataFlags {
self.borrow_layout_data().as_ref().unwrap().flags
}
fn insert_flags(self, new_flags: LayoutDataFlags) {
self.mutate_layout_data().unwrap().flags.insert(new_flags);
}
fn remove_flags(self, flags: LayoutDataFlags) {
self.mutate_layout_data().unwrap().flags.remove(flags);
}
fn text_content(&self) -> TextContent {
if self.get_pseudo_element_type().is_replaced_content() {
let style = self.as_element().unwrap().resolved_style();
return TextContent::GeneratedContent(match style.as_ref().get_counters().content {
Content::Items(ref value) => value.to_vec(),
_ => vec![],
});
}
TextContent::Text(self.node_text_content().into_boxed_str())
}
fn restyle_damage(self) -> RestyleDamage {
// We need the underlying node to potentially access the parent in the
// case of text nodes. This is safe as long as we don't let the parent
// escape and never access its descendants.
let mut node = unsafe { self.unsafe_get() };
// If this is a text node, use the parent element, since that's what
// controls our style.
if node.is_text_node() {
node = node.parent_node().unwrap();
debug_assert!(node.is_element());
}
let damage = {
let data = node.get_raw_data().unwrap();
if!data
.layout_data
.borrow()
.flags
.contains(crate::data::LayoutDataFlags::HAS_BEEN_TRAVERSED)
{
// We're reflowing a node that was styled for the first time and
// has never been visited by layout. Return rebuild_and_reflow,
// because that's what the code expects.
RestyleDamage::rebuild_and_reflow()
} else {
data.style_data.element_data.borrow().damage
}
};
damage
}
}
pub enum TextContent {
Text(Box<str>),
GeneratedContent(Vec<ContentItem>),
}
impl TextContent {
pub fn is_empty(&self) -> bool {
match *self {
TextContent::Text(_) => false,
TextContent::GeneratedContent(ref content) => content.is_empty(),
}
}
}
| mutate_layout_data | identifier_name |
wrapper.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! A safe wrapper for DOM nodes that prevents layout from mutating the DOM, from letting DOM nodes
//! escape, and from generally doing anything that it isn't supposed to. This is accomplished via
//! a simple whitelist of allowed operations, along with some lifetime magic to prevent nodes from
//! escaping.
//!
//! As a security wrapper is only as good as its whitelist, be careful when adding operations to
//! this list. The cardinal rules are:
//!
//! 1. Layout is not allowed to mutate the DOM.
//!
//! 2. Layout is not allowed to see anything with `LayoutDom` in the name, because it could hang
//! onto these objects and cause use-after-free.
//!
//! When implementing wrapper functions, be careful that you do not touch the borrow flags, or you
//! will race and cause spurious thread failure. (Note that I do not believe these races are
//! exploitable, but they'll result in brokenness nonetheless.)
//!
//! Rules of the road for this file:
//!
//! * Do not call any methods on DOM nodes without checking to see whether they use borrow flags.
//!
//! o Instead of `get_attr()`, use `.get_attr_val_for_layout()`.
//!
//! o Instead of `html_element_in_html_document()`, use
//! `html_element_in_html_document_for_layout()`.
#![allow(unsafe_code)]
use atomic_refcell::{AtomicRef, AtomicRefMut};
use crate::data::{LayoutData, LayoutDataFlags, StyleAndLayoutData};
use script_layout_interface::wrapper_traits::GetLayoutData;
use script_layout_interface::wrapper_traits::{ThreadSafeLayoutElement, ThreadSafeLayoutNode};
use style::dom::{NodeInfo, TNode};
use style::selector_parser::RestyleDamage;
use style::values::computed::counters::ContentItem;
use style::values::generics::counters::Content;
pub trait LayoutNodeLayoutData {
/// Similar to borrow_data*, but returns the full PersistentLayoutData rather
/// than only the style::data::ElementData.
fn borrow_layout_data(&self) -> Option<AtomicRef<LayoutData>>;
fn mutate_layout_data(&self) -> Option<AtomicRefMut<LayoutData>>;
fn flow_debug_id(self) -> usize;
}
impl<T: GetLayoutData> LayoutNodeLayoutData for T {
fn borrow_layout_data(&self) -> Option<AtomicRef<LayoutData>> {
self.get_raw_data().map(|d| d.layout_data.borrow())
}
fn mutate_layout_data(&self) -> Option<AtomicRefMut<LayoutData>> {
self.get_raw_data().map(|d| d.layout_data.borrow_mut())
}
fn flow_debug_id(self) -> usize {
self.borrow_layout_data()
.map_or(0, |d| d.flow_construction_result.debug_id())
}
}
pub trait GetRawData {
fn get_raw_data(&self) -> Option<&StyleAndLayoutData>;
}
impl<T: GetLayoutData> GetRawData for T {
fn get_raw_data(&self) -> Option<&StyleAndLayoutData> {
self.get_style_and_layout_data().map(|opaque| {
let container = opaque.ptr.as_ptr() as *mut StyleAndLayoutData;
unsafe { &*container }
})
}
}
pub trait ThreadSafeLayoutNodeHelpers {
/// Returns the layout data flags for this node.
fn flags(self) -> LayoutDataFlags;
/// Adds the given flags to this node.
fn insert_flags(self, new_flags: LayoutDataFlags);
/// Removes the given flags from this node.
fn remove_flags(self, flags: LayoutDataFlags);
/// If this is a text node, generated content, or a form element, copies out
/// its content. Otherwise, panics.
///
/// FIXME(pcwalton): This might have too much copying and/or allocation. Profile this.
fn text_content(&self) -> TextContent;
/// The RestyleDamage from any restyling, or RestyleDamage::rebuild_and_reflow() if this
/// is the first time layout is visiting this node. We implement this here, rather than
/// with the rest of the wrapper layer, because we need layout code to determine whether
/// layout has visited the node.
fn restyle_damage(self) -> RestyleDamage;
}
impl<T: ThreadSafeLayoutNode> ThreadSafeLayoutNodeHelpers for T {
fn flags(self) -> LayoutDataFlags {
self.borrow_layout_data().as_ref().unwrap().flags
}
fn insert_flags(self, new_flags: LayoutDataFlags) {
self.mutate_layout_data().unwrap().flags.insert(new_flags);
}
fn remove_flags(self, flags: LayoutDataFlags) |
fn text_content(&self) -> TextContent {
if self.get_pseudo_element_type().is_replaced_content() {
let style = self.as_element().unwrap().resolved_style();
return TextContent::GeneratedContent(match style.as_ref().get_counters().content {
Content::Items(ref value) => value.to_vec(),
_ => vec![],
});
}
TextContent::Text(self.node_text_content().into_boxed_str())
}
fn restyle_damage(self) -> RestyleDamage {
// We need the underlying node to potentially access the parent in the
// case of text nodes. This is safe as long as we don't let the parent
// escape and never access its descendants.
let mut node = unsafe { self.unsafe_get() };
// If this is a text node, use the parent element, since that's what
// controls our style.
if node.is_text_node() {
node = node.parent_node().unwrap();
debug_assert!(node.is_element());
}
let damage = {
let data = node.get_raw_data().unwrap();
if!data
.layout_data
.borrow()
.flags
.contains(crate::data::LayoutDataFlags::HAS_BEEN_TRAVERSED)
{
// We're reflowing a node that was styled for the first time and
// has never been visited by layout. Return rebuild_and_reflow,
// because that's what the code expects.
RestyleDamage::rebuild_and_reflow()
} else {
data.style_data.element_data.borrow().damage
}
};
damage
}
}
pub enum TextContent {
Text(Box<str>),
GeneratedContent(Vec<ContentItem>),
}
impl TextContent {
pub fn is_empty(&self) -> bool {
match *self {
TextContent::Text(_) => false,
TextContent::GeneratedContent(ref content) => content.is_empty(),
}
}
}
| {
self.mutate_layout_data().unwrap().flags.remove(flags);
} | identifier_body |
c_type.rs | pub fn rustify_pointers(c_type: &str) -> (String, String) {
let mut input = c_type.trim();
let leading_const = input.starts_with("const ");
if leading_const {
input = &input[6..];
}
let end = [
input.find(" const"),
input.find("*const"),
input.find("*"),
Some(input.len()),
].iter().filter_map(|&x| x).min().unwrap();
let inner = input[..end].trim().into();
let mut ptrs: Vec<_> = input[end..].rsplit('*').skip(1)
.map(|s| if s.contains("const") { "*const" } else { "*mut" }).collect();
if let (true, Some(p)) = (leading_const, ptrs.last_mut()) {
*p = "*const";
}
let res = (ptrs.join(" "), inner);
trace!("rustify `{}` -> `{}` `{}`", c_type, res.0, res.1);
res
}
#[cfg(test)]
mod tests {
use super::rustify_pointers as rustify_ptr;
fn s(x: &str, y: &str) -> (String, String) {
(x.into(), y.into())
}
#[test]
fn | () {
assert_eq!(rustify_ptr("char"), s("", "char"));
assert_eq!(rustify_ptr("char*"), s("*mut", "char"));
assert_eq!(rustify_ptr("const char*"), s("*const", "char"));
assert_eq!(rustify_ptr("char const*"), s("*const", "char"));
assert_eq!(rustify_ptr("char const *"), s("*const", "char"));
assert_eq!(rustify_ptr(" char * * "), s("*mut *mut", "char"));
assert_eq!(rustify_ptr("const char**"), s("*mut *const", "char"));
assert_eq!(rustify_ptr("char const**"), s("*mut *const", "char"));
assert_eq!(rustify_ptr("const char* const*"), s("*const *const", "char"));
assert_eq!(rustify_ptr("char const * const *"), s("*const *const", "char"));
assert_eq!(rustify_ptr("char* const*"), s("*const *mut", "char"));
assert_eq!(rustify_ptr("GtkWidget*"), s("*mut", "GtkWidget"));
}
}
| rustify_pointers | identifier_name |
c_type.rs | pub fn rustify_pointers(c_type: &str) -> (String, String) | let res = (ptrs.join(" "), inner);
trace!("rustify `{}` -> `{}` `{}`", c_type, res.0, res.1);
res
}
#[cfg(test)]
mod tests {
use super::rustify_pointers as rustify_ptr;
fn s(x: &str, y: &str) -> (String, String) {
(x.into(), y.into())
}
#[test]
fn rustify_pointers() {
assert_eq!(rustify_ptr("char"), s("", "char"));
assert_eq!(rustify_ptr("char*"), s("*mut", "char"));
assert_eq!(rustify_ptr("const char*"), s("*const", "char"));
assert_eq!(rustify_ptr("char const*"), s("*const", "char"));
assert_eq!(rustify_ptr("char const *"), s("*const", "char"));
assert_eq!(rustify_ptr(" char * * "), s("*mut *mut", "char"));
assert_eq!(rustify_ptr("const char**"), s("*mut *const", "char"));
assert_eq!(rustify_ptr("char const**"), s("*mut *const", "char"));
assert_eq!(rustify_ptr("const char* const*"), s("*const *const", "char"));
assert_eq!(rustify_ptr("char const * const *"), s("*const *const", "char"));
assert_eq!(rustify_ptr("char* const*"), s("*const *mut", "char"));
assert_eq!(rustify_ptr("GtkWidget*"), s("*mut", "GtkWidget"));
}
}
| {
let mut input = c_type.trim();
let leading_const = input.starts_with("const ");
if leading_const {
input = &input[6..];
}
let end = [
input.find(" const"),
input.find("*const"),
input.find("*"),
Some(input.len()),
].iter().filter_map(|&x| x).min().unwrap();
let inner = input[..end].trim().into();
let mut ptrs: Vec<_> = input[end..].rsplit('*').skip(1)
.map(|s| if s.contains("const") { "*const" } else { "*mut" }).collect();
if let (true, Some(p)) = (leading_const, ptrs.last_mut()) {
*p = "*const";
}
| identifier_body |
c_type.rs | pub fn rustify_pointers(c_type: &str) -> (String, String) {
let mut input = c_type.trim();
let leading_const = input.starts_with("const ");
if leading_const {
input = &input[6..];
}
let end = [
input.find(" const"),
input.find("*const"),
input.find("*"),
Some(input.len()),
].iter().filter_map(|&x| x).min().unwrap();
let inner = input[..end].trim().into();
let mut ptrs: Vec<_> = input[end..].rsplit('*').skip(1)
.map(|s| if s.contains("const") | else { "*mut" }).collect();
if let (true, Some(p)) = (leading_const, ptrs.last_mut()) {
*p = "*const";
}
let res = (ptrs.join(" "), inner);
trace!("rustify `{}` -> `{}` `{}`", c_type, res.0, res.1);
res
}
#[cfg(test)]
mod tests {
use super::rustify_pointers as rustify_ptr;
fn s(x: &str, y: &str) -> (String, String) {
(x.into(), y.into())
}
#[test]
fn rustify_pointers() {
assert_eq!(rustify_ptr("char"), s("", "char"));
assert_eq!(rustify_ptr("char*"), s("*mut", "char"));
assert_eq!(rustify_ptr("const char*"), s("*const", "char"));
assert_eq!(rustify_ptr("char const*"), s("*const", "char"));
assert_eq!(rustify_ptr("char const *"), s("*const", "char"));
assert_eq!(rustify_ptr(" char * * "), s("*mut *mut", "char"));
assert_eq!(rustify_ptr("const char**"), s("*mut *const", "char"));
assert_eq!(rustify_ptr("char const**"), s("*mut *const", "char"));
assert_eq!(rustify_ptr("const char* const*"), s("*const *const", "char"));
assert_eq!(rustify_ptr("char const * const *"), s("*const *const", "char"));
assert_eq!(rustify_ptr("char* const*"), s("*const *mut", "char"));
assert_eq!(rustify_ptr("GtkWidget*"), s("*mut", "GtkWidget"));
}
}
| { "*const" } | conditional_block |
c_type.rs | pub fn rustify_pointers(c_type: &str) -> (String, String) {
let mut input = c_type.trim();
let leading_const = input.starts_with("const ");
if leading_const {
input = &input[6..];
}
let end = [
input.find(" const"),
input.find("*const"),
input.find("*"),
Some(input.len()),
].iter().filter_map(|&x| x).min().unwrap();
let inner = input[..end].trim().into();
let mut ptrs: Vec<_> = input[end..].rsplit('*').skip(1)
.map(|s| if s.contains("const") { "*const" } else { "*mut" }).collect();
if let (true, Some(p)) = (leading_const, ptrs.last_mut()) {
*p = "*const";
}
let res = (ptrs.join(" "), inner);
trace!("rustify `{}` -> `{}` `{}`", c_type, res.0, res.1);
res
}
#[cfg(test)]
mod tests {
use super::rustify_pointers as rustify_ptr;
fn s(x: &str, y: &str) -> (String, String) {
(x.into(), y.into()) | }
#[test]
fn rustify_pointers() {
assert_eq!(rustify_ptr("char"), s("", "char"));
assert_eq!(rustify_ptr("char*"), s("*mut", "char"));
assert_eq!(rustify_ptr("const char*"), s("*const", "char"));
assert_eq!(rustify_ptr("char const*"), s("*const", "char"));
assert_eq!(rustify_ptr("char const *"), s("*const", "char"));
assert_eq!(rustify_ptr(" char * * "), s("*mut *mut", "char"));
assert_eq!(rustify_ptr("const char**"), s("*mut *const", "char"));
assert_eq!(rustify_ptr("char const**"), s("*mut *const", "char"));
assert_eq!(rustify_ptr("const char* const*"), s("*const *const", "char"));
assert_eq!(rustify_ptr("char const * const *"), s("*const *const", "char"));
assert_eq!(rustify_ptr("char* const*"), s("*const *mut", "char"));
assert_eq!(rustify_ptr("GtkWidget*"), s("*mut", "GtkWidget"));
}
} | random_line_split |
|
job.rs | //
// Copyright:: Copyright (c) 2016 Chef Software, Inc.
// License:: Apache License, Version 2.0
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
use clap::{App, Arg, ArgMatches, SubCommand};
use cli::arguments::{
a2_mode_arg, local_arg, patchset_arg, pipeline_arg, project_arg, project_specific_args,
u_e_s_o_args, value_of,
};
use cli::Options;
use config::Config;
use fips;
use project;
use types::DeliveryResult;
pub const SUBCOMMAND_NAME: &'static str = "job";
#[derive(Debug)]
pub struct JobClapOptions<'n> {
pub stage: &'n str,
pub phases: &'n str,
pub change: &'n str,
pub pipeline: &'n str,
pub job_root: &'n str,
pub project: &'n str,
pub user: &'n str,
pub server: &'n str,
pub ent: &'n str,
pub org: &'n str,
pub patchset: &'n str,
pub change_id: &'n str,
pub git_url: &'n str,
pub shasum: &'n str,
pub branch: &'n str,
pub skip_default: bool,
pub local: bool,
pub docker_image: &'n str,
pub fips: bool,
pub fips_git_port: &'n str,
pub fips_custom_cert_filename: &'n str,
pub a2_mode: Option<bool>,
}
impl<'n> Default for JobClapOptions<'n> {
fn default() -> Self {
JobClapOptions {
stage: "",
phases: "",
change: "",
pipeline: "master",
job_root: "",
project: "",
user: "",
server: "",
ent: "",
org: "",
patchset: "",
change_id: "",
git_url: "",
shasum: "",
branch: "",
skip_default: false,
local: false,
docker_image: "",
fips: false,
fips_git_port: "",
fips_custom_cert_filename: "",
a2_mode: None,
}
}
}
impl<'n> JobClapOptions<'n> {
pub fn new(matches: &'n ArgMatches<'n>) -> Self {
JobClapOptions {
stage: value_of(&matches, "stage"),
phases: value_of(matches, "phases"),
change: value_of(&matches, "change"),
pipeline: value_of(&matches, "pipeline"),
job_root: value_of(&matches, "job-root"),
project: value_of(&matches, "project"),
user: value_of(&matches, "user"),
server: value_of(&matches, "server"),
ent: value_of(&matches, "ent"),
org: value_of(&matches, "org"),
patchset: value_of(&matches, "patchset"),
change_id: value_of(&matches, "change-id"),
git_url: value_of(&matches, "git-url"),
shasum: value_of(&matches, "shasum"),
branch: value_of(&matches, "branch"),
skip_default: matches.is_present("skip-default"),
local: matches.is_present("local"),
docker_image: value_of(&matches, "docker"),
fips: matches.is_present("fips"),
fips_git_port: value_of(&matches, "fips-git-port"),
fips_custom_cert_filename: value_of(&matches, "fips-custom-cert-filename"),
a2_mode: if matches.is_present("a2-mode") {
Some(true)
} else {
None
},
}
}
}
impl<'n> Options for JobClapOptions<'n> {
fn merge_options_and_config(&self, config: Config) -> DeliveryResult<Config> | self.fips_custom_cert_filename,
new_config,
)
}
}
fn with_default<'a>(val: &'a str, default: &'a str, local: &bool) -> &'a str {
if!local ||!val.is_empty() {
val
} else {
default
}
}
pub fn clap_subcommand<'c>() -> App<'c, 'c> {
SubCommand::with_name(SUBCOMMAND_NAME)
.about("Run one or more phase jobs")
.args(&vec![patchset_arg(), project_arg(), local_arg()])
.args(&make_arg_vec![
"-j --job-root=[root] 'Path to the job root'",
"-g --git-url=[url] 'Git URL (-u -s -e -o ignored if used)'",
"-C --change=[change] 'Feature branch name'",
"-b --branch=[branch] 'Branch to merge'",
"-S --shasum=[gitsha] 'Git SHA of change'",
"--change-id=[id] 'The change ID'",
"--skip-default'skip default'",
"--docker=[image] 'Docker image'"
])
.args_from_usage(
"<stage> 'Stage for the run'
<phases> 'One or more phases'",
)
.args(&u_e_s_o_args())
.args(&pipeline_arg())
.args(&project_specific_args())
.args(&vec![a2_mode_arg()])
}
| {
let project = try!(project::project_or_from_cwd(&self.project));
let mut new_config = config
.set_pipeline(&self.pipeline)
.set_user(with_default(&self.user, "you", &&self.local))
.set_server(with_default(&self.server, "localhost", &&self.local))
.set_enterprise(with_default(&self.ent, "local", &&self.local))
.set_organization(with_default(&self.org, "workstation", &&self.local))
.set_project(&project)
.set_a2_mode_if_def(self.a2_mode);
// A2 mode requires SAML right now
if new_config.a2_mode.unwrap_or(false) {
new_config.saml = Some(true)
}
fips::merge_fips_options_and_config(
self.fips,
self.fips_git_port, | identifier_body |
job.rs | //
// Copyright:: Copyright (c) 2016 Chef Software, Inc.
// License:: Apache License, Version 2.0
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
use clap::{App, Arg, ArgMatches, SubCommand};
use cli::arguments::{
a2_mode_arg, local_arg, patchset_arg, pipeline_arg, project_arg, project_specific_args,
u_e_s_o_args, value_of,
};
use cli::Options;
use config::Config;
use fips;
use project;
use types::DeliveryResult;
pub const SUBCOMMAND_NAME: &'static str = "job";
#[derive(Debug)]
pub struct JobClapOptions<'n> {
pub stage: &'n str,
pub phases: &'n str,
pub change: &'n str,
pub pipeline: &'n str,
pub job_root: &'n str,
pub project: &'n str,
pub user: &'n str,
pub server: &'n str,
pub ent: &'n str,
pub org: &'n str,
pub patchset: &'n str,
pub change_id: &'n str,
pub git_url: &'n str,
pub shasum: &'n str,
pub branch: &'n str,
pub skip_default: bool,
pub local: bool,
pub docker_image: &'n str,
pub fips: bool,
pub fips_git_port: &'n str,
pub fips_custom_cert_filename: &'n str,
pub a2_mode: Option<bool>,
}
impl<'n> Default for JobClapOptions<'n> {
fn | () -> Self {
JobClapOptions {
stage: "",
phases: "",
change: "",
pipeline: "master",
job_root: "",
project: "",
user: "",
server: "",
ent: "",
org: "",
patchset: "",
change_id: "",
git_url: "",
shasum: "",
branch: "",
skip_default: false,
local: false,
docker_image: "",
fips: false,
fips_git_port: "",
fips_custom_cert_filename: "",
a2_mode: None,
}
}
}
impl<'n> JobClapOptions<'n> {
pub fn new(matches: &'n ArgMatches<'n>) -> Self {
JobClapOptions {
stage: value_of(&matches, "stage"),
phases: value_of(matches, "phases"),
change: value_of(&matches, "change"),
pipeline: value_of(&matches, "pipeline"),
job_root: value_of(&matches, "job-root"),
project: value_of(&matches, "project"),
user: value_of(&matches, "user"),
server: value_of(&matches, "server"),
ent: value_of(&matches, "ent"),
org: value_of(&matches, "org"),
patchset: value_of(&matches, "patchset"),
change_id: value_of(&matches, "change-id"),
git_url: value_of(&matches, "git-url"),
shasum: value_of(&matches, "shasum"),
branch: value_of(&matches, "branch"),
skip_default: matches.is_present("skip-default"),
local: matches.is_present("local"),
docker_image: value_of(&matches, "docker"),
fips: matches.is_present("fips"),
fips_git_port: value_of(&matches, "fips-git-port"),
fips_custom_cert_filename: value_of(&matches, "fips-custom-cert-filename"),
a2_mode: if matches.is_present("a2-mode") {
Some(true)
} else {
None
},
}
}
}
impl<'n> Options for JobClapOptions<'n> {
fn merge_options_and_config(&self, config: Config) -> DeliveryResult<Config> {
let project = try!(project::project_or_from_cwd(&self.project));
let mut new_config = config
.set_pipeline(&self.pipeline)
.set_user(with_default(&self.user, "you", &&self.local))
.set_server(with_default(&self.server, "localhost", &&self.local))
.set_enterprise(with_default(&self.ent, "local", &&self.local))
.set_organization(with_default(&self.org, "workstation", &&self.local))
.set_project(&project)
.set_a2_mode_if_def(self.a2_mode);
// A2 mode requires SAML right now
if new_config.a2_mode.unwrap_or(false) {
new_config.saml = Some(true)
}
fips::merge_fips_options_and_config(
self.fips,
self.fips_git_port,
self.fips_custom_cert_filename,
new_config,
)
}
}
fn with_default<'a>(val: &'a str, default: &'a str, local: &bool) -> &'a str {
if!local ||!val.is_empty() {
val
} else {
default
}
}
pub fn clap_subcommand<'c>() -> App<'c, 'c> {
SubCommand::with_name(SUBCOMMAND_NAME)
.about("Run one or more phase jobs")
.args(&vec![patchset_arg(), project_arg(), local_arg()])
.args(&make_arg_vec![
"-j --job-root=[root] 'Path to the job root'",
"-g --git-url=[url] 'Git URL (-u -s -e -o ignored if used)'",
"-C --change=[change] 'Feature branch name'",
"-b --branch=[branch] 'Branch to merge'",
"-S --shasum=[gitsha] 'Git SHA of change'",
"--change-id=[id] 'The change ID'",
"--skip-default'skip default'",
"--docker=[image] 'Docker image'"
])
.args_from_usage(
"<stage> 'Stage for the run'
<phases> 'One or more phases'",
)
.args(&u_e_s_o_args())
.args(&pipeline_arg())
.args(&project_specific_args())
.args(&vec![a2_mode_arg()])
}
| default | identifier_name |
job.rs | //
// Copyright:: Copyright (c) 2016 Chef Software, Inc.
// License:: Apache License, Version 2.0
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
use clap::{App, Arg, ArgMatches, SubCommand};
use cli::arguments::{
a2_mode_arg, local_arg, patchset_arg, pipeline_arg, project_arg, project_specific_args,
u_e_s_o_args, value_of,
};
use cli::Options;
use config::Config;
use fips;
use project;
use types::DeliveryResult;
pub const SUBCOMMAND_NAME: &'static str = "job";
#[derive(Debug)]
pub struct JobClapOptions<'n> {
pub stage: &'n str,
pub phases: &'n str,
pub change: &'n str,
pub pipeline: &'n str,
pub job_root: &'n str,
pub project: &'n str,
pub user: &'n str,
pub server: &'n str,
pub ent: &'n str,
pub org: &'n str,
pub patchset: &'n str,
pub change_id: &'n str,
pub git_url: &'n str,
pub shasum: &'n str,
pub branch: &'n str,
pub skip_default: bool,
pub local: bool,
pub docker_image: &'n str,
pub fips: bool,
pub fips_git_port: &'n str,
pub fips_custom_cert_filename: &'n str,
pub a2_mode: Option<bool>,
}
impl<'n> Default for JobClapOptions<'n> {
fn default() -> Self {
JobClapOptions {
stage: "",
phases: "",
change: "",
pipeline: "master",
job_root: "",
project: "",
user: "",
server: "",
ent: "",
org: "",
patchset: "",
change_id: "",
git_url: "",
shasum: "",
branch: "",
skip_default: false,
local: false,
docker_image: "",
fips: false,
fips_git_port: "",
fips_custom_cert_filename: "",
a2_mode: None,
}
}
}
impl<'n> JobClapOptions<'n> {
pub fn new(matches: &'n ArgMatches<'n>) -> Self {
JobClapOptions {
stage: value_of(&matches, "stage"),
phases: value_of(matches, "phases"),
change: value_of(&matches, "change"),
pipeline: value_of(&matches, "pipeline"),
job_root: value_of(&matches, "job-root"),
project: value_of(&matches, "project"),
user: value_of(&matches, "user"),
server: value_of(&matches, "server"),
ent: value_of(&matches, "ent"),
org: value_of(&matches, "org"),
patchset: value_of(&matches, "patchset"),
change_id: value_of(&matches, "change-id"),
git_url: value_of(&matches, "git-url"),
shasum: value_of(&matches, "shasum"),
branch: value_of(&matches, "branch"),
skip_default: matches.is_present("skip-default"),
local: matches.is_present("local"),
docker_image: value_of(&matches, "docker"),
fips: matches.is_present("fips"),
fips_git_port: value_of(&matches, "fips-git-port"),
fips_custom_cert_filename: value_of(&matches, "fips-custom-cert-filename"),
a2_mode: if matches.is_present("a2-mode") {
Some(true)
} else {
None
},
}
}
}
impl<'n> Options for JobClapOptions<'n> {
fn merge_options_and_config(&self, config: Config) -> DeliveryResult<Config> {
let project = try!(project::project_or_from_cwd(&self.project));
let mut new_config = config
.set_pipeline(&self.pipeline)
.set_user(with_default(&self.user, "you", &&self.local))
.set_server(with_default(&self.server, "localhost", &&self.local))
.set_enterprise(with_default(&self.ent, "local", &&self.local))
.set_organization(with_default(&self.org, "workstation", &&self.local))
.set_project(&project)
.set_a2_mode_if_def(self.a2_mode);
// A2 mode requires SAML right now
if new_config.a2_mode.unwrap_or(false) |
fips::merge_fips_options_and_config(
self.fips,
self.fips_git_port,
self.fips_custom_cert_filename,
new_config,
)
}
}
fn with_default<'a>(val: &'a str, default: &'a str, local: &bool) -> &'a str {
if!local ||!val.is_empty() {
val
} else {
default
}
}
pub fn clap_subcommand<'c>() -> App<'c, 'c> {
SubCommand::with_name(SUBCOMMAND_NAME)
.about("Run one or more phase jobs")
.args(&vec![patchset_arg(), project_arg(), local_arg()])
.args(&make_arg_vec![
"-j --job-root=[root] 'Path to the job root'",
"-g --git-url=[url] 'Git URL (-u -s -e -o ignored if used)'",
"-C --change=[change] 'Feature branch name'",
"-b --branch=[branch] 'Branch to merge'",
"-S --shasum=[gitsha] 'Git SHA of change'",
"--change-id=[id] 'The change ID'",
"--skip-default'skip default'",
"--docker=[image] 'Docker image'"
])
.args_from_usage(
"<stage> 'Stage for the run'
<phases> 'One or more phases'",
)
.args(&u_e_s_o_args())
.args(&pipeline_arg())
.args(&project_specific_args())
.args(&vec![a2_mode_arg()])
}
| {
new_config.saml = Some(true)
} | conditional_block |
job.rs | //
// Copyright:: Copyright (c) 2016 Chef Software, Inc.
// License:: Apache License, Version 2.0
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
use clap::{App, Arg, ArgMatches, SubCommand};
use cli::arguments::{
a2_mode_arg, local_arg, patchset_arg, pipeline_arg, project_arg, project_specific_args,
u_e_s_o_args, value_of,
};
use cli::Options;
use config::Config;
use fips;
use project;
use types::DeliveryResult;
pub const SUBCOMMAND_NAME: &'static str = "job";
#[derive(Debug)]
pub struct JobClapOptions<'n> {
pub stage: &'n str,
pub phases: &'n str,
pub change: &'n str,
pub pipeline: &'n str,
pub job_root: &'n str,
pub project: &'n str,
pub user: &'n str,
pub server: &'n str,
pub ent: &'n str,
pub org: &'n str,
pub patchset: &'n str,
pub change_id: &'n str,
pub git_url: &'n str,
pub shasum: &'n str,
pub branch: &'n str,
pub skip_default: bool,
pub local: bool,
pub docker_image: &'n str,
pub fips: bool,
pub fips_git_port: &'n str,
pub fips_custom_cert_filename: &'n str,
pub a2_mode: Option<bool>,
}
impl<'n> Default for JobClapOptions<'n> {
fn default() -> Self {
JobClapOptions {
stage: "",
phases: "",
change: "",
pipeline: "master",
job_root: "",
project: "",
user: "",
server: "",
ent: "",
org: "",
patchset: "",
change_id: "",
git_url: "",
shasum: "",
branch: "",
skip_default: false,
local: false,
docker_image: "",
fips: false,
fips_git_port: "",
fips_custom_cert_filename: "",
a2_mode: None,
}
}
}
impl<'n> JobClapOptions<'n> {
pub fn new(matches: &'n ArgMatches<'n>) -> Self {
JobClapOptions {
stage: value_of(&matches, "stage"),
phases: value_of(matches, "phases"),
change: value_of(&matches, "change"),
pipeline: value_of(&matches, "pipeline"),
job_root: value_of(&matches, "job-root"),
project: value_of(&matches, "project"),
user: value_of(&matches, "user"),
server: value_of(&matches, "server"),
ent: value_of(&matches, "ent"),
org: value_of(&matches, "org"),
patchset: value_of(&matches, "patchset"),
change_id: value_of(&matches, "change-id"),
git_url: value_of(&matches, "git-url"),
shasum: value_of(&matches, "shasum"),
branch: value_of(&matches, "branch"),
skip_default: matches.is_present("skip-default"),
local: matches.is_present("local"),
docker_image: value_of(&matches, "docker"),
fips: matches.is_present("fips"),
fips_git_port: value_of(&matches, "fips-git-port"),
fips_custom_cert_filename: value_of(&matches, "fips-custom-cert-filename"), | None
},
}
}
}
impl<'n> Options for JobClapOptions<'n> {
fn merge_options_and_config(&self, config: Config) -> DeliveryResult<Config> {
let project = try!(project::project_or_from_cwd(&self.project));
let mut new_config = config
.set_pipeline(&self.pipeline)
.set_user(with_default(&self.user, "you", &&self.local))
.set_server(with_default(&self.server, "localhost", &&self.local))
.set_enterprise(with_default(&self.ent, "local", &&self.local))
.set_organization(with_default(&self.org, "workstation", &&self.local))
.set_project(&project)
.set_a2_mode_if_def(self.a2_mode);
// A2 mode requires SAML right now
if new_config.a2_mode.unwrap_or(false) {
new_config.saml = Some(true)
}
fips::merge_fips_options_and_config(
self.fips,
self.fips_git_port,
self.fips_custom_cert_filename,
new_config,
)
}
}
fn with_default<'a>(val: &'a str, default: &'a str, local: &bool) -> &'a str {
if!local ||!val.is_empty() {
val
} else {
default
}
}
pub fn clap_subcommand<'c>() -> App<'c, 'c> {
SubCommand::with_name(SUBCOMMAND_NAME)
.about("Run one or more phase jobs")
.args(&vec![patchset_arg(), project_arg(), local_arg()])
.args(&make_arg_vec![
"-j --job-root=[root] 'Path to the job root'",
"-g --git-url=[url] 'Git URL (-u -s -e -o ignored if used)'",
"-C --change=[change] 'Feature branch name'",
"-b --branch=[branch] 'Branch to merge'",
"-S --shasum=[gitsha] 'Git SHA of change'",
"--change-id=[id] 'The change ID'",
"--skip-default'skip default'",
"--docker=[image] 'Docker image'"
])
.args_from_usage(
"<stage> 'Stage for the run'
<phases> 'One or more phases'",
)
.args(&u_e_s_o_args())
.args(&pipeline_arg())
.args(&project_specific_args())
.args(&vec![a2_mode_arg()])
} | a2_mode: if matches.is_present("a2-mode") {
Some(true)
} else { | random_line_split |
sort.rs | use std::fs::File;
use std::io::Read;
use std::path::Path;
use std::process::Command;
static PROGNAME: &'static str = "./sort";
#[test]
fn numeric1() {
numeric_helper(1);
}
#[test]
fn numeric2() {
numeric_helper(2);
}
#[test]
fn numeric3() {
numeric_helper(3);
}
#[test]
fn numeric4() {
numeric_helper(4);
}
#[test]
fn numeric5() {
numeric_helper(5);
}
fn numeric_helper(test_num: isize) {
let mut cmd = Command::new(PROGNAME);
cmd.arg("-n");
let po = match cmd.arg(format!("{}{}{}", "numeric", test_num, ".txt")).output() {
Ok(p) => p,
Err(err) => panic!("{}", err)
};
let filename = format!("{}{}{}", "numeric", test_num, ".ans");
let mut f = File::open(Path::new(&filename)).unwrap_or_else(|err| {
panic!("{}", err) | Ok(_) => {},
Err(err) => panic!("{}", err)
}
assert_eq!(String::from_utf8(po.stdout).unwrap(), String::from_utf8(answer).unwrap());
} | });
let mut answer = vec!();
match f.read_to_end(&mut answer) { | random_line_split |
sort.rs | use std::fs::File;
use std::io::Read;
use std::path::Path;
use std::process::Command;
static PROGNAME: &'static str = "./sort";
#[test]
fn numeric1() {
numeric_helper(1);
}
#[test]
fn numeric2() {
numeric_helper(2);
}
#[test]
fn numeric3() {
numeric_helper(3);
}
#[test]
fn numeric4() {
numeric_helper(4);
}
#[test]
fn numeric5() {
numeric_helper(5);
}
fn numeric_helper(test_num: isize) {
let mut cmd = Command::new(PROGNAME);
cmd.arg("-n");
let po = match cmd.arg(format!("{}{}{}", "numeric", test_num, ".txt")).output() {
Ok(p) => p,
Err(err) => panic!("{}", err)
};
let filename = format!("{}{}{}", "numeric", test_num, ".ans");
let mut f = File::open(Path::new(&filename)).unwrap_or_else(|err| {
panic!("{}", err)
});
let mut answer = vec!();
match f.read_to_end(&mut answer) {
Ok(_) => | ,
Err(err) => panic!("{}", err)
}
assert_eq!(String::from_utf8(po.stdout).unwrap(), String::from_utf8(answer).unwrap());
}
| {} | conditional_block |
sort.rs | use std::fs::File;
use std::io::Read;
use std::path::Path;
use std::process::Command;
static PROGNAME: &'static str = "./sort";
#[test]
fn numeric1() {
numeric_helper(1);
}
#[test]
fn numeric2() |
#[test]
fn numeric3() {
numeric_helper(3);
}
#[test]
fn numeric4() {
numeric_helper(4);
}
#[test]
fn numeric5() {
numeric_helper(5);
}
fn numeric_helper(test_num: isize) {
let mut cmd = Command::new(PROGNAME);
cmd.arg("-n");
let po = match cmd.arg(format!("{}{}{}", "numeric", test_num, ".txt")).output() {
Ok(p) => p,
Err(err) => panic!("{}", err)
};
let filename = format!("{}{}{}", "numeric", test_num, ".ans");
let mut f = File::open(Path::new(&filename)).unwrap_or_else(|err| {
panic!("{}", err)
});
let mut answer = vec!();
match f.read_to_end(&mut answer) {
Ok(_) => {},
Err(err) => panic!("{}", err)
}
assert_eq!(String::from_utf8(po.stdout).unwrap(), String::from_utf8(answer).unwrap());
}
| {
numeric_helper(2);
} | identifier_body |
sort.rs | use std::fs::File;
use std::io::Read;
use std::path::Path;
use std::process::Command;
static PROGNAME: &'static str = "./sort";
#[test]
fn numeric1() {
numeric_helper(1);
}
#[test]
fn numeric2() {
numeric_helper(2);
}
#[test]
fn numeric3() {
numeric_helper(3);
}
#[test]
fn numeric4() {
numeric_helper(4);
}
#[test]
fn numeric5() {
numeric_helper(5);
}
fn | (test_num: isize) {
let mut cmd = Command::new(PROGNAME);
cmd.arg("-n");
let po = match cmd.arg(format!("{}{}{}", "numeric", test_num, ".txt")).output() {
Ok(p) => p,
Err(err) => panic!("{}", err)
};
let filename = format!("{}{}{}", "numeric", test_num, ".ans");
let mut f = File::open(Path::new(&filename)).unwrap_or_else(|err| {
panic!("{}", err)
});
let mut answer = vec!();
match f.read_to_end(&mut answer) {
Ok(_) => {},
Err(err) => panic!("{}", err)
}
assert_eq!(String::from_utf8(po.stdout).unwrap(), String::from_utf8(answer).unwrap());
}
| numeric_helper | identifier_name |
lib.rs | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
mod handlers;
use crate::handlers::get_routes;
use diem_logger::prelude::*;
use diemdb::DiemDB;
use std::{net::SocketAddr, sync::Arc};
use tokio::runtime::{Builder, Runtime};
pub fn start_backup_service(address: SocketAddr, db: Arc<DiemDB>) -> Runtime {
let backup_handler = db.get_backup_handler();
let routes = get_routes(backup_handler);
let runtime = Builder::new_multi_thread()
.thread_name("backup")
.enable_all()
.build()
.expect("[backup] failed to create runtime");
// Ensure that we actually bind to the socket first before spawning the
// server tasks. This helps in tests to prevent races where a client attempts
// to make a request before the server task is actually listening on the
// socket.
//
// Note: we need to enter the runtime context first to actually bind, since
// tokio TcpListener can only be bound inside a tokio context.
let _guard = runtime.enter();
let server = warp::serve(routes).bind(address);
runtime.handle().spawn(server);
info!("Backup service spawned.");
runtime
}
#[cfg(test)]
mod tests {
use super::*;
use diem_config::utils::get_available_port;
use diem_crypto::hash::HashValue;
use diem_temppath::TempPath;
use reqwest::blocking::get;
use std::net::{IpAddr, Ipv4Addr};
/// 404 - endpoint not found
/// 400 - params not provided or failed parsing
/// 500 - endpoint handler raised error
///
/// And failure on one endpoint doesn't result in warp::Rejection which makes it fallback to other matches.
#[test]
fn | () {
let tmpdir = TempPath::new();
let db = Arc::new(DiemDB::new_for_test(&tmpdir));
let port = get_available_port();
let _rt = start_backup_service(SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), port), db);
// Endpoint doesn't exist.
let resp = get(&format!("http://127.0.0.1:{}/", port)).unwrap();
assert_eq!(resp.status(), 404);
let resp = get(&format!("http://127.0.0.1:{}/x", port)).unwrap();
assert_eq!(resp.status(), 404);
// Params not provided.
let resp = get(&format!("http://127.0.0.1:{}/state_range_proof", port)).unwrap();
assert_eq!(resp.status(), 400);
let resp = get(&format!(
"http://127.0.0.1:{}/state_range_proof/{}",
port, 123
))
.unwrap();
assert_eq!(resp.status(), 400);
let resp = get(&format!("http://127.0.0.1:{}/state_snapshot", port)).unwrap();
assert_eq!(resp.status(), 400);
// Params fail to parse (HashValue)
let resp = get(&format!("http://127.0.0.1:{}/state_range_proof/1/ff", port)).unwrap();
assert_eq!(resp.status(), 400);
// Request handler raised Error (non-bootstrapped DB)
let resp = get(&format!(
"http://127.0.0.1:{}/state_range_proof/1/{}",
port,
HashValue::zero().to_hex()
))
.unwrap();
assert_eq!(resp.status(), 500);
let resp = get(&format!("http://127.0.0.1:{}/state_root_proof/0", port,)).unwrap();
assert_eq!(resp.status(), 500);
// an endpoint handled by `reply_with_async_channel_writer' always returns 200,
// connection terminates prematurely when the channel writer errors.
let resp = get(&format!("http://127.0.0.1:{}/state_snapshot/1", port,)).unwrap();
assert_eq!(resp.status(), 200);
assert_eq!(resp.content_length(), None);
assert!(resp.bytes().is_err());
}
}
| routing_and_error_codes | identifier_name |
lib.rs | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
mod handlers;
use crate::handlers::get_routes;
use diem_logger::prelude::*;
use diemdb::DiemDB;
use std::{net::SocketAddr, sync::Arc};
use tokio::runtime::{Builder, Runtime};
pub fn start_backup_service(address: SocketAddr, db: Arc<DiemDB>) -> Runtime {
let backup_handler = db.get_backup_handler();
let routes = get_routes(backup_handler);
let runtime = Builder::new_multi_thread()
.thread_name("backup")
.enable_all()
.build()
.expect("[backup] failed to create runtime");
// Ensure that we actually bind to the socket first before spawning the
// server tasks. This helps in tests to prevent races where a client attempts
// to make a request before the server task is actually listening on the
// socket.
//
// Note: we need to enter the runtime context first to actually bind, since
// tokio TcpListener can only be bound inside a tokio context.
let _guard = runtime.enter();
let server = warp::serve(routes).bind(address);
runtime.handle().spawn(server);
info!("Backup service spawned.");
runtime
}
#[cfg(test)]
mod tests {
use super::*;
use diem_config::utils::get_available_port;
use diem_crypto::hash::HashValue;
use diem_temppath::TempPath;
use reqwest::blocking::get;
use std::net::{IpAddr, Ipv4Addr};
/// 404 - endpoint not found
/// 400 - params not provided or failed parsing
/// 500 - endpoint handler raised error
///
/// And failure on one endpoint doesn't result in warp::Rejection which makes it fallback to other matches.
#[test]
fn routing_and_error_codes() {
let tmpdir = TempPath::new();
let db = Arc::new(DiemDB::new_for_test(&tmpdir));
let port = get_available_port();
let _rt = start_backup_service(SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), port), db);
// Endpoint doesn't exist.
let resp = get(&format!("http://127.0.0.1:{}/", port)).unwrap();
assert_eq!(resp.status(), 404);
let resp = get(&format!("http://127.0.0.1:{}/x", port)).unwrap();
assert_eq!(resp.status(), 404);
// Params not provided.
let resp = get(&format!("http://127.0.0.1:{}/state_range_proof", port)).unwrap();
assert_eq!(resp.status(), 400); | "http://127.0.0.1:{}/state_range_proof/{}",
port, 123
))
.unwrap();
assert_eq!(resp.status(), 400);
let resp = get(&format!("http://127.0.0.1:{}/state_snapshot", port)).unwrap();
assert_eq!(resp.status(), 400);
// Params fail to parse (HashValue)
let resp = get(&format!("http://127.0.0.1:{}/state_range_proof/1/ff", port)).unwrap();
assert_eq!(resp.status(), 400);
// Request handler raised Error (non-bootstrapped DB)
let resp = get(&format!(
"http://127.0.0.1:{}/state_range_proof/1/{}",
port,
HashValue::zero().to_hex()
))
.unwrap();
assert_eq!(resp.status(), 500);
let resp = get(&format!("http://127.0.0.1:{}/state_root_proof/0", port,)).unwrap();
assert_eq!(resp.status(), 500);
// an endpoint handled by `reply_with_async_channel_writer' always returns 200,
// connection terminates prematurely when the channel writer errors.
let resp = get(&format!("http://127.0.0.1:{}/state_snapshot/1", port,)).unwrap();
assert_eq!(resp.status(), 200);
assert_eq!(resp.content_length(), None);
assert!(resp.bytes().is_err());
}
} | let resp = get(&format!( | random_line_split |
lib.rs | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
mod handlers;
use crate::handlers::get_routes;
use diem_logger::prelude::*;
use diemdb::DiemDB;
use std::{net::SocketAddr, sync::Arc};
use tokio::runtime::{Builder, Runtime};
pub fn start_backup_service(address: SocketAddr, db: Arc<DiemDB>) -> Runtime {
let backup_handler = db.get_backup_handler();
let routes = get_routes(backup_handler);
let runtime = Builder::new_multi_thread()
.thread_name("backup")
.enable_all()
.build()
.expect("[backup] failed to create runtime");
// Ensure that we actually bind to the socket first before spawning the
// server tasks. This helps in tests to prevent races where a client attempts
// to make a request before the server task is actually listening on the
// socket.
//
// Note: we need to enter the runtime context first to actually bind, since
// tokio TcpListener can only be bound inside a tokio context.
let _guard = runtime.enter();
let server = warp::serve(routes).bind(address);
runtime.handle().spawn(server);
info!("Backup service spawned.");
runtime
}
#[cfg(test)]
mod tests {
use super::*;
use diem_config::utils::get_available_port;
use diem_crypto::hash::HashValue;
use diem_temppath::TempPath;
use reqwest::blocking::get;
use std::net::{IpAddr, Ipv4Addr};
/// 404 - endpoint not found
/// 400 - params not provided or failed parsing
/// 500 - endpoint handler raised error
///
/// And failure on one endpoint doesn't result in warp::Rejection which makes it fallback to other matches.
#[test]
fn routing_and_error_codes() | assert_eq!(resp.status(), 400);
let resp = get(&format!("http://127.0.0.1:{}/state_snapshot", port)).unwrap();
assert_eq!(resp.status(), 400);
// Params fail to parse (HashValue)
let resp = get(&format!("http://127.0.0.1:{}/state_range_proof/1/ff", port)).unwrap();
assert_eq!(resp.status(), 400);
// Request handler raised Error (non-bootstrapped DB)
let resp = get(&format!(
"http://127.0.0.1:{}/state_range_proof/1/{}",
port,
HashValue::zero().to_hex()
))
.unwrap();
assert_eq!(resp.status(), 500);
let resp = get(&format!("http://127.0.0.1:{}/state_root_proof/0", port,)).unwrap();
assert_eq!(resp.status(), 500);
// an endpoint handled by `reply_with_async_channel_writer' always returns 200,
// connection terminates prematurely when the channel writer errors.
let resp = get(&format!("http://127.0.0.1:{}/state_snapshot/1", port,)).unwrap();
assert_eq!(resp.status(), 200);
assert_eq!(resp.content_length(), None);
assert!(resp.bytes().is_err());
}
}
| {
let tmpdir = TempPath::new();
let db = Arc::new(DiemDB::new_for_test(&tmpdir));
let port = get_available_port();
let _rt = start_backup_service(SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), port), db);
// Endpoint doesn't exist.
let resp = get(&format!("http://127.0.0.1:{}/", port)).unwrap();
assert_eq!(resp.status(), 404);
let resp = get(&format!("http://127.0.0.1:{}/x", port)).unwrap();
assert_eq!(resp.status(), 404);
// Params not provided.
let resp = get(&format!("http://127.0.0.1:{}/state_range_proof", port)).unwrap();
assert_eq!(resp.status(), 400);
let resp = get(&format!(
"http://127.0.0.1:{}/state_range_proof/{}",
port, 123
))
.unwrap(); | identifier_body |
scope.rs | // https://rustbyexample.com/variable_bindings/scope.html
// http://rust-lang-ja.org/rust-by-example/variable_bindings/scope.html
fn | () {
// This binding lives in the main function
let long_lived_binding = 1;
// This is a block, and has a smaller scope than the main function
{
// This binding only exists in this block
let short_lived_binding = 2;
println!("inner short: {}", short_lived_binding);
// This binding *shadows* the outer one
let long_lived_binding = 5_f32;
println!("inner long: {}", long_lived_binding);
}
// End of the block
// Error! `short_lived_binding` doesn't exist in this scope
// println!("outer short: {}", short_lived_binding); // error[E0425]: cannot find value `short_lived_binding` in this scope
// FIXME ^ Comment out this line
println!("outer long: {}", long_lived_binding);
// This binding also *shadows* the previous binding
let long_lived_binding = 'a';
println!("outer long: {}", long_lived_binding);
}
| main | identifier_name |
scope.rs | // https://rustbyexample.com/variable_bindings/scope.html
// http://rust-lang-ja.org/rust-by-example/variable_bindings/scope.html
fn main() { |
// This is a block, and has a smaller scope than the main function
{
// This binding only exists in this block
let short_lived_binding = 2;
println!("inner short: {}", short_lived_binding);
// This binding *shadows* the outer one
let long_lived_binding = 5_f32;
println!("inner long: {}", long_lived_binding);
}
// End of the block
// Error! `short_lived_binding` doesn't exist in this scope
// println!("outer short: {}", short_lived_binding); // error[E0425]: cannot find value `short_lived_binding` in this scope
// FIXME ^ Comment out this line
println!("outer long: {}", long_lived_binding);
// This binding also *shadows* the previous binding
let long_lived_binding = 'a';
println!("outer long: {}", long_lived_binding);
} | // This binding lives in the main function
let long_lived_binding = 1; | random_line_split |
scope.rs | // https://rustbyexample.com/variable_bindings/scope.html
// http://rust-lang-ja.org/rust-by-example/variable_bindings/scope.html
fn main() | // FIXME ^ Comment out this line
println!("outer long: {}", long_lived_binding);
// This binding also *shadows* the previous binding
let long_lived_binding = 'a';
println!("outer long: {}", long_lived_binding);
}
| {
// This binding lives in the main function
let long_lived_binding = 1;
// This is a block, and has a smaller scope than the main function
{
// This binding only exists in this block
let short_lived_binding = 2;
println!("inner short: {}", short_lived_binding);
// This binding *shadows* the outer one
let long_lived_binding = 5_f32;
println!("inner long: {}", long_lived_binding);
}
// End of the block
// Error! `short_lived_binding` doesn't exist in this scope
// println!("outer short: {}", short_lived_binding); // error[E0425]: cannot find value `short_lived_binding` in this scope | identifier_body |
argument-passing.rs | // run-pass
struct X {
x: isize
}
fn f1(a: &mut X, b: &mut isize, c: isize) -> isize {
let r = a.x + *b + c;
a.x = 0;
*b = 10;
return r;
}
fn | <F>(a: isize, f: F) -> isize where F: FnOnce(isize) { f(1); return a; }
pub fn main() {
let mut a = X {x: 1};
let mut b = 2;
let c = 3;
assert_eq!(f1(&mut a, &mut b, c), 6);
assert_eq!(a.x, 0);
assert_eq!(b, 10);
assert_eq!(f2(a.x, |_| a.x = 50), 0);
assert_eq!(a.x, 50);
}
| f2 | identifier_name |
argument-passing.rs | // run-pass
struct X {
x: isize
}
fn f1(a: &mut X, b: &mut isize, c: isize) -> isize { | }
fn f2<F>(a: isize, f: F) -> isize where F: FnOnce(isize) { f(1); return a; }
pub fn main() {
let mut a = X {x: 1};
let mut b = 2;
let c = 3;
assert_eq!(f1(&mut a, &mut b, c), 6);
assert_eq!(a.x, 0);
assert_eq!(b, 10);
assert_eq!(f2(a.x, |_| a.x = 50), 0);
assert_eq!(a.x, 50);
} | let r = a.x + *b + c;
a.x = 0;
*b = 10;
return r; | random_line_split |
argument-passing.rs | // run-pass
struct X {
x: isize
}
fn f1(a: &mut X, b: &mut isize, c: isize) -> isize {
let r = a.x + *b + c;
a.x = 0;
*b = 10;
return r;
}
fn f2<F>(a: isize, f: F) -> isize where F: FnOnce(isize) { f(1); return a; }
pub fn main() | {
let mut a = X {x: 1};
let mut b = 2;
let c = 3;
assert_eq!(f1(&mut a, &mut b, c), 6);
assert_eq!(a.x, 0);
assert_eq!(b, 10);
assert_eq!(f2(a.x, |_| a.x = 50), 0);
assert_eq!(a.x, 50);
} | identifier_body |
|
command.rs | // Copyright 2016 The Gfx-rs Developers.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#![allow(missing_docs)]
use std::ptr;
use winapi::{FLOAT, INT, UINT, UINT8, DXGI_FORMAT,
DXGI_FORMAT_R16_UINT, DXGI_FORMAT_R32_UINT,
D3D11_CLEAR_FLAG, D3D11_PRIMITIVE_TOPOLOGY, D3D11_VIEWPORT, D3D11_RECT,
ID3D11RasterizerState, ID3D11DepthStencilState, ID3D11BlendState};
use core::{command, pso, shade, state, target, texture as tex};
use core::{IndexType, VertexCount};
use core::{MAX_VERTEX_ATTRIBUTES, MAX_CONSTANT_BUFFERS,
MAX_RESOURCE_VIEWS, MAX_UNORDERED_VIEWS,
MAX_SAMPLERS, MAX_COLOR_TARGETS};
use {native, Resources, InputLayout, Buffer, Texture, Pipeline, Program};
| #[derive(Clone, Copy, PartialEq, Debug)]
pub struct DataPointer {
offset: u32,
size: u32,
}
pub struct DataBuffer(Vec<u8>);
impl DataBuffer {
/// Create a new empty data buffer.
pub fn new() -> DataBuffer {
DataBuffer(Vec::new())
}
/// Reset the contents.
pub fn reset(&mut self) {
self.0.clear();
}
/// Copy a given vector slice into the buffer.
pub fn add(&mut self, data: &[u8]) -> DataPointer {
self.0.extend_from_slice(data);
DataPointer {
offset: (self.0.len() - data.len()) as u32,
size: data.len() as u32,
}
}
/// Return a reference to a stored data object.
pub fn get(&self, ptr: DataPointer) -> &[u8] {
&self.0[ptr.offset as usize.. (ptr.offset + ptr.size) as usize]
}
}
///Serialized device command.
#[derive(Clone, Copy, Debug)]
pub enum Command {
// states
BindProgram(Program),
BindInputLayout(InputLayout),
BindIndex(Buffer, DXGI_FORMAT),
BindVertexBuffers([native::Buffer; MAX_VERTEX_ATTRIBUTES], [UINT; MAX_VERTEX_ATTRIBUTES], [UINT; MAX_VERTEX_ATTRIBUTES]),
BindConstantBuffers(shade::Stage, [native::Buffer; MAX_CONSTANT_BUFFERS]),
BindShaderResources(shade::Stage, [native::Srv; MAX_RESOURCE_VIEWS]),
BindSamplers(shade::Stage, [native::Sampler; MAX_SAMPLERS]),
BindPixelTargets([native::Rtv; MAX_COLOR_TARGETS], native::Dsv),
SetPrimitive(D3D11_PRIMITIVE_TOPOLOGY),
SetViewport(D3D11_VIEWPORT),
SetScissor(D3D11_RECT),
SetRasterizer(*const ID3D11RasterizerState),
SetDepthStencil(*const ID3D11DepthStencilState, UINT),
SetBlend(*const ID3D11BlendState, [FLOAT; 4], UINT),
// resource updates
UpdateBuffer(Buffer, DataPointer, usize),
UpdateTexture(Texture, tex::Kind, Option<tex::CubeFace>, DataPointer, tex::RawImageInfo),
GenerateMips(native::Srv),
// drawing
ClearColor(native::Rtv, [f32; 4]),
ClearDepthStencil(native::Dsv, D3D11_CLEAR_FLAG, FLOAT, UINT8),
Draw(UINT, UINT),
DrawInstanced(UINT, UINT, UINT, UINT),
DrawIndexed(UINT, UINT, INT),
DrawIndexedInstanced(UINT, UINT, UINT, INT, UINT),
}
unsafe impl Send for Command {}
struct Cache {
attrib_strides: [Option<pso::ElemStride>; MAX_VERTEX_ATTRIBUTES],
rasterizer: *const ID3D11RasterizerState,
depth_stencil: *const ID3D11DepthStencilState,
stencil_ref: UINT,
blend: *const ID3D11BlendState,
blend_ref: [FLOAT; 4],
}
unsafe impl Send for Cache {}
impl Cache {
fn new() -> Cache {
Cache {
attrib_strides: [None; MAX_VERTEX_ATTRIBUTES],
rasterizer: ptr::null(),
depth_stencil: ptr::null(),
stencil_ref: 0,
blend: ptr::null(),
blend_ref: [0.0; 4],
}
}
}
pub struct CommandBuffer<P> {
pub parser: P,
cache: Cache,
}
pub trait Parser: Sized + Send {
fn reset(&mut self);
fn parse(&mut self, Command);
fn update_buffer(&mut self, Buffer, &[u8], usize);
fn update_texture(&mut self, Texture, tex::Kind, Option<tex::CubeFace>, &[u8], tex::RawImageInfo);
}
impl<P: Parser> From<P> for CommandBuffer<P> {
fn from(parser: P) -> CommandBuffer<P> {
CommandBuffer {
parser: parser,
cache: Cache::new(),
}
}
}
impl<P: Parser> CommandBuffer<P> {
fn flush(&mut self) {
let sample_mask =!0; //TODO
self.parser.parse(Command::SetDepthStencil(self.cache.depth_stencil, self.cache.stencil_ref));
self.parser.parse(Command::SetBlend(self.cache.blend, self.cache.blend_ref, sample_mask));
}
}
impl<P: Parser> command::Buffer<Resources> for CommandBuffer<P> {
fn reset(&mut self) {
self.parser.reset();
self.cache = Cache::new();
}
fn bind_pipeline_state(&mut self, pso: Pipeline) {
self.parser.parse(Command::SetPrimitive(pso.topology));
for (stride, ad_option) in self.cache.attrib_strides.iter_mut().zip(pso.attributes.iter()) {
*stride = ad_option.map(|(buf_id, _)| match pso.vertex_buffers[buf_id as usize] {
Some(ref bdesc) => bdesc.stride,
None => {
error!("Unexpected use of buffer id {}", buf_id);
0
},
});
}
if self.cache.rasterizer!= pso.rasterizer {
self.cache.rasterizer = pso.rasterizer;
self.parser.parse(Command::SetRasterizer(pso.rasterizer));
}
self.cache.depth_stencil = pso.depth_stencil;
self.cache.blend = pso.blend;
self.parser.parse(Command::BindInputLayout(pso.layout));
self.parser.parse(Command::BindProgram(pso.program));
}
fn bind_vertex_buffers(&mut self, vbs: pso::VertexBufferSet<Resources>) {
//Note: assumes `bind_pipeline_state` is called prior
let mut buffers = [native::Buffer(ptr::null_mut()); MAX_VERTEX_ATTRIBUTES];
let mut strides = [0; MAX_VERTEX_ATTRIBUTES];
let mut offsets = [0; MAX_VERTEX_ATTRIBUTES];
for i in 0.. MAX_VERTEX_ATTRIBUTES {
match (vbs.0[i], self.cache.attrib_strides[i]) {
(None, Some(stride)) => {
error!("No vertex input provided for slot {} with stride {}", i, stride)
},
(Some((buffer, offset)), Some(stride)) => {
buffers[i] = buffer.0;
strides[i] = stride as UINT;
offsets[i] = offset as UINT;
},
(_, None) => (),
}
}
self.parser.parse(Command::BindVertexBuffers(buffers, strides, offsets));
}
fn bind_constant_buffers(&mut self, cbs: &[pso::ConstantBufferParam<Resources>]) {
for &stage in shade::STAGES.iter() {
let mut buffers = [native::Buffer(ptr::null_mut()); MAX_CONSTANT_BUFFERS];
let mask = stage.into();
let mut count = 0;
for cbuf in cbs.iter() {
if cbuf.1.contains(mask) {
buffers[cbuf.2 as usize] = (cbuf.0).0;
count += 1;
}
}
if count!= 0 {
self.parser.parse(Command::BindConstantBuffers(stage, buffers));
}
}
}
fn bind_global_constant(&mut self, _: shade::Location, _: shade::UniformValue) {
error!("Global constants are not supported");
}
fn bind_resource_views(&mut self, rvs: &[pso::ResourceViewParam<Resources>]) {
for &stage in shade::STAGES.iter() {
let mut views = [native::Srv(ptr::null_mut()); MAX_RESOURCE_VIEWS];
let mask = stage.into();
let mut count = 0;
for view in rvs.iter() {
if view.1.contains(mask) {
views[view.2 as usize] = view.0;
count += 1;
}
}
if count!= 0 {
self.parser.parse(Command::BindShaderResources(stage, views));
}
}
}
fn bind_unordered_views(&mut self, uvs: &[pso::UnorderedViewParam<Resources>]) {
let mut views = [(); MAX_UNORDERED_VIEWS];
let mut count = 0;
for view in uvs.iter() {
views[view.2 as usize] = view.0;
count += 1;
}
if count!= 0 {
unimplemented!()
//self.parser.parse(Command::BindUnorderedAccess(stage, views));
}
}
fn bind_samplers(&mut self, ss: &[pso::SamplerParam<Resources>]) {
for &stage in shade::STAGES.iter() {
let mut samplers = [native::Sampler(ptr::null_mut()); MAX_SAMPLERS];
let mask = stage.into();
let mut count = 0;
for sm in ss.iter() {
if sm.1.contains(mask) {
samplers[sm.2 as usize] = sm.0;
count += 1;
}
}
if count!= 0 {
self.parser.parse(Command::BindSamplers(stage, samplers));
}
}
}
fn bind_pixel_targets(&mut self, pts: pso::PixelTargetSet<Resources>) {
if let (Some(ref d), Some(ref s)) = (pts.depth, pts.stencil) {
if d!= s {
error!("Depth and stencil views have to be the same");
}
}
let view = pts.get_view();
let viewport = D3D11_VIEWPORT {
TopLeftX: 0.0,
TopLeftY: 0.0,
Width: view.0 as f32,
Height: view.1 as f32,
MinDepth: 0.0,
MaxDepth: 1.0,
};
let mut colors = [native::Rtv(ptr::null_mut()); MAX_COLOR_TARGETS];
for i in 0.. MAX_COLOR_TARGETS {
if let Some(c) = pts.colors[i] {
colors[i] = c;
}
}
let ds = pts.depth.unwrap_or(native::Dsv(ptr::null_mut()));
self.parser.parse(Command::BindPixelTargets(colors, ds));
self.parser.parse(Command::SetViewport(viewport));
}
fn bind_index(&mut self, buf: Buffer, itype: IndexType) {
let format = match itype {
IndexType::U16 => DXGI_FORMAT_R16_UINT,
IndexType::U32 => DXGI_FORMAT_R32_UINT,
};
self.parser.parse(Command::BindIndex(buf, format));
}
fn set_scissor(&mut self, rect: target::Rect) {
self.parser.parse(Command::SetScissor(D3D11_RECT {
left: rect.x as INT,
top: rect.y as INT,
right: (rect.x + rect.w) as INT,
bottom: (rect.y + rect.h) as INT,
}));
}
fn set_ref_values(&mut self, rv: state::RefValues) {
if rv.stencil.0!= rv.stencil.1 {
error!("Unable to set different stencil ref values for front ({}) and back ({})",
rv.stencil.0, rv.stencil.1);
}
self.cache.stencil_ref = rv.stencil.0 as UINT;
self.cache.blend_ref = rv.blend;
}
fn update_buffer(&mut self, buf: Buffer, data: &[u8], offset: usize) {
self.parser.update_buffer(buf, data, offset);
}
fn update_texture(&mut self, tex: Texture, kind: tex::Kind, face: Option<tex::CubeFace>,
data: &[u8], image: tex::RawImageInfo) {
self.parser.update_texture(tex, kind, face, data, image);
}
fn generate_mipmap(&mut self, srv: native::Srv) {
self.parser.parse(Command::GenerateMips(srv));
}
fn clear_color(&mut self, target: native::Rtv, value: command::ClearColor) {
match value {
command::ClearColor::Float(data) => {
self.parser.parse(Command::ClearColor(target, data));
},
_ => {
error!("Unable to clear int/uint target");
},
}
}
fn clear_depth_stencil(&mut self, target: native::Dsv, depth: Option<target::Depth>,
stencil: Option<target::Stencil>) {
let flags = //warning: magic constants ahead
D3D11_CLEAR_FLAG(if depth.is_some() {1} else {0}) |
D3D11_CLEAR_FLAG(if stencil.is_some() {2} else {0});
self.parser.parse(Command::ClearDepthStencil(target, flags,
depth.unwrap_or_default() as FLOAT,
stencil.unwrap_or_default() as UINT8
));
}
fn call_draw(&mut self, start: VertexCount, count: VertexCount, instances: Option<command::InstanceParams>) {
self.flush();
self.parser.parse(match instances {
Some((ninst, offset)) => Command::DrawInstanced(
count as UINT, ninst as UINT, start as UINT, offset as UINT),
None => Command::Draw(count as UINT, start as UINT),
});
}
fn call_draw_indexed(&mut self, start: VertexCount, count: VertexCount,
base: VertexCount, instances: Option<command::InstanceParams>) {
self.flush();
self.parser.parse(match instances {
Some((ninst, offset)) => Command::DrawIndexedInstanced(
count as UINT, ninst as UINT, start as UINT, base as INT, offset as UINT),
None => Command::DrawIndexed(count as UINT, start as UINT, base as INT),
});
}
} | /// The place of some data in the data buffer. | random_line_split |
command.rs | // Copyright 2016 The Gfx-rs Developers.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#![allow(missing_docs)]
use std::ptr;
use winapi::{FLOAT, INT, UINT, UINT8, DXGI_FORMAT,
DXGI_FORMAT_R16_UINT, DXGI_FORMAT_R32_UINT,
D3D11_CLEAR_FLAG, D3D11_PRIMITIVE_TOPOLOGY, D3D11_VIEWPORT, D3D11_RECT,
ID3D11RasterizerState, ID3D11DepthStencilState, ID3D11BlendState};
use core::{command, pso, shade, state, target, texture as tex};
use core::{IndexType, VertexCount};
use core::{MAX_VERTEX_ATTRIBUTES, MAX_CONSTANT_BUFFERS,
MAX_RESOURCE_VIEWS, MAX_UNORDERED_VIEWS,
MAX_SAMPLERS, MAX_COLOR_TARGETS};
use {native, Resources, InputLayout, Buffer, Texture, Pipeline, Program};
/// The place of some data in the data buffer.
#[derive(Clone, Copy, PartialEq, Debug)]
pub struct DataPointer {
offset: u32,
size: u32,
}
pub struct DataBuffer(Vec<u8>);
impl DataBuffer {
/// Create a new empty data buffer.
pub fn new() -> DataBuffer {
DataBuffer(Vec::new())
}
/// Reset the contents.
pub fn reset(&mut self) {
self.0.clear();
}
/// Copy a given vector slice into the buffer.
pub fn add(&mut self, data: &[u8]) -> DataPointer {
self.0.extend_from_slice(data);
DataPointer {
offset: (self.0.len() - data.len()) as u32,
size: data.len() as u32,
}
}
/// Return a reference to a stored data object.
pub fn get(&self, ptr: DataPointer) -> &[u8] {
&self.0[ptr.offset as usize.. (ptr.offset + ptr.size) as usize]
}
}
///Serialized device command.
#[derive(Clone, Copy, Debug)]
pub enum Command {
// states
BindProgram(Program),
BindInputLayout(InputLayout),
BindIndex(Buffer, DXGI_FORMAT),
BindVertexBuffers([native::Buffer; MAX_VERTEX_ATTRIBUTES], [UINT; MAX_VERTEX_ATTRIBUTES], [UINT; MAX_VERTEX_ATTRIBUTES]),
BindConstantBuffers(shade::Stage, [native::Buffer; MAX_CONSTANT_BUFFERS]),
BindShaderResources(shade::Stage, [native::Srv; MAX_RESOURCE_VIEWS]),
BindSamplers(shade::Stage, [native::Sampler; MAX_SAMPLERS]),
BindPixelTargets([native::Rtv; MAX_COLOR_TARGETS], native::Dsv),
SetPrimitive(D3D11_PRIMITIVE_TOPOLOGY),
SetViewport(D3D11_VIEWPORT),
SetScissor(D3D11_RECT),
SetRasterizer(*const ID3D11RasterizerState),
SetDepthStencil(*const ID3D11DepthStencilState, UINT),
SetBlend(*const ID3D11BlendState, [FLOAT; 4], UINT),
// resource updates
UpdateBuffer(Buffer, DataPointer, usize),
UpdateTexture(Texture, tex::Kind, Option<tex::CubeFace>, DataPointer, tex::RawImageInfo),
GenerateMips(native::Srv),
// drawing
ClearColor(native::Rtv, [f32; 4]),
ClearDepthStencil(native::Dsv, D3D11_CLEAR_FLAG, FLOAT, UINT8),
Draw(UINT, UINT),
DrawInstanced(UINT, UINT, UINT, UINT),
DrawIndexed(UINT, UINT, INT),
DrawIndexedInstanced(UINT, UINT, UINT, INT, UINT),
}
unsafe impl Send for Command {}
struct Cache {
attrib_strides: [Option<pso::ElemStride>; MAX_VERTEX_ATTRIBUTES],
rasterizer: *const ID3D11RasterizerState,
depth_stencil: *const ID3D11DepthStencilState,
stencil_ref: UINT,
blend: *const ID3D11BlendState,
blend_ref: [FLOAT; 4],
}
unsafe impl Send for Cache {}
impl Cache {
fn new() -> Cache {
Cache {
attrib_strides: [None; MAX_VERTEX_ATTRIBUTES],
rasterizer: ptr::null(),
depth_stencil: ptr::null(),
stencil_ref: 0,
blend: ptr::null(),
blend_ref: [0.0; 4],
}
}
}
pub struct CommandBuffer<P> {
pub parser: P,
cache: Cache,
}
pub trait Parser: Sized + Send {
fn reset(&mut self);
fn parse(&mut self, Command);
fn update_buffer(&mut self, Buffer, &[u8], usize);
fn update_texture(&mut self, Texture, tex::Kind, Option<tex::CubeFace>, &[u8], tex::RawImageInfo);
}
impl<P: Parser> From<P> for CommandBuffer<P> {
fn from(parser: P) -> CommandBuffer<P> {
CommandBuffer {
parser: parser,
cache: Cache::new(),
}
}
}
impl<P: Parser> CommandBuffer<P> {
fn flush(&mut self) {
let sample_mask =!0; //TODO
self.parser.parse(Command::SetDepthStencil(self.cache.depth_stencil, self.cache.stencil_ref));
self.parser.parse(Command::SetBlend(self.cache.blend, self.cache.blend_ref, sample_mask));
}
}
impl<P: Parser> command::Buffer<Resources> for CommandBuffer<P> {
fn reset(&mut self) {
self.parser.reset();
self.cache = Cache::new();
}
fn bind_pipeline_state(&mut self, pso: Pipeline) {
self.parser.parse(Command::SetPrimitive(pso.topology));
for (stride, ad_option) in self.cache.attrib_strides.iter_mut().zip(pso.attributes.iter()) {
*stride = ad_option.map(|(buf_id, _)| match pso.vertex_buffers[buf_id as usize] {
Some(ref bdesc) => bdesc.stride,
None => {
error!("Unexpected use of buffer id {}", buf_id);
0
},
});
}
if self.cache.rasterizer!= pso.rasterizer {
self.cache.rasterizer = pso.rasterizer;
self.parser.parse(Command::SetRasterizer(pso.rasterizer));
}
self.cache.depth_stencil = pso.depth_stencil;
self.cache.blend = pso.blend;
self.parser.parse(Command::BindInputLayout(pso.layout));
self.parser.parse(Command::BindProgram(pso.program));
}
fn bind_vertex_buffers(&mut self, vbs: pso::VertexBufferSet<Resources>) {
//Note: assumes `bind_pipeline_state` is called prior
let mut buffers = [native::Buffer(ptr::null_mut()); MAX_VERTEX_ATTRIBUTES];
let mut strides = [0; MAX_VERTEX_ATTRIBUTES];
let mut offsets = [0; MAX_VERTEX_ATTRIBUTES];
for i in 0.. MAX_VERTEX_ATTRIBUTES {
match (vbs.0[i], self.cache.attrib_strides[i]) {
(None, Some(stride)) => {
error!("No vertex input provided for slot {} with stride {}", i, stride)
},
(Some((buffer, offset)), Some(stride)) => {
buffers[i] = buffer.0;
strides[i] = stride as UINT;
offsets[i] = offset as UINT;
},
(_, None) => (),
}
}
self.parser.parse(Command::BindVertexBuffers(buffers, strides, offsets));
}
fn bind_constant_buffers(&mut self, cbs: &[pso::ConstantBufferParam<Resources>]) {
for &stage in shade::STAGES.iter() {
let mut buffers = [native::Buffer(ptr::null_mut()); MAX_CONSTANT_BUFFERS];
let mask = stage.into();
let mut count = 0;
for cbuf in cbs.iter() {
if cbuf.1.contains(mask) {
buffers[cbuf.2 as usize] = (cbuf.0).0;
count += 1;
}
}
if count!= 0 {
self.parser.parse(Command::BindConstantBuffers(stage, buffers));
}
}
}
fn bind_global_constant(&mut self, _: shade::Location, _: shade::UniformValue) {
error!("Global constants are not supported");
}
fn bind_resource_views(&mut self, rvs: &[pso::ResourceViewParam<Resources>]) {
for &stage in shade::STAGES.iter() {
let mut views = [native::Srv(ptr::null_mut()); MAX_RESOURCE_VIEWS];
let mask = stage.into();
let mut count = 0;
for view in rvs.iter() {
if view.1.contains(mask) {
views[view.2 as usize] = view.0;
count += 1;
}
}
if count!= 0 {
self.parser.parse(Command::BindShaderResources(stage, views));
}
}
}
fn bind_unordered_views(&mut self, uvs: &[pso::UnorderedViewParam<Resources>]) {
let mut views = [(); MAX_UNORDERED_VIEWS];
let mut count = 0;
for view in uvs.iter() {
views[view.2 as usize] = view.0;
count += 1;
}
if count!= 0 {
unimplemented!()
//self.parser.parse(Command::BindUnorderedAccess(stage, views));
}
}
fn bind_samplers(&mut self, ss: &[pso::SamplerParam<Resources>]) {
for &stage in shade::STAGES.iter() {
let mut samplers = [native::Sampler(ptr::null_mut()); MAX_SAMPLERS];
let mask = stage.into();
let mut count = 0;
for sm in ss.iter() {
if sm.1.contains(mask) {
samplers[sm.2 as usize] = sm.0;
count += 1;
}
}
if count!= 0 {
self.parser.parse(Command::BindSamplers(stage, samplers));
}
}
}
fn bind_pixel_targets(&mut self, pts: pso::PixelTargetSet<Resources>) {
if let (Some(ref d), Some(ref s)) = (pts.depth, pts.stencil) {
if d!= s {
error!("Depth and stencil views have to be the same");
}
}
let view = pts.get_view();
let viewport = D3D11_VIEWPORT {
TopLeftX: 0.0,
TopLeftY: 0.0,
Width: view.0 as f32,
Height: view.1 as f32,
MinDepth: 0.0,
MaxDepth: 1.0,
};
let mut colors = [native::Rtv(ptr::null_mut()); MAX_COLOR_TARGETS];
for i in 0.. MAX_COLOR_TARGETS {
if let Some(c) = pts.colors[i] {
colors[i] = c;
}
}
let ds = pts.depth.unwrap_or(native::Dsv(ptr::null_mut()));
self.parser.parse(Command::BindPixelTargets(colors, ds));
self.parser.parse(Command::SetViewport(viewport));
}
fn bind_index(&mut self, buf: Buffer, itype: IndexType) {
let format = match itype {
IndexType::U16 => DXGI_FORMAT_R16_UINT,
IndexType::U32 => DXGI_FORMAT_R32_UINT,
};
self.parser.parse(Command::BindIndex(buf, format));
}
fn set_scissor(&mut self, rect: target::Rect) {
self.parser.parse(Command::SetScissor(D3D11_RECT {
left: rect.x as INT,
top: rect.y as INT,
right: (rect.x + rect.w) as INT,
bottom: (rect.y + rect.h) as INT,
}));
}
fn set_ref_values(&mut self, rv: state::RefValues) {
if rv.stencil.0!= rv.stencil.1 {
error!("Unable to set different stencil ref values for front ({}) and back ({})",
rv.stencil.0, rv.stencil.1);
}
self.cache.stencil_ref = rv.stencil.0 as UINT;
self.cache.blend_ref = rv.blend;
}
fn update_buffer(&mut self, buf: Buffer, data: &[u8], offset: usize) {
self.parser.update_buffer(buf, data, offset);
}
fn update_texture(&mut self, tex: Texture, kind: tex::Kind, face: Option<tex::CubeFace>,
data: &[u8], image: tex::RawImageInfo) {
self.parser.update_texture(tex, kind, face, data, image);
}
fn generate_mipmap(&mut self, srv: native::Srv) {
self.parser.parse(Command::GenerateMips(srv));
}
fn clear_color(&mut self, target: native::Rtv, value: command::ClearColor) |
fn clear_depth_stencil(&mut self, target: native::Dsv, depth: Option<target::Depth>,
stencil: Option<target::Stencil>) {
let flags = //warning: magic constants ahead
D3D11_CLEAR_FLAG(if depth.is_some() {1} else {0}) |
D3D11_CLEAR_FLAG(if stencil.is_some() {2} else {0});
self.parser.parse(Command::ClearDepthStencil(target, flags,
depth.unwrap_or_default() as FLOAT,
stencil.unwrap_or_default() as UINT8
));
}
fn call_draw(&mut self, start: VertexCount, count: VertexCount, instances: Option<command::InstanceParams>) {
self.flush();
self.parser.parse(match instances {
Some((ninst, offset)) => Command::DrawInstanced(
count as UINT, ninst as UINT, start as UINT, offset as UINT),
None => Command::Draw(count as UINT, start as UINT),
});
}
fn call_draw_indexed(&mut self, start: VertexCount, count: VertexCount,
base: VertexCount, instances: Option<command::InstanceParams>) {
self.flush();
self.parser.parse(match instances {
Some((ninst, offset)) => Command::DrawIndexedInstanced(
count as UINT, ninst as UINT, start as UINT, base as INT, offset as UINT),
None => Command::DrawIndexed(count as UINT, start as UINT, base as INT),
});
}
}
| {
match value {
command::ClearColor::Float(data) => {
self.parser.parse(Command::ClearColor(target, data));
},
_ => {
error!("Unable to clear int/uint target");
},
}
} | identifier_body |
command.rs | // Copyright 2016 The Gfx-rs Developers.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#![allow(missing_docs)]
use std::ptr;
use winapi::{FLOAT, INT, UINT, UINT8, DXGI_FORMAT,
DXGI_FORMAT_R16_UINT, DXGI_FORMAT_R32_UINT,
D3D11_CLEAR_FLAG, D3D11_PRIMITIVE_TOPOLOGY, D3D11_VIEWPORT, D3D11_RECT,
ID3D11RasterizerState, ID3D11DepthStencilState, ID3D11BlendState};
use core::{command, pso, shade, state, target, texture as tex};
use core::{IndexType, VertexCount};
use core::{MAX_VERTEX_ATTRIBUTES, MAX_CONSTANT_BUFFERS,
MAX_RESOURCE_VIEWS, MAX_UNORDERED_VIEWS,
MAX_SAMPLERS, MAX_COLOR_TARGETS};
use {native, Resources, InputLayout, Buffer, Texture, Pipeline, Program};
/// The place of some data in the data buffer.
#[derive(Clone, Copy, PartialEq, Debug)]
pub struct DataPointer {
offset: u32,
size: u32,
}
pub struct DataBuffer(Vec<u8>);
impl DataBuffer {
/// Create a new empty data buffer.
pub fn new() -> DataBuffer {
DataBuffer(Vec::new())
}
/// Reset the contents.
pub fn reset(&mut self) {
self.0.clear();
}
/// Copy a given vector slice into the buffer.
pub fn add(&mut self, data: &[u8]) -> DataPointer {
self.0.extend_from_slice(data);
DataPointer {
offset: (self.0.len() - data.len()) as u32,
size: data.len() as u32,
}
}
/// Return a reference to a stored data object.
pub fn get(&self, ptr: DataPointer) -> &[u8] {
&self.0[ptr.offset as usize.. (ptr.offset + ptr.size) as usize]
}
}
///Serialized device command.
#[derive(Clone, Copy, Debug)]
pub enum Command {
// states
BindProgram(Program),
BindInputLayout(InputLayout),
BindIndex(Buffer, DXGI_FORMAT),
BindVertexBuffers([native::Buffer; MAX_VERTEX_ATTRIBUTES], [UINT; MAX_VERTEX_ATTRIBUTES], [UINT; MAX_VERTEX_ATTRIBUTES]),
BindConstantBuffers(shade::Stage, [native::Buffer; MAX_CONSTANT_BUFFERS]),
BindShaderResources(shade::Stage, [native::Srv; MAX_RESOURCE_VIEWS]),
BindSamplers(shade::Stage, [native::Sampler; MAX_SAMPLERS]),
BindPixelTargets([native::Rtv; MAX_COLOR_TARGETS], native::Dsv),
SetPrimitive(D3D11_PRIMITIVE_TOPOLOGY),
SetViewport(D3D11_VIEWPORT),
SetScissor(D3D11_RECT),
SetRasterizer(*const ID3D11RasterizerState),
SetDepthStencil(*const ID3D11DepthStencilState, UINT),
SetBlend(*const ID3D11BlendState, [FLOAT; 4], UINT),
// resource updates
UpdateBuffer(Buffer, DataPointer, usize),
UpdateTexture(Texture, tex::Kind, Option<tex::CubeFace>, DataPointer, tex::RawImageInfo),
GenerateMips(native::Srv),
// drawing
ClearColor(native::Rtv, [f32; 4]),
ClearDepthStencil(native::Dsv, D3D11_CLEAR_FLAG, FLOAT, UINT8),
Draw(UINT, UINT),
DrawInstanced(UINT, UINT, UINT, UINT),
DrawIndexed(UINT, UINT, INT),
DrawIndexedInstanced(UINT, UINT, UINT, INT, UINT),
}
unsafe impl Send for Command {}
struct Cache {
attrib_strides: [Option<pso::ElemStride>; MAX_VERTEX_ATTRIBUTES],
rasterizer: *const ID3D11RasterizerState,
depth_stencil: *const ID3D11DepthStencilState,
stencil_ref: UINT,
blend: *const ID3D11BlendState,
blend_ref: [FLOAT; 4],
}
unsafe impl Send for Cache {}
impl Cache {
fn new() -> Cache {
Cache {
attrib_strides: [None; MAX_VERTEX_ATTRIBUTES],
rasterizer: ptr::null(),
depth_stencil: ptr::null(),
stencil_ref: 0,
blend: ptr::null(),
blend_ref: [0.0; 4],
}
}
}
pub struct CommandBuffer<P> {
pub parser: P,
cache: Cache,
}
pub trait Parser: Sized + Send {
fn reset(&mut self);
fn parse(&mut self, Command);
fn update_buffer(&mut self, Buffer, &[u8], usize);
fn update_texture(&mut self, Texture, tex::Kind, Option<tex::CubeFace>, &[u8], tex::RawImageInfo);
}
impl<P: Parser> From<P> for CommandBuffer<P> {
fn from(parser: P) -> CommandBuffer<P> {
CommandBuffer {
parser: parser,
cache: Cache::new(),
}
}
}
impl<P: Parser> CommandBuffer<P> {
fn flush(&mut self) {
let sample_mask =!0; //TODO
self.parser.parse(Command::SetDepthStencil(self.cache.depth_stencil, self.cache.stencil_ref));
self.parser.parse(Command::SetBlend(self.cache.blend, self.cache.blend_ref, sample_mask));
}
}
impl<P: Parser> command::Buffer<Resources> for CommandBuffer<P> {
fn reset(&mut self) {
self.parser.reset();
self.cache = Cache::new();
}
fn bind_pipeline_state(&mut self, pso: Pipeline) {
self.parser.parse(Command::SetPrimitive(pso.topology));
for (stride, ad_option) in self.cache.attrib_strides.iter_mut().zip(pso.attributes.iter()) {
*stride = ad_option.map(|(buf_id, _)| match pso.vertex_buffers[buf_id as usize] {
Some(ref bdesc) => bdesc.stride,
None => {
error!("Unexpected use of buffer id {}", buf_id);
0
},
});
}
if self.cache.rasterizer!= pso.rasterizer {
self.cache.rasterizer = pso.rasterizer;
self.parser.parse(Command::SetRasterizer(pso.rasterizer));
}
self.cache.depth_stencil = pso.depth_stencil;
self.cache.blend = pso.blend;
self.parser.parse(Command::BindInputLayout(pso.layout));
self.parser.parse(Command::BindProgram(pso.program));
}
fn bind_vertex_buffers(&mut self, vbs: pso::VertexBufferSet<Resources>) {
//Note: assumes `bind_pipeline_state` is called prior
let mut buffers = [native::Buffer(ptr::null_mut()); MAX_VERTEX_ATTRIBUTES];
let mut strides = [0; MAX_VERTEX_ATTRIBUTES];
let mut offsets = [0; MAX_VERTEX_ATTRIBUTES];
for i in 0.. MAX_VERTEX_ATTRIBUTES {
match (vbs.0[i], self.cache.attrib_strides[i]) {
(None, Some(stride)) => {
error!("No vertex input provided for slot {} with stride {}", i, stride)
},
(Some((buffer, offset)), Some(stride)) => {
buffers[i] = buffer.0;
strides[i] = stride as UINT;
offsets[i] = offset as UINT;
},
(_, None) => (),
}
}
self.parser.parse(Command::BindVertexBuffers(buffers, strides, offsets));
}
fn bind_constant_buffers(&mut self, cbs: &[pso::ConstantBufferParam<Resources>]) {
for &stage in shade::STAGES.iter() {
let mut buffers = [native::Buffer(ptr::null_mut()); MAX_CONSTANT_BUFFERS];
let mask = stage.into();
let mut count = 0;
for cbuf in cbs.iter() {
if cbuf.1.contains(mask) {
buffers[cbuf.2 as usize] = (cbuf.0).0;
count += 1;
}
}
if count!= 0 {
self.parser.parse(Command::BindConstantBuffers(stage, buffers));
}
}
}
fn bind_global_constant(&mut self, _: shade::Location, _: shade::UniformValue) {
error!("Global constants are not supported");
}
fn bind_resource_views(&mut self, rvs: &[pso::ResourceViewParam<Resources>]) {
for &stage in shade::STAGES.iter() {
let mut views = [native::Srv(ptr::null_mut()); MAX_RESOURCE_VIEWS];
let mask = stage.into();
let mut count = 0;
for view in rvs.iter() {
if view.1.contains(mask) {
views[view.2 as usize] = view.0;
count += 1;
}
}
if count!= 0 {
self.parser.parse(Command::BindShaderResources(stage, views));
}
}
}
fn bind_unordered_views(&mut self, uvs: &[pso::UnorderedViewParam<Resources>]) {
let mut views = [(); MAX_UNORDERED_VIEWS];
let mut count = 0;
for view in uvs.iter() {
views[view.2 as usize] = view.0;
count += 1;
}
if count!= 0 {
unimplemented!()
//self.parser.parse(Command::BindUnorderedAccess(stage, views));
}
}
fn bind_samplers(&mut self, ss: &[pso::SamplerParam<Resources>]) {
for &stage in shade::STAGES.iter() {
let mut samplers = [native::Sampler(ptr::null_mut()); MAX_SAMPLERS];
let mask = stage.into();
let mut count = 0;
for sm in ss.iter() {
if sm.1.contains(mask) {
samplers[sm.2 as usize] = sm.0;
count += 1;
}
}
if count!= 0 {
self.parser.parse(Command::BindSamplers(stage, samplers));
}
}
}
fn bind_pixel_targets(&mut self, pts: pso::PixelTargetSet<Resources>) {
if let (Some(ref d), Some(ref s)) = (pts.depth, pts.stencil) {
if d!= s {
error!("Depth and stencil views have to be the same");
}
}
let view = pts.get_view();
let viewport = D3D11_VIEWPORT {
TopLeftX: 0.0,
TopLeftY: 0.0,
Width: view.0 as f32,
Height: view.1 as f32,
MinDepth: 0.0,
MaxDepth: 1.0,
};
let mut colors = [native::Rtv(ptr::null_mut()); MAX_COLOR_TARGETS];
for i in 0.. MAX_COLOR_TARGETS {
if let Some(c) = pts.colors[i] {
colors[i] = c;
}
}
let ds = pts.depth.unwrap_or(native::Dsv(ptr::null_mut()));
self.parser.parse(Command::BindPixelTargets(colors, ds));
self.parser.parse(Command::SetViewport(viewport));
}
fn bind_index(&mut self, buf: Buffer, itype: IndexType) {
let format = match itype {
IndexType::U16 => DXGI_FORMAT_R16_UINT,
IndexType::U32 => DXGI_FORMAT_R32_UINT,
};
self.parser.parse(Command::BindIndex(buf, format));
}
fn set_scissor(&mut self, rect: target::Rect) {
self.parser.parse(Command::SetScissor(D3D11_RECT {
left: rect.x as INT,
top: rect.y as INT,
right: (rect.x + rect.w) as INT,
bottom: (rect.y + rect.h) as INT,
}));
}
fn set_ref_values(&mut self, rv: state::RefValues) {
if rv.stencil.0!= rv.stencil.1 {
error!("Unable to set different stencil ref values for front ({}) and back ({})",
rv.stencil.0, rv.stencil.1);
}
self.cache.stencil_ref = rv.stencil.0 as UINT;
self.cache.blend_ref = rv.blend;
}
fn update_buffer(&mut self, buf: Buffer, data: &[u8], offset: usize) {
self.parser.update_buffer(buf, data, offset);
}
fn update_texture(&mut self, tex: Texture, kind: tex::Kind, face: Option<tex::CubeFace>,
data: &[u8], image: tex::RawImageInfo) {
self.parser.update_texture(tex, kind, face, data, image);
}
fn generate_mipmap(&mut self, srv: native::Srv) {
self.parser.parse(Command::GenerateMips(srv));
}
fn | (&mut self, target: native::Rtv, value: command::ClearColor) {
match value {
command::ClearColor::Float(data) => {
self.parser.parse(Command::ClearColor(target, data));
},
_ => {
error!("Unable to clear int/uint target");
},
}
}
fn clear_depth_stencil(&mut self, target: native::Dsv, depth: Option<target::Depth>,
stencil: Option<target::Stencil>) {
let flags = //warning: magic constants ahead
D3D11_CLEAR_FLAG(if depth.is_some() {1} else {0}) |
D3D11_CLEAR_FLAG(if stencil.is_some() {2} else {0});
self.parser.parse(Command::ClearDepthStencil(target, flags,
depth.unwrap_or_default() as FLOAT,
stencil.unwrap_or_default() as UINT8
));
}
fn call_draw(&mut self, start: VertexCount, count: VertexCount, instances: Option<command::InstanceParams>) {
self.flush();
self.parser.parse(match instances {
Some((ninst, offset)) => Command::DrawInstanced(
count as UINT, ninst as UINT, start as UINT, offset as UINT),
None => Command::Draw(count as UINT, start as UINT),
});
}
fn call_draw_indexed(&mut self, start: VertexCount, count: VertexCount,
base: VertexCount, instances: Option<command::InstanceParams>) {
self.flush();
self.parser.parse(match instances {
Some((ninst, offset)) => Command::DrawIndexedInstanced(
count as UINT, ninst as UINT, start as UINT, base as INT, offset as UINT),
None => Command::DrawIndexed(count as UINT, start as UINT, base as INT),
});
}
}
| clear_color | identifier_name |
command.rs | // Copyright 2016 The Gfx-rs Developers.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#![allow(missing_docs)]
use std::ptr;
use winapi::{FLOAT, INT, UINT, UINT8, DXGI_FORMAT,
DXGI_FORMAT_R16_UINT, DXGI_FORMAT_R32_UINT,
D3D11_CLEAR_FLAG, D3D11_PRIMITIVE_TOPOLOGY, D3D11_VIEWPORT, D3D11_RECT,
ID3D11RasterizerState, ID3D11DepthStencilState, ID3D11BlendState};
use core::{command, pso, shade, state, target, texture as tex};
use core::{IndexType, VertexCount};
use core::{MAX_VERTEX_ATTRIBUTES, MAX_CONSTANT_BUFFERS,
MAX_RESOURCE_VIEWS, MAX_UNORDERED_VIEWS,
MAX_SAMPLERS, MAX_COLOR_TARGETS};
use {native, Resources, InputLayout, Buffer, Texture, Pipeline, Program};
/// The place of some data in the data buffer.
#[derive(Clone, Copy, PartialEq, Debug)]
pub struct DataPointer {
offset: u32,
size: u32,
}
pub struct DataBuffer(Vec<u8>);
impl DataBuffer {
/// Create a new empty data buffer.
pub fn new() -> DataBuffer {
DataBuffer(Vec::new())
}
/// Reset the contents.
pub fn reset(&mut self) {
self.0.clear();
}
/// Copy a given vector slice into the buffer.
pub fn add(&mut self, data: &[u8]) -> DataPointer {
self.0.extend_from_slice(data);
DataPointer {
offset: (self.0.len() - data.len()) as u32,
size: data.len() as u32,
}
}
/// Return a reference to a stored data object.
pub fn get(&self, ptr: DataPointer) -> &[u8] {
&self.0[ptr.offset as usize.. (ptr.offset + ptr.size) as usize]
}
}
///Serialized device command.
#[derive(Clone, Copy, Debug)]
pub enum Command {
// states
BindProgram(Program),
BindInputLayout(InputLayout),
BindIndex(Buffer, DXGI_FORMAT),
BindVertexBuffers([native::Buffer; MAX_VERTEX_ATTRIBUTES], [UINT; MAX_VERTEX_ATTRIBUTES], [UINT; MAX_VERTEX_ATTRIBUTES]),
BindConstantBuffers(shade::Stage, [native::Buffer; MAX_CONSTANT_BUFFERS]),
BindShaderResources(shade::Stage, [native::Srv; MAX_RESOURCE_VIEWS]),
BindSamplers(shade::Stage, [native::Sampler; MAX_SAMPLERS]),
BindPixelTargets([native::Rtv; MAX_COLOR_TARGETS], native::Dsv),
SetPrimitive(D3D11_PRIMITIVE_TOPOLOGY),
SetViewport(D3D11_VIEWPORT),
SetScissor(D3D11_RECT),
SetRasterizer(*const ID3D11RasterizerState),
SetDepthStencil(*const ID3D11DepthStencilState, UINT),
SetBlend(*const ID3D11BlendState, [FLOAT; 4], UINT),
// resource updates
UpdateBuffer(Buffer, DataPointer, usize),
UpdateTexture(Texture, tex::Kind, Option<tex::CubeFace>, DataPointer, tex::RawImageInfo),
GenerateMips(native::Srv),
// drawing
ClearColor(native::Rtv, [f32; 4]),
ClearDepthStencil(native::Dsv, D3D11_CLEAR_FLAG, FLOAT, UINT8),
Draw(UINT, UINT),
DrawInstanced(UINT, UINT, UINT, UINT),
DrawIndexed(UINT, UINT, INT),
DrawIndexedInstanced(UINT, UINT, UINT, INT, UINT),
}
unsafe impl Send for Command {}
struct Cache {
attrib_strides: [Option<pso::ElemStride>; MAX_VERTEX_ATTRIBUTES],
rasterizer: *const ID3D11RasterizerState,
depth_stencil: *const ID3D11DepthStencilState,
stencil_ref: UINT,
blend: *const ID3D11BlendState,
blend_ref: [FLOAT; 4],
}
unsafe impl Send for Cache {}
impl Cache {
fn new() -> Cache {
Cache {
attrib_strides: [None; MAX_VERTEX_ATTRIBUTES],
rasterizer: ptr::null(),
depth_stencil: ptr::null(),
stencil_ref: 0,
blend: ptr::null(),
blend_ref: [0.0; 4],
}
}
}
pub struct CommandBuffer<P> {
pub parser: P,
cache: Cache,
}
pub trait Parser: Sized + Send {
fn reset(&mut self);
fn parse(&mut self, Command);
fn update_buffer(&mut self, Buffer, &[u8], usize);
fn update_texture(&mut self, Texture, tex::Kind, Option<tex::CubeFace>, &[u8], tex::RawImageInfo);
}
impl<P: Parser> From<P> for CommandBuffer<P> {
fn from(parser: P) -> CommandBuffer<P> {
CommandBuffer {
parser: parser,
cache: Cache::new(),
}
}
}
impl<P: Parser> CommandBuffer<P> {
fn flush(&mut self) {
let sample_mask =!0; //TODO
self.parser.parse(Command::SetDepthStencil(self.cache.depth_stencil, self.cache.stencil_ref));
self.parser.parse(Command::SetBlend(self.cache.blend, self.cache.blend_ref, sample_mask));
}
}
impl<P: Parser> command::Buffer<Resources> for CommandBuffer<P> {
fn reset(&mut self) {
self.parser.reset();
self.cache = Cache::new();
}
fn bind_pipeline_state(&mut self, pso: Pipeline) {
self.parser.parse(Command::SetPrimitive(pso.topology));
for (stride, ad_option) in self.cache.attrib_strides.iter_mut().zip(pso.attributes.iter()) {
*stride = ad_option.map(|(buf_id, _)| match pso.vertex_buffers[buf_id as usize] {
Some(ref bdesc) => bdesc.stride,
None => {
error!("Unexpected use of buffer id {}", buf_id);
0
},
});
}
if self.cache.rasterizer!= pso.rasterizer {
self.cache.rasterizer = pso.rasterizer;
self.parser.parse(Command::SetRasterizer(pso.rasterizer));
}
self.cache.depth_stencil = pso.depth_stencil;
self.cache.blend = pso.blend;
self.parser.parse(Command::BindInputLayout(pso.layout));
self.parser.parse(Command::BindProgram(pso.program));
}
fn bind_vertex_buffers(&mut self, vbs: pso::VertexBufferSet<Resources>) {
//Note: assumes `bind_pipeline_state` is called prior
let mut buffers = [native::Buffer(ptr::null_mut()); MAX_VERTEX_ATTRIBUTES];
let mut strides = [0; MAX_VERTEX_ATTRIBUTES];
let mut offsets = [0; MAX_VERTEX_ATTRIBUTES];
for i in 0.. MAX_VERTEX_ATTRIBUTES {
match (vbs.0[i], self.cache.attrib_strides[i]) {
(None, Some(stride)) => {
error!("No vertex input provided for slot {} with stride {}", i, stride)
},
(Some((buffer, offset)), Some(stride)) => {
buffers[i] = buffer.0;
strides[i] = stride as UINT;
offsets[i] = offset as UINT;
},
(_, None) => (),
}
}
self.parser.parse(Command::BindVertexBuffers(buffers, strides, offsets));
}
fn bind_constant_buffers(&mut self, cbs: &[pso::ConstantBufferParam<Resources>]) {
for &stage in shade::STAGES.iter() {
let mut buffers = [native::Buffer(ptr::null_mut()); MAX_CONSTANT_BUFFERS];
let mask = stage.into();
let mut count = 0;
for cbuf in cbs.iter() {
if cbuf.1.contains(mask) {
buffers[cbuf.2 as usize] = (cbuf.0).0;
count += 1;
}
}
if count!= 0 {
self.parser.parse(Command::BindConstantBuffers(stage, buffers));
}
}
}
fn bind_global_constant(&mut self, _: shade::Location, _: shade::UniformValue) {
error!("Global constants are not supported");
}
fn bind_resource_views(&mut self, rvs: &[pso::ResourceViewParam<Resources>]) {
for &stage in shade::STAGES.iter() {
let mut views = [native::Srv(ptr::null_mut()); MAX_RESOURCE_VIEWS];
let mask = stage.into();
let mut count = 0;
for view in rvs.iter() {
if view.1.contains(mask) {
views[view.2 as usize] = view.0;
count += 1;
}
}
if count!= 0 |
}
}
fn bind_unordered_views(&mut self, uvs: &[pso::UnorderedViewParam<Resources>]) {
let mut views = [(); MAX_UNORDERED_VIEWS];
let mut count = 0;
for view in uvs.iter() {
views[view.2 as usize] = view.0;
count += 1;
}
if count!= 0 {
unimplemented!()
//self.parser.parse(Command::BindUnorderedAccess(stage, views));
}
}
fn bind_samplers(&mut self, ss: &[pso::SamplerParam<Resources>]) {
for &stage in shade::STAGES.iter() {
let mut samplers = [native::Sampler(ptr::null_mut()); MAX_SAMPLERS];
let mask = stage.into();
let mut count = 0;
for sm in ss.iter() {
if sm.1.contains(mask) {
samplers[sm.2 as usize] = sm.0;
count += 1;
}
}
if count!= 0 {
self.parser.parse(Command::BindSamplers(stage, samplers));
}
}
}
fn bind_pixel_targets(&mut self, pts: pso::PixelTargetSet<Resources>) {
if let (Some(ref d), Some(ref s)) = (pts.depth, pts.stencil) {
if d!= s {
error!("Depth and stencil views have to be the same");
}
}
let view = pts.get_view();
let viewport = D3D11_VIEWPORT {
TopLeftX: 0.0,
TopLeftY: 0.0,
Width: view.0 as f32,
Height: view.1 as f32,
MinDepth: 0.0,
MaxDepth: 1.0,
};
let mut colors = [native::Rtv(ptr::null_mut()); MAX_COLOR_TARGETS];
for i in 0.. MAX_COLOR_TARGETS {
if let Some(c) = pts.colors[i] {
colors[i] = c;
}
}
let ds = pts.depth.unwrap_or(native::Dsv(ptr::null_mut()));
self.parser.parse(Command::BindPixelTargets(colors, ds));
self.parser.parse(Command::SetViewport(viewport));
}
fn bind_index(&mut self, buf: Buffer, itype: IndexType) {
let format = match itype {
IndexType::U16 => DXGI_FORMAT_R16_UINT,
IndexType::U32 => DXGI_FORMAT_R32_UINT,
};
self.parser.parse(Command::BindIndex(buf, format));
}
fn set_scissor(&mut self, rect: target::Rect) {
self.parser.parse(Command::SetScissor(D3D11_RECT {
left: rect.x as INT,
top: rect.y as INT,
right: (rect.x + rect.w) as INT,
bottom: (rect.y + rect.h) as INT,
}));
}
fn set_ref_values(&mut self, rv: state::RefValues) {
if rv.stencil.0!= rv.stencil.1 {
error!("Unable to set different stencil ref values for front ({}) and back ({})",
rv.stencil.0, rv.stencil.1);
}
self.cache.stencil_ref = rv.stencil.0 as UINT;
self.cache.blend_ref = rv.blend;
}
fn update_buffer(&mut self, buf: Buffer, data: &[u8], offset: usize) {
self.parser.update_buffer(buf, data, offset);
}
fn update_texture(&mut self, tex: Texture, kind: tex::Kind, face: Option<tex::CubeFace>,
data: &[u8], image: tex::RawImageInfo) {
self.parser.update_texture(tex, kind, face, data, image);
}
fn generate_mipmap(&mut self, srv: native::Srv) {
self.parser.parse(Command::GenerateMips(srv));
}
fn clear_color(&mut self, target: native::Rtv, value: command::ClearColor) {
match value {
command::ClearColor::Float(data) => {
self.parser.parse(Command::ClearColor(target, data));
},
_ => {
error!("Unable to clear int/uint target");
},
}
}
fn clear_depth_stencil(&mut self, target: native::Dsv, depth: Option<target::Depth>,
stencil: Option<target::Stencil>) {
let flags = //warning: magic constants ahead
D3D11_CLEAR_FLAG(if depth.is_some() {1} else {0}) |
D3D11_CLEAR_FLAG(if stencil.is_some() {2} else {0});
self.parser.parse(Command::ClearDepthStencil(target, flags,
depth.unwrap_or_default() as FLOAT,
stencil.unwrap_or_default() as UINT8
));
}
fn call_draw(&mut self, start: VertexCount, count: VertexCount, instances: Option<command::InstanceParams>) {
self.flush();
self.parser.parse(match instances {
Some((ninst, offset)) => Command::DrawInstanced(
count as UINT, ninst as UINT, start as UINT, offset as UINT),
None => Command::Draw(count as UINT, start as UINT),
});
}
fn call_draw_indexed(&mut self, start: VertexCount, count: VertexCount,
base: VertexCount, instances: Option<command::InstanceParams>) {
self.flush();
self.parser.parse(match instances {
Some((ninst, offset)) => Command::DrawIndexedInstanced(
count as UINT, ninst as UINT, start as UINT, base as INT, offset as UINT),
None => Command::DrawIndexed(count as UINT, start as UINT, base as INT),
});
}
}
| {
self.parser.parse(Command::BindShaderResources(stage, views));
} | conditional_block |
node.rs | // Copyright 2018 Pants project contributors (see CONTRIBUTORS.md).
// Licensed under the Apache License, Version 2.0 (see LICENSE).
use std::fmt::{Debug, Display};
use std::hash::Hash;
use boxfuture::BoxFuture;
use hashing::Digest;
use futures01::future::Future;
use petgraph::stable_graph;
use crate::entry::Entry;
use crate::Graph;
// 2^32 Nodes ought to be more than enough for anyone!
pub type EntryId = stable_graph::NodeIndex<u32>;
///
/// Defines executing a cacheable/memoizable step within the given NodeContext.
///
/// Note that it is assumed that Nodes are very cheap to clone.
///
pub trait Node: Clone + Debug + Display + Eq + Hash + Send +'static {
type Context: NodeContext<Node = Self>;
type Item: Clone + Debug + Eq + Send +'static;
type Error: NodeError;
fn run(self, context: Self::Context) -> BoxFuture<Self::Item, Self::Error>;
///
/// If the given Node output represents an FS operation, returns its Digest.
///
fn digest(result: Self::Item) -> Option<Digest>;
///
/// If the node result is cacheable, return true.
///
fn cacheable(&self, context: &Self::Context) -> bool;
/// Nodes optionally have a user-facing name (distinct from their Debug and Display
/// implementations). This user-facing name is intended to provide high-level information
/// to end users of pants about what computation pants is currently doing. Not all
/// `Node`s need a user-facing name. For `Node`s derived from Python `@rule`s, the
/// user-facing name should be the same as the `name` annotation on the rule decorator.
fn user_facing_name(&self) -> Option<String> {
None
}
}
pub trait NodeError: Clone + Debug + Eq + Send {
///
/// Creates an instance that represents that a Node was invalidated out of the | /// Creates an instance that represents that a Node dependency was cyclic along the given path.
///
fn cyclic(path: Vec<String>) -> Self;
}
///
/// A trait used to visualize Nodes in either DOT/GraphViz format.
///
pub trait NodeVisualizer<N: Node> {
///
/// Returns a GraphViz color scheme name for this visualizer.
///
fn color_scheme(&self) -> &str;
///
/// Returns a GraphViz color name/id within Self::color_scheme for the given Entry.
///
fn color(&mut self, entry: &Entry<N>, context: &N::Context) -> String;
}
///
/// A trait used to visualize Nodes for the purposes of CLI-output tracing.
///
pub trait NodeTracer<N: Node> {
///
/// Returns true if the given Node Result represents the "bottom" of a trace.
///
/// A trace represents a sub-dag of the entire Graph, and a "bottom" Node result represents
/// a boundary that the trace stops before (ie, a bottom Node will not be rendered in the trace,
/// but anything that depends on a bottom Node will be).
///
fn is_bottom(result: Option<Result<N::Item, N::Error>>) -> bool;
///
/// Renders the given result for a trace. The trace will already be indented by `indent`, but
/// an implementer creating a multi-line output would need to indent them as well.
///
fn state_str(indent: &str, result: Option<Result<N::Item, N::Error>>) -> String;
}
///
/// A context passed between Nodes that also stores an EntryId to uniquely identify them.
///
pub trait NodeContext: Clone + Send +'static {
///
/// The type generated when this Context is cloned for another Node.
///
type Node: Node;
///
/// The Session ID type for this Context. Some Node behaviours (in particular: Node::cacheable)
/// have Session-specific semantics. More than one context object might be associated with a
/// single caller "session".
///
type SessionId: Clone + Debug + Eq;
///
/// Creates a clone of this NodeContext to be used for a different Node.
///
/// To clone a Context for use for the same Node, `Clone` is used directly.
///
fn clone_for(&self, entry_id: EntryId) -> <Self::Node as Node>::Context;
///
/// Returns the SessionId for this Context, which should uniquely identify a caller's run for the
/// purposes of "once per Session" behaviour.
///
fn session_id(&self) -> &Self::SessionId;
///
/// Returns a reference to the Graph for this Context.
///
fn graph(&self) -> &Graph<Self::Node>;
///
/// Spawns a Future on an Executor provided by the context.
///
/// NB: Unlike the futures `Executor` trait itself, this implementation _must_ spawn the work
/// on another thread, as it is called from within the Graph lock.
///
fn spawn<F>(&self, future: F)
where
F: Future<Item = (), Error = ()> + Send +'static;
} | /// Graph (generally while running).
///
fn invalidated() -> Self;
/// | random_line_split |
node.rs | // Copyright 2018 Pants project contributors (see CONTRIBUTORS.md).
// Licensed under the Apache License, Version 2.0 (see LICENSE).
use std::fmt::{Debug, Display};
use std::hash::Hash;
use boxfuture::BoxFuture;
use hashing::Digest;
use futures01::future::Future;
use petgraph::stable_graph;
use crate::entry::Entry;
use crate::Graph;
// 2^32 Nodes ought to be more than enough for anyone!
pub type EntryId = stable_graph::NodeIndex<u32>;
///
/// Defines executing a cacheable/memoizable step within the given NodeContext.
///
/// Note that it is assumed that Nodes are very cheap to clone.
///
pub trait Node: Clone + Debug + Display + Eq + Hash + Send +'static {
type Context: NodeContext<Node = Self>;
type Item: Clone + Debug + Eq + Send +'static;
type Error: NodeError;
fn run(self, context: Self::Context) -> BoxFuture<Self::Item, Self::Error>;
///
/// If the given Node output represents an FS operation, returns its Digest.
///
fn digest(result: Self::Item) -> Option<Digest>;
///
/// If the node result is cacheable, return true.
///
fn cacheable(&self, context: &Self::Context) -> bool;
/// Nodes optionally have a user-facing name (distinct from their Debug and Display
/// implementations). This user-facing name is intended to provide high-level information
/// to end users of pants about what computation pants is currently doing. Not all
/// `Node`s need a user-facing name. For `Node`s derived from Python `@rule`s, the
/// user-facing name should be the same as the `name` annotation on the rule decorator.
fn | (&self) -> Option<String> {
None
}
}
pub trait NodeError: Clone + Debug + Eq + Send {
///
/// Creates an instance that represents that a Node was invalidated out of the
/// Graph (generally while running).
///
fn invalidated() -> Self;
///
/// Creates an instance that represents that a Node dependency was cyclic along the given path.
///
fn cyclic(path: Vec<String>) -> Self;
}
///
/// A trait used to visualize Nodes in either DOT/GraphViz format.
///
pub trait NodeVisualizer<N: Node> {
///
/// Returns a GraphViz color scheme name for this visualizer.
///
fn color_scheme(&self) -> &str;
///
/// Returns a GraphViz color name/id within Self::color_scheme for the given Entry.
///
fn color(&mut self, entry: &Entry<N>, context: &N::Context) -> String;
}
///
/// A trait used to visualize Nodes for the purposes of CLI-output tracing.
///
pub trait NodeTracer<N: Node> {
///
/// Returns true if the given Node Result represents the "bottom" of a trace.
///
/// A trace represents a sub-dag of the entire Graph, and a "bottom" Node result represents
/// a boundary that the trace stops before (ie, a bottom Node will not be rendered in the trace,
/// but anything that depends on a bottom Node will be).
///
fn is_bottom(result: Option<Result<N::Item, N::Error>>) -> bool;
///
/// Renders the given result for a trace. The trace will already be indented by `indent`, but
/// an implementer creating a multi-line output would need to indent them as well.
///
fn state_str(indent: &str, result: Option<Result<N::Item, N::Error>>) -> String;
}
///
/// A context passed between Nodes that also stores an EntryId to uniquely identify them.
///
pub trait NodeContext: Clone + Send +'static {
///
/// The type generated when this Context is cloned for another Node.
///
type Node: Node;
///
/// The Session ID type for this Context. Some Node behaviours (in particular: Node::cacheable)
/// have Session-specific semantics. More than one context object might be associated with a
/// single caller "session".
///
type SessionId: Clone + Debug + Eq;
///
/// Creates a clone of this NodeContext to be used for a different Node.
///
/// To clone a Context for use for the same Node, `Clone` is used directly.
///
fn clone_for(&self, entry_id: EntryId) -> <Self::Node as Node>::Context;
///
/// Returns the SessionId for this Context, which should uniquely identify a caller's run for the
/// purposes of "once per Session" behaviour.
///
fn session_id(&self) -> &Self::SessionId;
///
/// Returns a reference to the Graph for this Context.
///
fn graph(&self) -> &Graph<Self::Node>;
///
/// Spawns a Future on an Executor provided by the context.
///
/// NB: Unlike the futures `Executor` trait itself, this implementation _must_ spawn the work
/// on another thread, as it is called from within the Graph lock.
///
fn spawn<F>(&self, future: F)
where
F: Future<Item = (), Error = ()> + Send +'static;
}
| user_facing_name | identifier_name |
node.rs | // Copyright 2018 Pants project contributors (see CONTRIBUTORS.md).
// Licensed under the Apache License, Version 2.0 (see LICENSE).
use std::fmt::{Debug, Display};
use std::hash::Hash;
use boxfuture::BoxFuture;
use hashing::Digest;
use futures01::future::Future;
use petgraph::stable_graph;
use crate::entry::Entry;
use crate::Graph;
// 2^32 Nodes ought to be more than enough for anyone!
pub type EntryId = stable_graph::NodeIndex<u32>;
///
/// Defines executing a cacheable/memoizable step within the given NodeContext.
///
/// Note that it is assumed that Nodes are very cheap to clone.
///
pub trait Node: Clone + Debug + Display + Eq + Hash + Send +'static {
type Context: NodeContext<Node = Self>;
type Item: Clone + Debug + Eq + Send +'static;
type Error: NodeError;
fn run(self, context: Self::Context) -> BoxFuture<Self::Item, Self::Error>;
///
/// If the given Node output represents an FS operation, returns its Digest.
///
fn digest(result: Self::Item) -> Option<Digest>;
///
/// If the node result is cacheable, return true.
///
fn cacheable(&self, context: &Self::Context) -> bool;
/// Nodes optionally have a user-facing name (distinct from their Debug and Display
/// implementations). This user-facing name is intended to provide high-level information
/// to end users of pants about what computation pants is currently doing. Not all
/// `Node`s need a user-facing name. For `Node`s derived from Python `@rule`s, the
/// user-facing name should be the same as the `name` annotation on the rule decorator.
fn user_facing_name(&self) -> Option<String> |
}
pub trait NodeError: Clone + Debug + Eq + Send {
///
/// Creates an instance that represents that a Node was invalidated out of the
/// Graph (generally while running).
///
fn invalidated() -> Self;
///
/// Creates an instance that represents that a Node dependency was cyclic along the given path.
///
fn cyclic(path: Vec<String>) -> Self;
}
///
/// A trait used to visualize Nodes in either DOT/GraphViz format.
///
pub trait NodeVisualizer<N: Node> {
///
/// Returns a GraphViz color scheme name for this visualizer.
///
fn color_scheme(&self) -> &str;
///
/// Returns a GraphViz color name/id within Self::color_scheme for the given Entry.
///
fn color(&mut self, entry: &Entry<N>, context: &N::Context) -> String;
}
///
/// A trait used to visualize Nodes for the purposes of CLI-output tracing.
///
pub trait NodeTracer<N: Node> {
///
/// Returns true if the given Node Result represents the "bottom" of a trace.
///
/// A trace represents a sub-dag of the entire Graph, and a "bottom" Node result represents
/// a boundary that the trace stops before (ie, a bottom Node will not be rendered in the trace,
/// but anything that depends on a bottom Node will be).
///
fn is_bottom(result: Option<Result<N::Item, N::Error>>) -> bool;
///
/// Renders the given result for a trace. The trace will already be indented by `indent`, but
/// an implementer creating a multi-line output would need to indent them as well.
///
fn state_str(indent: &str, result: Option<Result<N::Item, N::Error>>) -> String;
}
///
/// A context passed between Nodes that also stores an EntryId to uniquely identify them.
///
pub trait NodeContext: Clone + Send +'static {
///
/// The type generated when this Context is cloned for another Node.
///
type Node: Node;
///
/// The Session ID type for this Context. Some Node behaviours (in particular: Node::cacheable)
/// have Session-specific semantics. More than one context object might be associated with a
/// single caller "session".
///
type SessionId: Clone + Debug + Eq;
///
/// Creates a clone of this NodeContext to be used for a different Node.
///
/// To clone a Context for use for the same Node, `Clone` is used directly.
///
fn clone_for(&self, entry_id: EntryId) -> <Self::Node as Node>::Context;
///
/// Returns the SessionId for this Context, which should uniquely identify a caller's run for the
/// purposes of "once per Session" behaviour.
///
fn session_id(&self) -> &Self::SessionId;
///
/// Returns a reference to the Graph for this Context.
///
fn graph(&self) -> &Graph<Self::Node>;
///
/// Spawns a Future on an Executor provided by the context.
///
/// NB: Unlike the futures `Executor` trait itself, this implementation _must_ spawn the work
/// on another thread, as it is called from within the Graph lock.
///
fn spawn<F>(&self, future: F)
where
F: Future<Item = (), Error = ()> + Send +'static;
}
| {
None
} | identifier_body |
cssparse.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/// Some little helpers for hooking up the HTML parser with the CSS parser.
use std::cell::Cell;
use std::comm;
use std::comm::Port;
use std::task;
use style::Stylesheet;
use servo_net::resource_task::{Load, ProgressMsg, Payload, Done, ResourceTask};
use extra::url::Url;
/// Where a style sheet comes from.
pub enum StylesheetProvenance {
UrlProvenance(Url),
InlineProvenance(Url, ~str),
}
pub fn spawn_css_parser(provenance: StylesheetProvenance,
resource_task: ResourceTask)
-> Port<Stylesheet> {
let (result_port, result_chan) = comm::stream();
let provenance_cell = Cell::new(provenance); | do task::spawn {
// TODO: CSS parsing should take a base URL.
let _url = do provenance_cell.with_ref |p| {
match *p {
UrlProvenance(ref the_url) => (*the_url).clone(),
InlineProvenance(ref the_url, _) => (*the_url).clone()
}
};
let sheet = match provenance_cell.take() {
UrlProvenance(url) => {
debug!("cssparse: loading style sheet at {:s}", url.to_str());
let (input_port, input_chan) = comm::stream();
resource_task.send(Load(url, input_chan));
Stylesheet::from_iter(ProgressMsgPortIterator {
progress_port: input_port.recv().progress_port
})
}
InlineProvenance(_, data) => {
Stylesheet::from_str(data)
}
};
result_chan.send(sheet);
}
return result_port;
}
struct ProgressMsgPortIterator {
progress_port: Port<ProgressMsg>
}
impl Iterator<~[u8]> for ProgressMsgPortIterator {
fn next(&mut self) -> Option<~[u8]> {
match self.progress_port.recv() {
Payload(data) => Some(data),
Done(*) => None
}
}
} | random_line_split |
|
cssparse.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/// Some little helpers for hooking up the HTML parser with the CSS parser.
use std::cell::Cell;
use std::comm;
use std::comm::Port;
use std::task;
use style::Stylesheet;
use servo_net::resource_task::{Load, ProgressMsg, Payload, Done, ResourceTask};
use extra::url::Url;
/// Where a style sheet comes from.
pub enum StylesheetProvenance {
UrlProvenance(Url),
InlineProvenance(Url, ~str),
}
pub fn | (provenance: StylesheetProvenance,
resource_task: ResourceTask)
-> Port<Stylesheet> {
let (result_port, result_chan) = comm::stream();
let provenance_cell = Cell::new(provenance);
do task::spawn {
// TODO: CSS parsing should take a base URL.
let _url = do provenance_cell.with_ref |p| {
match *p {
UrlProvenance(ref the_url) => (*the_url).clone(),
InlineProvenance(ref the_url, _) => (*the_url).clone()
}
};
let sheet = match provenance_cell.take() {
UrlProvenance(url) => {
debug!("cssparse: loading style sheet at {:s}", url.to_str());
let (input_port, input_chan) = comm::stream();
resource_task.send(Load(url, input_chan));
Stylesheet::from_iter(ProgressMsgPortIterator {
progress_port: input_port.recv().progress_port
})
}
InlineProvenance(_, data) => {
Stylesheet::from_str(data)
}
};
result_chan.send(sheet);
}
return result_port;
}
struct ProgressMsgPortIterator {
progress_port: Port<ProgressMsg>
}
impl Iterator<~[u8]> for ProgressMsgPortIterator {
fn next(&mut self) -> Option<~[u8]> {
match self.progress_port.recv() {
Payload(data) => Some(data),
Done(*) => None
}
}
}
| spawn_css_parser | identifier_name |
char.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Character manipulation (`char` type, Unicode Scalar Value)
//!
//! This module provides the `CharExt` trait, as well as its
//! implementation for the primitive `char` type, in order to allow
//! basic character manipulation.
//!
//! A `char` actually represents a
//! *[Unicode Scalar
//! Value](http://www.unicode.org/glossary/#unicode_scalar_value)*, as it can
//! contain any Unicode code point except high-surrogate and low-surrogate code
//! points.
//!
//! As such, only values in the ranges \[0x0,0xD7FF\] and \[0xE000,0x10FFFF\]
//! (inclusive) are allowed. A `char` can always be safely cast to a `u32`;
//! however the converse is not always true due to the above range limits
//! and, as such, should be performed via the `from_u32` function.
#![stable(feature = "rust1", since = "1.0.0")]
#![doc(primitive = "char")]
use core::char::CharExt as C;
use core::option::Option::{self, Some, None};
use core::iter::Iterator;
use tables::{derived_property, property, general_category, conversions, charwidth};
// stable reexports
pub use core::char::{MAX, from_u32, from_digit, EscapeUnicode, EscapeDefault};
// unstable reexports
#[allow(deprecated)]
pub use normalize::{decompose_canonical, decompose_compatible, compose};
#[allow(deprecated)]
pub use tables::normalization::canonical_combining_class;
pub use tables::UNICODE_VERSION;
/// An iterator over the lowercase mapping of a given character, returned from
/// the [`to_lowercase` method](../primitive.char.html#method.to_lowercase) on
/// characters.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct ToLowercase(CaseMappingIter);
#[stable(feature = "rust1", since = "1.0.0")]
impl Iterator for ToLowercase {
type Item = char;
fn next(&mut self) -> Option<char> { self.0.next() }
}
| #[stable(feature = "rust1", since = "1.0.0")]
pub struct ToUppercase(CaseMappingIter);
#[stable(feature = "rust1", since = "1.0.0")]
impl Iterator for ToUppercase {
type Item = char;
fn next(&mut self) -> Option<char> { self.0.next() }
}
/// An iterator over the titlecase mapping of a given character, returned from
/// the [`to_titlecase` method](../primitive.char.html#method.to_titlecase) on
/// characters.
#[unstable(feature = "unicode", reason = "recently added")]
pub struct ToTitlecase(CaseMappingIter);
#[stable(feature = "unicode_case_mapping", since = "1.2.0")]
impl Iterator for ToTitlecase {
type Item = char;
fn next(&mut self) -> Option<char> { self.0.next() }
}
enum CaseMappingIter {
Three(char, char, char),
Two(char, char),
One(char),
Zero
}
impl CaseMappingIter {
fn new(chars: [char; 3]) -> CaseMappingIter {
if chars[2] == '\0' {
if chars[1] == '\0' {
CaseMappingIter::One(chars[0]) // Including if chars[0] == '\0'
} else {
CaseMappingIter::Two(chars[0], chars[1])
}
} else {
CaseMappingIter::Three(chars[0], chars[1], chars[2])
}
}
}
impl Iterator for CaseMappingIter {
type Item = char;
fn next(&mut self) -> Option<char> {
match *self {
CaseMappingIter::Three(a, b, c) => {
*self = CaseMappingIter::Two(b, c);
Some(a)
}
CaseMappingIter::Two(b, c) => {
*self = CaseMappingIter::One(c);
Some(b)
}
CaseMappingIter::One(c) => {
*self = CaseMappingIter::Zero;
Some(c)
}
CaseMappingIter::Zero => None,
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
#[lang = "char"]
impl char {
/// Checks if a `char` parses as a numeric digit in the given radix.
///
/// Compared to `is_numeric()`, this function only recognizes the characters
/// `0-9`, `a-z` and `A-Z`.
///
/// # Return value
///
/// Returns `true` if `c` is a valid digit under `radix`, and `false`
/// otherwise.
///
/// # Panics
///
/// Panics if given a radix > 36.
///
/// # Examples
///
/// ```
/// let c = '1';
///
/// assert!(c.is_digit(10));
///
/// assert!('f'.is_digit(16));
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_digit(self, radix: u32) -> bool { C::is_digit(self, radix) }
/// Converts a character to the corresponding digit.
///
/// # Return value
///
/// If `c` is between '0' and '9', the corresponding value between 0 and
/// 9. If `c` is 'a' or 'A', 10. If `c` is 'b' or 'B', 11, etc. Returns
/// none if the character does not refer to a digit in the given radix.
///
/// # Panics
///
/// Panics if given a radix outside the range [0..36].
///
/// # Examples
///
/// ```
/// let c = '1';
///
/// assert_eq!(c.to_digit(10), Some(1));
///
/// assert_eq!('f'.to_digit(16), Some(15));
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn to_digit(self, radix: u32) -> Option<u32> { C::to_digit(self, radix) }
/// Returns an iterator that yields the hexadecimal Unicode escape of a
/// character, as `char`s.
///
/// All characters are escaped with Rust syntax of the form `\\u{NNNN}`
/// where `NNNN` is the shortest hexadecimal representation of the code
/// point.
///
/// # Examples
///
/// ```
/// for i in '❤'.escape_unicode() {
/// println!("{}", i);
/// }
/// ```
///
/// This prints:
///
/// ```text
/// \
/// u
/// {
/// 2
/// 7
/// 6
/// 4
/// }
/// ```
///
/// Collecting into a `String`:
///
/// ```
/// let heart: String = '❤'.escape_unicode().collect();
///
/// assert_eq!(heart, r"\u{2764}");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn escape_unicode(self) -> EscapeUnicode { C::escape_unicode(self) }
/// Returns an iterator that yields the 'default' ASCII and
/// C++11-like literal escape of a character, as `char`s.
///
/// The default is chosen with a bias toward producing literals that are
/// legal in a variety of languages, including C++11 and similar C-family
/// languages. The exact rules are:
///
/// * Tab, CR and LF are escaped as '\t', '\r' and '\n' respectively.
/// * Single-quote, double-quote and backslash chars are backslash-
/// escaped.
/// * Any other chars in the range [0x20,0x7e] are not escaped.
/// * Any other chars are given hex Unicode escapes; see `escape_unicode`.
///
/// # Examples
///
/// ```
/// for i in '"'.escape_default() {
/// println!("{}", i);
/// }
/// ```
///
/// This prints:
///
/// ```text
/// \
/// "
/// ```
///
/// Collecting into a `String`:
///
/// ```
/// let quote: String = '"'.escape_default().collect();
///
/// assert_eq!(quote, "\\\"");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn escape_default(self) -> EscapeDefault { C::escape_default(self) }
/// Returns the number of bytes this character would need if encoded in
/// UTF-8.
///
/// # Examples
///
/// ```
/// let n = 'ß'.len_utf8();
///
/// assert_eq!(n, 2);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn len_utf8(self) -> usize { C::len_utf8(self) }
/// Returns the number of 16-bit code units this character would need if
/// encoded in UTF-16.
///
/// # Examples
///
/// ```
/// let n = 'ß'.len_utf16();
///
/// assert_eq!(n, 1);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn len_utf16(self) -> usize { C::len_utf16(self) }
/// Encodes this character as UTF-8 into the provided byte buffer, and then
/// returns the number of bytes written.
///
/// If the buffer is not large enough, nothing will be written into it and a
/// `None` will be returned. A buffer of length four is large enough to
/// encode any `char`.
///
/// # Examples
///
/// In both of these examples, 'ß' takes two bytes to encode.
///
/// ```
/// # #![feature(unicode)]
/// let mut b = [0; 2];
///
/// let result = 'ß'.encode_utf8(&mut b);
///
/// assert_eq!(result, Some(2));
/// ```
///
/// A buffer that's too small:
///
/// ```
/// # #![feature(unicode)]
/// let mut b = [0; 1];
///
/// let result = 'ß'.encode_utf8(&mut b);
///
/// assert_eq!(result, None);
/// ```
#[unstable(feature = "unicode",
reason = "pending decision about Iterator/Writer/Reader")]
pub fn encode_utf8(self, dst: &mut [u8]) -> Option<usize> { C::encode_utf8(self, dst) }
/// Encodes this character as UTF-16 into the provided `u16` buffer, and
/// then returns the number of `u16`s written.
///
/// If the buffer is not large enough, nothing will be written into it and a
/// `None` will be returned. A buffer of length 2 is large enough to encode
/// any `char`.
///
/// # Examples
///
/// In both of these examples, 'ß' takes one `u16` to encode.
///
/// ```
/// # #![feature(unicode)]
/// let mut b = [0; 1];
///
/// let result = 'ß'.encode_utf16(&mut b);
///
/// assert_eq!(result, Some(1));
/// ```
///
/// A buffer that's too small:
///
/// ```
/// # #![feature(unicode)]
/// let mut b = [0; 0];
///
/// let result = 'ß'.encode_utf8(&mut b);
///
/// assert_eq!(result, None);
/// ```
#[unstable(feature = "unicode",
reason = "pending decision about Iterator/Writer/Reader")]
pub fn encode_utf16(self, dst: &mut [u16]) -> Option<usize> { C::encode_utf16(self, dst) }
/// Returns whether the specified character is considered a Unicode
/// alphabetic code point.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_alphabetic(self) -> bool {
match self {
'a'... 'z' | 'A'... 'Z' => true,
c if c > '\x7f' => derived_property::Alphabetic(c),
_ => false
}
}
/// Returns whether the specified character satisfies the 'XID_Start'
/// Unicode property.
///
/// 'XID_Start' is a Unicode Derived Property specified in
/// [UAX #31](http://unicode.org/reports/tr31/#NFKC_Modifications),
/// mostly similar to ID_Start but modified for closure under NFKx.
#[unstable(feature = "unicode",
reason = "mainly needed for compiler internals")]
#[inline]
pub fn is_xid_start(self) -> bool { derived_property::XID_Start(self) }
/// Returns whether the specified `char` satisfies the 'XID_Continue'
/// Unicode property.
///
/// 'XID_Continue' is a Unicode Derived Property specified in
/// [UAX #31](http://unicode.org/reports/tr31/#NFKC_Modifications),
/// mostly similar to 'ID_Continue' but modified for closure under NFKx.
#[unstable(feature = "unicode",
reason = "mainly needed for compiler internals")]
#[inline]
pub fn is_xid_continue(self) -> bool { derived_property::XID_Continue(self) }
/// Indicates whether a character is in lowercase.
///
/// This is defined according to the terms of the Unicode Derived Core
/// Property `Lowercase`.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_lowercase(self) -> bool {
match self {
'a'... 'z' => true,
c if c > '\x7f' => derived_property::Lowercase(c),
_ => false
}
}
/// Indicates whether a character is in uppercase.
///
/// This is defined according to the terms of the Unicode Derived Core
/// Property `Uppercase`.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_uppercase(self) -> bool {
match self {
'A'... 'Z' => true,
c if c > '\x7f' => derived_property::Uppercase(c),
_ => false
}
}
/// Indicates whether a character is whitespace.
///
/// Whitespace is defined in terms of the Unicode Property `White_Space`.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_whitespace(self) -> bool {
match self {
'' | '\x09'... '\x0d' => true,
c if c > '\x7f' => property::White_Space(c),
_ => false
}
}
/// Indicates whether a character is alphanumeric.
///
/// Alphanumericness is defined in terms of the Unicode General Categories
/// 'Nd', 'Nl', 'No' and the Derived Core Property 'Alphabetic'.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_alphanumeric(self) -> bool {
self.is_alphabetic() || self.is_numeric()
}
/// Indicates whether a character is a control code point.
///
/// Control code points are defined in terms of the Unicode General
/// Category `Cc`.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_control(self) -> bool { general_category::Cc(self) }
/// Indicates whether the character is numeric (Nd, Nl, or No).
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_numeric(self) -> bool {
match self {
'0'... '9' => true,
c if c > '\x7f' => general_category::N(c),
_ => false
}
}
/// Converts a character to its lowercase equivalent.
///
/// This performs complex unconditional mappings with no tailoring.
/// See `to_uppercase()` for references and more information.
///
/// # Return value
///
/// Returns an iterator which yields the characters corresponding to the
/// lowercase equivalent of the character. If no conversion is possible then
/// an iterator with just the input character is returned.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn to_lowercase(self) -> ToLowercase {
ToLowercase(CaseMappingIter::new(conversions::to_lower(self)))
}
/// Converts a character to its titlecase equivalent.
///
/// This performs complex unconditional mappings with no tailoring.
/// See `to_uppercase()` for references and more information.
///
/// This differs from `to_uppercase()` since Unicode contains
/// digraphs and ligature characters.
/// For example, U+01F3 “dz” and U+FB01 “fi”
/// map to U+01F1 “DZ” and U+0046 U+0069 “Fi”, respectively.
///
/// # Return value
///
/// Returns an iterator which yields the characters corresponding to the
/// lowercase equivalent of the character. If no conversion is possible then
/// an iterator with just the input character is returned.
#[unstable(feature = "unicode", reason = "recently added")]
#[inline]
pub fn to_titlecase(self) -> ToTitlecase {
ToTitlecase(CaseMappingIter::new(conversions::to_title(self)))
}
/// Converts a character to its uppercase equivalent.
///
/// This performs complex unconditional mappings with no tailoring:
/// it maps one Unicode character to its uppercase equivalent
/// according to the Unicode database [1]
/// and the additional complex mappings [`SpecialCasing.txt`].
/// Conditional mappings (based on context or language) are not considerd here.
///
/// A full reference can be found here [2].
///
/// # Return value
///
/// Returns an iterator which yields the characters corresponding to the
/// uppercase equivalent of the character. If no conversion is possible then
/// an iterator with just the input character is returned.
///
/// [1]: ftp://ftp.unicode.org/Public/UNIDATA/UnicodeData.txt
///
/// [`SpecialCasing.txt`]: ftp://ftp.unicode.org/Public/UNIDATA/SpecialCasing.txt
///
/// [2]: http://www.unicode.org/versions/Unicode7.0.0/ch03.pdf#G33992
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn to_uppercase(self) -> ToUppercase {
ToUppercase(CaseMappingIter::new(conversions::to_upper(self)))
}
/// Returns this character's displayed width in columns, or `None` if it is a
/// control character other than `'\x00'`.
///
/// `is_cjk` determines behavior for characters in the Ambiguous category:
/// if `is_cjk` is `true`, these are 2 columns wide; otherwise, they are 1.
/// In CJK contexts, `is_cjk` should be `true`, else it should be `false`.
/// [Unicode Standard Annex #11](http://www.unicode.org/reports/tr11/)
/// recommends that these characters be treated as 1 column (i.e.,
/// `is_cjk` = `false`) if the context cannot be reliably determined.
#[deprecated(reason = "use the crates.io `unicode-width` library instead",
since = "1.0.0")]
#[unstable(feature = "unicode",
reason = "needs expert opinion. is_cjk flag stands out as ugly")]
pub fn width(self, is_cjk: bool) -> Option<usize> { charwidth::width(self, is_cjk) }
} | /// An iterator over the uppercase mapping of a given character, returned from
/// the [`to_uppercase` method](../primitive.char.html#method.to_uppercase) on
/// characters. | random_line_split |
char.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Character manipulation (`char` type, Unicode Scalar Value)
//!
//! This module provides the `CharExt` trait, as well as its
//! implementation for the primitive `char` type, in order to allow
//! basic character manipulation.
//!
//! A `char` actually represents a
//! *[Unicode Scalar
//! Value](http://www.unicode.org/glossary/#unicode_scalar_value)*, as it can
//! contain any Unicode code point except high-surrogate and low-surrogate code
//! points.
//!
//! As such, only values in the ranges \[0x0,0xD7FF\] and \[0xE000,0x10FFFF\]
//! (inclusive) are allowed. A `char` can always be safely cast to a `u32`;
//! however the converse is not always true due to the above range limits
//! and, as such, should be performed via the `from_u32` function.
#![stable(feature = "rust1", since = "1.0.0")]
#![doc(primitive = "char")]
use core::char::CharExt as C;
use core::option::Option::{self, Some, None};
use core::iter::Iterator;
use tables::{derived_property, property, general_category, conversions, charwidth};
// stable reexports
pub use core::char::{MAX, from_u32, from_digit, EscapeUnicode, EscapeDefault};
// unstable reexports
#[allow(deprecated)]
pub use normalize::{decompose_canonical, decompose_compatible, compose};
#[allow(deprecated)]
pub use tables::normalization::canonical_combining_class;
pub use tables::UNICODE_VERSION;
/// An iterator over the lowercase mapping of a given character, returned from
/// the [`to_lowercase` method](../primitive.char.html#method.to_lowercase) on
/// characters.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct ToLowercase(CaseMappingIter);
#[stable(feature = "rust1", since = "1.0.0")]
impl Iterator for ToLowercase {
type Item = char;
fn next(&mut self) -> Option<char> { self.0.next() }
}
/// An iterator over the uppercase mapping of a given character, returned from
/// the [`to_uppercase` method](../primitive.char.html#method.to_uppercase) on
/// characters.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct ToUppercase(CaseMappingIter);
#[stable(feature = "rust1", since = "1.0.0")]
impl Iterator for ToUppercase {
type Item = char;
fn next(&mut self) -> Option<char> { self.0.next() }
}
/// An iterator over the titlecase mapping of a given character, returned from
/// the [`to_titlecase` method](../primitive.char.html#method.to_titlecase) on
/// characters.
#[unstable(feature = "unicode", reason = "recently added")]
pub struct ToTitlecase(CaseMappingIter);
#[stable(feature = "unicode_case_mapping", since = "1.2.0")]
impl Iterator for ToTitlecase {
type Item = char;
fn next(&mut self) -> Option<char> { self.0.next() }
}
enum CaseMappingIter {
Three(char, char, char),
Two(char, char),
One(char),
Zero
}
impl CaseMappingIter {
fn new(chars: [char; 3]) -> CaseMappingIter {
if chars[2] == '\0' {
if chars[1] == '\0' {
CaseMappingIter::One(chars[0]) // Including if chars[0] == '\0'
} else {
CaseMappingIter::Two(chars[0], chars[1])
}
} else {
CaseMappingIter::Three(chars[0], chars[1], chars[2])
}
}
}
impl Iterator for CaseMappingIter {
type Item = char;
fn next(&mut self) -> Option<char> {
match *self {
CaseMappingIter::Three(a, b, c) => {
*self = CaseMappingIter::Two(b, c);
Some(a)
}
CaseMappingIter::Two(b, c) => {
*self = CaseMappingIter::One(c);
Some(b)
}
CaseMappingIter::One(c) => {
*self = CaseMappingIter::Zero;
Some(c)
}
CaseMappingIter::Zero => None,
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
#[lang = "char"]
impl char {
/// Checks if a `char` parses as a numeric digit in the given radix.
///
/// Compared to `is_numeric()`, this function only recognizes the characters
/// `0-9`, `a-z` and `A-Z`.
///
/// # Return value
///
/// Returns `true` if `c` is a valid digit under `radix`, and `false`
/// otherwise.
///
/// # Panics
///
/// Panics if given a radix > 36.
///
/// # Examples
///
/// ```
/// let c = '1';
///
/// assert!(c.is_digit(10));
///
/// assert!('f'.is_digit(16));
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_digit(self, radix: u32) -> bool { C::is_digit(self, radix) }
/// Converts a character to the corresponding digit.
///
/// # Return value
///
/// If `c` is between '0' and '9', the corresponding value between 0 and
/// 9. If `c` is 'a' or 'A', 10. If `c` is 'b' or 'B', 11, etc. Returns
/// none if the character does not refer to a digit in the given radix.
///
/// # Panics
///
/// Panics if given a radix outside the range [0..36].
///
/// # Examples
///
/// ```
/// let c = '1';
///
/// assert_eq!(c.to_digit(10), Some(1));
///
/// assert_eq!('f'.to_digit(16), Some(15));
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn to_digit(self, radix: u32) -> Option<u32> { C::to_digit(self, radix) }
/// Returns an iterator that yields the hexadecimal Unicode escape of a
/// character, as `char`s.
///
/// All characters are escaped with Rust syntax of the form `\\u{NNNN}`
/// where `NNNN` is the shortest hexadecimal representation of the code
/// point.
///
/// # Examples
///
/// ```
/// for i in '❤'.escape_unicode() {
/// println!("{}", i);
/// }
/// ```
///
/// This prints:
///
/// ```text
/// \
/// u
/// {
/// 2
/// 7
/// 6
/// 4
/// }
/// ```
///
/// Collecting into a `String`:
///
/// ```
/// let heart: String = '❤'.escape_unicode().collect();
///
/// assert_eq!(heart, r"\u{2764}");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn escape_unicode(self) -> EscapeUnicode { C::escape_unicode(self) }
/// Returns an iterator that yields the 'default' ASCII and
/// C++11-like literal escape of a character, as `char`s.
///
/// The default is chosen with a bias toward producing literals that are
/// legal in a variety of languages, including C++11 and similar C-family
/// languages. The exact rules are:
///
/// * Tab, CR and LF are escaped as '\t', '\r' and '\n' respectively.
/// * Single-quote, double-quote and backslash chars are backslash-
/// escaped.
/// * Any other chars in the range [0x20,0x7e] are not escaped.
/// * Any other chars are given hex Unicode escapes; see `escape_unicode`.
///
/// # Examples
///
/// ```
/// for i in '"'.escape_default() {
/// println!("{}", i);
/// }
/// ```
///
/// This prints:
///
/// ```text
/// \
/// "
/// ```
///
/// Collecting into a `String`:
///
/// ```
/// let quote: String = '"'.escape_default().collect();
///
/// assert_eq!(quote, "\\\"");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn escape_default(self) -> EscapeDefault { C::escape_default(self) }
/// Returns the number of bytes this character would need if encoded in
/// UTF-8.
///
/// # Examples
///
/// ```
/// let n = 'ß'.len_utf8();
///
/// assert_eq!(n, 2);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn len_utf8(self) -> usize { C::len_utf8(self) }
/// Returns the number of 16-bit code units this character would need if
/// encoded in UTF-16.
///
/// # Examples
///
/// ```
/// let n = 'ß'.len_utf16();
///
/// assert_eq!(n, 1);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn len_utf16(self) -> usize { C::len_utf16(self) }
/// Encodes this character as UTF-8 into the provided byte buffer, and then
/// returns the number of bytes written.
///
/// If the buffer is not large enough, nothing will be written into it and a
/// `None` will be returned. A buffer of length four is large enough to
/// encode any `char`.
///
/// # Examples
///
/// In both of these examples, 'ß' takes two bytes to encode.
///
/// ```
/// # #![feature(unicode)]
/// let mut b = [0; 2];
///
/// let result = 'ß'.encode_utf8(&mut b);
///
/// assert_eq!(result, Some(2));
/// ```
///
/// A buffer that's too small:
///
/// ```
/// # #![feature(unicode)]
/// let mut b = [0; 1];
///
/// let result = 'ß'.encode_utf8(&mut b);
///
/// assert_eq!(result, None);
/// ```
#[unstable(feature = "unicode",
reason = "pending decision about Iterator/Writer/Reader")]
pub fn encode_utf8(self, dst: &mut [u8]) -> Option<usize> { C::encode_utf8(self, dst) }
/// Encodes this character as UTF-16 into the provided `u16` buffer, and
/// then returns the number of `u16`s written.
///
/// If the buffer is not large enough, nothing will be written into it and a
/// `None` will be returned. A buffer of length 2 is large enough to encode
/// any `char`.
///
/// # Examples
///
/// In both of these examples, 'ß' takes one `u16` to encode.
///
/// ```
/// # #![feature(unicode)]
/// let mut b = [0; 1];
///
/// let result = 'ß'.encode_utf16(&mut b);
///
/// assert_eq!(result, Some(1));
/// ```
///
/// A buffer that's too small:
///
/// ```
/// # #![feature(unicode)]
/// let mut b = [0; 0];
///
/// let result = 'ß'.encode_utf8(&mut b);
///
/// assert_eq!(result, None);
/// ```
#[unstable(feature = "unicode",
reason = "pending decision about Iterator/Writer/Reader")]
pub fn encode_utf16(self, dst: &mut [u16]) -> Option<usize> { C::encode_utf16(self, dst) }
/// Returns whether the specified character is considered a Unicode
/// alphabetic code point.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_alphabetic(self) -> bool {
match self {
'a'... 'z' | 'A'... 'Z' => true,
c if c > '\x7f' => derived_property::Alphabetic(c),
_ => false
}
}
/// Returns whether the specified character satisfies the 'XID_Start'
/// Unicode property.
///
/// 'XID_Start' is a Unicode Derived Property specified in
/// [UAX #31](http://unicode.org/reports/tr31/#NFKC_Modifications),
/// mostly similar to ID_Start but modified for closure under NFKx.
#[unstable(feature = "unicode",
reason = "mainly needed for compiler internals")]
#[inline]
pub fn is_xid_start(self) -> bool { derived_property::XID_Start(self) }
/// Returns whether the specified `char` satisfies the 'XID_Continue'
/// Unicode property.
///
/// 'XID_Continue' is a Unicode Derived Property specified in
/// [UAX #31](http://unicode.org/reports/tr31/#NFKC_Modifications),
/// mostly similar to 'ID_Continue' but modified for closure under NFKx.
#[unstable(feature = "unicode",
reason = "mainly needed for compiler internals")]
#[inline]
pub fn is_xid_continue(self) -> bool { derived_property::XID_Continue(self) }
/// Indicates whether a character is in lowercase.
///
/// This is defined according to the terms of the Unicode Derived Core
/// Property `Lowercase`.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_lowercase(self) -> bool {
match self {
'a'... 'z' => true,
c if c > '\x7f' => derived_property::Lowercase(c),
_ => false
}
}
/// Indicates whether a character is in uppercase.
///
/// This is defined according to the terms of the Unicode Derived Core
/// Property `Uppercase`.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_uppercase(self) -> bool {
match self {
'A'... 'Z' => true,
c if c > '\x7f' => derived_property::Uppercase(c),
_ => false
}
}
/// Indicates whether a character is whitespace.
///
/// Whitespace is defined in terms of the Unicode Property `White_Space`.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_whitespace(self) -> bool {
match self {
'' | '\x09'... '\x0d' => true,
c if c > '\x7f' => property::White_Space(c),
_ => false
}
}
/// Indicates whether a character is alphanumeric.
///
/// Alphanumericness is defined in terms of the Unicode General Categories
/// 'Nd', 'Nl', 'No' and the Derived Core Property 'Alphabetic'.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_alphanumeric(self) -> bool {
self.is_alphabetic() || self.is_numeric()
}
/// Indicates whether a character is a control code point.
///
/// Control code points are defined in terms of the Unicode General
/// Category `Cc`.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_control(self) -> bool { general_category::Cc(self) }
/// Indicates whether the character is numeric (Nd, Nl, or No).
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_numeric(self) -> bool {
match self {
'0'... '9' => true,
c if c > '\x7f' => general_category::N(c),
_ => false
}
}
/// Converts a character to its lowercase equivalent.
///
/// This performs complex unconditional mappings with no tailoring.
/// See `to_uppercase()` for references and more information.
///
/// # Return value
///
/// Returns an iterator which yields the characters corresponding to the
/// lowercase equivalent of the character. If no conversion is possible then
/// an iterator with just the input character is returned.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn to_lowercase(self) -> ToLowercase {
ToLowercase(CaseMappingIter::new(conversions::to_lower(self)))
}
/// Converts a character to its titlecase equivalent.
///
/// This performs complex unconditional mappings with no tailoring.
/// See `to_uppercase()` for references and more information.
///
/// This differs from `to_uppercase()` since Unicode contains
/// digraphs and ligature characters.
/// For example, U+01F3 “dz” and U+FB01 “fi”
/// map to U+01F1 “DZ” and U+0046 U+0069 “Fi”, respectively.
///
/// # Return value
///
/// Returns an iterator which yields the characters corresponding to the
/// lowercase equivalent of the character. If no conversion is possible then
/// an iterator with just the input character is returned.
#[unstable(feature = "unicode", reason = "recently added")]
#[inline]
pub fn to_titlecase(self) -> ToTitlecase {
ToTitlecase(CaseMappingIter::new(conversions::to_title(self)))
}
/// Converts a character to its uppercase equivalent.
///
/// This performs complex unconditional mappings with no tailoring:
/// it maps one Unicode character to its uppercase equivalent
/// according to the Unicode database [1]
/// and the additional complex mappings [`SpecialCasing.txt`].
/// Conditional mappings (based on context or language) are not considerd here.
///
/// A full reference can be found here [2].
///
/// # Return value
///
/// Returns an iterator which yields the characters corresponding to the
/// uppercase equivalent of the character. If no conversion is possible then
/// an iterator with just the input character is returned.
///
/// [1]: ftp://ftp.unicode.org/Public/UNIDATA/UnicodeData.txt
///
/// [`SpecialCasing.txt`]: ftp://ftp.unicode.org/Public/UNIDATA/SpecialCasing.txt
///
/// [2]: http://www.unicode.org/versions/Unicode7.0.0/ch03.pdf#G33992
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn to_uppercase(self) -> ToUppercase {
ToUppercase(CaseMappin | 's displayed width in columns, or `None` if it is a
/// control character other than `'\x00'`.
///
/// `is_cjk` determines behavior for characters in the Ambiguous category:
/// if `is_cjk` is `true`, these are 2 columns wide; otherwise, they are 1.
/// In CJK contexts, `is_cjk` should be `true`, else it should be `false`.
/// [Unicode Standard Annex #11](http://www.unicode.org/reports/tr11/)
/// recommends that these characters be treated as 1 column (i.e.,
/// `is_cjk` = `false`) if the context cannot be reliably determined.
#[deprecated(reason = "use the crates.io `unicode-width` library instead",
since = "1.0.0")]
#[unstable(feature = "unicode",
reason = "needs expert opinion. is_cjk flag stands out as ugly")]
pub fn width(self, is_cjk: bool) -> Option<usize> { charwidth::width(self, is_cjk) }
}
| gIter::new(conversions::to_upper(self)))
}
/// Returns this character | identifier_body |
char.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Character manipulation (`char` type, Unicode Scalar Value)
//!
//! This module provides the `CharExt` trait, as well as its
//! implementation for the primitive `char` type, in order to allow
//! basic character manipulation.
//!
//! A `char` actually represents a
//! *[Unicode Scalar
//! Value](http://www.unicode.org/glossary/#unicode_scalar_value)*, as it can
//! contain any Unicode code point except high-surrogate and low-surrogate code
//! points.
//!
//! As such, only values in the ranges \[0x0,0xD7FF\] and \[0xE000,0x10FFFF\]
//! (inclusive) are allowed. A `char` can always be safely cast to a `u32`;
//! however the converse is not always true due to the above range limits
//! and, as such, should be performed via the `from_u32` function.
#![stable(feature = "rust1", since = "1.0.0")]
#![doc(primitive = "char")]
use core::char::CharExt as C;
use core::option::Option::{self, Some, None};
use core::iter::Iterator;
use tables::{derived_property, property, general_category, conversions, charwidth};
// stable reexports
pub use core::char::{MAX, from_u32, from_digit, EscapeUnicode, EscapeDefault};
// unstable reexports
#[allow(deprecated)]
pub use normalize::{decompose_canonical, decompose_compatible, compose};
#[allow(deprecated)]
pub use tables::normalization::canonical_combining_class;
pub use tables::UNICODE_VERSION;
/// An iterator over the lowercase mapping of a given character, returned from
/// the [`to_lowercase` method](../primitive.char.html#method.to_lowercase) on
/// characters.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct ToLowercase(CaseMappingIter);
#[stable(feature = "rust1", since = "1.0.0")]
impl Iterator for ToLowercase {
type Item = char;
fn next(&mut self) -> Option<char> { self.0.next() }
}
/// An iterator over the uppercase mapping of a given character, returned from
/// the [`to_uppercase` method](../primitive.char.html#method.to_uppercase) on
/// characters.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct ToUppercase(CaseMappingIter);
#[stable(feature = "rust1", since = "1.0.0")]
impl Iterator for ToUppercase {
type Item = char;
fn next(&mut self) -> Option<char> { self.0.next() }
}
/// An iterator over the titlecase mapping of a given character, returned from
/// the [`to_titlecase` method](../primitive.char.html#method.to_titlecase) on
/// characters.
#[unstable(feature = "unicode", reason = "recently added")]
pub struct ToTitlecase(CaseMappingIter);
#[stable(feature = "unicode_case_mapping", since = "1.2.0")]
impl Iterator for ToTitlecase {
type Item = char;
fn next(&mut self) -> Option<char> { self.0.next() }
}
enum CaseMappingIter {
Three(char, char, char),
Two(char, char),
One(char),
Zero
}
impl CaseMappingIter {
fn new(chars: [char; 3]) -> CaseMappingIter {
if chars[2] == '\0' {
if chars[1] == '\0' {
CaseMappingIter::One(chars[0]) // Including if chars[0] == '\0'
} else {
CaseMappingIter::Two(chars[0], chars[1])
}
} else {
CaseMappingIter::Three(chars[0], chars[1], chars[2])
}
}
}
impl Iterator for CaseMappingIter {
type Item = char;
fn next(&mut self) -> Option<char> {
match *self {
CaseMappingIter::Three(a, b, c) => {
*self = CaseMappingIter::Two(b, c);
Some(a)
}
CaseMappingIter::Two(b, c) => {
*self = CaseMappingIter::One(c);
Some(b)
}
CaseMappingIter::One(c) => {
*self = CaseMappingIter::Zero;
Some(c)
}
CaseMappingIter::Zero => None,
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
#[lang = "char"]
impl char {
/// Checks if a `char` parses as a numeric digit in the given radix.
///
/// Compared to `is_numeric()`, this function only recognizes the characters
/// `0-9`, `a-z` and `A-Z`.
///
/// # Return value
///
/// Returns `true` if `c` is a valid digit under `radix`, and `false`
/// otherwise.
///
/// # Panics
///
/// Panics if given a radix > 36.
///
/// # Examples
///
/// ```
/// let c = '1';
///
/// assert!(c.is_digit(10));
///
/// assert!('f'.is_digit(16));
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_digit(self, radix: u32) -> bool { C::is_digit(self, radix) }
/// Converts a character to the corresponding digit.
///
/// # Return value
///
/// If `c` is between '0' and '9', the corresponding value between 0 and
/// 9. If `c` is 'a' or 'A', 10. If `c` is 'b' or 'B', 11, etc. Returns
/// none if the character does not refer to a digit in the given radix.
///
/// # Panics
///
/// Panics if given a radix outside the range [0..36].
///
/// # Examples
///
/// ```
/// let c = '1';
///
/// assert_eq!(c.to_digit(10), Some(1));
///
/// assert_eq!('f'.to_digit(16), Some(15));
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn to_digit(self, radix: u32) -> Option<u32> { C::to_digit(self, radix) }
/// Returns an iterator that yields the hexadecimal Unicode escape of a
/// character, as `char`s.
///
/// All characters are escaped with Rust syntax of the form `\\u{NNNN}`
/// where `NNNN` is the shortest hexadecimal representation of the code
/// point.
///
/// # Examples
///
/// ```
/// for i in '❤'.escape_unicode() {
/// println!("{}", i);
/// }
/// ```
///
/// This prints:
///
/// ```text
/// \
/// u
/// {
/// 2
/// 7
/// 6
/// 4
/// }
/// ```
///
/// Collecting into a `String`:
///
/// ```
/// let heart: String = '❤'.escape_unicode().collect();
///
/// assert_eq!(heart, r"\u{2764}");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn escape_unicode(self) -> EscapeUnicode { C::escape_unicode(self) }
/// Returns an iterator that yields the 'default' ASCII and
/// C++11-like literal escape of a character, as `char`s.
///
/// The default is chosen with a bias toward producing literals that are
/// legal in a variety of languages, including C++11 and similar C-family
/// languages. The exact rules are:
///
/// * Tab, CR and LF are escaped as '\t', '\r' and '\n' respectively.
/// * Single-quote, double-quote and backslash chars are backslash-
/// escaped.
/// * Any other chars in the range [0x20,0x7e] are not escaped.
/// * Any other chars are given hex Unicode escapes; see `escape_unicode`.
///
/// # Examples
///
/// ```
/// for i in '"'.escape_default() {
/// println!("{}", i);
/// }
/// ```
///
/// This prints:
///
/// ```text
/// \
/// "
/// ```
///
/// Collecting into a `String`:
///
/// ```
/// let quote: String = '"'.escape_default().collect();
///
/// assert_eq!(quote, "\\\"");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn esca | f) -> EscapeDefault { C::escape_default(self) }
/// Returns the number of bytes this character would need if encoded in
/// UTF-8.
///
/// # Examples
///
/// ```
/// let n = 'ß'.len_utf8();
///
/// assert_eq!(n, 2);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn len_utf8(self) -> usize { C::len_utf8(self) }
/// Returns the number of 16-bit code units this character would need if
/// encoded in UTF-16.
///
/// # Examples
///
/// ```
/// let n = 'ß'.len_utf16();
///
/// assert_eq!(n, 1);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn len_utf16(self) -> usize { C::len_utf16(self) }
/// Encodes this character as UTF-8 into the provided byte buffer, and then
/// returns the number of bytes written.
///
/// If the buffer is not large enough, nothing will be written into it and a
/// `None` will be returned. A buffer of length four is large enough to
/// encode any `char`.
///
/// # Examples
///
/// In both of these examples, 'ß' takes two bytes to encode.
///
/// ```
/// # #![feature(unicode)]
/// let mut b = [0; 2];
///
/// let result = 'ß'.encode_utf8(&mut b);
///
/// assert_eq!(result, Some(2));
/// ```
///
/// A buffer that's too small:
///
/// ```
/// # #![feature(unicode)]
/// let mut b = [0; 1];
///
/// let result = 'ß'.encode_utf8(&mut b);
///
/// assert_eq!(result, None);
/// ```
#[unstable(feature = "unicode",
reason = "pending decision about Iterator/Writer/Reader")]
pub fn encode_utf8(self, dst: &mut [u8]) -> Option<usize> { C::encode_utf8(self, dst) }
/// Encodes this character as UTF-16 into the provided `u16` buffer, and
/// then returns the number of `u16`s written.
///
/// If the buffer is not large enough, nothing will be written into it and a
/// `None` will be returned. A buffer of length 2 is large enough to encode
/// any `char`.
///
/// # Examples
///
/// In both of these examples, 'ß' takes one `u16` to encode.
///
/// ```
/// # #![feature(unicode)]
/// let mut b = [0; 1];
///
/// let result = 'ß'.encode_utf16(&mut b);
///
/// assert_eq!(result, Some(1));
/// ```
///
/// A buffer that's too small:
///
/// ```
/// # #![feature(unicode)]
/// let mut b = [0; 0];
///
/// let result = 'ß'.encode_utf8(&mut b);
///
/// assert_eq!(result, None);
/// ```
#[unstable(feature = "unicode",
reason = "pending decision about Iterator/Writer/Reader")]
pub fn encode_utf16(self, dst: &mut [u16]) -> Option<usize> { C::encode_utf16(self, dst) }
/// Returns whether the specified character is considered a Unicode
/// alphabetic code point.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_alphabetic(self) -> bool {
match self {
'a'... 'z' | 'A'... 'Z' => true,
c if c > '\x7f' => derived_property::Alphabetic(c),
_ => false
}
}
/// Returns whether the specified character satisfies the 'XID_Start'
/// Unicode property.
///
/// 'XID_Start' is a Unicode Derived Property specified in
/// [UAX #31](http://unicode.org/reports/tr31/#NFKC_Modifications),
/// mostly similar to ID_Start but modified for closure under NFKx.
#[unstable(feature = "unicode",
reason = "mainly needed for compiler internals")]
#[inline]
pub fn is_xid_start(self) -> bool { derived_property::XID_Start(self) }
/// Returns whether the specified `char` satisfies the 'XID_Continue'
/// Unicode property.
///
/// 'XID_Continue' is a Unicode Derived Property specified in
/// [UAX #31](http://unicode.org/reports/tr31/#NFKC_Modifications),
/// mostly similar to 'ID_Continue' but modified for closure under NFKx.
#[unstable(feature = "unicode",
reason = "mainly needed for compiler internals")]
#[inline]
pub fn is_xid_continue(self) -> bool { derived_property::XID_Continue(self) }
/// Indicates whether a character is in lowercase.
///
/// This is defined according to the terms of the Unicode Derived Core
/// Property `Lowercase`.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_lowercase(self) -> bool {
match self {
'a'... 'z' => true,
c if c > '\x7f' => derived_property::Lowercase(c),
_ => false
}
}
/// Indicates whether a character is in uppercase.
///
/// This is defined according to the terms of the Unicode Derived Core
/// Property `Uppercase`.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_uppercase(self) -> bool {
match self {
'A'... 'Z' => true,
c if c > '\x7f' => derived_property::Uppercase(c),
_ => false
}
}
/// Indicates whether a character is whitespace.
///
/// Whitespace is defined in terms of the Unicode Property `White_Space`.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_whitespace(self) -> bool {
match self {
'' | '\x09'... '\x0d' => true,
c if c > '\x7f' => property::White_Space(c),
_ => false
}
}
/// Indicates whether a character is alphanumeric.
///
/// Alphanumericness is defined in terms of the Unicode General Categories
/// 'Nd', 'Nl', 'No' and the Derived Core Property 'Alphabetic'.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_alphanumeric(self) -> bool {
self.is_alphabetic() || self.is_numeric()
}
/// Indicates whether a character is a control code point.
///
/// Control code points are defined in terms of the Unicode General
/// Category `Cc`.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_control(self) -> bool { general_category::Cc(self) }
/// Indicates whether the character is numeric (Nd, Nl, or No).
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_numeric(self) -> bool {
match self {
'0'... '9' => true,
c if c > '\x7f' => general_category::N(c),
_ => false
}
}
/// Converts a character to its lowercase equivalent.
///
/// This performs complex unconditional mappings with no tailoring.
/// See `to_uppercase()` for references and more information.
///
/// # Return value
///
/// Returns an iterator which yields the characters corresponding to the
/// lowercase equivalent of the character. If no conversion is possible then
/// an iterator with just the input character is returned.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn to_lowercase(self) -> ToLowercase {
ToLowercase(CaseMappingIter::new(conversions::to_lower(self)))
}
/// Converts a character to its titlecase equivalent.
///
/// This performs complex unconditional mappings with no tailoring.
/// See `to_uppercase()` for references and more information.
///
/// This differs from `to_uppercase()` since Unicode contains
/// digraphs and ligature characters.
/// For example, U+01F3 “dz” and U+FB01 “fi”
/// map to U+01F1 “DZ” and U+0046 U+0069 “Fi”, respectively.
///
/// # Return value
///
/// Returns an iterator which yields the characters corresponding to the
/// lowercase equivalent of the character. If no conversion is possible then
/// an iterator with just the input character is returned.
#[unstable(feature = "unicode", reason = "recently added")]
#[inline]
pub fn to_titlecase(self) -> ToTitlecase {
ToTitlecase(CaseMappingIter::new(conversions::to_title(self)))
}
/// Converts a character to its uppercase equivalent.
///
/// This performs complex unconditional mappings with no tailoring:
/// it maps one Unicode character to its uppercase equivalent
/// according to the Unicode database [1]
/// and the additional complex mappings [`SpecialCasing.txt`].
/// Conditional mappings (based on context or language) are not considerd here.
///
/// A full reference can be found here [2].
///
/// # Return value
///
/// Returns an iterator which yields the characters corresponding to the
/// uppercase equivalent of the character. If no conversion is possible then
/// an iterator with just the input character is returned.
///
/// [1]: ftp://ftp.unicode.org/Public/UNIDATA/UnicodeData.txt
///
/// [`SpecialCasing.txt`]: ftp://ftp.unicode.org/Public/UNIDATA/SpecialCasing.txt
///
/// [2]: http://www.unicode.org/versions/Unicode7.0.0/ch03.pdf#G33992
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn to_uppercase(self) -> ToUppercase {
ToUppercase(CaseMappingIter::new(conversions::to_upper(self)))
}
/// Returns this character's displayed width in columns, or `None` if it is a
/// control character other than `'\x00'`.
///
/// `is_cjk` determines behavior for characters in the Ambiguous category:
/// if `is_cjk` is `true`, these are 2 columns wide; otherwise, they are 1.
/// In CJK contexts, `is_cjk` should be `true`, else it should be `false`.
/// [Unicode Standard Annex #11](http://www.unicode.org/reports/tr11/)
/// recommends that these characters be treated as 1 column (i.e.,
/// `is_cjk` = `false`) if the context cannot be reliably determined.
#[deprecated(reason = "use the crates.io `unicode-width` library instead",
since = "1.0.0")]
#[unstable(feature = "unicode",
reason = "needs expert opinion. is_cjk flag stands out as ugly")]
pub fn width(self, is_cjk: bool) -> Option<usize> { charwidth::width(self, is_cjk) }
}
| pe_default(sel | identifier_name |
basename.rs | #![crate_name = "uu_basename"]
/*
* This file is part of the uutils coreutils package.
*
* (c) Jimmy Lu <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
#[macro_use]
extern crate uucore;
use std::io::Write;
use std::path::{is_separator, PathBuf};
static NAME: &'static str = "basename";
static SYNTAX: &'static str = "NAME [SUFFIX]";
static SUMMARY: &'static str = "Print NAME with any leading directory components removed
If specified, also remove a trailing SUFFIX";
static LONG_HELP: &'static str = "";
pub fn uumain(args: Vec<String>) -> i32 {
//
// Argument parsing
//
let matches = new_coreopts!(SYNTAX, SUMMARY, LONG_HELP)
.optflag("a", "multiple", "Support more than one argument. Treat every argument as a name.")
.optopt("s", "suffix", "Remove a trailing suffix. This option implies the -a option.", "SUFFIX")
.optflag("z", "zero", "Output a zero byte (ASCII NUL) at the end of each line, rather than a newline.")
.parse(args);
// too few arguments
if matches.free.len() < 1 {
crash!(
1,
"{0}: {1}\nTry '{0} --help' for more information.",
NAME,
"missing operand"
);
}
let opt_s = matches.opt_present("s");
let opt_a = matches.opt_present("a");
let opt_z = matches.opt_present("z");
let multiple_paths = opt_s || opt_a;
// too many arguments
if!multiple_paths && matches.free.len() > 2 {
crash!(
1,
"{0}: extra operand '{1}'\nTry '{0} --help' for more information.",
NAME,
matches.free[2]
);
}
let suffix = if opt_s {
matches.opt_str("s").unwrap()
} else if!opt_a && matches.free.len() > 1 {
matches.free[1].clone()
} else {
"".to_owned()
};
//
// Main Program Processing
//
let paths = if multiple_paths {
&matches.free[..]
} else {
&matches.free[0..1]
};
let line_ending = if opt_z { "\0" } else { "\n" };
for path in paths {
print!("{}{}", basename(&path, &suffix), line_ending);
}
0
}
fn basename(fullname: &str, suffix: &str) -> String {
// Remove all platform-specific path separators from the end
let mut path: String = fullname.chars().rev().skip_while(|&ch| is_separator(ch)).collect();
// Undo reverse
path = path.chars().rev().collect();
// Convert to path buffer and get last path component
let pb = PathBuf::from(path);
match pb.components().last() {
Some(c) => strip_suffix(c.as_os_str().to_str().unwrap(), suffix),
None => "".to_owned()
}
}
| if name.ends_with(suffix) {
return name[..name.len() - suffix.len()].to_owned();
}
name.to_owned()
} | fn strip_suffix(name: &str, suffix: &str) -> String {
if name == suffix {
return name.to_owned();
}
| random_line_split |
basename.rs | #![crate_name = "uu_basename"]
/*
* This file is part of the uutils coreutils package.
*
* (c) Jimmy Lu <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
#[macro_use]
extern crate uucore;
use std::io::Write;
use std::path::{is_separator, PathBuf};
static NAME: &'static str = "basename";
static SYNTAX: &'static str = "NAME [SUFFIX]";
static SUMMARY: &'static str = "Print NAME with any leading directory components removed
If specified, also remove a trailing SUFFIX";
static LONG_HELP: &'static str = "";
pub fn uumain(args: Vec<String>) -> i32 {
//
// Argument parsing
//
let matches = new_coreopts!(SYNTAX, SUMMARY, LONG_HELP)
.optflag("a", "multiple", "Support more than one argument. Treat every argument as a name.")
.optopt("s", "suffix", "Remove a trailing suffix. This option implies the -a option.", "SUFFIX")
.optflag("z", "zero", "Output a zero byte (ASCII NUL) at the end of each line, rather than a newline.")
.parse(args);
// too few arguments
if matches.free.len() < 1 {
crash!(
1,
"{0}: {1}\nTry '{0} --help' for more information.",
NAME,
"missing operand"
);
}
let opt_s = matches.opt_present("s");
let opt_a = matches.opt_present("a");
let opt_z = matches.opt_present("z");
let multiple_paths = opt_s || opt_a;
// too many arguments
if!multiple_paths && matches.free.len() > 2 {
crash!(
1,
"{0}: extra operand '{1}'\nTry '{0} --help' for more information.",
NAME,
matches.free[2]
);
}
let suffix = if opt_s {
matches.opt_str("s").unwrap()
} else if!opt_a && matches.free.len() > 1 {
matches.free[1].clone()
} else {
"".to_owned()
};
//
// Main Program Processing
//
let paths = if multiple_paths {
&matches.free[..]
} else {
&matches.free[0..1]
};
let line_ending = if opt_z { "\0" } else { "\n" };
for path in paths {
print!("{}{}", basename(&path, &suffix), line_ending);
}
0
}
fn basename(fullname: &str, suffix: &str) -> String {
// Remove all platform-specific path separators from the end
let mut path: String = fullname.chars().rev().skip_while(|&ch| is_separator(ch)).collect();
// Undo reverse
path = path.chars().rev().collect();
// Convert to path buffer and get last path component
let pb = PathBuf::from(path);
match pb.components().last() {
Some(c) => strip_suffix(c.as_os_str().to_str().unwrap(), suffix),
None => "".to_owned()
}
}
fn strip_suffix(name: &str, suffix: &str) -> String | {
if name == suffix {
return name.to_owned();
}
if name.ends_with(suffix) {
return name[..name.len() - suffix.len()].to_owned();
}
name.to_owned()
} | identifier_body |
|
basename.rs | #![crate_name = "uu_basename"]
/*
* This file is part of the uutils coreutils package.
*
* (c) Jimmy Lu <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
#[macro_use]
extern crate uucore;
use std::io::Write;
use std::path::{is_separator, PathBuf};
static NAME: &'static str = "basename";
static SYNTAX: &'static str = "NAME [SUFFIX]";
static SUMMARY: &'static str = "Print NAME with any leading directory components removed
If specified, also remove a trailing SUFFIX";
static LONG_HELP: &'static str = "";
pub fn | (args: Vec<String>) -> i32 {
//
// Argument parsing
//
let matches = new_coreopts!(SYNTAX, SUMMARY, LONG_HELP)
.optflag("a", "multiple", "Support more than one argument. Treat every argument as a name.")
.optopt("s", "suffix", "Remove a trailing suffix. This option implies the -a option.", "SUFFIX")
.optflag("z", "zero", "Output a zero byte (ASCII NUL) at the end of each line, rather than a newline.")
.parse(args);
// too few arguments
if matches.free.len() < 1 {
crash!(
1,
"{0}: {1}\nTry '{0} --help' for more information.",
NAME,
"missing operand"
);
}
let opt_s = matches.opt_present("s");
let opt_a = matches.opt_present("a");
let opt_z = matches.opt_present("z");
let multiple_paths = opt_s || opt_a;
// too many arguments
if!multiple_paths && matches.free.len() > 2 {
crash!(
1,
"{0}: extra operand '{1}'\nTry '{0} --help' for more information.",
NAME,
matches.free[2]
);
}
let suffix = if opt_s {
matches.opt_str("s").unwrap()
} else if!opt_a && matches.free.len() > 1 {
matches.free[1].clone()
} else {
"".to_owned()
};
//
// Main Program Processing
//
let paths = if multiple_paths {
&matches.free[..]
} else {
&matches.free[0..1]
};
let line_ending = if opt_z { "\0" } else { "\n" };
for path in paths {
print!("{}{}", basename(&path, &suffix), line_ending);
}
0
}
fn basename(fullname: &str, suffix: &str) -> String {
// Remove all platform-specific path separators from the end
let mut path: String = fullname.chars().rev().skip_while(|&ch| is_separator(ch)).collect();
// Undo reverse
path = path.chars().rev().collect();
// Convert to path buffer and get last path component
let pb = PathBuf::from(path);
match pb.components().last() {
Some(c) => strip_suffix(c.as_os_str().to_str().unwrap(), suffix),
None => "".to_owned()
}
}
fn strip_suffix(name: &str, suffix: &str) -> String {
if name == suffix {
return name.to_owned();
}
if name.ends_with(suffix) {
return name[..name.len() - suffix.len()].to_owned();
}
name.to_owned()
}
| uumain | identifier_name |
basename.rs | #![crate_name = "uu_basename"]
/*
* This file is part of the uutils coreutils package.
*
* (c) Jimmy Lu <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
#[macro_use]
extern crate uucore;
use std::io::Write;
use std::path::{is_separator, PathBuf};
static NAME: &'static str = "basename";
static SYNTAX: &'static str = "NAME [SUFFIX]";
static SUMMARY: &'static str = "Print NAME with any leading directory components removed
If specified, also remove a trailing SUFFIX";
static LONG_HELP: &'static str = "";
pub fn uumain(args: Vec<String>) -> i32 {
//
// Argument parsing
//
let matches = new_coreopts!(SYNTAX, SUMMARY, LONG_HELP)
.optflag("a", "multiple", "Support more than one argument. Treat every argument as a name.")
.optopt("s", "suffix", "Remove a trailing suffix. This option implies the -a option.", "SUFFIX")
.optflag("z", "zero", "Output a zero byte (ASCII NUL) at the end of each line, rather than a newline.")
.parse(args);
// too few arguments
if matches.free.len() < 1 {
crash!(
1,
"{0}: {1}\nTry '{0} --help' for more information.",
NAME,
"missing operand"
);
}
let opt_s = matches.opt_present("s");
let opt_a = matches.opt_present("a");
let opt_z = matches.opt_present("z");
let multiple_paths = opt_s || opt_a;
// too many arguments
if!multiple_paths && matches.free.len() > 2 {
crash!(
1,
"{0}: extra operand '{1}'\nTry '{0} --help' for more information.",
NAME,
matches.free[2]
);
}
let suffix = if opt_s {
matches.opt_str("s").unwrap()
} else if!opt_a && matches.free.len() > 1 {
matches.free[1].clone()
} else {
"".to_owned()
};
//
// Main Program Processing
//
let paths = if multiple_paths {
&matches.free[..]
} else {
&matches.free[0..1]
};
let line_ending = if opt_z { "\0" } else | ;
for path in paths {
print!("{}{}", basename(&path, &suffix), line_ending);
}
0
}
fn basename(fullname: &str, suffix: &str) -> String {
// Remove all platform-specific path separators from the end
let mut path: String = fullname.chars().rev().skip_while(|&ch| is_separator(ch)).collect();
// Undo reverse
path = path.chars().rev().collect();
// Convert to path buffer and get last path component
let pb = PathBuf::from(path);
match pb.components().last() {
Some(c) => strip_suffix(c.as_os_str().to_str().unwrap(), suffix),
None => "".to_owned()
}
}
fn strip_suffix(name: &str, suffix: &str) -> String {
if name == suffix {
return name.to_owned();
}
if name.ends_with(suffix) {
return name[..name.len() - suffix.len()].to_owned();
}
name.to_owned()
}
| { "\n" } | conditional_block |
lib.rs | //! # The Rust Core Library
//!
//! The Rust Core Library is the dependency-free[^free] foundation of [The
//! Rust Standard Library](../std/index.html). It is the portable glue
//! between the language and its libraries, defining the intrinsic and
//! primitive building blocks of all Rust code. It links to no
//! upstream libraries, no system libraries, and no libc.
//!
//! [^free]: Strictly speaking, there are some symbols which are needed but
//! they aren't always necessary.
//!
//! The core library is *minimal*: it isn't even aware of heap allocation,
//! nor does it provide concurrency or I/O. These things require
//! platform integration, and this library is platform-agnostic.
//!
//! # How to use the core library
//!
//! Please note that all of these details are currently not considered stable.
//!
// FIXME: Fill me in with more detail when the interface settles
//! This library is built on the assumption of a few existing symbols:
//!
//! * `memcpy`, `memcmp`, `memset` - These are core memory routines which are
//! often generated by LLVM. Additionally, this library can make explicit
//! calls to these functions. Their signatures are the same as found in C.
//! These functions are often provided by the system libc, but can also be
//! provided by the [compiler-builtins crate](https://crates.io/crates/compiler_builtins).
//!
//! * `rust_begin_panic` - This function takes four arguments, a
//! `fmt::Arguments`, a `&'static str`, and two `u32`'s. These four arguments
//! dictate the panic message, the file at which panic was invoked, and the
//! line and column inside the file. It is up to consumers of this core
//! library to define this panic function; it is only required to never
//! return. This requires a `lang` attribute named `panic_impl`.
//!
//! * `rust_eh_personality` - is used by the failure mechanisms of the
//! compiler. This is often mapped to GCC's personality function, but crates
//! which do not trigger a panic can be assured that this function is never
//! called. The `lang` attribute is called `eh_personality`.
// Since libcore defines many fundamental lang items, all tests live in a
// separate crate, libcoretest, to avoid bizarre issues.
//
// Here we explicitly #[cfg]-out this whole crate when testing. If we don't do
// this, both the generated test artifact and the linked libtest (which
// transitively includes libcore) will both define the same set of lang items,
// and this will cause the E0152 "found duplicate lang item" error. See
// discussion in #50466 for details.
//
// This cfg won't affect doc tests.
#![cfg(not(test))]
// To run libcore tests without x.py without ending up with two copies of libcore, Miri needs to be
// able to "empty" this crate. See <https://github.com/rust-lang/miri-test-libstd/issues/4>.
// rustc itself never sets the feature, so this line has no affect there.
#![cfg(any(not(feature = "miri-test-libstd"), test, doctest))]
#![stable(feature = "core", since = "1.6.0")]
#![doc(
html_playground_url = "https://play.rust-lang.org/",
issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/",
test(no_crate_inject, attr(deny(warnings))),
test(attr(allow(dead_code, deprecated, unused_variables, unused_mut)))
)]
#![no_core]
// | #![deny(rust_2021_incompatible_or_patterns)]
#![deny(unsafe_op_in_unsafe_fn)]
#![warn(deprecated_in_future)]
#![warn(missing_debug_implementations)]
#![warn(missing_docs)]
#![allow(explicit_outlives_requirements)]
#![cfg_attr(bootstrap, allow(incomplete_features))] // if_let_guard
//
// Library features for const fns:
#![feature(const_align_of_val)]
#![feature(const_alloc_layout)]
#![feature(const_arguments_as_str)]
#![feature(const_assert_type)]
#![feature(const_bigint_helper_methods)]
#![feature(const_caller_location)]
#![feature(const_cell_into_inner)]
#![feature(const_discriminant)]
#![feature(const_float_bits_conv)]
#![feature(const_float_classify)]
#![feature(const_heap)]
#![feature(const_inherent_unchecked_arith)]
#![feature(const_int_unchecked_arith)]
#![feature(const_intrinsic_copy)]
#![feature(const_intrinsic_forget)]
#![feature(const_likely)]
#![feature(const_maybe_uninit_as_ptr)]
#![feature(const_maybe_uninit_assume_init)]
#![feature(const_option)]
#![feature(const_pin)]
#![feature(const_ptr_offset)]
#![feature(const_ptr_offset_from)]
#![feature(const_ptr_read)]
#![feature(const_ptr_write)]
#![feature(const_raw_ptr_comparison)]
#![feature(const_size_of_val)]
#![feature(const_slice_from_raw_parts)]
#![feature(const_slice_ptr_len)]
#![feature(const_swap)]
#![feature(const_trait_impl)]
#![feature(const_type_id)]
#![feature(const_type_name)]
#![feature(const_unreachable_unchecked)]
#![feature(const_default_impls)]
#![feature(duration_consts_2)]
#![feature(ptr_metadata)]
#![feature(slice_ptr_get)]
#![feature(variant_count)]
//
// Language features:
#![feature(abi_unadjusted)]
#![feature(allow_internal_unsafe)]
#![feature(allow_internal_unstable)]
#![feature(asm)]
#![feature(associated_type_bounds)]
#![feature(auto_traits)]
#![feature(cfg_target_has_atomic)]
#![feature(const_fn_floating_point_arithmetic)]
#![feature(const_fn_fn_ptr_basics)]
#![feature(const_fn_trait_bound)]
#![cfg_attr(bootstrap, feature(const_fn_transmute))]
#![cfg_attr(bootstrap, feature(const_fn_union))]
#![feature(const_impl_trait)]
#![feature(const_mut_refs)]
#![feature(const_panic)]
#![feature(const_precise_live_drops)]
#![feature(const_raw_ptr_deref)]
#![feature(const_refs_to_cell)]
#![feature(decl_macro)]
#![feature(doc_cfg)]
#![feature(doc_notable_trait)]
#![feature(exhaustive_patterns)]
#![feature(extern_types)]
#![feature(fundamental)]
#![feature(if_let_guard)]
#![feature(intra_doc_pointers)]
#![feature(intrinsics)]
#![feature(lang_items)]
#![feature(link_llvm_intrinsics)]
#![feature(llvm_asm)]
#![feature(min_specialization)]
#![feature(negative_impls)]
#![feature(never_type)]
#![feature(no_core)]
#![feature(no_coverage)] // rust-lang/rust#84605
#![feature(no_niche)] // rust-lang/rust#68303
#![feature(platform_intrinsics)]
#![feature(prelude_import)]
#![feature(repr_simd)]
#![feature(rustc_allow_const_fn_unstable)]
#![feature(rustc_attrs)]
#![feature(simd_ffi)]
#![feature(staged_api)]
#![feature(stmt_expr_attributes)]
#![feature(trait_alias)]
#![feature(transparent_unions)]
#![feature(try_blocks)]
#![feature(unboxed_closures)]
#![feature(unsized_fn_params)]
//
// Target features:
#![feature(aarch64_target_feature)]
#![feature(adx_target_feature)]
#![feature(arm_target_feature)]
#![feature(avx512_target_feature)]
#![feature(cmpxchg16b_target_feature)]
#![feature(f16c_target_feature)]
#![feature(hexagon_target_feature)]
#![feature(mips_target_feature)]
#![feature(powerpc_target_feature)]
#![feature(rtm_target_feature)]
#![feature(sse4a_target_feature)]
#![feature(tbm_target_feature)]
#![feature(wasm_target_feature)]
// allow using `core::` in intra-doc links
#[allow(unused_extern_crates)]
extern crate self as core;
#[prelude_import]
#[allow(unused)]
use prelude::v1::*;
#[cfg(not(test))] // See #65860
#[macro_use]
mod macros;
// We don't export this through #[macro_export] for now, to avoid breakage.
// See https://github.com/rust-lang/rust/issues/82913
#[cfg(not(test))]
#[unstable(feature = "assert_matches", issue = "82775")]
/// Unstable module containing the unstable `assert_matches` macro.
pub mod assert_matches {
#[unstable(feature = "assert_matches", issue = "82775")]
pub use crate::macros::{assert_matches, debug_assert_matches};
}
#[macro_use]
mod internal_macros;
#[path = "num/shells/int_macros.rs"]
#[macro_use]
mod int_macros;
#[path = "num/shells/i128.rs"]
pub mod i128;
#[path = "num/shells/i16.rs"]
pub mod i16;
#[path = "num/shells/i32.rs"]
pub mod i32;
#[path = "num/shells/i64.rs"]
pub mod i64;
#[path = "num/shells/i8.rs"]
pub mod i8;
#[path = "num/shells/isize.rs"]
pub mod isize;
#[path = "num/shells/u128.rs"]
pub mod u128;
#[path = "num/shells/u16.rs"]
pub mod u16;
#[path = "num/shells/u32.rs"]
pub mod u32;
#[path = "num/shells/u64.rs"]
pub mod u64;
#[path = "num/shells/u8.rs"]
pub mod u8;
#[path = "num/shells/usize.rs"]
pub mod usize;
#[path = "num/f32.rs"]
pub mod f32;
#[path = "num/f64.rs"]
pub mod f64;
#[macro_use]
pub mod num;
/* The libcore prelude, not as all-encompassing as the libstd prelude */
pub mod prelude;
/* Core modules for ownership management */
pub mod hint;
pub mod intrinsics;
pub mod mem;
pub mod ptr;
/* Core language traits */
pub mod borrow;
pub mod clone;
pub mod cmp;
pub mod convert;
pub mod default;
pub mod marker;
pub mod ops;
/* Core types and methods on primitives */
pub mod any;
pub mod array;
pub mod ascii;
pub mod cell;
pub mod char;
pub mod ffi;
pub mod iter;
#[unstable(feature = "once_cell", issue = "74465")]
pub mod lazy;
pub mod option;
pub mod panic;
pub mod panicking;
pub mod pin;
pub mod result;
#[unstable(feature = "async_stream", issue = "79024")]
pub mod stream;
pub mod sync;
pub mod fmt;
pub mod hash;
pub mod slice;
pub mod str;
pub mod time;
pub mod unicode;
/* Async */
pub mod future;
pub mod task;
/* Heap memory allocator trait */
#[allow(missing_docs)]
pub mod alloc;
// note: does not need to be public
mod bool;
mod tuple;
mod unit;
#[stable(feature = "core_primitive", since = "1.43.0")]
pub mod primitive;
// Pull in the `core_arch` crate directly into libcore. The contents of
// `core_arch` are in a different repository: rust-lang/stdarch.
//
// `core_arch` depends on libcore, but the contents of this module are
// set up in such a way that directly pulling it here works such that the
// crate uses the this crate as its libcore.
#[path = "../../stdarch/crates/core_arch/src/mod.rs"]
#[allow(
missing_docs,
missing_debug_implementations,
dead_code,
unused_imports,
unsafe_op_in_unsafe_fn
)]
#[allow(rustdoc::bare_urls)]
// FIXME: This annotation should be moved into rust-lang/stdarch after clashing_extern_declarations is
// merged. It currently cannot because bootstrap fails as the lint hasn't been defined yet.
#[allow(clashing_extern_declarations)]
#[unstable(feature = "stdsimd", issue = "48556")]
mod core_arch;
#[doc = include_str!("../../stdarch/crates/core_arch/src/core_arch_docs.md")]
#[stable(feature = "simd_arch", since = "1.27.0")]
pub mod arch {
#[stable(feature = "simd_arch", since = "1.27.0")]
pub use crate::core_arch::arch::*;
/// Inline assembly.
///
/// Read the [unstable book] for the usage.
///
/// [unstable book]:../../unstable-book/library-features/asm.html
#[unstable(
feature = "asm",
issue = "72016",
reason = "inline assembly is not stable enough for use and is subject to change"
)]
#[rustc_builtin_macro]
pub macro asm("assembly template", $(operands,)* $(options($(option),*))?) {
/* compiler built-in */
}
/// Module-level inline assembly.
#[unstable(
feature = "global_asm",
issue = "35119",
reason = "`global_asm!` is not stable enough for use and is subject to change"
)]
#[rustc_builtin_macro]
pub macro global_asm("assembly template", $(operands,)* $(options($(option),*))?) {
/* compiler built-in */
}
} | // Lints: | random_line_split |
tests.rs | /*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use super::*;
use once_cell::sync::Lazy;
use parking_lot::Mutex;
use regex::Regex;
use std::fmt;
use std::sync::atomic::AtomicU64;
use std::sync::atomic::Ordering::AcqRel;
use std::sync::Arc;
use tracing::span::Attributes;
use tracing::span::Record;
use tracing::Callsite;
use tracing::Event;
use tracing::Id;
use tracing::Level;
use tracing::Metadata;
use tracing::Subscriber;
#[test]
fn test_callsite_span() {
let callsite = create_callsite::<SpanKindType, _>((11, 0), || CallsiteInfo {
name: "foo".to_string(),
target: "bar".to_string(),
level: Level::ERROR,
file: Some("a.rs".to_string()),
line: Some(10),
module_path: Some("z::a".to_string()),
field_names: vec!["x".to_string(), "y".to_string(), "z".to_string()],
});
assert_eq!(
d(callsite.metadata()),
"Metadata { name: foo, target: bar, level: Level(Error), module_path: z::a, location: a.rs:10, fields: {x, y, z}, callsite: _, kind: Kind(Span) }"
);
assert_eq!(callsite.identifier(), callsite.metadata().callsite());
let log = capture(|| {
let span = callsite.create_span(&[None, None, None]);
span.record("y", &"yyy2");
span.in_scope(|| {});
let span = callsite.create_span(&[Some(Box::new("foo")), None, Some(Box::new(123))]);
span.record("x", &123);
span.in_scope(|| {});
});
assert_eq!(
log,
[
"new_span(Attributes { metadata: Metadata { name: foo, target: bar, level: Level(Error), module_path: z::a, location: a.rs:10, fields: {x, y, z}, callsite: _, kind: Kind(Span) }, values: ValueSet { callsite: _ }, parent: Current } = 1",
"record(Id(1), Record { values: ValueSet { y: yyy2, callsite: _ } })",
"enter(Id(1))",
"exit(Id(1))",
"new_span(Attributes { metadata: Metadata { name: foo, target: bar, level: Level(Error), module_path: z::a, location: a.rs:10, fields: {x, y, z}, callsite: _, kind: Kind(Span) }, values: ValueSet { x: foo, z: 123, callsite: _ }, parent: Current } = 2",
"record(Id(2), Record { values: ValueSet { x: 123, callsite: _ } })",
"enter(Id(2))",
"exit(Id(2))"
]
);
}
#[test]
fn test_callsite_event() {
let callsite = create_callsite::<EventKindType, _>((22, 0), || CallsiteInfo {
name: "foo".to_string(),
level: Level::ERROR,
field_names: vec!["x".to_string(), "y".to_string(), "z".to_string()],
..Default::default()
});
assert_eq!(
d(callsite.metadata()),
"Metadata { name: foo, target:, level: Level(Error), fields: {x, y, z}, callsite: _, kind: Kind(Event) }"
);
assert_eq!(callsite.identifier(), callsite.metadata().callsite());
let log = capture(|| {
callsite.create_event(&[None, None, None]);
callsite.create_event(&[Some(Box::new(12)), None, Some(Box::new("zz"))]);
callsite.create_event(&[Some(Box::new("15"))]);
});
assert_eq!(
log,
[
"event(Event { fields: ValueSet { callsite: _ }, metadata: Metadata { name: foo, target:, level: Level(Error), fields: {x, y, z}, callsite: _, kind: Kind(Event) }, parent: Current })",
"event(Event { fields: ValueSet { x: 12, z: zz, callsite: _ }, metadata: Metadata { name: foo, target:, level: Level(Error), fields: {x, y, z}, callsite: _, kind: Kind(Event) }, parent: Current })",
"event(Event { fields: ValueSet { x: 15, callsite: _ }, metadata: Metadata { name: foo, target:, level: Level(Error), fields: {x, y, z}, callsite: _, kind: Kind(Event) }, parent: Current })"
]
);
}
#[test]
fn test_callsite_reuse() {
let callsite1 = create_callsite::<EventKindType, _>((33, 1), CallsiteInfo::default);
let callsite2 = create_callsite::<EventKindType, _>((33, 1), CallsiteInfo::default);
assert_eq!(callsite1 as *const _, callsite2 as *const _);
}
#[test]
fn test_intern() {
use crate::Intern;
let s1 = "abc".intern();
let s2 = "abc".to_string().intern();
assert_eq!(s1.as_ptr(), s2.as_ptr());
}
/// Capture logs about tracing.
fn capture(f: impl FnOnce()) -> Vec<String> {
// Prevent races since tests run in multiple threads.
let _locked = THREAD_LOCK.lock();
let sub = TestSubscriber::default();
let out = sub.out.clone();
tracing::subscriber::with_default(sub, f);
let out = out.lock();
out.clone()
}
/// Subscriber that captures calls to a string.
#[derive(Default)]
struct | {
id: AtomicU64,
out: Arc<Mutex<Vec<String>>>,
}
impl TestSubscriber {
fn log(&self, s: String) {
self.out.lock().push(normalize(&s));
}
}
impl Subscriber for TestSubscriber {
fn enabled(&self, _metadata: &Metadata) -> bool {
true
}
fn new_span(&self, span: &Attributes) -> Id {
let id = self.id.fetch_add(1, AcqRel) + 1;
self.log(format!("new_span({:?} = {}", span, id));
Id::from_u64(id)
}
fn record(&self, span: &Id, values: &Record) {
self.log(format!("record({:?}, {:?})", span, values));
}
fn event(&self, event: &Event) {
self.log(format!("event({:?})", event));
}
fn enter(&self, span: &Id) {
self.log(format!("enter({:?})", span));
}
fn exit(&self, span: &Id) {
self.log(format!("exit({:?})", span));
}
fn record_follows_from(&self, span: &Id, follows: &Id) {
self.log(format!("record_follows_from({:?}, {:?})", span, follows));
}
}
/// Debug format with some normalization.
fn d<T: fmt::Debug>(t: T) -> String {
let s = format!("{:?}", t);
normalize(&s)
}
fn normalize(s: &str) -> String {
// Change "Identifier(...)" to "_". It has dynamic pointer.
IDENTIFIER_RE.replace_all(&s, "_").replace('"', "")
}
static THREAD_LOCK: Lazy<Mutex<()>> = Lazy::new(Default::default);
static IDENTIFIER_RE: Lazy<Regex> = Lazy::new(|| Regex::new("Identifier\\([^)]*\\)").unwrap());
| TestSubscriber | identifier_name |
tests.rs | /*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use super::*;
use once_cell::sync::Lazy;
use parking_lot::Mutex;
use regex::Regex;
use std::fmt;
use std::sync::atomic::AtomicU64;
use std::sync::atomic::Ordering::AcqRel;
use std::sync::Arc;
use tracing::span::Attributes;
use tracing::span::Record;
use tracing::Callsite;
use tracing::Event;
use tracing::Id;
use tracing::Level;
use tracing::Metadata;
use tracing::Subscriber;
#[test]
fn test_callsite_span() {
let callsite = create_callsite::<SpanKindType, _>((11, 0), || CallsiteInfo {
name: "foo".to_string(),
target: "bar".to_string(),
level: Level::ERROR,
file: Some("a.rs".to_string()),
line: Some(10),
module_path: Some("z::a".to_string()),
field_names: vec!["x".to_string(), "y".to_string(), "z".to_string()],
});
assert_eq!(
d(callsite.metadata()),
"Metadata { name: foo, target: bar, level: Level(Error), module_path: z::a, location: a.rs:10, fields: {x, y, z}, callsite: _, kind: Kind(Span) }"
);
assert_eq!(callsite.identifier(), callsite.metadata().callsite());
let log = capture(|| {
let span = callsite.create_span(&[None, None, None]);
span.record("y", &"yyy2");
span.in_scope(|| {});
let span = callsite.create_span(&[Some(Box::new("foo")), None, Some(Box::new(123))]);
span.record("x", &123);
span.in_scope(|| {});
});
assert_eq!(
log,
[
"new_span(Attributes { metadata: Metadata { name: foo, target: bar, level: Level(Error), module_path: z::a, location: a.rs:10, fields: {x, y, z}, callsite: _, kind: Kind(Span) }, values: ValueSet { callsite: _ }, parent: Current } = 1",
"record(Id(1), Record { values: ValueSet { y: yyy2, callsite: _ } })",
"enter(Id(1))",
"exit(Id(1))",
"new_span(Attributes { metadata: Metadata { name: foo, target: bar, level: Level(Error), module_path: z::a, location: a.rs:10, fields: {x, y, z}, callsite: _, kind: Kind(Span) }, values: ValueSet { x: foo, z: 123, callsite: _ }, parent: Current } = 2",
"record(Id(2), Record { values: ValueSet { x: 123, callsite: _ } })",
"enter(Id(2))",
"exit(Id(2))"
]
);
}
#[test]
fn test_callsite_event() {
let callsite = create_callsite::<EventKindType, _>((22, 0), || CallsiteInfo {
name: "foo".to_string(),
level: Level::ERROR,
field_names: vec!["x".to_string(), "y".to_string(), "z".to_string()],
..Default::default()
});
assert_eq!(
d(callsite.metadata()),
"Metadata { name: foo, target:, level: Level(Error), fields: {x, y, z}, callsite: _, kind: Kind(Event) }"
);
assert_eq!(callsite.identifier(), callsite.metadata().callsite());
let log = capture(|| {
callsite.create_event(&[None, None, None]);
callsite.create_event(&[Some(Box::new(12)), None, Some(Box::new("zz"))]);
callsite.create_event(&[Some(Box::new("15"))]);
});
assert_eq!(
log,
[
"event(Event { fields: ValueSet { callsite: _ }, metadata: Metadata { name: foo, target:, level: Level(Error), fields: {x, y, z}, callsite: _, kind: Kind(Event) }, parent: Current })",
"event(Event { fields: ValueSet { x: 12, z: zz, callsite: _ }, metadata: Metadata { name: foo, target:, level: Level(Error), fields: {x, y, z}, callsite: _, kind: Kind(Event) }, parent: Current })",
"event(Event { fields: ValueSet { x: 15, callsite: _ }, metadata: Metadata { name: foo, target:, level: Level(Error), fields: {x, y, z}, callsite: _, kind: Kind(Event) }, parent: Current })"
]
);
}
#[test]
fn test_callsite_reuse() {
let callsite1 = create_callsite::<EventKindType, _>((33, 1), CallsiteInfo::default);
let callsite2 = create_callsite::<EventKindType, _>((33, 1), CallsiteInfo::default);
assert_eq!(callsite1 as *const _, callsite2 as *const _);
}
#[test]
fn test_intern() {
use crate::Intern;
let s1 = "abc".intern(); | /// Capture logs about tracing.
fn capture(f: impl FnOnce()) -> Vec<String> {
// Prevent races since tests run in multiple threads.
let _locked = THREAD_LOCK.lock();
let sub = TestSubscriber::default();
let out = sub.out.clone();
tracing::subscriber::with_default(sub, f);
let out = out.lock();
out.clone()
}
/// Subscriber that captures calls to a string.
#[derive(Default)]
struct TestSubscriber {
id: AtomicU64,
out: Arc<Mutex<Vec<String>>>,
}
impl TestSubscriber {
fn log(&self, s: String) {
self.out.lock().push(normalize(&s));
}
}
impl Subscriber for TestSubscriber {
fn enabled(&self, _metadata: &Metadata) -> bool {
true
}
fn new_span(&self, span: &Attributes) -> Id {
let id = self.id.fetch_add(1, AcqRel) + 1;
self.log(format!("new_span({:?} = {}", span, id));
Id::from_u64(id)
}
fn record(&self, span: &Id, values: &Record) {
self.log(format!("record({:?}, {:?})", span, values));
}
fn event(&self, event: &Event) {
self.log(format!("event({:?})", event));
}
fn enter(&self, span: &Id) {
self.log(format!("enter({:?})", span));
}
fn exit(&self, span: &Id) {
self.log(format!("exit({:?})", span));
}
fn record_follows_from(&self, span: &Id, follows: &Id) {
self.log(format!("record_follows_from({:?}, {:?})", span, follows));
}
}
/// Debug format with some normalization.
fn d<T: fmt::Debug>(t: T) -> String {
let s = format!("{:?}", t);
normalize(&s)
}
fn normalize(s: &str) -> String {
// Change "Identifier(...)" to "_". It has dynamic pointer.
IDENTIFIER_RE.replace_all(&s, "_").replace('"', "")
}
static THREAD_LOCK: Lazy<Mutex<()>> = Lazy::new(Default::default);
static IDENTIFIER_RE: Lazy<Regex> = Lazy::new(|| Regex::new("Identifier\\([^)]*\\)").unwrap()); | let s2 = "abc".to_string().intern();
assert_eq!(s1.as_ptr(), s2.as_ptr());
}
| random_line_split |
tests.rs | /*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use super::*;
use once_cell::sync::Lazy;
use parking_lot::Mutex;
use regex::Regex;
use std::fmt;
use std::sync::atomic::AtomicU64;
use std::sync::atomic::Ordering::AcqRel;
use std::sync::Arc;
use tracing::span::Attributes;
use tracing::span::Record;
use tracing::Callsite;
use tracing::Event;
use tracing::Id;
use tracing::Level;
use tracing::Metadata;
use tracing::Subscriber;
#[test]
fn test_callsite_span() {
let callsite = create_callsite::<SpanKindType, _>((11, 0), || CallsiteInfo {
name: "foo".to_string(),
target: "bar".to_string(),
level: Level::ERROR,
file: Some("a.rs".to_string()),
line: Some(10),
module_path: Some("z::a".to_string()),
field_names: vec!["x".to_string(), "y".to_string(), "z".to_string()],
});
assert_eq!(
d(callsite.metadata()),
"Metadata { name: foo, target: bar, level: Level(Error), module_path: z::a, location: a.rs:10, fields: {x, y, z}, callsite: _, kind: Kind(Span) }"
);
assert_eq!(callsite.identifier(), callsite.metadata().callsite());
let log = capture(|| {
let span = callsite.create_span(&[None, None, None]);
span.record("y", &"yyy2");
span.in_scope(|| {});
let span = callsite.create_span(&[Some(Box::new("foo")), None, Some(Box::new(123))]);
span.record("x", &123);
span.in_scope(|| {});
});
assert_eq!(
log,
[
"new_span(Attributes { metadata: Metadata { name: foo, target: bar, level: Level(Error), module_path: z::a, location: a.rs:10, fields: {x, y, z}, callsite: _, kind: Kind(Span) }, values: ValueSet { callsite: _ }, parent: Current } = 1",
"record(Id(1), Record { values: ValueSet { y: yyy2, callsite: _ } })",
"enter(Id(1))",
"exit(Id(1))",
"new_span(Attributes { metadata: Metadata { name: foo, target: bar, level: Level(Error), module_path: z::a, location: a.rs:10, fields: {x, y, z}, callsite: _, kind: Kind(Span) }, values: ValueSet { x: foo, z: 123, callsite: _ }, parent: Current } = 2",
"record(Id(2), Record { values: ValueSet { x: 123, callsite: _ } })",
"enter(Id(2))",
"exit(Id(2))"
]
);
}
#[test]
fn test_callsite_event() {
let callsite = create_callsite::<EventKindType, _>((22, 0), || CallsiteInfo {
name: "foo".to_string(),
level: Level::ERROR,
field_names: vec!["x".to_string(), "y".to_string(), "z".to_string()],
..Default::default()
});
assert_eq!(
d(callsite.metadata()),
"Metadata { name: foo, target:, level: Level(Error), fields: {x, y, z}, callsite: _, kind: Kind(Event) }"
);
assert_eq!(callsite.identifier(), callsite.metadata().callsite());
let log = capture(|| {
callsite.create_event(&[None, None, None]);
callsite.create_event(&[Some(Box::new(12)), None, Some(Box::new("zz"))]);
callsite.create_event(&[Some(Box::new("15"))]);
});
assert_eq!(
log,
[
"event(Event { fields: ValueSet { callsite: _ }, metadata: Metadata { name: foo, target:, level: Level(Error), fields: {x, y, z}, callsite: _, kind: Kind(Event) }, parent: Current })",
"event(Event { fields: ValueSet { x: 12, z: zz, callsite: _ }, metadata: Metadata { name: foo, target:, level: Level(Error), fields: {x, y, z}, callsite: _, kind: Kind(Event) }, parent: Current })",
"event(Event { fields: ValueSet { x: 15, callsite: _ }, metadata: Metadata { name: foo, target:, level: Level(Error), fields: {x, y, z}, callsite: _, kind: Kind(Event) }, parent: Current })"
]
);
}
#[test]
fn test_callsite_reuse() |
#[test]
fn test_intern() {
use crate::Intern;
let s1 = "abc".intern();
let s2 = "abc".to_string().intern();
assert_eq!(s1.as_ptr(), s2.as_ptr());
}
/// Capture logs about tracing.
fn capture(f: impl FnOnce()) -> Vec<String> {
// Prevent races since tests run in multiple threads.
let _locked = THREAD_LOCK.lock();
let sub = TestSubscriber::default();
let out = sub.out.clone();
tracing::subscriber::with_default(sub, f);
let out = out.lock();
out.clone()
}
/// Subscriber that captures calls to a string.
#[derive(Default)]
struct TestSubscriber {
id: AtomicU64,
out: Arc<Mutex<Vec<String>>>,
}
impl TestSubscriber {
fn log(&self, s: String) {
self.out.lock().push(normalize(&s));
}
}
impl Subscriber for TestSubscriber {
fn enabled(&self, _metadata: &Metadata) -> bool {
true
}
fn new_span(&self, span: &Attributes) -> Id {
let id = self.id.fetch_add(1, AcqRel) + 1;
self.log(format!("new_span({:?} = {}", span, id));
Id::from_u64(id)
}
fn record(&self, span: &Id, values: &Record) {
self.log(format!("record({:?}, {:?})", span, values));
}
fn event(&self, event: &Event) {
self.log(format!("event({:?})", event));
}
fn enter(&self, span: &Id) {
self.log(format!("enter({:?})", span));
}
fn exit(&self, span: &Id) {
self.log(format!("exit({:?})", span));
}
fn record_follows_from(&self, span: &Id, follows: &Id) {
self.log(format!("record_follows_from({:?}, {:?})", span, follows));
}
}
/// Debug format with some normalization.
fn d<T: fmt::Debug>(t: T) -> String {
let s = format!("{:?}", t);
normalize(&s)
}
fn normalize(s: &str) -> String {
// Change "Identifier(...)" to "_". It has dynamic pointer.
IDENTIFIER_RE.replace_all(&s, "_").replace('"', "")
}
static THREAD_LOCK: Lazy<Mutex<()>> = Lazy::new(Default::default);
static IDENTIFIER_RE: Lazy<Regex> = Lazy::new(|| Regex::new("Identifier\\([^)]*\\)").unwrap());
| {
let callsite1 = create_callsite::<EventKindType, _>((33, 1), CallsiteInfo::default);
let callsite2 = create_callsite::<EventKindType, _>((33, 1), CallsiteInfo::default);
assert_eq!(callsite1 as *const _, callsite2 as *const _);
} | identifier_body |
method_self_arg2.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![crate_type = "lib"]
#![allow(unknown_features)]
#![feature(box_syntax)]
static mut COUNT: u64 = 1;
pub fn | () -> u64 { unsafe { COUNT } }
#[derive(Copy, Clone)]
pub struct Foo;
impl Foo {
pub fn run_trait(self) {
unsafe { COUNT *= 17; }
// Test internal call.
Bar::foo1(&self);
Bar::foo2(self);
Bar::foo3(box self);
Bar::bar1(&self);
Bar::bar2(self);
Bar::bar3(box self);
}
}
pub trait Bar : Sized {
fn foo1(&self);
fn foo2(self);
fn foo3(self: Box<Self>);
fn bar1(&self) {
unsafe { COUNT *= 7; }
}
fn bar2(self) {
unsafe { COUNT *= 11; }
}
fn bar3(self: Box<Self>) {
unsafe { COUNT *= 13; }
}
}
impl Bar for Foo {
fn foo1(&self) {
unsafe { COUNT *= 2; }
}
fn foo2(self) {
unsafe { COUNT *= 3; }
}
fn foo3(self: Box<Foo>) {
unsafe { COUNT *= 5; }
}
}
| get_count | identifier_name |
method_self_arg2.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![crate_type = "lib"]
#![allow(unknown_features)]
#![feature(box_syntax)]
static mut COUNT: u64 = 1;
pub fn get_count() -> u64 { unsafe { COUNT } }
#[derive(Copy, Clone)]
pub struct Foo;
impl Foo {
pub fn run_trait(self) {
unsafe { COUNT *= 17; }
// Test internal call.
Bar::foo1(&self);
Bar::foo2(self);
Bar::foo3(box self);
Bar::bar1(&self);
Bar::bar2(self);
Bar::bar3(box self);
}
}
pub trait Bar : Sized {
fn foo1(&self);
fn foo2(self);
fn foo3(self: Box<Self>);
fn bar1(&self) {
unsafe { COUNT *= 7; }
}
fn bar2(self) {
unsafe { COUNT *= 11; }
}
fn bar3(self: Box<Self>) {
unsafe { COUNT *= 13; }
}
}
impl Bar for Foo {
fn foo1(&self) {
unsafe { COUNT *= 2; }
}
fn foo2(self) {
unsafe { COUNT *= 3; }
}
fn foo3(self: Box<Foo>) {
unsafe { COUNT *= 5; }
} | } | random_line_split |
|
method_self_arg2.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![crate_type = "lib"]
#![allow(unknown_features)]
#![feature(box_syntax)]
static mut COUNT: u64 = 1;
pub fn get_count() -> u64 { unsafe { COUNT } }
#[derive(Copy, Clone)]
pub struct Foo;
impl Foo {
pub fn run_trait(self) {
unsafe { COUNT *= 17; }
// Test internal call.
Bar::foo1(&self);
Bar::foo2(self);
Bar::foo3(box self);
Bar::bar1(&self);
Bar::bar2(self);
Bar::bar3(box self);
}
}
pub trait Bar : Sized {
fn foo1(&self);
fn foo2(self);
fn foo3(self: Box<Self>);
fn bar1(&self) {
unsafe { COUNT *= 7; }
}
fn bar2(self) {
unsafe { COUNT *= 11; }
}
fn bar3(self: Box<Self>) {
unsafe { COUNT *= 13; }
}
}
impl Bar for Foo {
fn foo1(&self) |
fn foo2(self) {
unsafe { COUNT *= 3; }
}
fn foo3(self: Box<Foo>) {
unsafe { COUNT *= 5; }
}
}
| {
unsafe { COUNT *= 2; }
} | identifier_body |
evec-slice.rs | // run-pass
#![allow(unused_assignments)]
pub fn main() {
let x : &[isize] = &[1,2,3,4,5];
let mut z : &[isize] = &[1,2,3,4,5];
z = x;
assert_eq!(z[0], 1); | assert_eq!(z[4], 5);
let a : &[isize] = &[1,1,1,1,1];
let b : &[isize] = &[2,2,2,2,2];
let c : &[isize] = &[2,2,2,2,3];
let cc : &[isize] = &[2,2,2,2,2,2];
println!("{:?}", a);
assert!(a < b);
assert!(a <= b);
assert!(a!= b);
assert!(b >= a);
assert!(b > a);
println!("{:?}", b);
assert!(b < c);
assert!(b <= c);
assert!(b!= c);
assert!(c >= b);
assert!(c > b);
assert!(a < c);
assert!(a <= c);
assert!(a!= c);
assert!(c >= a);
assert!(c > a);
println!("{:?}", c);
assert!(a < cc);
assert!(a <= cc);
assert!(a!= cc);
assert!(cc >= a);
assert!(cc > a);
println!("{:?}", cc);
} | random_line_split |
|
evec-slice.rs | // run-pass
#![allow(unused_assignments)]
pub fn main() | println!("{:?}", b);
assert!(b < c);
assert!(b <= c);
assert!(b!= c);
assert!(c >= b);
assert!(c > b);
assert!(a < c);
assert!(a <= c);
assert!(a!= c);
assert!(c >= a);
assert!(c > a);
println!("{:?}", c);
assert!(a < cc);
assert!(a <= cc);
assert!(a!= cc);
assert!(cc >= a);
assert!(cc > a);
println!("{:?}", cc);
}
| {
let x : &[isize] = &[1,2,3,4,5];
let mut z : &[isize] = &[1,2,3,4,5];
z = x;
assert_eq!(z[0], 1);
assert_eq!(z[4], 5);
let a : &[isize] = &[1,1,1,1,1];
let b : &[isize] = &[2,2,2,2,2];
let c : &[isize] = &[2,2,2,2,3];
let cc : &[isize] = &[2,2,2,2,2,2];
println!("{:?}", a);
assert!(a < b);
assert!(a <= b);
assert!(a != b);
assert!(b >= a);
assert!(b > a);
| identifier_body |
evec-slice.rs | // run-pass
#![allow(unused_assignments)]
pub fn | () {
let x : &[isize] = &[1,2,3,4,5];
let mut z : &[isize] = &[1,2,3,4,5];
z = x;
assert_eq!(z[0], 1);
assert_eq!(z[4], 5);
let a : &[isize] = &[1,1,1,1,1];
let b : &[isize] = &[2,2,2,2,2];
let c : &[isize] = &[2,2,2,2,3];
let cc : &[isize] = &[2,2,2,2,2,2];
println!("{:?}", a);
assert!(a < b);
assert!(a <= b);
assert!(a!= b);
assert!(b >= a);
assert!(b > a);
println!("{:?}", b);
assert!(b < c);
assert!(b <= c);
assert!(b!= c);
assert!(c >= b);
assert!(c > b);
assert!(a < c);
assert!(a <= c);
assert!(a!= c);
assert!(c >= a);
assert!(c > a);
println!("{:?}", c);
assert!(a < cc);
assert!(a <= cc);
assert!(a!= cc);
assert!(cc >= a);
assert!(cc > a);
println!("{:?}", cc);
}
| main | identifier_name |
driver.rs | use util::errors::{
Result,
Error,
};
use term::terminfo::TermInfo;
use term::terminfo::parm;
use term::terminfo::parm::{
Param,
Variables,
};
// String constants correspond to terminfo capnames and are used internally for name resolution.
const ENTER_CA: &'static str = "smcup";
const EXIT_CA: &'static str = "rmcup";
const SHOW_CURSOR: &'static str = "cnorm";
const HIDE_CURSOR: &'static str = "civis";
const SET_CURSOR: &'static str = "cup";
const CLEAR: &'static str = "clear";
const RESET: &'static str = "sgr0";
const UNDERLINE: &'static str = "smul";
const BOLD: &'static str = "bold";
const BLINK: &'static str = "blink";
const REVERSE: &'static str = "rev";
const SETFG: &'static str = "setaf";
const SETBG: &'static str = "setab";
// Array of required capabilities, used as an iterator on startup to ensure all required
// functionality is present.
static CAP_TABLE: &'static [&'static str] = &[
ENTER_CA,
EXIT_CA,
SHOW_CURSOR,
HIDE_CURSOR,
SET_CURSOR,
CLEAR,
RESET,
UNDERLINE,
BOLD,
REVERSE,
SETFG,
SETBG,
];
// Driver capabilities are an enum instead of string constants (there are string constants private
// to the module however, those are only used for naming convenience and disambiguation)
// to take advantage of compile-time type-checking instead of hoping capability names are correct.
// This allows us to guarantee that driver accesses will succeed. In addition, using an enum means
// Driver doesn't need hard-coded methods for each capability we want to use.
pub enum Cap {
EnterCa,
ExitCa,
ShowCursor,
HideCursor,
SetCursor(usize, usize),
Clear,
Reset,
Underline,
Bold,
Blink,
Reverse,
SetFg(u8),
SetBg(u8),
}
impl Cap {
fn resolve(&self) -> &'static str {
match *self {
Cap::EnterCa => ENTER_CA,
Cap::ExitCa => EXIT_CA,
Cap::ShowCursor => SHOW_CURSOR,
Cap::HideCursor => HIDE_CURSOR,
Cap::SetCursor(..) => SET_CURSOR,
Cap::Clear => CLEAR,
Cap::Reset => RESET,
Cap::Underline => UNDERLINE,
Cap::Bold => BOLD,
Cap::Blink => BLINK,
Cap::Reverse => REVERSE,
Cap::SetFg(..) => SETFG,
Cap::SetBg(..) => SETBG,
}
}
}
pub struct Driver {
tinfo: TermInfo,
}
impl Driver {
pub fn new() -> Result<Driver> {
let tinfo = try!(TermInfo::from_env());
for capname in CAP_TABLE {
if!tinfo.strings.contains_key(*capname) {
return Err(Error::new(&format!("terminal missing capability: '{}'", capname)));
}
}
Ok(Driver {
tinfo: tinfo,
})
}
// Processes a capability and returns the device specific escape sequence.
//
// process() will not return an error, and theoretically should never panic.
//
// The pre-flight checks on initialization of `Driver` ensure that every capability is present,
// thus the `HashMap` retrieval should never fail.
// Furthermore the `expand()` routine, given the input we pass it, should never fail either.
// This can be verified in the source of the `term` crate.
pub fn process(&self, cap_request: Cap) -> Vec<u8> {
let capname = cap_request.resolve();
let cap = self.tinfo.strings.get(capname).unwrap();
match cap_request {
Cap::SetFg(attr) | Cap::SetBg(attr) => { | Cap::SetCursor(x, y) => {
let params = &[Param::Number(y as i16), Param::Number(x as i16)];
let mut vars = Variables::new();
parm::expand(cap, params, &mut vars).unwrap()
},
_ => {
cap.clone()
},
}
}
} | let params = &[Param::Number(attr as i16)];
let mut vars = Variables::new();
parm::expand(cap, params, &mut vars).unwrap()
}, | random_line_split |
driver.rs | use util::errors::{
Result,
Error,
};
use term::terminfo::TermInfo;
use term::terminfo::parm;
use term::terminfo::parm::{
Param,
Variables,
};
// String constants correspond to terminfo capnames and are used internally for name resolution.
const ENTER_CA: &'static str = "smcup";
const EXIT_CA: &'static str = "rmcup";
const SHOW_CURSOR: &'static str = "cnorm";
const HIDE_CURSOR: &'static str = "civis";
const SET_CURSOR: &'static str = "cup";
const CLEAR: &'static str = "clear";
const RESET: &'static str = "sgr0";
const UNDERLINE: &'static str = "smul";
const BOLD: &'static str = "bold";
const BLINK: &'static str = "blink";
const REVERSE: &'static str = "rev";
const SETFG: &'static str = "setaf";
const SETBG: &'static str = "setab";
// Array of required capabilities, used as an iterator on startup to ensure all required
// functionality is present.
static CAP_TABLE: &'static [&'static str] = &[
ENTER_CA,
EXIT_CA,
SHOW_CURSOR,
HIDE_CURSOR,
SET_CURSOR,
CLEAR,
RESET,
UNDERLINE,
BOLD,
REVERSE,
SETFG,
SETBG,
];
// Driver capabilities are an enum instead of string constants (there are string constants private
// to the module however, those are only used for naming convenience and disambiguation)
// to take advantage of compile-time type-checking instead of hoping capability names are correct.
// This allows us to guarantee that driver accesses will succeed. In addition, using an enum means
// Driver doesn't need hard-coded methods for each capability we want to use.
pub enum Cap {
EnterCa,
ExitCa,
ShowCursor,
HideCursor,
SetCursor(usize, usize),
Clear,
Reset,
Underline,
Bold,
Blink,
Reverse,
SetFg(u8),
SetBg(u8),
}
impl Cap {
fn | (&self) -> &'static str {
match *self {
Cap::EnterCa => ENTER_CA,
Cap::ExitCa => EXIT_CA,
Cap::ShowCursor => SHOW_CURSOR,
Cap::HideCursor => HIDE_CURSOR,
Cap::SetCursor(..) => SET_CURSOR,
Cap::Clear => CLEAR,
Cap::Reset => RESET,
Cap::Underline => UNDERLINE,
Cap::Bold => BOLD,
Cap::Blink => BLINK,
Cap::Reverse => REVERSE,
Cap::SetFg(..) => SETFG,
Cap::SetBg(..) => SETBG,
}
}
}
pub struct Driver {
tinfo: TermInfo,
}
impl Driver {
pub fn new() -> Result<Driver> {
let tinfo = try!(TermInfo::from_env());
for capname in CAP_TABLE {
if!tinfo.strings.contains_key(*capname) {
return Err(Error::new(&format!("terminal missing capability: '{}'", capname)));
}
}
Ok(Driver {
tinfo: tinfo,
})
}
// Processes a capability and returns the device specific escape sequence.
//
// process() will not return an error, and theoretically should never panic.
//
// The pre-flight checks on initialization of `Driver` ensure that every capability is present,
// thus the `HashMap` retrieval should never fail.
// Furthermore the `expand()` routine, given the input we pass it, should never fail either.
// This can be verified in the source of the `term` crate.
pub fn process(&self, cap_request: Cap) -> Vec<u8> {
let capname = cap_request.resolve();
let cap = self.tinfo.strings.get(capname).unwrap();
match cap_request {
Cap::SetFg(attr) | Cap::SetBg(attr) => {
let params = &[Param::Number(attr as i16)];
let mut vars = Variables::new();
parm::expand(cap, params, &mut vars).unwrap()
},
Cap::SetCursor(x, y) => {
let params = &[Param::Number(y as i16), Param::Number(x as i16)];
let mut vars = Variables::new();
parm::expand(cap, params, &mut vars).unwrap()
},
_ => {
cap.clone()
},
}
}
}
| resolve | identifier_name |
font.rs | * License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use app_units::Au;
use euclid::{Point2D, Rect, Size2D};
use font_context::{FontContext, FontSource};
use font_template::FontTemplateDescriptor;
use ordered_float::NotNan;
use platform::font::{FontHandle, FontTable};
use platform::font_context::FontContextHandle;
pub use platform::font_list::fallback_font_families;
use platform::font_template::FontTemplateData;
use servo_atoms::Atom;
use smallvec::SmallVec;
use std::borrow::ToOwned;
use std::cell::RefCell;
use std::collections::HashMap;
use std::iter;
use std::rc::Rc;
use std::str;
use std::sync::Arc;
use std::sync::atomic::{ATOMIC_USIZE_INIT, AtomicUsize, Ordering};
use style::computed_values::{font_stretch, font_style, font_variant_caps, font_weight};
use style::properties::style_structs::Font as FontStyleStruct;
use style::values::computed::font::SingleFontFamily;
use text::Shaper;
use text::glyph::{ByteIndex, GlyphData, GlyphId, GlyphStore};
use text::shaping::ShaperMethods;
use time;
use unicode_script::Script;
use webrender_api;
macro_rules! ot_tag {
($t1:expr, $t2:expr, $t3:expr, $t4:expr) => (
(($t1 as u32) << 24) | (($t2 as u32) << 16) | (($t3 as u32) << 8) | ($t4 as u32)
);
}
pub const GPOS: u32 = ot_tag!('G', 'P', 'O', 'S');
pub const GSUB: u32 = ot_tag!('G', 'S', 'U', 'B');
pub const KERN: u32 = ot_tag!('k', 'e', 'r', 'n');
static TEXT_SHAPING_PERFORMANCE_COUNTER: AtomicUsize = ATOMIC_USIZE_INIT;
// FontHandle encapsulates access to the platform's font API,
// e.g. quartz, FreeType. It provides access to metrics and tables
// needed by the text shaper as well as access to the underlying font
// resources needed by the graphics layer to draw glyphs.
pub trait FontHandleMethods: Sized {
fn new_from_template(
fctx: &FontContextHandle,
template: Arc<FontTemplateData>,
pt_size: Option<Au>,
) -> Result<Self, ()>;
fn template(&self) -> Arc<FontTemplateData>;
fn family_name(&self) -> Option<String>;
fn face_name(&self) -> Option<String>;
fn style(&self) -> font_style::T;
fn boldness(&self) -> font_weight::T;
fn stretchiness(&self) -> font_stretch::T;
fn glyph_index(&self, codepoint: char) -> Option<GlyphId>;
fn glyph_h_advance(&self, GlyphId) -> Option<FractionalPixel>;
fn glyph_h_kerning(&self, glyph0: GlyphId, glyph1: GlyphId) -> FractionalPixel;
/// Can this font do basic horizontal LTR shaping without Harfbuzz?
fn can_do_fast_shaping(&self) -> bool;
fn metrics(&self) -> FontMetrics;
fn table_for_tag(&self, FontTableTag) -> Option<FontTable>;
/// A unique identifier for the font, allowing comparison.
fn identifier(&self) -> Atom;
}
// Used to abstract over the shaper's choice of fixed int representation.
pub type FractionalPixel = f64;
pub type FontTableTag = u32;
trait FontTableTagConversions {
fn tag_to_str(&self) -> String;
}
impl FontTableTagConversions for FontTableTag {
fn tag_to_str(&self) -> String {
let bytes = [(self >> 24) as u8,
(self >> 16) as u8,
(self >> 8) as u8,
(self >> 0) as u8];
str::from_utf8(&bytes).unwrap().to_owned()
}
}
pub trait FontTableMethods {
fn buffer(&self) -> &[u8];
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct FontMetrics {
pub underline_size: Au,
pub underline_offset: Au,
pub strikeout_size: Au,
pub strikeout_offset: Au,
pub leading: Au,
pub x_height: Au,
pub em_size: Au,
pub ascent: Au,
pub descent: Au,
pub max_advance: Au,
pub average_advance: Au,
pub line_gap: Au,
}
/// `FontDescriptor` describes the parameters of a `Font`. It represents rendering a given font
/// template at a particular size, with a particular font-variant-caps applied, etc. This contrasts
/// with `FontTemplateDescriptor` in that the latter represents only the parameters inherent in the
/// font data (weight, stretch, etc.).
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
pub struct FontDescriptor {
pub template_descriptor: FontTemplateDescriptor,
pub variant: font_variant_caps::T,
pub pt_size: Au,
}
impl<'a> From<&'a FontStyleStruct> for FontDescriptor {
fn from(style: &'a FontStyleStruct) -> Self {
FontDescriptor {
template_descriptor: FontTemplateDescriptor::from(style),
variant: style.font_variant_caps,
pt_size: style.font_size.size(),
}
}
}
#[derive(Debug)]
pub struct Font {
pub handle: FontHandle,
pub metrics: FontMetrics,
pub descriptor: FontDescriptor,
pub actual_pt_size: Au,
shaper: Option<Shaper>,
shape_cache: RefCell<HashMap<ShapeCacheEntry, Arc<GlyphStore>>>,
glyph_advance_cache: RefCell<HashMap<u32, FractionalPixel>>,
pub font_key: webrender_api::FontInstanceKey,
}
impl Font {
pub fn new(handle: FontHandle,
descriptor: FontDescriptor,
actual_pt_size: Au,
font_key: webrender_api::FontInstanceKey) -> Font {
let metrics = handle.metrics();
Font {
handle: handle,
shaper: None,
descriptor,
actual_pt_size,
metrics,
shape_cache: RefCell::new(HashMap::new()),
glyph_advance_cache: RefCell::new(HashMap::new()),
font_key,
}
}
/// A unique identifier for the font, allowing comparison.
pub fn identifier(&self) -> Atom {
self.handle.identifier()
}
}
bitflags! {
pub struct ShapingFlags: u8 {
#[doc = "Set if the text is entirely whitespace."]
const IS_WHITESPACE_SHAPING_FLAG = 0x01;
#[doc = "Set if we are to ignore ligatures."]
const IGNORE_LIGATURES_SHAPING_FLAG = 0x02;
#[doc = "Set if we are to disable kerning."]
const DISABLE_KERNING_SHAPING_FLAG = 0x04;
#[doc = "Text direction is right-to-left."]
const RTL_FLAG = 0x08;
#[doc = "Set if word-break is set to keep-all."]
const KEEP_ALL_FLAG = 0x10;
}
}
/// Various options that control text shaping.
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
pub struct ShapingOptions {
/// Spacing to add between each letter. Corresponds to the CSS 2.1 `letter-spacing` property.
/// NB: You will probably want to set the `IGNORE_LIGATURES_SHAPING_FLAG` if this is non-null.
pub letter_spacing: Option<Au>,
/// Spacing to add between each word. Corresponds to the CSS 2.1 `word-spacing` property.
pub word_spacing: (Au, NotNan<f32>),
/// The Unicode script property of the characters in this run.
pub script: Script,
/// Various flags.
pub flags: ShapingFlags,
}
/// An entry in the shape cache.
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
struct ShapeCacheEntry {
text: String,
options: ShapingOptions,
}
impl Font {
pub fn shape_text(&mut self, text: &str, options: &ShapingOptions) -> Arc<GlyphStore> {
let this = self as *const Font;
let mut shaper = self.shaper.take();
let lookup_key = ShapeCacheEntry {
text: text.to_owned(),
options: *options,
};
let result = self.shape_cache.borrow_mut().entry(lookup_key).or_insert_with(|| {
let start_time = time::precise_time_ns();
let mut glyphs = GlyphStore::new(text.len(),
options.flags.contains(ShapingFlags::IS_WHITESPACE_SHAPING_FLAG),
options.flags.contains(ShapingFlags::RTL_FLAG));
if self.can_do_fast_shaping(text, options) {
debug!("shape_text: Using ASCII fast path.");
self.shape_text_fast(text, options, &mut glyphs);
} else {
debug!("shape_text: Using Harfbuzz.");
if shaper.is_none() {
shaper = Some(Shaper::new(this));
}
shaper.as_ref().unwrap().shape_text(text, options, &mut glyphs);
}
let end_time = time::precise_time_ns();
TEXT_SHAPING_PERFORMANCE_COUNTER.fetch_add((end_time - start_time) as usize,
Ordering::Relaxed);
Arc::new(glyphs)
}).clone();
self.shaper = shaper;
result
}
fn can_do_fast_shaping(&self, text: &str, options: &ShapingOptions) -> bool {
options.script == Script::Latin &&
!options.flags.contains(ShapingFlags::RTL_FLAG) &&
self.handle.can_do_fast_shaping() &&
text.is_ascii()
}
/// Fast path for ASCII text that only needs simple horizontal LTR kerning.
fn shape_text_fast(&self, text: &str, options: &ShapingOptions, glyphs: &mut GlyphStore) {
let mut prev_glyph_id = None;
for (i, byte) in text.bytes().enumerate() {
let character = byte as char;
let glyph_id = match self.glyph_index(character) {
Some(id) => id,
None => continue,
};
| if character =='' {
// https://drafts.csswg.org/css-text-3/#word-spacing-property
let (length, percent) = options.word_spacing;
advance = (advance + length) + Au((advance.0 as f32 * percent.into_inner()) as i32);
}
if let Some(letter_spacing) = options.letter_spacing {
advance += letter_spacing;
}
let offset = prev_glyph_id.map(|prev| {
let h_kerning = Au::from_f64_px(self.glyph_h_kerning(prev, glyph_id));
advance += h_kerning;
Point2D::new(h_kerning, Au(0))
});
let glyph = GlyphData::new(glyph_id, advance, offset, true, true);
glyphs.add_glyph_for_byte_index(ByteIndex(i as isize), character, &glyph);
prev_glyph_id = Some(glyph_id);
}
glyphs.finalize_changes();
}
pub fn table_for_tag(&self, tag: FontTableTag) -> Option<FontTable> {
let result = self.handle.table_for_tag(tag);
let status = if result.is_some() { "Found" } else { "Didn't find" };
debug!("{} font table[{}] with family={}, face={}",
status, tag.tag_to_str(),
self.handle.family_name().unwrap_or("unavailable".to_owned()),
self.handle.face_name().unwrap_or("unavailable".to_owned()));
result
}
#[inline]
pub fn glyph_index(&self, codepoint: char) -> Option<GlyphId> {
let codepoint = match self.descriptor.variant {
font_variant_caps::T::SmallCaps => codepoint.to_uppercase().next().unwrap(), //FIXME: #5938
font_variant_caps::T::Normal => codepoint,
};
self.handle.glyph_index(codepoint)
}
pub fn has_glyph_for(&self, codepoint: char) -> bool {
self.glyph_index(codepoint).is_some()
}
pub fn glyph_h_kerning(&self, first_glyph: GlyphId, second_glyph: GlyphId)
-> FractionalPixel {
self.handle.glyph_h_kerning(first_glyph, second_glyph)
}
pub fn glyph_h_advance(&self, glyph: GlyphId) -> FractionalPixel {
*self.glyph_advance_cache.borrow_mut().entry(glyph).or_insert_with(|| {
match self.handle.glyph_h_advance(glyph) {
Some(adv) => adv,
None => 10f64 as FractionalPixel // FIXME: Need fallback strategy
}
})
}
}
pub type FontRef = Rc<RefCell<Font>>;
/// A `FontGroup` is a prioritised list of fonts for a given set of font styles. It is used by
/// `TextRun` to decide which font to render a character with. If none of the fonts listed in the
/// styles are suitable, a fallback font may be used.
#[derive(Debug)]
pub struct FontGroup {
descriptor: FontDescriptor,
families: SmallVec<[FontGroupFamily; 8]>,
last_matching_fallback: Option<FontRef>,
}
impl FontGroup {
pub fn new(style: &FontStyleStruct) -> FontGroup {
let descriptor = FontDescriptor::from(style);
let families =
style.font_family.0.iter()
.map(|family| FontGroupFamily::new(descriptor.clone(), &family))
.collect();
FontGroup {
descriptor,
families,
last_matching_fallback: None,
}
}
/// Finds the first font, or else the first fallback font, which contains a glyph for
/// `codepoint`. If no such font is found, returns the first available font or fallback font
/// (which will cause a "glyph not found" character to be rendered). If no font at all can be
/// found, returns None.
pub fn find_by_codepoint<S: FontSource>(
&mut self,
mut font_context: &mut FontContext<S>,
codepoint: char
) -> Option<FontRef> {
let has_glyph = |font: &FontRef| font.borrow().has_glyph_for(codepoint);
let font = self.find(&mut font_context, |font| has_glyph(font));
if font.is_some() {
return font
}
if let Some(ref fallback) = self.last_matching_fallback {
if has_glyph(&fallback) {
return self.last_matching_fallback.clone()
}
}
let font = self.find_fallback(&mut font_context, Some(codepoint), has_glyph);
if font.is_some() {
self.last_matching_fallback = font.clone();
return font
}
self.first(&mut font_context)
}
/// Find the first available font in the group, or the first available fallback font.
pub fn first<S: FontSource>(
&mut self,
mut font_context: &mut FontContext<S>
) -> Option<FontRef> {
self.find(&mut font_context, |_| true)
.or_else(|| self.find_fallback(&mut font_context, None, |_| true))
}
/// Find a font which returns true for `predicate`. This method mutates because we may need to
/// load new font data in the process of finding a suitable font.
fn find<S, P>(
&mut self,
mut font_context: &mut FontContext<S>,
predicate: P,
) -> Option<FontRef>
where
S: FontSource,
P: FnMut(&FontRef) -> bool,
{
self.families.iter_mut()
.filter_map(|family| family.font(&mut font_context))
.find(predicate)
}
/// Attempts to find a suitable fallback font which matches the `predicate`. The default
/// family (i.e. "serif") will be tried first, followed by platform-specific family names.
/// If a `codepoint` is provided, then its Unicode block may be used to refine the list of
/// family names which will be tried.
fn find_fallback<S, P>(
&mut self,
font_context: &mut FontContext<S>,
codepoint: Option<char>,
predicate: P,
) -> Option<FontRef>
where
S: FontSource,
P: FnMut(&FontRef) -> bool,
{
iter::once(FontFamilyDescriptor::default())
.chain(
fallback_font_families(codepoint).into_iter().map(|family| {
FontFamilyDescriptor::new(
FontFamilyName::from(family),
FontSearchScope::Local,
)
})
)
.filter_map(|family| font_context.font(&self.descriptor, &family))
.find(predicate)
}
}
/// A `FontGroupFamily` is a single font family in a `FontGroup`. It corresponds to one of the
/// families listed in the `font-family` CSS property. The corresponding font data is lazy-loaded,
/// only if actually needed.
#[derive(Debug)]
struct FontGroupFamily {
font_descriptor: FontDescriptor,
family_descriptor: FontFamilyDescriptor,
loaded: bool,
font: Option<FontRef>,
}
impl FontGroupFamily {
fn new(font_descriptor: FontDescriptor, family: &SingleFontFamily) -> FontGroupFamily {
let family_descriptor = FontFamilyDescriptor::new(
FontFamilyName::from(family),
FontSearchScope::Any
);
FontGroupFamily {
font_descriptor,
family_descriptor,
loaded: false,
font: None,
}
}
/// Returns the font within this family which matches the style. We'll fetch the data from the
/// `FontContext` the first time this method is called, and return a cached reference on
/// subsequent calls.
fn font<S: FontSource>(&mut self, font_context: &mut FontContext<S>) -> Option<FontRef> {
if!self.loaded {
self.font = font_context.font(&self.font_descriptor, &self.family_descriptor);
self.loaded = true;
}
self.font.clone()
}
}
pub struct RunMetrics {
// may be negative due to negative width (i.e., kerning of '.' in 'P.T.')
pub advance_width: Au,
pub ascent: Au, // nonzero
pub descent: Au, // nonzero
// this bounding box is relative to the left origin baseline.
// so, bounding_box.position.y = -ascent
pub bounding_box: Rect<Au>
}
impl RunMetrics {
pub fn new(advance: Au, ascent: Au, descent: Au) -> RunMetrics {
let bounds = Rect::new(Point2D::new(Au(0), -ascent),
Size2D::new(advance, ascent + descent));
// TODO(Issue #125): support loose and tight bounding boxes; using the
// ascent+descent and advance is sometimes too generous and
// looking at actual glyph extents can yield a tighter box.
RunMetrics {
advance_width: advance,
bounding_box: bounds,
ascent: ascent,
descent: descent,
}
}
}
pub fn get_and_reset_text_shaping_performance_counter() -> usize {
let value = TEXT_SHAPING_PERFORMANCE_COUNTER.load(Ordering::SeqCst);
TEXT_SHAPING_PERFORMANCE_COUNTER.store(0, Ordering::SeqCst);
value
}
/// The scope within which we will look for a font.
#[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)]
pub enum FontSearchScope {
/// All fonts will be searched, including those specified via `@font-face` rules.
Any,
/// Only local system fonts will be searched.
Local,
}
/// A font family name used in font selection.
#[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)]
pub enum FontFamilyName {
/// A specific name such as `"Arial"`
Specific(Atom),
/// A generic name such as `sans-serif`
Generic(Atom),
}
impl FontFamilyName {
pub fn name(&self) -> &str {
match *self {
FontFamilyName::Specific(ref name) => name,
FontFamilyName::Generic(ref name) => name,
}
}
}
impl<'a> From<&'a SingleFontFamily> for FontFamilyName {
fn from(other: &'a SingleFontFamily) -> FontFamilyName {
match *other {
SingleFontFamily::FamilyName(ref family_name) =>
FontFamilyName::Specific(family_name.name.clone()),
SingleFontFamily::Generic(ref generic_name) =>
FontFamilyName::Generic(generic_name.clone()),
}
}
}
impl<'a> From<&'a str> for FontFamilyName {
fn from(other: &'a str) -> FontFamilyName {
FontFamilyName::Specific(Atom::from(other))
}
}
/// The font family parameters for font selection.
#[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)]
pub struct FontFamilyDescriptor {
pub name: FontFamilyName,
pub scope: FontSearchScope,
}
impl FontFamilyDescriptor {
pub fn new(name: FontFamilyName, scope: FontSearchScope) -> FontFamilyDescriptor {
FontFamilyDescriptor { name, scope }
}
fn default() -> FontFamilyDescriptor {
FontFamilyDescriptor {
name: FontFamilyName::Generic(atom!("serif")),
scope: FontSearchScope::Local,
}
} | let mut advance = Au::from_f64_px(self.glyph_h_advance(glyph_id)); | random_line_split |
font.rs | License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use app_units::Au;
use euclid::{Point2D, Rect, Size2D};
use font_context::{FontContext, FontSource};
use font_template::FontTemplateDescriptor;
use ordered_float::NotNan;
use platform::font::{FontHandle, FontTable};
use platform::font_context::FontContextHandle;
pub use platform::font_list::fallback_font_families;
use platform::font_template::FontTemplateData;
use servo_atoms::Atom;
use smallvec::SmallVec;
use std::borrow::ToOwned;
use std::cell::RefCell;
use std::collections::HashMap;
use std::iter;
use std::rc::Rc;
use std::str;
use std::sync::Arc;
use std::sync::atomic::{ATOMIC_USIZE_INIT, AtomicUsize, Ordering};
use style::computed_values::{font_stretch, font_style, font_variant_caps, font_weight};
use style::properties::style_structs::Font as FontStyleStruct;
use style::values::computed::font::SingleFontFamily;
use text::Shaper;
use text::glyph::{ByteIndex, GlyphData, GlyphId, GlyphStore};
use text::shaping::ShaperMethods;
use time;
use unicode_script::Script;
use webrender_api;
macro_rules! ot_tag {
($t1:expr, $t2:expr, $t3:expr, $t4:expr) => (
(($t1 as u32) << 24) | (($t2 as u32) << 16) | (($t3 as u32) << 8) | ($t4 as u32)
);
}
pub const GPOS: u32 = ot_tag!('G', 'P', 'O', 'S');
pub const GSUB: u32 = ot_tag!('G', 'S', 'U', 'B');
pub const KERN: u32 = ot_tag!('k', 'e', 'r', 'n');
static TEXT_SHAPING_PERFORMANCE_COUNTER: AtomicUsize = ATOMIC_USIZE_INIT;
// FontHandle encapsulates access to the platform's font API,
// e.g. quartz, FreeType. It provides access to metrics and tables
// needed by the text shaper as well as access to the underlying font
// resources needed by the graphics layer to draw glyphs.
pub trait FontHandleMethods: Sized {
fn new_from_template(
fctx: &FontContextHandle,
template: Arc<FontTemplateData>,
pt_size: Option<Au>,
) -> Result<Self, ()>;
fn template(&self) -> Arc<FontTemplateData>;
fn family_name(&self) -> Option<String>;
fn face_name(&self) -> Option<String>;
fn style(&self) -> font_style::T;
fn boldness(&self) -> font_weight::T;
fn stretchiness(&self) -> font_stretch::T;
fn glyph_index(&self, codepoint: char) -> Option<GlyphId>;
fn glyph_h_advance(&self, GlyphId) -> Option<FractionalPixel>;
fn glyph_h_kerning(&self, glyph0: GlyphId, glyph1: GlyphId) -> FractionalPixel;
/// Can this font do basic horizontal LTR shaping without Harfbuzz?
fn can_do_fast_shaping(&self) -> bool;
fn metrics(&self) -> FontMetrics;
fn table_for_tag(&self, FontTableTag) -> Option<FontTable>;
/// A unique identifier for the font, allowing comparison.
fn identifier(&self) -> Atom;
}
// Used to abstract over the shaper's choice of fixed int representation.
pub type FractionalPixel = f64;
pub type FontTableTag = u32;
trait FontTableTagConversions {
fn tag_to_str(&self) -> String;
}
impl FontTableTagConversions for FontTableTag {
fn tag_to_str(&self) -> String {
let bytes = [(self >> 24) as u8,
(self >> 16) as u8,
(self >> 8) as u8,
(self >> 0) as u8];
str::from_utf8(&bytes).unwrap().to_owned()
}
}
pub trait FontTableMethods {
fn buffer(&self) -> &[u8];
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct FontMetrics {
pub underline_size: Au,
pub underline_offset: Au,
pub strikeout_size: Au,
pub strikeout_offset: Au,
pub leading: Au,
pub x_height: Au,
pub em_size: Au,
pub ascent: Au,
pub descent: Au,
pub max_advance: Au,
pub average_advance: Au,
pub line_gap: Au,
}
/// `FontDescriptor` describes the parameters of a `Font`. It represents rendering a given font
/// template at a particular size, with a particular font-variant-caps applied, etc. This contrasts
/// with `FontTemplateDescriptor` in that the latter represents only the parameters inherent in the
/// font data (weight, stretch, etc.).
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
pub struct FontDescriptor {
pub template_descriptor: FontTemplateDescriptor,
pub variant: font_variant_caps::T,
pub pt_size: Au,
}
impl<'a> From<&'a FontStyleStruct> for FontDescriptor {
fn from(style: &'a FontStyleStruct) -> Self {
FontDescriptor {
template_descriptor: FontTemplateDescriptor::from(style),
variant: style.font_variant_caps,
pt_size: style.font_size.size(),
}
}
}
#[derive(Debug)]
pub struct Font {
pub handle: FontHandle,
pub metrics: FontMetrics,
pub descriptor: FontDescriptor,
pub actual_pt_size: Au,
shaper: Option<Shaper>,
shape_cache: RefCell<HashMap<ShapeCacheEntry, Arc<GlyphStore>>>,
glyph_advance_cache: RefCell<HashMap<u32, FractionalPixel>>,
pub font_key: webrender_api::FontInstanceKey,
}
impl Font {
pub fn new(handle: FontHandle,
descriptor: FontDescriptor,
actual_pt_size: Au,
font_key: webrender_api::FontInstanceKey) -> Font {
let metrics = handle.metrics();
Font {
handle: handle,
shaper: None,
descriptor,
actual_pt_size,
metrics,
shape_cache: RefCell::new(HashMap::new()),
glyph_advance_cache: RefCell::new(HashMap::new()),
font_key,
}
}
/// A unique identifier for the font, allowing comparison.
pub fn identifier(&self) -> Atom {
self.handle.identifier()
}
}
bitflags! {
pub struct ShapingFlags: u8 {
#[doc = "Set if the text is entirely whitespace."]
const IS_WHITESPACE_SHAPING_FLAG = 0x01;
#[doc = "Set if we are to ignore ligatures."]
const IGNORE_LIGATURES_SHAPING_FLAG = 0x02;
#[doc = "Set if we are to disable kerning."]
const DISABLE_KERNING_SHAPING_FLAG = 0x04;
#[doc = "Text direction is right-to-left."]
const RTL_FLAG = 0x08;
#[doc = "Set if word-break is set to keep-all."]
const KEEP_ALL_FLAG = 0x10;
}
}
/// Various options that control text shaping.
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
pub struct ShapingOptions {
/// Spacing to add between each letter. Corresponds to the CSS 2.1 `letter-spacing` property.
/// NB: You will probably want to set the `IGNORE_LIGATURES_SHAPING_FLAG` if this is non-null.
pub letter_spacing: Option<Au>,
/// Spacing to add between each word. Corresponds to the CSS 2.1 `word-spacing` property.
pub word_spacing: (Au, NotNan<f32>),
/// The Unicode script property of the characters in this run.
pub script: Script,
/// Various flags.
pub flags: ShapingFlags,
}
/// An entry in the shape cache.
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
struct ShapeCacheEntry {
text: String,
options: ShapingOptions,
}
impl Font {
pub fn shape_text(&mut self, text: &str, options: &ShapingOptions) -> Arc<GlyphStore> {
let this = self as *const Font;
let mut shaper = self.shaper.take();
let lookup_key = ShapeCacheEntry {
text: text.to_owned(),
options: *options,
};
let result = self.shape_cache.borrow_mut().entry(lookup_key).or_insert_with(|| {
let start_time = time::precise_time_ns();
let mut glyphs = GlyphStore::new(text.len(),
options.flags.contains(ShapingFlags::IS_WHITESPACE_SHAPING_FLAG),
options.flags.contains(ShapingFlags::RTL_FLAG));
if self.can_do_fast_shaping(text, options) {
debug!("shape_text: Using ASCII fast path.");
self.shape_text_fast(text, options, &mut glyphs);
} else {
debug!("shape_text: Using Harfbuzz.");
if shaper.is_none() {
shaper = Some(Shaper::new(this));
}
shaper.as_ref().unwrap().shape_text(text, options, &mut glyphs);
}
let end_time = time::precise_time_ns();
TEXT_SHAPING_PERFORMANCE_COUNTER.fetch_add((end_time - start_time) as usize,
Ordering::Relaxed);
Arc::new(glyphs)
}).clone();
self.shaper = shaper;
result
}
fn can_do_fast_shaping(&self, text: &str, options: &ShapingOptions) -> bool {
options.script == Script::Latin &&
!options.flags.contains(ShapingFlags::RTL_FLAG) &&
self.handle.can_do_fast_shaping() &&
text.is_ascii()
}
/// Fast path for ASCII text that only needs simple horizontal LTR kerning.
fn shape_text_fast(&self, text: &str, options: &ShapingOptions, glyphs: &mut GlyphStore) {
let mut prev_glyph_id = None;
for (i, byte) in text.bytes().enumerate() {
let character = byte as char;
let glyph_id = match self.glyph_index(character) {
Some(id) => id,
None => continue,
};
let mut advance = Au::from_f64_px(self.glyph_h_advance(glyph_id));
if character =='' {
// https://drafts.csswg.org/css-text-3/#word-spacing-property
let (length, percent) = options.word_spacing;
advance = (advance + length) + Au((advance.0 as f32 * percent.into_inner()) as i32);
}
if let Some(letter_spacing) = options.letter_spacing {
advance += letter_spacing;
}
let offset = prev_glyph_id.map(|prev| {
let h_kerning = Au::from_f64_px(self.glyph_h_kerning(prev, glyph_id));
advance += h_kerning;
Point2D::new(h_kerning, Au(0))
});
let glyph = GlyphData::new(glyph_id, advance, offset, true, true);
glyphs.add_glyph_for_byte_index(ByteIndex(i as isize), character, &glyph);
prev_glyph_id = Some(glyph_id);
}
glyphs.finalize_changes();
}
pub fn table_for_tag(&self, tag: FontTableTag) -> Option<FontTable> {
let result = self.handle.table_for_tag(tag);
let status = if result.is_some() { "Found" } else { "Didn't find" };
debug!("{} font table[{}] with family={}, face={}",
status, tag.tag_to_str(),
self.handle.family_name().unwrap_or("unavailable".to_owned()),
self.handle.face_name().unwrap_or("unavailable".to_owned()));
result
}
#[inline]
pub fn glyph_index(&self, codepoint: char) -> Option<GlyphId> {
let codepoint = match self.descriptor.variant {
font_variant_caps::T::SmallCaps => codepoint.to_uppercase().next().unwrap(), //FIXME: #5938
font_variant_caps::T::Normal => codepoint,
};
self.handle.glyph_index(codepoint)
}
pub fn has_glyph_for(&self, codepoint: char) -> bool {
self.glyph_index(codepoint).is_some()
}
pub fn glyph_h_kerning(&self, first_glyph: GlyphId, second_glyph: GlyphId)
-> FractionalPixel {
self.handle.glyph_h_kerning(first_glyph, second_glyph)
}
pub fn glyph_h_advance(&self, glyph: GlyphId) -> FractionalPixel {
*self.glyph_advance_cache.borrow_mut().entry(glyph).or_insert_with(|| {
match self.handle.glyph_h_advance(glyph) {
Some(adv) => adv,
None => 10f64 as FractionalPixel // FIXME: Need fallback strategy
}
})
}
}
pub type FontRef = Rc<RefCell<Font>>;
/// A `FontGroup` is a prioritised list of fonts for a given set of font styles. It is used by
/// `TextRun` to decide which font to render a character with. If none of the fonts listed in the
/// styles are suitable, a fallback font may be used.
#[derive(Debug)]
pub struct | {
descriptor: FontDescriptor,
families: SmallVec<[FontGroupFamily; 8]>,
last_matching_fallback: Option<FontRef>,
}
impl FontGroup {
pub fn new(style: &FontStyleStruct) -> FontGroup {
let descriptor = FontDescriptor::from(style);
let families =
style.font_family.0.iter()
.map(|family| FontGroupFamily::new(descriptor.clone(), &family))
.collect();
FontGroup {
descriptor,
families,
last_matching_fallback: None,
}
}
/// Finds the first font, or else the first fallback font, which contains a glyph for
/// `codepoint`. If no such font is found, returns the first available font or fallback font
/// (which will cause a "glyph not found" character to be rendered). If no font at all can be
/// found, returns None.
pub fn find_by_codepoint<S: FontSource>(
&mut self,
mut font_context: &mut FontContext<S>,
codepoint: char
) -> Option<FontRef> {
let has_glyph = |font: &FontRef| font.borrow().has_glyph_for(codepoint);
let font = self.find(&mut font_context, |font| has_glyph(font));
if font.is_some() {
return font
}
if let Some(ref fallback) = self.last_matching_fallback {
if has_glyph(&fallback) {
return self.last_matching_fallback.clone()
}
}
let font = self.find_fallback(&mut font_context, Some(codepoint), has_glyph);
if font.is_some() {
self.last_matching_fallback = font.clone();
return font
}
self.first(&mut font_context)
}
/// Find the first available font in the group, or the first available fallback font.
pub fn first<S: FontSource>(
&mut self,
mut font_context: &mut FontContext<S>
) -> Option<FontRef> {
self.find(&mut font_context, |_| true)
.or_else(|| self.find_fallback(&mut font_context, None, |_| true))
}
/// Find a font which returns true for `predicate`. This method mutates because we may need to
/// load new font data in the process of finding a suitable font.
fn find<S, P>(
&mut self,
mut font_context: &mut FontContext<S>,
predicate: P,
) -> Option<FontRef>
where
S: FontSource,
P: FnMut(&FontRef) -> bool,
{
self.families.iter_mut()
.filter_map(|family| family.font(&mut font_context))
.find(predicate)
}
/// Attempts to find a suitable fallback font which matches the `predicate`. The default
/// family (i.e. "serif") will be tried first, followed by platform-specific family names.
/// If a `codepoint` is provided, then its Unicode block may be used to refine the list of
/// family names which will be tried.
fn find_fallback<S, P>(
&mut self,
font_context: &mut FontContext<S>,
codepoint: Option<char>,
predicate: P,
) -> Option<FontRef>
where
S: FontSource,
P: FnMut(&FontRef) -> bool,
{
iter::once(FontFamilyDescriptor::default())
.chain(
fallback_font_families(codepoint).into_iter().map(|family| {
FontFamilyDescriptor::new(
FontFamilyName::from(family),
FontSearchScope::Local,
)
})
)
.filter_map(|family| font_context.font(&self.descriptor, &family))
.find(predicate)
}
}
/// A `FontGroupFamily` is a single font family in a `FontGroup`. It corresponds to one of the
/// families listed in the `font-family` CSS property. The corresponding font data is lazy-loaded,
/// only if actually needed.
#[derive(Debug)]
struct FontGroupFamily {
font_descriptor: FontDescriptor,
family_descriptor: FontFamilyDescriptor,
loaded: bool,
font: Option<FontRef>,
}
impl FontGroupFamily {
fn new(font_descriptor: FontDescriptor, family: &SingleFontFamily) -> FontGroupFamily {
let family_descriptor = FontFamilyDescriptor::new(
FontFamilyName::from(family),
FontSearchScope::Any
);
FontGroupFamily {
font_descriptor,
family_descriptor,
loaded: false,
font: None,
}
}
/// Returns the font within this family which matches the style. We'll fetch the data from the
/// `FontContext` the first time this method is called, and return a cached reference on
/// subsequent calls.
fn font<S: FontSource>(&mut self, font_context: &mut FontContext<S>) -> Option<FontRef> {
if!self.loaded {
self.font = font_context.font(&self.font_descriptor, &self.family_descriptor);
self.loaded = true;
}
self.font.clone()
}
}
pub struct RunMetrics {
// may be negative due to negative width (i.e., kerning of '.' in 'P.T.')
pub advance_width: Au,
pub ascent: Au, // nonzero
pub descent: Au, // nonzero
// this bounding box is relative to the left origin baseline.
// so, bounding_box.position.y = -ascent
pub bounding_box: Rect<Au>
}
impl RunMetrics {
pub fn new(advance: Au, ascent: Au, descent: Au) -> RunMetrics {
let bounds = Rect::new(Point2D::new(Au(0), -ascent),
Size2D::new(advance, ascent + descent));
// TODO(Issue #125): support loose and tight bounding boxes; using the
// ascent+descent and advance is sometimes too generous and
// looking at actual glyph extents can yield a tighter box.
RunMetrics {
advance_width: advance,
bounding_box: bounds,
ascent: ascent,
descent: descent,
}
}
}
pub fn get_and_reset_text_shaping_performance_counter() -> usize {
let value = TEXT_SHAPING_PERFORMANCE_COUNTER.load(Ordering::SeqCst);
TEXT_SHAPING_PERFORMANCE_COUNTER.store(0, Ordering::SeqCst);
value
}
/// The scope within which we will look for a font.
#[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)]
pub enum FontSearchScope {
/// All fonts will be searched, including those specified via `@font-face` rules.
Any,
/// Only local system fonts will be searched.
Local,
}
/// A font family name used in font selection.
#[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)]
pub enum FontFamilyName {
/// A specific name such as `"Arial"`
Specific(Atom),
/// A generic name such as `sans-serif`
Generic(Atom),
}
impl FontFamilyName {
pub fn name(&self) -> &str {
match *self {
FontFamilyName::Specific(ref name) => name,
FontFamilyName::Generic(ref name) => name,
}
}
}
impl<'a> From<&'a SingleFontFamily> for FontFamilyName {
fn from(other: &'a SingleFontFamily) -> FontFamilyName {
match *other {
SingleFontFamily::FamilyName(ref family_name) =>
FontFamilyName::Specific(family_name.name.clone()),
SingleFontFamily::Generic(ref generic_name) =>
FontFamilyName::Generic(generic_name.clone()),
}
}
}
impl<'a> From<&'a str> for FontFamilyName {
fn from(other: &'a str) -> FontFamilyName {
FontFamilyName::Specific(Atom::from(other))
}
}
/// The font family parameters for font selection.
#[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)]
pub struct FontFamilyDescriptor {
pub name: FontFamilyName,
pub scope: FontSearchScope,
}
impl FontFamilyDescriptor {
pub fn new(name: FontFamilyName, scope: FontSearchScope) -> FontFamilyDescriptor {
FontFamilyDescriptor { name, scope }
}
fn default() -> FontFamilyDescriptor {
FontFamilyDescriptor {
name: FontFamilyName::Generic(atom!("serif")),
scope: FontSearchScope::Local,
}
| FontGroup | identifier_name |
chain.rs | // Copyright 2015, 2016 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use super::test_common::*;
use client::{BlockChainClient, Client, ClientConfig};
use block::Block;
use ethereum;
use tests::helpers::*;
use devtools::*;
use spec::Genesis;
use ethjson;
use miner::Miner;
use io::IoChannel;
pub fn json_chain_test(json_data: &[u8], era: ChainEra) -> Vec<String> {
init_log();
let tests = ethjson::blockchain::Test::load(json_data).unwrap();
let mut failed = Vec::new();
for (name, blockchain) in tests.into_iter() {
let mut fail = false; | {
let mut fail_unless = |cond: bool| if!cond &&!fail {
failed.push(name.clone());
flushln!("FAIL");
fail = true;
true
} else {false};
flush!(" - {}...", name);
let spec = {
let genesis = Genesis::from(blockchain.genesis());
let state = From::from(blockchain.pre_state.clone());
let mut spec = match era {
ChainEra::Frontier => ethereum::new_frontier_test(),
ChainEra::Homestead => ethereum::new_homestead_test(),
ChainEra::Eip150 => ethereum::new_eip150_test(),
ChainEra::Eip161 => ethereum::new_eip161_test(),
ChainEra::TransitionTest => ethereum::new_transition_test(),
};
spec.set_genesis_state(state);
spec.overwrite_genesis_params(genesis);
assert!(spec.is_state_root_valid());
spec
};
let temp = RandomTempPath::new();
{
let db_config = DatabaseConfig::with_columns(::db::NUM_COLUMNS);
let client = Client::new(
ClientConfig::default(),
&spec,
temp.as_path(),
Arc::new(Miner::with_spec(&spec)),
IoChannel::disconnected(),
&db_config,
).unwrap();
for b in &blockchain.blocks_rlp() {
if Block::is_good(&b) {
let _ = client.import_block(b.clone());
client.flush_queue();
client.import_verified_blocks();
}
}
fail_unless(client.chain_info().best_block_hash == blockchain.best_block.into());
}
}
if!fail {
flushln!("ok");
}
}
println!("!!! {:?} tests from failed.", failed.len());
failed
}
mod frontier_era_tests {
use tests::helpers::*;
use super::json_chain_test;
fn do_json_test(json_data: &[u8]) -> Vec<String> {
json_chain_test(json_data, ChainEra::Frontier)
}
declare_test!{BlockchainTests_bcBlockGasLimitTest, "BlockchainTests/bcBlockGasLimitTest"}
declare_test!{BlockchainTests_bcForkBlockTest, "BlockchainTests/bcForkBlockTest"}
declare_test!{BlockchainTests_bcForkStressTest, "BlockchainTests/bcForkStressTest"}
declare_test!{BlockchainTests_bcForkUncle, "BlockchainTests/bcForkUncle"}
declare_test!{BlockchainTests_bcGasPricerTest, "BlockchainTests/bcGasPricerTest"}
declare_test!{BlockchainTests_bcInvalidHeaderTest, "BlockchainTests/bcInvalidHeaderTest"}
// TODO [ToDr] Ignored because of incorrect JSON (https://github.com/ethereum/tests/pull/113)
declare_test!{ignore => BlockchainTests_bcInvalidRLPTest, "BlockchainTests/bcInvalidRLPTest"}
declare_test!{BlockchainTests_bcMultiChainTest, "BlockchainTests/bcMultiChainTest"}
declare_test!{BlockchainTests_bcRPC_API_Test, "BlockchainTests/bcRPC_API_Test"}
declare_test!{BlockchainTests_bcStateTest, "BlockchainTests/bcStateTest"}
declare_test!{BlockchainTests_bcTotalDifficultyTest, "BlockchainTests/bcTotalDifficultyTest"}
declare_test!{BlockchainTests_bcUncleHeaderValiditiy, "BlockchainTests/bcUncleHeaderValiditiy"}
declare_test!{BlockchainTests_bcUncleTest, "BlockchainTests/bcUncleTest"}
declare_test!{BlockchainTests_bcValidBlockTest, "BlockchainTests/bcValidBlockTest"}
declare_test!{BlockchainTests_bcWalletTest, "BlockchainTests/bcWalletTest"}
declare_test!{BlockchainTests_RandomTests_bl10251623GO, "BlockchainTests/RandomTests/bl10251623GO"}
declare_test!{BlockchainTests_RandomTests_bl201507071825GO, "BlockchainTests/RandomTests/bl201507071825GO"}
}
mod transition_tests {
use tests::helpers::*;
use super::json_chain_test;
fn do_json_test(json_data: &[u8]) -> Vec<String> {
json_chain_test(json_data, ChainEra::TransitionTest)
}
declare_test!{BlockchainTests_TestNetwork_bcSimpleTransitionTest, "BlockchainTests/TestNetwork/bcSimpleTransitionTest"}
declare_test!{BlockchainTests_TestNetwork_bcTheDaoTest, "BlockchainTests/TestNetwork/bcTheDaoTest"}
declare_test!{BlockchainTests_TestNetwork_bcEIP150Test, "BlockchainTests/TestNetwork/bcEIP150Test"}
}
mod eip150_blockchain_tests {
use tests::helpers::*;
use super::json_chain_test;
fn do_json_test(json_data: &[u8]) -> Vec<String> {
json_chain_test(json_data, ChainEra::Eip150)
}
declare_test!{BlockchainTests_EIP150_bcBlockGasLimitTest, "BlockchainTests/EIP150/bcBlockGasLimitTest"}
declare_test!{BlockchainTests_EIP150_bcForkStressTest, "BlockchainTests/EIP150/bcForkStressTest"}
declare_test!{BlockchainTests_EIP150_bcGasPricerTest, "BlockchainTests/EIP150/bcGasPricerTest"}
declare_test!{BlockchainTests_EIP150_bcInvalidHeaderTest, "BlockchainTests/EIP150/bcInvalidHeaderTest"}
declare_test!{BlockchainTests_EIP150_bcInvalidRLPTest, "BlockchainTests/EIP150/bcInvalidRLPTest"}
declare_test!{BlockchainTests_EIP150_bcMultiChainTest, "BlockchainTests/EIP150/bcMultiChainTest"}
declare_test!{BlockchainTests_EIP150_bcRPC_API_Test, "BlockchainTests/EIP150/bcRPC_API_Test"}
declare_test!{BlockchainTests_EIP150_bcStateTest, "BlockchainTests/EIP150/bcStateTest"}
declare_test!{BlockchainTests_EIP150_bcTotalDifficultyTest, "BlockchainTests/EIP150/bcTotalDifficultyTest"}
declare_test!{BlockchainTests_EIP150_bcUncleHeaderValiditiy, "BlockchainTests/EIP150/bcUncleHeaderValiditiy"}
declare_test!{BlockchainTests_EIP150_bcUncleTest, "BlockchainTests/EIP150/bcUncleTest"}
declare_test!{BlockchainTests_EIP150_bcValidBlockTest, "BlockchainTests/EIP150/bcValidBlockTest"}
declare_test!{BlockchainTests_EIP150_bcWalletTest, "BlockchainTests/EIP150/bcWalletTest"}
} | random_line_split |
|
chain.rs | // Copyright 2015, 2016 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use super::test_common::*;
use client::{BlockChainClient, Client, ClientConfig};
use block::Block;
use ethereum;
use tests::helpers::*;
use devtools::*;
use spec::Genesis;
use ethjson;
use miner::Miner;
use io::IoChannel;
pub fn json_chain_test(json_data: &[u8], era: ChainEra) -> Vec<String> {
init_log();
let tests = ethjson::blockchain::Test::load(json_data).unwrap();
let mut failed = Vec::new();
for (name, blockchain) in tests.into_iter() {
let mut fail = false;
{
let mut fail_unless = |cond: bool| if!cond &&!fail {
failed.push(name.clone());
flushln!("FAIL");
fail = true;
true
} else {false};
flush!(" - {}...", name);
let spec = {
let genesis = Genesis::from(blockchain.genesis());
let state = From::from(blockchain.pre_state.clone());
let mut spec = match era {
ChainEra::Frontier => ethereum::new_frontier_test(),
ChainEra::Homestead => ethereum::new_homestead_test(),
ChainEra::Eip150 => ethereum::new_eip150_test(),
ChainEra::Eip161 => ethereum::new_eip161_test(),
ChainEra::TransitionTest => ethereum::new_transition_test(),
};
spec.set_genesis_state(state);
spec.overwrite_genesis_params(genesis);
assert!(spec.is_state_root_valid());
spec
};
let temp = RandomTempPath::new();
{
let db_config = DatabaseConfig::with_columns(::db::NUM_COLUMNS);
let client = Client::new(
ClientConfig::default(),
&spec,
temp.as_path(),
Arc::new(Miner::with_spec(&spec)),
IoChannel::disconnected(),
&db_config,
).unwrap();
for b in &blockchain.blocks_rlp() {
if Block::is_good(&b) {
let _ = client.import_block(b.clone());
client.flush_queue();
client.import_verified_blocks();
}
}
fail_unless(client.chain_info().best_block_hash == blockchain.best_block.into());
}
}
if!fail {
flushln!("ok");
}
}
println!("!!! {:?} tests from failed.", failed.len());
failed
}
mod frontier_era_tests {
use tests::helpers::*;
use super::json_chain_test;
fn do_json_test(json_data: &[u8]) -> Vec<String> {
json_chain_test(json_data, ChainEra::Frontier)
}
declare_test!{BlockchainTests_bcBlockGasLimitTest, "BlockchainTests/bcBlockGasLimitTest"}
declare_test!{BlockchainTests_bcForkBlockTest, "BlockchainTests/bcForkBlockTest"}
declare_test!{BlockchainTests_bcForkStressTest, "BlockchainTests/bcForkStressTest"}
declare_test!{BlockchainTests_bcForkUncle, "BlockchainTests/bcForkUncle"}
declare_test!{BlockchainTests_bcGasPricerTest, "BlockchainTests/bcGasPricerTest"}
declare_test!{BlockchainTests_bcInvalidHeaderTest, "BlockchainTests/bcInvalidHeaderTest"}
// TODO [ToDr] Ignored because of incorrect JSON (https://github.com/ethereum/tests/pull/113)
declare_test!{ignore => BlockchainTests_bcInvalidRLPTest, "BlockchainTests/bcInvalidRLPTest"}
declare_test!{BlockchainTests_bcMultiChainTest, "BlockchainTests/bcMultiChainTest"}
declare_test!{BlockchainTests_bcRPC_API_Test, "BlockchainTests/bcRPC_API_Test"}
declare_test!{BlockchainTests_bcStateTest, "BlockchainTests/bcStateTest"}
declare_test!{BlockchainTests_bcTotalDifficultyTest, "BlockchainTests/bcTotalDifficultyTest"}
declare_test!{BlockchainTests_bcUncleHeaderValiditiy, "BlockchainTests/bcUncleHeaderValiditiy"}
declare_test!{BlockchainTests_bcUncleTest, "BlockchainTests/bcUncleTest"}
declare_test!{BlockchainTests_bcValidBlockTest, "BlockchainTests/bcValidBlockTest"}
declare_test!{BlockchainTests_bcWalletTest, "BlockchainTests/bcWalletTest"}
declare_test!{BlockchainTests_RandomTests_bl10251623GO, "BlockchainTests/RandomTests/bl10251623GO"}
declare_test!{BlockchainTests_RandomTests_bl201507071825GO, "BlockchainTests/RandomTests/bl201507071825GO"}
}
mod transition_tests {
use tests::helpers::*;
use super::json_chain_test;
fn | (json_data: &[u8]) -> Vec<String> {
json_chain_test(json_data, ChainEra::TransitionTest)
}
declare_test!{BlockchainTests_TestNetwork_bcSimpleTransitionTest, "BlockchainTests/TestNetwork/bcSimpleTransitionTest"}
declare_test!{BlockchainTests_TestNetwork_bcTheDaoTest, "BlockchainTests/TestNetwork/bcTheDaoTest"}
declare_test!{BlockchainTests_TestNetwork_bcEIP150Test, "BlockchainTests/TestNetwork/bcEIP150Test"}
}
mod eip150_blockchain_tests {
use tests::helpers::*;
use super::json_chain_test;
fn do_json_test(json_data: &[u8]) -> Vec<String> {
json_chain_test(json_data, ChainEra::Eip150)
}
declare_test!{BlockchainTests_EIP150_bcBlockGasLimitTest, "BlockchainTests/EIP150/bcBlockGasLimitTest"}
declare_test!{BlockchainTests_EIP150_bcForkStressTest, "BlockchainTests/EIP150/bcForkStressTest"}
declare_test!{BlockchainTests_EIP150_bcGasPricerTest, "BlockchainTests/EIP150/bcGasPricerTest"}
declare_test!{BlockchainTests_EIP150_bcInvalidHeaderTest, "BlockchainTests/EIP150/bcInvalidHeaderTest"}
declare_test!{BlockchainTests_EIP150_bcInvalidRLPTest, "BlockchainTests/EIP150/bcInvalidRLPTest"}
declare_test!{BlockchainTests_EIP150_bcMultiChainTest, "BlockchainTests/EIP150/bcMultiChainTest"}
declare_test!{BlockchainTests_EIP150_bcRPC_API_Test, "BlockchainTests/EIP150/bcRPC_API_Test"}
declare_test!{BlockchainTests_EIP150_bcStateTest, "BlockchainTests/EIP150/bcStateTest"}
declare_test!{BlockchainTests_EIP150_bcTotalDifficultyTest, "BlockchainTests/EIP150/bcTotalDifficultyTest"}
declare_test!{BlockchainTests_EIP150_bcUncleHeaderValiditiy, "BlockchainTests/EIP150/bcUncleHeaderValiditiy"}
declare_test!{BlockchainTests_EIP150_bcUncleTest, "BlockchainTests/EIP150/bcUncleTest"}
declare_test!{BlockchainTests_EIP150_bcValidBlockTest, "BlockchainTests/EIP150/bcValidBlockTest"}
declare_test!{BlockchainTests_EIP150_bcWalletTest, "BlockchainTests/EIP150/bcWalletTest"}
}
| do_json_test | identifier_name |
chain.rs | // Copyright 2015, 2016 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use super::test_common::*;
use client::{BlockChainClient, Client, ClientConfig};
use block::Block;
use ethereum;
use tests::helpers::*;
use devtools::*;
use spec::Genesis;
use ethjson;
use miner::Miner;
use io::IoChannel;
pub fn json_chain_test(json_data: &[u8], era: ChainEra) -> Vec<String> {
init_log();
let tests = ethjson::blockchain::Test::load(json_data).unwrap();
let mut failed = Vec::new();
for (name, blockchain) in tests.into_iter() {
let mut fail = false;
{
let mut fail_unless = |cond: bool| if!cond &&!fail {
failed.push(name.clone());
flushln!("FAIL");
fail = true;
true
} else {false};
flush!(" - {}...", name);
let spec = {
let genesis = Genesis::from(blockchain.genesis());
let state = From::from(blockchain.pre_state.clone());
let mut spec = match era {
ChainEra::Frontier => ethereum::new_frontier_test(),
ChainEra::Homestead => ethereum::new_homestead_test(),
ChainEra::Eip150 => ethereum::new_eip150_test(),
ChainEra::Eip161 => ethereum::new_eip161_test(),
ChainEra::TransitionTest => ethereum::new_transition_test(),
};
spec.set_genesis_state(state);
spec.overwrite_genesis_params(genesis);
assert!(spec.is_state_root_valid());
spec
};
let temp = RandomTempPath::new();
{
let db_config = DatabaseConfig::with_columns(::db::NUM_COLUMNS);
let client = Client::new(
ClientConfig::default(),
&spec,
temp.as_path(),
Arc::new(Miner::with_spec(&spec)),
IoChannel::disconnected(),
&db_config,
).unwrap();
for b in &blockchain.blocks_rlp() {
if Block::is_good(&b) {
let _ = client.import_block(b.clone());
client.flush_queue();
client.import_verified_blocks();
}
}
fail_unless(client.chain_info().best_block_hash == blockchain.best_block.into());
}
}
if!fail {
flushln!("ok");
}
}
println!("!!! {:?} tests from failed.", failed.len());
failed
}
mod frontier_era_tests {
use tests::helpers::*;
use super::json_chain_test;
fn do_json_test(json_data: &[u8]) -> Vec<String> {
json_chain_test(json_data, ChainEra::Frontier)
}
declare_test!{BlockchainTests_bcBlockGasLimitTest, "BlockchainTests/bcBlockGasLimitTest"}
declare_test!{BlockchainTests_bcForkBlockTest, "BlockchainTests/bcForkBlockTest"}
declare_test!{BlockchainTests_bcForkStressTest, "BlockchainTests/bcForkStressTest"}
declare_test!{BlockchainTests_bcForkUncle, "BlockchainTests/bcForkUncle"}
declare_test!{BlockchainTests_bcGasPricerTest, "BlockchainTests/bcGasPricerTest"}
declare_test!{BlockchainTests_bcInvalidHeaderTest, "BlockchainTests/bcInvalidHeaderTest"}
// TODO [ToDr] Ignored because of incorrect JSON (https://github.com/ethereum/tests/pull/113)
declare_test!{ignore => BlockchainTests_bcInvalidRLPTest, "BlockchainTests/bcInvalidRLPTest"}
declare_test!{BlockchainTests_bcMultiChainTest, "BlockchainTests/bcMultiChainTest"}
declare_test!{BlockchainTests_bcRPC_API_Test, "BlockchainTests/bcRPC_API_Test"}
declare_test!{BlockchainTests_bcStateTest, "BlockchainTests/bcStateTest"}
declare_test!{BlockchainTests_bcTotalDifficultyTest, "BlockchainTests/bcTotalDifficultyTest"}
declare_test!{BlockchainTests_bcUncleHeaderValiditiy, "BlockchainTests/bcUncleHeaderValiditiy"}
declare_test!{BlockchainTests_bcUncleTest, "BlockchainTests/bcUncleTest"}
declare_test!{BlockchainTests_bcValidBlockTest, "BlockchainTests/bcValidBlockTest"}
declare_test!{BlockchainTests_bcWalletTest, "BlockchainTests/bcWalletTest"}
declare_test!{BlockchainTests_RandomTests_bl10251623GO, "BlockchainTests/RandomTests/bl10251623GO"}
declare_test!{BlockchainTests_RandomTests_bl201507071825GO, "BlockchainTests/RandomTests/bl201507071825GO"}
}
mod transition_tests {
use tests::helpers::*;
use super::json_chain_test;
fn do_json_test(json_data: &[u8]) -> Vec<String> |
declare_test!{BlockchainTests_TestNetwork_bcSimpleTransitionTest, "BlockchainTests/TestNetwork/bcSimpleTransitionTest"}
declare_test!{BlockchainTests_TestNetwork_bcTheDaoTest, "BlockchainTests/TestNetwork/bcTheDaoTest"}
declare_test!{BlockchainTests_TestNetwork_bcEIP150Test, "BlockchainTests/TestNetwork/bcEIP150Test"}
}
mod eip150_blockchain_tests {
use tests::helpers::*;
use super::json_chain_test;
fn do_json_test(json_data: &[u8]) -> Vec<String> {
json_chain_test(json_data, ChainEra::Eip150)
}
declare_test!{BlockchainTests_EIP150_bcBlockGasLimitTest, "BlockchainTests/EIP150/bcBlockGasLimitTest"}
declare_test!{BlockchainTests_EIP150_bcForkStressTest, "BlockchainTests/EIP150/bcForkStressTest"}
declare_test!{BlockchainTests_EIP150_bcGasPricerTest, "BlockchainTests/EIP150/bcGasPricerTest"}
declare_test!{BlockchainTests_EIP150_bcInvalidHeaderTest, "BlockchainTests/EIP150/bcInvalidHeaderTest"}
declare_test!{BlockchainTests_EIP150_bcInvalidRLPTest, "BlockchainTests/EIP150/bcInvalidRLPTest"}
declare_test!{BlockchainTests_EIP150_bcMultiChainTest, "BlockchainTests/EIP150/bcMultiChainTest"}
declare_test!{BlockchainTests_EIP150_bcRPC_API_Test, "BlockchainTests/EIP150/bcRPC_API_Test"}
declare_test!{BlockchainTests_EIP150_bcStateTest, "BlockchainTests/EIP150/bcStateTest"}
declare_test!{BlockchainTests_EIP150_bcTotalDifficultyTest, "BlockchainTests/EIP150/bcTotalDifficultyTest"}
declare_test!{BlockchainTests_EIP150_bcUncleHeaderValiditiy, "BlockchainTests/EIP150/bcUncleHeaderValiditiy"}
declare_test!{BlockchainTests_EIP150_bcUncleTest, "BlockchainTests/EIP150/bcUncleTest"}
declare_test!{BlockchainTests_EIP150_bcValidBlockTest, "BlockchainTests/EIP150/bcValidBlockTest"}
declare_test!{BlockchainTests_EIP150_bcWalletTest, "BlockchainTests/EIP150/bcWalletTest"}
}
| {
json_chain_test(json_data, ChainEra::TransitionTest)
} | identifier_body |
main.rs | extern crate yars_raytracer; | use std::path::Path;
use yars_raytracer::vector3d::Vec3;
use yars_raytracer::algebra::InnerProductSpace;
use yars_raytracer::space_algebra::SO3;
use yars_raytracer::ray::Orientable;
use yars_raytracer::camera::CameraBuilder;
use yars_raytracer::scene::{Scene, Light, AmbientLight};
use yars_raytracer::shade::{Shader, PhongShader};
use yars_raytracer::materials::Material;
use yars_raytracer::raytrace::Raytracer;
use image::{ImageBuffer, ImageRgb8, Rgb, PNG};
use yars_raytracer::shapes::{Sphere, Plane};
use yars_raytracer::ray::{Ray, Shadable, ShadeCell};
fn main() {
let WIDTH = 800;
let HEIGHT = 600;
let OUTPUT = "output.png";
let camera = (SO3::rotation_x(0.47) * CameraBuilder::new(WIDTH, HEIGHT, 45.0) +
Vec3(0.0, -2.0, 0.0))
.build();
let mut img = ImageBuffer::new(WIDTH, HEIGHT);
// Some test paramters
let a_colour = Rgb([255 as u8; 3]);
let light = Light {
position: Vec3(4.0, -4.0, 0.0),
colour: Rgb([255 as u8, 255 as u8, 200 as u8]),
};
let slate = Material::new([0.01; 3], [0.03; 3], [0.1; 3], [0.0; 3], 2.0);
let mat1 = Material::new([0.5, 0.5, 0.5],
[0.5, 0.3, 0.01],
[0.5, 0.3, 0.01],
[0.1, 0.1, 0.05],
7.0);
let mat2 = Material::new([0.3, 0.2, 0.5],
[0.3, 0.1, 0.5],
[0.3, 0.1, 0.5],
[0.1, 0.1, 0.1],
4.0);
let plane =
Box::new(Plane::new(Vec3(1.0, 0.0, 0.0), Vec3(0.0, 0.0, 1.0), slate)) as Box<Shadable>;
let sphere = Box::new(Sphere::new(Vec3(0.0, -1.0, 5.0), 1.0, mat1)) as Box<Shadable>;
let obst = Box::new(Sphere::new(Vec3(-2.0, -0.5, 4.0), 0.5, mat2)) as Box<Shadable>;
let scene_objects = vec![plane, sphere, obst];
let ambient = AmbientLight { colour: Rgb([70; 3]) };
let scene = Scene {
ambient_light: ambient,
objects: scene_objects,
lights: vec![light],
};
let tracer = Raytracer::<PhongShader>::from_shader(PhongShader::instance());
// now do some tracing
for (x, y, pixel) in img.enumerate_pixels_mut() {
let ray = camera.get_ray_through_pixel(x, y);
match tracer.trace_to_depth(2, &ray, &scene) {
Some(col) => *pixel = col,
None => *pixel = Rgb([0 as u8, 0 as u8, 0 as u8]),
}
}
let ref mut fout = File::create(&Path::new(OUTPUT)).unwrap();
let _ = ImageRgb8(img).save(fout, PNG);
} | extern crate image;
use std::fs::File; | random_line_split |
main.rs | extern crate yars_raytracer;
extern crate image;
use std::fs::File;
use std::path::Path;
use yars_raytracer::vector3d::Vec3;
use yars_raytracer::algebra::InnerProductSpace;
use yars_raytracer::space_algebra::SO3;
use yars_raytracer::ray::Orientable;
use yars_raytracer::camera::CameraBuilder;
use yars_raytracer::scene::{Scene, Light, AmbientLight};
use yars_raytracer::shade::{Shader, PhongShader};
use yars_raytracer::materials::Material;
use yars_raytracer::raytrace::Raytracer;
use image::{ImageBuffer, ImageRgb8, Rgb, PNG};
use yars_raytracer::shapes::{Sphere, Plane};
use yars_raytracer::ray::{Ray, Shadable, ShadeCell};
fn | () {
let WIDTH = 800;
let HEIGHT = 600;
let OUTPUT = "output.png";
let camera = (SO3::rotation_x(0.47) * CameraBuilder::new(WIDTH, HEIGHT, 45.0) +
Vec3(0.0, -2.0, 0.0))
.build();
let mut img = ImageBuffer::new(WIDTH, HEIGHT);
// Some test paramters
let a_colour = Rgb([255 as u8; 3]);
let light = Light {
position: Vec3(4.0, -4.0, 0.0),
colour: Rgb([255 as u8, 255 as u8, 200 as u8]),
};
let slate = Material::new([0.01; 3], [0.03; 3], [0.1; 3], [0.0; 3], 2.0);
let mat1 = Material::new([0.5, 0.5, 0.5],
[0.5, 0.3, 0.01],
[0.5, 0.3, 0.01],
[0.1, 0.1, 0.05],
7.0);
let mat2 = Material::new([0.3, 0.2, 0.5],
[0.3, 0.1, 0.5],
[0.3, 0.1, 0.5],
[0.1, 0.1, 0.1],
4.0);
let plane =
Box::new(Plane::new(Vec3(1.0, 0.0, 0.0), Vec3(0.0, 0.0, 1.0), slate)) as Box<Shadable>;
let sphere = Box::new(Sphere::new(Vec3(0.0, -1.0, 5.0), 1.0, mat1)) as Box<Shadable>;
let obst = Box::new(Sphere::new(Vec3(-2.0, -0.5, 4.0), 0.5, mat2)) as Box<Shadable>;
let scene_objects = vec![plane, sphere, obst];
let ambient = AmbientLight { colour: Rgb([70; 3]) };
let scene = Scene {
ambient_light: ambient,
objects: scene_objects,
lights: vec![light],
};
let tracer = Raytracer::<PhongShader>::from_shader(PhongShader::instance());
// now do some tracing
for (x, y, pixel) in img.enumerate_pixels_mut() {
let ray = camera.get_ray_through_pixel(x, y);
match tracer.trace_to_depth(2, &ray, &scene) {
Some(col) => *pixel = col,
None => *pixel = Rgb([0 as u8, 0 as u8, 0 as u8]),
}
}
let ref mut fout = File::create(&Path::new(OUTPUT)).unwrap();
let _ = ImageRgb8(img).save(fout, PNG);
}
| main | identifier_name |
main.rs | extern crate yars_raytracer;
extern crate image;
use std::fs::File;
use std::path::Path;
use yars_raytracer::vector3d::Vec3;
use yars_raytracer::algebra::InnerProductSpace;
use yars_raytracer::space_algebra::SO3;
use yars_raytracer::ray::Orientable;
use yars_raytracer::camera::CameraBuilder;
use yars_raytracer::scene::{Scene, Light, AmbientLight};
use yars_raytracer::shade::{Shader, PhongShader};
use yars_raytracer::materials::Material;
use yars_raytracer::raytrace::Raytracer;
use image::{ImageBuffer, ImageRgb8, Rgb, PNG};
use yars_raytracer::shapes::{Sphere, Plane};
use yars_raytracer::ray::{Ray, Shadable, ShadeCell};
fn main() | let mat1 = Material::new([0.5, 0.5, 0.5],
[0.5, 0.3, 0.01],
[0.5, 0.3, 0.01],
[0.1, 0.1, 0.05],
7.0);
let mat2 = Material::new([0.3, 0.2, 0.5],
[0.3, 0.1, 0.5],
[0.3, 0.1, 0.5],
[0.1, 0.1, 0.1],
4.0);
let plane =
Box::new(Plane::new(Vec3(1.0, 0.0, 0.0), Vec3(0.0, 0.0, 1.0), slate)) as Box<Shadable>;
let sphere = Box::new(Sphere::new(Vec3(0.0, -1.0, 5.0), 1.0, mat1)) as Box<Shadable>;
let obst = Box::new(Sphere::new(Vec3(-2.0, -0.5, 4.0), 0.5, mat2)) as Box<Shadable>;
let scene_objects = vec![plane, sphere, obst];
let ambient = AmbientLight { colour: Rgb([70; 3]) };
let scene = Scene {
ambient_light: ambient,
objects: scene_objects,
lights: vec![light],
};
let tracer = Raytracer::<PhongShader>::from_shader(PhongShader::instance());
// now do some tracing
for (x, y, pixel) in img.enumerate_pixels_mut() {
let ray = camera.get_ray_through_pixel(x, y);
match tracer.trace_to_depth(2, &ray, &scene) {
Some(col) => *pixel = col,
None => *pixel = Rgb([0 as u8, 0 as u8, 0 as u8]),
}
}
let ref mut fout = File::create(&Path::new(OUTPUT)).unwrap();
let _ = ImageRgb8(img).save(fout, PNG);
}
| {
let WIDTH = 800;
let HEIGHT = 600;
let OUTPUT = "output.png";
let camera = (SO3::rotation_x(0.47) * CameraBuilder::new(WIDTH, HEIGHT, 45.0) +
Vec3(0.0, -2.0, 0.0))
.build();
let mut img = ImageBuffer::new(WIDTH, HEIGHT);
// Some test paramters
let a_colour = Rgb([255 as u8; 3]);
let light = Light {
position: Vec3(4.0, -4.0, 0.0),
colour: Rgb([255 as u8, 255 as u8, 200 as u8]),
};
let slate = Material::new([0.01; 3], [0.03; 3], [0.1; 3], [0.0; 3], 2.0);
| identifier_body |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.