file_name
large_stringlengths
4
69
prefix
large_stringlengths
0
26.7k
suffix
large_stringlengths
0
24.8k
middle
large_stringlengths
0
2.12k
fim_type
large_stringclasses
4 values
persistable.rs
use std::marker::PhantomData; use expression::Expression; use query_builder::{QueryBuilder, BuildQueryResult}; use query_source::{Table, Column}; use types::NativeSqlType; /// Represents that a structure can be used to to insert a new row into the database. /// Implementations can be automatically generated by /// [`#[insertable_into]`](https://github.com/sgrif/diesel/tree/master/diesel_codegen#insertable_intotable_name). /// This is automatically implemented for `&[T]`, `Vec<T>` and `&Vec<T>` for inserting more than /// one record. pub trait Insertable<T: Table> { type Columns: InsertableColumns<T>; type Values: Expression<SqlType=<Self::Columns as InsertableColumns<T>>::SqlType>; fn columns() -> Self::Columns; fn values(self) -> Self::Values; } pub trait InsertableColumns<T: Table> { type SqlType: NativeSqlType; fn names(&self) -> String; } impl<'a, T, U> Insertable<T> for &'a [U] where T: Table, &'a U: Insertable<T>, { type Columns = <&'a U as Insertable<T>>::Columns; type Values = InsertValues<'a, T, U>; fn columns() -> Self::Columns { <&'a U>::columns() } fn values(self) -> Self::Values { InsertValues { values: self, _marker: PhantomData, } } } impl<'a, T, U> Insertable<T> for &'a Vec<U> where T: Table, &'a U: Insertable<T>, { type Columns = <&'a U as Insertable<T>>::Columns; type Values = InsertValues<'a, T, U>; fn columns() -> Self::Columns { <&'a U>::columns() } fn values(self) -> Self::Values { InsertValues { values: &*self, _marker: PhantomData, } } } pub struct InsertValues<'a, T, U: 'a> { values: &'a [U], _marker: PhantomData<T>, } impl<'a, T, U> Expression for InsertValues<'a, T, U> where T: Table, &'a U: Insertable<T>, { type SqlType = <<&'a U as Insertable<T>>::Columns as InsertableColumns<T>>::SqlType; fn to_sql(&self, out: &mut QueryBuilder) -> BuildQueryResult { self.to_insert_sql(out) } fn to_insert_sql(&self, out: &mut QueryBuilder) -> BuildQueryResult { for (i, record) in self.values.into_iter().enumerate() { if i!= 0 { out.push_sql(", "); } try!(record.values().to_insert_sql(out)); } Ok(()) } } impl<C: Column<Table=T>, T: Table> InsertableColumns<T> for C { type SqlType = <Self as Expression>::SqlType; fn names(&self) -> String
}
{ Self::name().to_string() }
identifier_body
persistable.rs
use std::marker::PhantomData; use expression::Expression; use query_builder::{QueryBuilder, BuildQueryResult}; use query_source::{Table, Column}; use types::NativeSqlType; /// Represents that a structure can be used to to insert a new row into the database. /// Implementations can be automatically generated by /// [`#[insertable_into]`](https://github.com/sgrif/diesel/tree/master/diesel_codegen#insertable_intotable_name). /// This is automatically implemented for `&[T]`, `Vec<T>` and `&Vec<T>` for inserting more than /// one record. pub trait Insertable<T: Table> { type Columns: InsertableColumns<T>; type Values: Expression<SqlType=<Self::Columns as InsertableColumns<T>>::SqlType>; fn columns() -> Self::Columns; fn values(self) -> Self::Values; } pub trait InsertableColumns<T: Table> { type SqlType: NativeSqlType; fn names(&self) -> String; } impl<'a, T, U> Insertable<T> for &'a [U] where T: Table, &'a U: Insertable<T>, { type Columns = <&'a U as Insertable<T>>::Columns; type Values = InsertValues<'a, T, U>; fn columns() -> Self::Columns { <&'a U>::columns() } fn values(self) -> Self::Values { InsertValues { values: self, _marker: PhantomData, } } } impl<'a, T, U> Insertable<T> for &'a Vec<U> where T: Table, &'a U: Insertable<T>, { type Columns = <&'a U as Insertable<T>>::Columns; type Values = InsertValues<'a, T, U>; fn columns() -> Self::Columns { <&'a U>::columns() } fn values(self) -> Self::Values { InsertValues { values: &*self, _marker: PhantomData, } } } pub struct InsertValues<'a, T, U: 'a> { values: &'a [U],
T: Table, &'a U: Insertable<T>, { type SqlType = <<&'a U as Insertable<T>>::Columns as InsertableColumns<T>>::SqlType; fn to_sql(&self, out: &mut QueryBuilder) -> BuildQueryResult { self.to_insert_sql(out) } fn to_insert_sql(&self, out: &mut QueryBuilder) -> BuildQueryResult { for (i, record) in self.values.into_iter().enumerate() { if i!= 0 { out.push_sql(", "); } try!(record.values().to_insert_sql(out)); } Ok(()) } } impl<C: Column<Table=T>, T: Table> InsertableColumns<T> for C { type SqlType = <Self as Expression>::SqlType; fn names(&self) -> String { Self::name().to_string() } }
_marker: PhantomData<T>, } impl<'a, T, U> Expression for InsertValues<'a, T, U> where
random_line_split
persistable.rs
use std::marker::PhantomData; use expression::Expression; use query_builder::{QueryBuilder, BuildQueryResult}; use query_source::{Table, Column}; use types::NativeSqlType; /// Represents that a structure can be used to to insert a new row into the database. /// Implementations can be automatically generated by /// [`#[insertable_into]`](https://github.com/sgrif/diesel/tree/master/diesel_codegen#insertable_intotable_name). /// This is automatically implemented for `&[T]`, `Vec<T>` and `&Vec<T>` for inserting more than /// one record. pub trait Insertable<T: Table> { type Columns: InsertableColumns<T>; type Values: Expression<SqlType=<Self::Columns as InsertableColumns<T>>::SqlType>; fn columns() -> Self::Columns; fn values(self) -> Self::Values; } pub trait InsertableColumns<T: Table> { type SqlType: NativeSqlType; fn names(&self) -> String; } impl<'a, T, U> Insertable<T> for &'a [U] where T: Table, &'a U: Insertable<T>, { type Columns = <&'a U as Insertable<T>>::Columns; type Values = InsertValues<'a, T, U>; fn columns() -> Self::Columns { <&'a U>::columns() } fn values(self) -> Self::Values { InsertValues { values: self, _marker: PhantomData, } } } impl<'a, T, U> Insertable<T> for &'a Vec<U> where T: Table, &'a U: Insertable<T>, { type Columns = <&'a U as Insertable<T>>::Columns; type Values = InsertValues<'a, T, U>; fn columns() -> Self::Columns { <&'a U>::columns() } fn values(self) -> Self::Values { InsertValues { values: &*self, _marker: PhantomData, } } } pub struct InsertValues<'a, T, U: 'a> { values: &'a [U], _marker: PhantomData<T>, } impl<'a, T, U> Expression for InsertValues<'a, T, U> where T: Table, &'a U: Insertable<T>, { type SqlType = <<&'a U as Insertable<T>>::Columns as InsertableColumns<T>>::SqlType; fn to_sql(&self, out: &mut QueryBuilder) -> BuildQueryResult { self.to_insert_sql(out) } fn to_insert_sql(&self, out: &mut QueryBuilder) -> BuildQueryResult { for (i, record) in self.values.into_iter().enumerate() { if i!= 0
try!(record.values().to_insert_sql(out)); } Ok(()) } } impl<C: Column<Table=T>, T: Table> InsertableColumns<T> for C { type SqlType = <Self as Expression>::SqlType; fn names(&self) -> String { Self::name().to_string() } }
{ out.push_sql(", "); }
conditional_block
promoted_errors.rs
// http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![warn(const_err)] // compile-pass // compile-flags: -O fn main() { println!("{}", 0u32 - 1); let _x = 0u32 - 1; //~^ WARN const_err println!("{}", 1/(1-1)); //~^ WARN const_err let _x = 1/(1-1); //~^ WARN const_err //~| WARN const_err println!("{}", 1/(false as u32)); //~^ WARN const_err let _x = 1/(false as u32); //~^ WARN const_err //~| WARN const_err }
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at
random_line_split
promoted_errors.rs
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![warn(const_err)] // compile-pass // compile-flags: -O fn main()
{ println!("{}", 0u32 - 1); let _x = 0u32 - 1; //~^ WARN const_err println!("{}", 1/(1-1)); //~^ WARN const_err let _x = 1/(1-1); //~^ WARN const_err //~| WARN const_err println!("{}", 1/(false as u32)); //~^ WARN const_err let _x = 1/(false as u32); //~^ WARN const_err //~| WARN const_err }
identifier_body
promoted_errors.rs
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![warn(const_err)] // compile-pass // compile-flags: -O fn
() { println!("{}", 0u32 - 1); let _x = 0u32 - 1; //~^ WARN const_err println!("{}", 1/(1-1)); //~^ WARN const_err let _x = 1/(1-1); //~^ WARN const_err //~| WARN const_err println!("{}", 1/(false as u32)); //~^ WARN const_err let _x = 1/(false as u32); //~^ WARN const_err //~| WARN const_err }
main
identifier_name
lib.rs
extern crate yassy; extern crate gnuplot; use yassy::utils; use yassy::utils::*; use self::gnuplot::*; pub fn
(nt: usize, nppt: usize, nn: usize, fs: f64, fhabs: &[f64], outname: &str) { // The axis of fhabs has nn/2+1 points, representing frequencies from 0 to fl/2, // or i*(fl/2)/(nn/2) = i*fl/nn = i*fs*(nppt-1)/nn for i=0..nn/2. (Because // fl=1/Tl=fs*(nppt-1)) We are only interested in // frequencies up to around fi=60KHz, or i= 60KHz*nn/(fs*(nppt-1)). let npptf64=nppt as f64; let ntf64=nt as f64; // Find index such that the horizontal axis of the plot is fmax, i.e. // i = fmax*nn/(fs*(nppt-1)) let fac = (nn as f64)/(fs*(npptf64-1f64)); let i_fi = (60000f64*fac).round(); println!("fac: {}", fac); println!("i_fi: {}", i_fi); let mut f = vec![0f64; nn/2+1]; // display kHz in plot utils::linspace(&mut f, 0f64, ((nn/2+1) as f64)/fac/1000f64); let f_cut = &f[..i_fi as usize]; let fhabs_cut = &fhabs[..i_fi as usize]; let mut fg = gnuplot::Figure::new(); fg.set_terminal("svg", outname); // yticks let yticks = [0.00001f64,0.0001f64,0.001f64,0.01f64,0.1f64,1f64]; fg.axes2d() .set_y_log(Some(10f64)) .lines(f_cut.iter(), fhabs_cut.iter(), &[Color("blue")]) .lines(&[20f64,20f64], &[0f64, 1f64], &[Color("green")]) .lines(&[fs/1000f64,fs/1000f64], &[0f64, 1f64], &[Color("red")]) .lines(&[fs/1000f64-20f64,fs/1000f64-20f64], &[0f64, 1f64], &[Color("red")]) .set_y_range(Fix(0.00001f64), Fix(1f64)) .set_y_ticks_custom(yticks.iter().map(|x| Major(*x as f64, Fix("10^{%T}".to_string()))),&[],&[]) .set_x_label("Frequency in kHz",&[]) .set_title("Amplitude spectrum",&[]); fg.show(); }
plot_ampl_spec
identifier_name
lib.rs
extern crate yassy; extern crate gnuplot; use yassy::utils; use yassy::utils::*; use self::gnuplot::*; pub fn plot_ampl_spec(nt: usize, nppt: usize, nn: usize, fs: f64, fhabs: &[f64], outname: &str)
let fhabs_cut = &fhabs[..i_fi as usize]; let mut fg = gnuplot::Figure::new(); fg.set_terminal("svg", outname); // yticks let yticks = [0.00001f64,0.0001f64,0.001f64,0.01f64,0.1f64,1f64]; fg.axes2d() .set_y_log(Some(10f64)) .lines(f_cut.iter(), fhabs_cut.iter(), &[Color("blue")]) .lines(&[20f64,20f64], &[0f64, 1f64], &[Color("green")]) .lines(&[fs/1000f64,fs/1000f64], &[0f64, 1f64], &[Color("red")]) .lines(&[fs/1000f64-20f64,fs/1000f64-20f64], &[0f64, 1f64], &[Color("red")]) .set_y_range(Fix(0.00001f64), Fix(1f64)) .set_y_ticks_custom(yticks.iter().map(|x| Major(*x as f64, Fix("10^{%T}".to_string()))),&[],&[]) .set_x_label("Frequency in kHz",&[]) .set_title("Amplitude spectrum",&[]); fg.show(); }
{ // The axis of fhabs has nn/2+1 points, representing frequencies from 0 to fl/2, // or i*(fl/2)/(nn/2) = i*fl/nn = i*fs*(nppt-1)/nn for i=0..nn/2. (Because // fl=1/Tl=fs*(nppt-1)) We are only interested in // frequencies up to around fi=60KHz, or i= 60KHz*nn/(fs*(nppt-1)). let npptf64=nppt as f64; let ntf64=nt as f64; // Find index such that the horizontal axis of the plot is fmax, i.e. // i = fmax*nn/(fs*(nppt-1)) let fac = (nn as f64)/(fs*(npptf64-1f64)); let i_fi = (60000f64*fac).round(); println!("fac: {}", fac); println!("i_fi: {}", i_fi); let mut f = vec![0f64; nn/2+1]; // display kHz in plot utils::linspace(&mut f, 0f64, ((nn/2+1) as f64)/fac/1000f64); let f_cut = &f[..i_fi as usize];
identifier_body
lib.rs
extern crate yassy; extern crate gnuplot; use yassy::utils; use yassy::utils::*; use self::gnuplot::*; pub fn plot_ampl_spec(nt: usize, nppt: usize, nn: usize, fs: f64, fhabs: &[f64], outname: &str) { // The axis of fhabs has nn/2+1 points, representing frequencies from 0 to fl/2, // or i*(fl/2)/(nn/2) = i*fl/nn = i*fs*(nppt-1)/nn for i=0..nn/2. (Because // fl=1/Tl=fs*(nppt-1)) We are only interested in
let ntf64=nt as f64; // Find index such that the horizontal axis of the plot is fmax, i.e. // i = fmax*nn/(fs*(nppt-1)) let fac = (nn as f64)/(fs*(npptf64-1f64)); let i_fi = (60000f64*fac).round(); println!("fac: {}", fac); println!("i_fi: {}", i_fi); let mut f = vec![0f64; nn/2+1]; // display kHz in plot utils::linspace(&mut f, 0f64, ((nn/2+1) as f64)/fac/1000f64); let f_cut = &f[..i_fi as usize]; let fhabs_cut = &fhabs[..i_fi as usize]; let mut fg = gnuplot::Figure::new(); fg.set_terminal("svg", outname); // yticks let yticks = [0.00001f64,0.0001f64,0.001f64,0.01f64,0.1f64,1f64]; fg.axes2d() .set_y_log(Some(10f64)) .lines(f_cut.iter(), fhabs_cut.iter(), &[Color("blue")]) .lines(&[20f64,20f64], &[0f64, 1f64], &[Color("green")]) .lines(&[fs/1000f64,fs/1000f64], &[0f64, 1f64], &[Color("red")]) .lines(&[fs/1000f64-20f64,fs/1000f64-20f64], &[0f64, 1f64], &[Color("red")]) .set_y_range(Fix(0.00001f64), Fix(1f64)) .set_y_ticks_custom(yticks.iter().map(|x| Major(*x as f64, Fix("10^{%T}".to_string()))),&[],&[]) .set_x_label("Frequency in kHz",&[]) .set_title("Amplitude spectrum",&[]); fg.show(); }
// frequencies up to around fi=60KHz, or i= 60KHz*nn/(fs*(nppt-1)). let npptf64=nppt as f64;
random_line_split
ed25519.rs
use digest::Digest; use sha2::{Sha512}; use curve25519::{GeP2, GeP3, ge_scalarmult_base, sc_reduce, sc_muladd, curve25519, Fe}; use util::{fixed_time_eq}; use std::ops::{Add, Sub, Mul}; pub fn keypair(seed: &[u8]) -> ([u8; 64], [u8; 32]) { let mut secret: [u8; 64] = { let mut hash_output: [u8; 64] = [0; 64]; let mut hasher = Sha512::new(); hasher.input(seed); hasher.result(&mut hash_output); hash_output[0] &= 248; hash_output[31] &= 63; hash_output[31] |= 64; hash_output }; let a = ge_scalarmult_base(&secret[0..32]); let public_key = a.to_bytes(); for (dest, src) in (&mut secret[32..64]).iter_mut().zip(public_key.iter()) { *dest = *src; } for (dest, src) in (&mut secret[0..32]).iter_mut().zip(seed.iter()) { *dest = *src; } (secret, public_key) } pub fn signature(message: &[u8], secret_key: &[u8]) -> [u8; 64] { let seed = &secret_key[0..32]; let public_key = &secret_key[32..64]; let az: [u8; 64] = { let mut hash_output: [u8; 64] = [0; 64]; let mut hasher = Sha512::new(); hasher.input(seed); hasher.result(&mut hash_output); hash_output[0] &= 248; hash_output[31] &= 63; hash_output[31] |= 64; hash_output }; let nonce = { let mut hash_output: [u8; 64] = [0; 64]; let mut hasher = Sha512::new(); hasher.input(&az[32..64]); hasher.input(message); hasher.result(&mut hash_output); sc_reduce(&mut hash_output[0..64]); hash_output }; let mut signature: [u8; 64] = [0; 64]; let r: GeP3 = ge_scalarmult_base(&nonce[0..32]); for (result_byte, source_byte) in (&mut signature[0..32]).iter_mut().zip(r.to_bytes().iter()) { *result_byte = *source_byte; } for (result_byte, source_byte) in (&mut signature[32..64]).iter_mut().zip(public_key.iter()) { *result_byte = *source_byte; } { let mut hasher = Sha512::new(); hasher.input(signature.as_ref()); hasher.input(message); let mut hram: [u8; 64] = [0; 64]; hasher.result(&mut hram); sc_reduce(&mut hram); sc_muladd(&mut signature[32..64], &hram[0..32], &az[0..32], &nonce[0..32]); } signature } fn check_s_lt_l(s: &[u8]) -> bool { let l: [u8; 32] = [ 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x14, 0xde, 0xf9, 0xde, 0xa2, 0xf7, 0x9c, 0xd6, 0x58, 0x12, 0x63, 0x1a, 0x5c, 0xf5, 0xd3, 0xed ]; let mut c: u8 = 0; let mut n: u8 = 1; let mut i = 31; loop { c |= ((((s[i] as i32) - (l[i] as i32)) >> 8) as u8) & n; n &= (((((s[i] ^ l[i]) as i32)) - 1) >> 8) as u8; if i == 0 { break; } else { i -= 1; } } c == 0 } pub fn verify(message: &[u8], public_key: &[u8], signature: &[u8]) -> bool { if check_s_lt_l(&signature[32..64]) { return false; } let a = match GeP3::from_bytes_negate_vartime(public_key) { Some(g) => g, None => { return false; } }; let mut d = 0; for pk_byte in public_key.iter() { d |= *pk_byte; } if d == 0 { return false; } let mut hasher = Sha512::new(); hasher.input(&signature[0..32]); hasher.input(public_key); hasher.input(message); let mut hash: [u8; 64] = [0; 64]; hasher.result(&mut hash); sc_reduce(&mut hash); let r = GeP2::double_scalarmult_vartime(hash.as_ref(), a, &signature[32..64]); let rcheck = r.to_bytes(); fixed_time_eq(rcheck.as_ref(), &signature[0..32]) } pub fn exchange(public_key: &[u8], private_key: &[u8]) -> [u8; 32] { let ed_y = Fe::from_bytes(&public_key); // Produce public key in Montgomery form. let mont_x = edwards_to_montgomery_x(ed_y); // Produce private key from seed component (bytes 0 to 32) // of the Ed25519 extended private key (64 bytes). let mut hasher = Sha512::new(); hasher.input(&private_key[0..32]); let mut hash: [u8; 64] = [0; 64]; hasher.result(&mut hash); // Clamp the hash such that it is a valid private key hash[0] &= 248; hash[31] &= 127; hash[31] |= 64; let shared_mont_x : [u8; 32] = curve25519(&hash, &mont_x.to_bytes()); // priv., pub. shared_mont_x } fn edwards_to_montgomery_x(ed_y: Fe) -> Fe { let ed_z = Fe([1,0,0,0,0,0,0,0,0,0]); let temp_x = ed_z.add(ed_y); let temp_z = ed_z.sub(ed_y); let temp_z_inv = temp_z.invert(); let mont_x = temp_x.mul(temp_z_inv); mont_x } #[cfg(test)] mod tests { use ed25519::{keypair, signature, verify, exchange}; use curve25519::{curve25519_base, curve25519}; use digest::Digest; use sha2::{Sha512}; fn do_keypair_case(seed: [u8; 32], expected_secret: [u8; 64], expected_public: [u8; 32]) { let (actual_secret, actual_public) = keypair(seed.as_ref()); assert_eq!(actual_secret.to_vec(), expected_secret.to_vec()); assert_eq!(actual_public.to_vec(), expected_public.to_vec()); } #[test] fn keypair_cases() { do_keypair_case( [0x26, 0x27, 0xf6, 0x85, 0x97, 0x15, 0xad, 0x1d, 0xd2, 0x94, 0xdd, 0xc4, 0x76, 0x19, 0x39, 0x31, 0xf1, 0xad, 0xb5, 0x58, 0xf0, 0x93, 0x97, 0x32, 0x19, 0x2b, 0xd1, 0xc0, 0xfd, 0x16, 0x8e, 0x4e], [0x26, 0x27, 0xf6, 0x85, 0x97, 0x15, 0xad, 0x1d, 0xd2, 0x94, 0xdd, 0xc4, 0x76, 0x19, 0x39, 0x31, 0xf1, 0xad, 0xb5, 0x58, 0xf0, 0x93, 0x97, 0x32, 0x19, 0x2b, 0xd1, 0xc0, 0xfd, 0x16, 0x8e, 0x4e, 0x5d, 0x6d, 0x23, 0x6b, 0x52, 0xd1, 0x8e, 0x3a, 0xb6, 0xd6, 0x07, 0x2f, 0xb6, 0xe4, 0xc7, 0xd4, 0x6b, 0xd5, 0x9a, 0xd9, 0xcc, 0x19, 0x47, 0x26, 0x5f, 0x00, 0xb7, 0x20, 0xfa, 0x2c, 0x8f, 0x66], [0x5d, 0x6d, 0x23, 0x6b, 0x52, 0xd1, 0x8e, 0x3a, 0xb6, 0xd6, 0x07, 0x2f, 0xb6, 0xe4, 0xc7, 0xd4, 0x6b, 0xd5, 0x9a, 0xd9, 0xcc, 0x19, 0x47, 0x26, 0x5f, 0x00, 0xb7, 0x20, 0xfa, 0x2c, 0x8f, 0x66]); do_keypair_case( [0x29, 0x23, 0xbe, 0x84, 0xe1, 0x6c, 0xd6, 0xae, 0x52, 0x90, 0x49, 0xf1, 0xf1, 0xbb, 0xe9, 0xeb, 0xb3, 0xa6, 0xdb, 0x3c, 0x87, 0x0c, 0x3e, 0x99, 0x24, 0x5e, 0x0d, 0x1c, 0x06, 0xb7, 0x47, 0xde], [0x29, 0x23, 0xbe, 0x84, 0xe1, 0x6c, 0xd6, 0xae, 0x52, 0x90, 0x49, 0xf1, 0xf1, 0xbb, 0xe9, 0xeb, 0xb3, 0xa6, 0xdb, 0x3c, 0x87, 0x0c, 0x3e, 0x99, 0x24, 0x5e, 0x0d, 0x1c, 0x06, 0xb7, 0x47, 0xde, 0x5d, 0x83, 0x31, 0x26, 0x56, 0x0c, 0xb1, 0x9a, 0x14, 0x19, 0x37, 0x27, 0x78, 0x96, 0xf0, 0xfd, 0x43, 0x7b, 0xa6, 0x80, 0x1e, 0xb2, 0x10, 0xac, 0x4c, 0x39, 0xd9, 0x00, 0x72, 0xd7, 0x0d, 0xa8], [0x5d, 0x83, 0x31, 0x26, 0x56, 0x0c, 0xb1, 0x9a, 0x14, 0x19, 0x37, 0x27, 0x78, 0x96, 0xf0, 0xfd, 0x43, 0x7b, 0xa6, 0x80, 0x1e, 0xb2, 0x10, 0xac, 0x4c, 0x39, 0xd9, 0x00, 0x72, 0xd7, 0x0d, 0xa8]); } #[test] fn keypair_matches_mont() { let seed = [0x26, 0x27, 0xf6, 0x85, 0x97, 0x15, 0xad, 0x1d, 0xd2, 0x94, 0xdd, 0xc4, 0x76, 0x19, 0x39, 0x31, 0xf1, 0xad, 0xb5, 0x58, 0xf0, 0x93, 0x97, 0x32, 0x19, 0x2b, 0xd1, 0xc0, 0xfd, 0x16, 0x8e, 0x4e]; let (ed_private, ed_public) = keypair(seed.as_ref()); let mut hasher = Sha512::new(); hasher.input(&ed_private[0..32]); let mut hash: [u8; 64] = [0; 64]; hasher.result(&mut hash); hash[0] &= 248; hash[31] &= 127; hash[31] |= 64; let cv_public = curve25519_base(&hash); let edx_ss = exchange(&ed_public, &ed_private); let cv_ss = curve25519(&hash, &cv_public);
fn do_sign_verify_case(seed: [u8; 32], message: &[u8], expected_signature: [u8; 64]) { let (secret_key, public_key) = keypair(seed.as_ref()); let mut actual_signature = signature(message, secret_key.as_ref()); assert_eq!(expected_signature.to_vec(), actual_signature.to_vec()); assert!(verify(message, public_key.as_ref(), actual_signature.as_ref())); for &(index, flip) in [(0, 1), (31, 0x80), (20, 0xff)].iter() { actual_signature[index] ^= flip; assert!(!verify(message, public_key.as_ref(), actual_signature.as_ref())); actual_signature[index] ^= flip; } let mut public_key_corrupt = public_key; public_key_corrupt[0] ^= 1; assert!(!verify(message, public_key_corrupt.as_ref(), actual_signature.as_ref())); } #[test] fn sign_verify_cases() { do_sign_verify_case( [0x2d, 0x20, 0x86, 0x83, 0x2c, 0xc2, 0xfe, 0x3f, 0xd1, 0x8c, 0xb5, 0x1d, 0x6c, 0x5e, 0x99, 0xa5, 0x75, 0x9f, 0x02, 0x21, 0x1f, 0x85, 0xe5, 0xff, 0x2f, 0x90, 0x4a, 0x78, 0x0f, 0x58, 0x00, 0x6f], [0x89, 0x8f, 0x9c, 0x4b, 0x2c, 0x6e, 0xe9, 0xe2, 0x28, 0x76, 0x1c, 0xa5, 0x08, 0x97, 0xb7, 0x1f, 0xfe, 0xca, 0x1c, 0x35, 0x28, 0x46, 0xf5, 0xfe, 0x13, 0xf7, 0xd3, 0xd5, 0x7e, 0x2c, 0x15, 0xac, 0x60, 0x90, 0x0c, 0xa3, 0x2c, 0x5b, 0x5d, 0xd9, 0x53, 0xc9, 0xa6, 0x81, 0x0a, 0xcc, 0x64, 0x39, 0x4f, 0xfd, 0x14, 0x98, 0x26, 0xd9, 0x98, 0x06, 0x29, 0x2a, 0xdd, 0xd1, 0x3f, 0xc3, 0xbb, 0x7d, 0xac, 0x70, 0x1c, 0x5b, 0x4a, 0x2d, 0x61, 0x5d, 0x15, 0x96, 0x01, 0x28, 0xed, 0x9f, 0x73, 0x6b, 0x98, 0x85, 0x4f, 0x6f, 0x07, 0x05, 0xb0, 0xf0, 0xda, 0xcb, 0xdc, 0x2c, 0x26, 0x2d, 0x27, 0x39, 0x75, 0x19, 0x14, 0x9b, 0x0e, 0x4c, 0xbe, 0x16, 0x77, 0xc5, 0x76, 0xc1, 0x39, 0x7a, 0xae, 0x5c, 0xe3, 0x49, 0x16, 0xe3, 0x51, 0x31, 0x04, 0x63, 0x2e, 0xc2, 0x19, 0x0d, 0xb8, 0xd2, 0x22, 0x89, 0xc3, 0x72, 0x3c, 0x8d, 0x01, 0x21, 0x3c, 0xad, 0x80, 0x3f, 0x4d, 0x75, 0x74, 0xc4, 0xdb, 0xb5, 0x37, 0x31, 0xb0, 0x1c, 0x8e, 0xc7, 0x5d, 0x08, 0x2e, 0xf7, 0xdc, 0x9d, 0x7f, 0x1b, 0x73, 0x15, 0x9f, 0x63, 0xdb, 0x56, 0xaa, 0x12, 0xa2, 0xca, 0x39, 0xea, 0xce, 0x6b, 0x28, 0xe4, 0xc3, 0x1d, 0x9d, 0x25, 0x67, 0x41, 0x45, 0x2e, 0x83, 0x87, 0xe1, 0x53, 0x6d, 0x03, 0x02, 0x6e, 0xe4, 0x84, 0x10, 0xd4, 0x3b, 0x21, 0x91, 0x88, 0xba, 0x14, 0xa8, 0xaf].as_ref(), [0x91, 0x20, 0x91, 0x66, 0x1e, 0xed, 0x18, 0xa4, 0x03, 0x4b, 0xc7, 0xdb, 0x4b, 0xd6, 0x0f, 0xe2, 0xde, 0xeb, 0xf3, 0xff, 0x3b, 0x6b, 0x99, 0x8d, 0xae, 0x20, 0x94, 0xb6, 0x09, 0x86, 0x5c, 0x20, 0x19, 0xec, 0x67, 0x22, 0xbf, 0xdc, 0x87, 0xbd, 0xa5, 0x40, 0x91, 0x92, 0x2e, 0x11, 0xe3, 0x93, 0xf5, 0xfd, 0xce, 0xea, 0x3e, 0x09, 0x1f, 0x2e, 0xe6, 0xbc, 0x62, 0xdf, 0x94, 0x8e, 0x99, 0x09] ); do_sign_verify_case( [0x33, 0x19, 0x17, 0x82, 0xc1, 0x70, 0x4f, 0x60, 0xd0, 0x84, 0x8d, 0x75, 0x62, 0xa2, 0xfa, 0x19, 0xf9, 0x92, 0x4f, 0xea, 0x4e, 0x77, 0x33, 0xcd, 0x45, 0xf6, 0xc3, 0x2f, 0x21, 0x9a, 0x72, 0x91], [0x77, 0x13, 0x43, 0x5a, 0x0e, 0x34, 0x6f, 0x67, 0x71, 0xae, 0x5a, 0xde, 0xa8, 0x7a, 0xe7, 0xa4, 0x52, 0xc6, 0x5d, 0x74, 0x8f, 0x48, 0x69, 0xd3, 0x1e, 0xd3, 0x67, 0x47, 0xc3, 0x28, 0xdd, 0xc4, 0xec, 0x0e, 0x48, 0x67, 0x93, 0xa5, 0x1c, 0x67, 0x66, 0xf7, 0x06, 0x48, 0x26, 0xd0, 0x74, 0x51, 0x4d, 0xd0, 0x57, 0x41, 0xf3, 0xbe, 0x27, 0x3e, 0xf2, 0x1f, 0x28, 0x0e, 0x49, 0x07, 0xed, 0x89, 0xbe, 0x30, 0x1a, 0x4e, 0xc8, 0x49, 0x6e, 0xb6, 0xab, 0x90, 0x00, 0x06, 0xe5, 0xa3, 0xc8, 0xe9, 0xc9, 0x93, 0x62, 0x1d, 0x6a, 0x3b, 0x0f, 0x6c, 0xba, 0xd0, 0xfd, 0xde, 0xf3, 0xb9, 0xc8, 0x2d].as_ref(), [0x4b, 0x8d, 0x9b, 0x1e, 0xca, 0x54, 0x00, 0xea, 0xc6, 0xf5, 0xcc, 0x0c, 0x94, 0x39, 0x63, 0x00, 0x52, 0xf7, 0x34, 0xce, 0x45, 0x3e, 0x94, 0x26, 0xf3, 0x19, 0xdd, 0x96, 0x03, 0xb6, 0xae, 0xae, 0xb9, 0xd2, 0x3a, 0x5f, 0x93, 0xf0, 0x6a, 0x46, 0x00, 0x18, 0xf0, 0x69, 0xdf, 0x19, 0x44, 0x48, 0xf5, 0x60, 0x51, 0xab, 0x9e, 0x6b, 0xfa, 0xeb, 0x64, 0x10, 0x16, 0xf7, 0xa9, 0x0b, 0xe2, 0x0c] ); } }
assert_eq!(edx_ss.to_vec(), cv_ss.to_vec()); }
random_line_split
ed25519.rs
use digest::Digest; use sha2::{Sha512}; use curve25519::{GeP2, GeP3, ge_scalarmult_base, sc_reduce, sc_muladd, curve25519, Fe}; use util::{fixed_time_eq}; use std::ops::{Add, Sub, Mul}; pub fn keypair(seed: &[u8]) -> ([u8; 64], [u8; 32]) { let mut secret: [u8; 64] = { let mut hash_output: [u8; 64] = [0; 64]; let mut hasher = Sha512::new(); hasher.input(seed); hasher.result(&mut hash_output); hash_output[0] &= 248; hash_output[31] &= 63; hash_output[31] |= 64; hash_output }; let a = ge_scalarmult_base(&secret[0..32]); let public_key = a.to_bytes(); for (dest, src) in (&mut secret[32..64]).iter_mut().zip(public_key.iter()) { *dest = *src; } for (dest, src) in (&mut secret[0..32]).iter_mut().zip(seed.iter()) { *dest = *src; } (secret, public_key) } pub fn signature(message: &[u8], secret_key: &[u8]) -> [u8; 64] { let seed = &secret_key[0..32]; let public_key = &secret_key[32..64]; let az: [u8; 64] = { let mut hash_output: [u8; 64] = [0; 64]; let mut hasher = Sha512::new(); hasher.input(seed); hasher.result(&mut hash_output); hash_output[0] &= 248; hash_output[31] &= 63; hash_output[31] |= 64; hash_output }; let nonce = { let mut hash_output: [u8; 64] = [0; 64]; let mut hasher = Sha512::new(); hasher.input(&az[32..64]); hasher.input(message); hasher.result(&mut hash_output); sc_reduce(&mut hash_output[0..64]); hash_output }; let mut signature: [u8; 64] = [0; 64]; let r: GeP3 = ge_scalarmult_base(&nonce[0..32]); for (result_byte, source_byte) in (&mut signature[0..32]).iter_mut().zip(r.to_bytes().iter()) { *result_byte = *source_byte; } for (result_byte, source_byte) in (&mut signature[32..64]).iter_mut().zip(public_key.iter()) { *result_byte = *source_byte; } { let mut hasher = Sha512::new(); hasher.input(signature.as_ref()); hasher.input(message); let mut hram: [u8; 64] = [0; 64]; hasher.result(&mut hram); sc_reduce(&mut hram); sc_muladd(&mut signature[32..64], &hram[0..32], &az[0..32], &nonce[0..32]); } signature } fn check_s_lt_l(s: &[u8]) -> bool { let l: [u8; 32] = [ 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x14, 0xde, 0xf9, 0xde, 0xa2, 0xf7, 0x9c, 0xd6, 0x58, 0x12, 0x63, 0x1a, 0x5c, 0xf5, 0xd3, 0xed ]; let mut c: u8 = 0; let mut n: u8 = 1; let mut i = 31; loop { c |= ((((s[i] as i32) - (l[i] as i32)) >> 8) as u8) & n; n &= (((((s[i] ^ l[i]) as i32)) - 1) >> 8) as u8; if i == 0 { break; } else { i -= 1; } } c == 0 } pub fn verify(message: &[u8], public_key: &[u8], signature: &[u8]) -> bool { if check_s_lt_l(&signature[32..64]) { return false; } let a = match GeP3::from_bytes_negate_vartime(public_key) { Some(g) => g, None => { return false; } }; let mut d = 0; for pk_byte in public_key.iter() { d |= *pk_byte; } if d == 0 { return false; } let mut hasher = Sha512::new(); hasher.input(&signature[0..32]); hasher.input(public_key); hasher.input(message); let mut hash: [u8; 64] = [0; 64]; hasher.result(&mut hash); sc_reduce(&mut hash); let r = GeP2::double_scalarmult_vartime(hash.as_ref(), a, &signature[32..64]); let rcheck = r.to_bytes(); fixed_time_eq(rcheck.as_ref(), &signature[0..32]) } pub fn exchange(public_key: &[u8], private_key: &[u8]) -> [u8; 32] { let ed_y = Fe::from_bytes(&public_key); // Produce public key in Montgomery form. let mont_x = edwards_to_montgomery_x(ed_y); // Produce private key from seed component (bytes 0 to 32) // of the Ed25519 extended private key (64 bytes). let mut hasher = Sha512::new(); hasher.input(&private_key[0..32]); let mut hash: [u8; 64] = [0; 64]; hasher.result(&mut hash); // Clamp the hash such that it is a valid private key hash[0] &= 248; hash[31] &= 127; hash[31] |= 64; let shared_mont_x : [u8; 32] = curve25519(&hash, &mont_x.to_bytes()); // priv., pub. shared_mont_x } fn edwards_to_montgomery_x(ed_y: Fe) -> Fe { let ed_z = Fe([1,0,0,0,0,0,0,0,0,0]); let temp_x = ed_z.add(ed_y); let temp_z = ed_z.sub(ed_y); let temp_z_inv = temp_z.invert(); let mont_x = temp_x.mul(temp_z_inv); mont_x } #[cfg(test)] mod tests { use ed25519::{keypair, signature, verify, exchange}; use curve25519::{curve25519_base, curve25519}; use digest::Digest; use sha2::{Sha512}; fn do_keypair_case(seed: [u8; 32], expected_secret: [u8; 64], expected_public: [u8; 32]) { let (actual_secret, actual_public) = keypair(seed.as_ref()); assert_eq!(actual_secret.to_vec(), expected_secret.to_vec()); assert_eq!(actual_public.to_vec(), expected_public.to_vec()); } #[test] fn keypair_cases() { do_keypair_case( [0x26, 0x27, 0xf6, 0x85, 0x97, 0x15, 0xad, 0x1d, 0xd2, 0x94, 0xdd, 0xc4, 0x76, 0x19, 0x39, 0x31, 0xf1, 0xad, 0xb5, 0x58, 0xf0, 0x93, 0x97, 0x32, 0x19, 0x2b, 0xd1, 0xc0, 0xfd, 0x16, 0x8e, 0x4e], [0x26, 0x27, 0xf6, 0x85, 0x97, 0x15, 0xad, 0x1d, 0xd2, 0x94, 0xdd, 0xc4, 0x76, 0x19, 0x39, 0x31, 0xf1, 0xad, 0xb5, 0x58, 0xf0, 0x93, 0x97, 0x32, 0x19, 0x2b, 0xd1, 0xc0, 0xfd, 0x16, 0x8e, 0x4e, 0x5d, 0x6d, 0x23, 0x6b, 0x52, 0xd1, 0x8e, 0x3a, 0xb6, 0xd6, 0x07, 0x2f, 0xb6, 0xe4, 0xc7, 0xd4, 0x6b, 0xd5, 0x9a, 0xd9, 0xcc, 0x19, 0x47, 0x26, 0x5f, 0x00, 0xb7, 0x20, 0xfa, 0x2c, 0x8f, 0x66], [0x5d, 0x6d, 0x23, 0x6b, 0x52, 0xd1, 0x8e, 0x3a, 0xb6, 0xd6, 0x07, 0x2f, 0xb6, 0xe4, 0xc7, 0xd4, 0x6b, 0xd5, 0x9a, 0xd9, 0xcc, 0x19, 0x47, 0x26, 0x5f, 0x00, 0xb7, 0x20, 0xfa, 0x2c, 0x8f, 0x66]); do_keypair_case( [0x29, 0x23, 0xbe, 0x84, 0xe1, 0x6c, 0xd6, 0xae, 0x52, 0x90, 0x49, 0xf1, 0xf1, 0xbb, 0xe9, 0xeb, 0xb3, 0xa6, 0xdb, 0x3c, 0x87, 0x0c, 0x3e, 0x99, 0x24, 0x5e, 0x0d, 0x1c, 0x06, 0xb7, 0x47, 0xde], [0x29, 0x23, 0xbe, 0x84, 0xe1, 0x6c, 0xd6, 0xae, 0x52, 0x90, 0x49, 0xf1, 0xf1, 0xbb, 0xe9, 0xeb, 0xb3, 0xa6, 0xdb, 0x3c, 0x87, 0x0c, 0x3e, 0x99, 0x24, 0x5e, 0x0d, 0x1c, 0x06, 0xb7, 0x47, 0xde, 0x5d, 0x83, 0x31, 0x26, 0x56, 0x0c, 0xb1, 0x9a, 0x14, 0x19, 0x37, 0x27, 0x78, 0x96, 0xf0, 0xfd, 0x43, 0x7b, 0xa6, 0x80, 0x1e, 0xb2, 0x10, 0xac, 0x4c, 0x39, 0xd9, 0x00, 0x72, 0xd7, 0x0d, 0xa8], [0x5d, 0x83, 0x31, 0x26, 0x56, 0x0c, 0xb1, 0x9a, 0x14, 0x19, 0x37, 0x27, 0x78, 0x96, 0xf0, 0xfd, 0x43, 0x7b, 0xa6, 0x80, 0x1e, 0xb2, 0x10, 0xac, 0x4c, 0x39, 0xd9, 0x00, 0x72, 0xd7, 0x0d, 0xa8]); } #[test] fn
() { let seed = [0x26, 0x27, 0xf6, 0x85, 0x97, 0x15, 0xad, 0x1d, 0xd2, 0x94, 0xdd, 0xc4, 0x76, 0x19, 0x39, 0x31, 0xf1, 0xad, 0xb5, 0x58, 0xf0, 0x93, 0x97, 0x32, 0x19, 0x2b, 0xd1, 0xc0, 0xfd, 0x16, 0x8e, 0x4e]; let (ed_private, ed_public) = keypair(seed.as_ref()); let mut hasher = Sha512::new(); hasher.input(&ed_private[0..32]); let mut hash: [u8; 64] = [0; 64]; hasher.result(&mut hash); hash[0] &= 248; hash[31] &= 127; hash[31] |= 64; let cv_public = curve25519_base(&hash); let edx_ss = exchange(&ed_public, &ed_private); let cv_ss = curve25519(&hash, &cv_public); assert_eq!(edx_ss.to_vec(), cv_ss.to_vec()); } fn do_sign_verify_case(seed: [u8; 32], message: &[u8], expected_signature: [u8; 64]) { let (secret_key, public_key) = keypair(seed.as_ref()); let mut actual_signature = signature(message, secret_key.as_ref()); assert_eq!(expected_signature.to_vec(), actual_signature.to_vec()); assert!(verify(message, public_key.as_ref(), actual_signature.as_ref())); for &(index, flip) in [(0, 1), (31, 0x80), (20, 0xff)].iter() { actual_signature[index] ^= flip; assert!(!verify(message, public_key.as_ref(), actual_signature.as_ref())); actual_signature[index] ^= flip; } let mut public_key_corrupt = public_key; public_key_corrupt[0] ^= 1; assert!(!verify(message, public_key_corrupt.as_ref(), actual_signature.as_ref())); } #[test] fn sign_verify_cases() { do_sign_verify_case( [0x2d, 0x20, 0x86, 0x83, 0x2c, 0xc2, 0xfe, 0x3f, 0xd1, 0x8c, 0xb5, 0x1d, 0x6c, 0x5e, 0x99, 0xa5, 0x75, 0x9f, 0x02, 0x21, 0x1f, 0x85, 0xe5, 0xff, 0x2f, 0x90, 0x4a, 0x78, 0x0f, 0x58, 0x00, 0x6f], [0x89, 0x8f, 0x9c, 0x4b, 0x2c, 0x6e, 0xe9, 0xe2, 0x28, 0x76, 0x1c, 0xa5, 0x08, 0x97, 0xb7, 0x1f, 0xfe, 0xca, 0x1c, 0x35, 0x28, 0x46, 0xf5, 0xfe, 0x13, 0xf7, 0xd3, 0xd5, 0x7e, 0x2c, 0x15, 0xac, 0x60, 0x90, 0x0c, 0xa3, 0x2c, 0x5b, 0x5d, 0xd9, 0x53, 0xc9, 0xa6, 0x81, 0x0a, 0xcc, 0x64, 0x39, 0x4f, 0xfd, 0x14, 0x98, 0x26, 0xd9, 0x98, 0x06, 0x29, 0x2a, 0xdd, 0xd1, 0x3f, 0xc3, 0xbb, 0x7d, 0xac, 0x70, 0x1c, 0x5b, 0x4a, 0x2d, 0x61, 0x5d, 0x15, 0x96, 0x01, 0x28, 0xed, 0x9f, 0x73, 0x6b, 0x98, 0x85, 0x4f, 0x6f, 0x07, 0x05, 0xb0, 0xf0, 0xda, 0xcb, 0xdc, 0x2c, 0x26, 0x2d, 0x27, 0x39, 0x75, 0x19, 0x14, 0x9b, 0x0e, 0x4c, 0xbe, 0x16, 0x77, 0xc5, 0x76, 0xc1, 0x39, 0x7a, 0xae, 0x5c, 0xe3, 0x49, 0x16, 0xe3, 0x51, 0x31, 0x04, 0x63, 0x2e, 0xc2, 0x19, 0x0d, 0xb8, 0xd2, 0x22, 0x89, 0xc3, 0x72, 0x3c, 0x8d, 0x01, 0x21, 0x3c, 0xad, 0x80, 0x3f, 0x4d, 0x75, 0x74, 0xc4, 0xdb, 0xb5, 0x37, 0x31, 0xb0, 0x1c, 0x8e, 0xc7, 0x5d, 0x08, 0x2e, 0xf7, 0xdc, 0x9d, 0x7f, 0x1b, 0x73, 0x15, 0x9f, 0x63, 0xdb, 0x56, 0xaa, 0x12, 0xa2, 0xca, 0x39, 0xea, 0xce, 0x6b, 0x28, 0xe4, 0xc3, 0x1d, 0x9d, 0x25, 0x67, 0x41, 0x45, 0x2e, 0x83, 0x87, 0xe1, 0x53, 0x6d, 0x03, 0x02, 0x6e, 0xe4, 0x84, 0x10, 0xd4, 0x3b, 0x21, 0x91, 0x88, 0xba, 0x14, 0xa8, 0xaf].as_ref(), [0x91, 0x20, 0x91, 0x66, 0x1e, 0xed, 0x18, 0xa4, 0x03, 0x4b, 0xc7, 0xdb, 0x4b, 0xd6, 0x0f, 0xe2, 0xde, 0xeb, 0xf3, 0xff, 0x3b, 0x6b, 0x99, 0x8d, 0xae, 0x20, 0x94, 0xb6, 0x09, 0x86, 0x5c, 0x20, 0x19, 0xec, 0x67, 0x22, 0xbf, 0xdc, 0x87, 0xbd, 0xa5, 0x40, 0x91, 0x92, 0x2e, 0x11, 0xe3, 0x93, 0xf5, 0xfd, 0xce, 0xea, 0x3e, 0x09, 0x1f, 0x2e, 0xe6, 0xbc, 0x62, 0xdf, 0x94, 0x8e, 0x99, 0x09] ); do_sign_verify_case( [0x33, 0x19, 0x17, 0x82, 0xc1, 0x70, 0x4f, 0x60, 0xd0, 0x84, 0x8d, 0x75, 0x62, 0xa2, 0xfa, 0x19, 0xf9, 0x92, 0x4f, 0xea, 0x4e, 0x77, 0x33, 0xcd, 0x45, 0xf6, 0xc3, 0x2f, 0x21, 0x9a, 0x72, 0x91], [0x77, 0x13, 0x43, 0x5a, 0x0e, 0x34, 0x6f, 0x67, 0x71, 0xae, 0x5a, 0xde, 0xa8, 0x7a, 0xe7, 0xa4, 0x52, 0xc6, 0x5d, 0x74, 0x8f, 0x48, 0x69, 0xd3, 0x1e, 0xd3, 0x67, 0x47, 0xc3, 0x28, 0xdd, 0xc4, 0xec, 0x0e, 0x48, 0x67, 0x93, 0xa5, 0x1c, 0x67, 0x66, 0xf7, 0x06, 0x48, 0x26, 0xd0, 0x74, 0x51, 0x4d, 0xd0, 0x57, 0x41, 0xf3, 0xbe, 0x27, 0x3e, 0xf2, 0x1f, 0x28, 0x0e, 0x49, 0x07, 0xed, 0x89, 0xbe, 0x30, 0x1a, 0x4e, 0xc8, 0x49, 0x6e, 0xb6, 0xab, 0x90, 0x00, 0x06, 0xe5, 0xa3, 0xc8, 0xe9, 0xc9, 0x93, 0x62, 0x1d, 0x6a, 0x3b, 0x0f, 0x6c, 0xba, 0xd0, 0xfd, 0xde, 0xf3, 0xb9, 0xc8, 0x2d].as_ref(), [0x4b, 0x8d, 0x9b, 0x1e, 0xca, 0x54, 0x00, 0xea, 0xc6, 0xf5, 0xcc, 0x0c, 0x94, 0x39, 0x63, 0x00, 0x52, 0xf7, 0x34, 0xce, 0x45, 0x3e, 0x94, 0x26, 0xf3, 0x19, 0xdd, 0x96, 0x03, 0xb6, 0xae, 0xae, 0xb9, 0xd2, 0x3a, 0x5f, 0x93, 0xf0, 0x6a, 0x46, 0x00, 0x18, 0xf0, 0x69, 0xdf, 0x19, 0x44, 0x48, 0xf5, 0x60, 0x51, 0xab, 0x9e, 0x6b, 0xfa, 0xeb, 0x64, 0x10, 0x16, 0xf7, 0xa9, 0x0b, 0xe2, 0x0c] ); } }
keypair_matches_mont
identifier_name
ed25519.rs
use digest::Digest; use sha2::{Sha512}; use curve25519::{GeP2, GeP3, ge_scalarmult_base, sc_reduce, sc_muladd, curve25519, Fe}; use util::{fixed_time_eq}; use std::ops::{Add, Sub, Mul}; pub fn keypair(seed: &[u8]) -> ([u8; 64], [u8; 32]) { let mut secret: [u8; 64] = { let mut hash_output: [u8; 64] = [0; 64]; let mut hasher = Sha512::new(); hasher.input(seed); hasher.result(&mut hash_output); hash_output[0] &= 248; hash_output[31] &= 63; hash_output[31] |= 64; hash_output }; let a = ge_scalarmult_base(&secret[0..32]); let public_key = a.to_bytes(); for (dest, src) in (&mut secret[32..64]).iter_mut().zip(public_key.iter()) { *dest = *src; } for (dest, src) in (&mut secret[0..32]).iter_mut().zip(seed.iter()) { *dest = *src; } (secret, public_key) } pub fn signature(message: &[u8], secret_key: &[u8]) -> [u8; 64] { let seed = &secret_key[0..32]; let public_key = &secret_key[32..64]; let az: [u8; 64] = { let mut hash_output: [u8; 64] = [0; 64]; let mut hasher = Sha512::new(); hasher.input(seed); hasher.result(&mut hash_output); hash_output[0] &= 248; hash_output[31] &= 63; hash_output[31] |= 64; hash_output }; let nonce = { let mut hash_output: [u8; 64] = [0; 64]; let mut hasher = Sha512::new(); hasher.input(&az[32..64]); hasher.input(message); hasher.result(&mut hash_output); sc_reduce(&mut hash_output[0..64]); hash_output }; let mut signature: [u8; 64] = [0; 64]; let r: GeP3 = ge_scalarmult_base(&nonce[0..32]); for (result_byte, source_byte) in (&mut signature[0..32]).iter_mut().zip(r.to_bytes().iter()) { *result_byte = *source_byte; } for (result_byte, source_byte) in (&mut signature[32..64]).iter_mut().zip(public_key.iter()) { *result_byte = *source_byte; } { let mut hasher = Sha512::new(); hasher.input(signature.as_ref()); hasher.input(message); let mut hram: [u8; 64] = [0; 64]; hasher.result(&mut hram); sc_reduce(&mut hram); sc_muladd(&mut signature[32..64], &hram[0..32], &az[0..32], &nonce[0..32]); } signature } fn check_s_lt_l(s: &[u8]) -> bool { let l: [u8; 32] = [ 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x14, 0xde, 0xf9, 0xde, 0xa2, 0xf7, 0x9c, 0xd6, 0x58, 0x12, 0x63, 0x1a, 0x5c, 0xf5, 0xd3, 0xed ]; let mut c: u8 = 0; let mut n: u8 = 1; let mut i = 31; loop { c |= ((((s[i] as i32) - (l[i] as i32)) >> 8) as u8) & n; n &= (((((s[i] ^ l[i]) as i32)) - 1) >> 8) as u8; if i == 0 { break; } else { i -= 1; } } c == 0 } pub fn verify(message: &[u8], public_key: &[u8], signature: &[u8]) -> bool { if check_s_lt_l(&signature[32..64]) { return false; } let a = match GeP3::from_bytes_negate_vartime(public_key) { Some(g) => g, None => { return false; } }; let mut d = 0; for pk_byte in public_key.iter() { d |= *pk_byte; } if d == 0 { return false; } let mut hasher = Sha512::new(); hasher.input(&signature[0..32]); hasher.input(public_key); hasher.input(message); let mut hash: [u8; 64] = [0; 64]; hasher.result(&mut hash); sc_reduce(&mut hash); let r = GeP2::double_scalarmult_vartime(hash.as_ref(), a, &signature[32..64]); let rcheck = r.to_bytes(); fixed_time_eq(rcheck.as_ref(), &signature[0..32]) } pub fn exchange(public_key: &[u8], private_key: &[u8]) -> [u8; 32] { let ed_y = Fe::from_bytes(&public_key); // Produce public key in Montgomery form. let mont_x = edwards_to_montgomery_x(ed_y); // Produce private key from seed component (bytes 0 to 32) // of the Ed25519 extended private key (64 bytes). let mut hasher = Sha512::new(); hasher.input(&private_key[0..32]); let mut hash: [u8; 64] = [0; 64]; hasher.result(&mut hash); // Clamp the hash such that it is a valid private key hash[0] &= 248; hash[31] &= 127; hash[31] |= 64; let shared_mont_x : [u8; 32] = curve25519(&hash, &mont_x.to_bytes()); // priv., pub. shared_mont_x } fn edwards_to_montgomery_x(ed_y: Fe) -> Fe { let ed_z = Fe([1,0,0,0,0,0,0,0,0,0]); let temp_x = ed_z.add(ed_y); let temp_z = ed_z.sub(ed_y); let temp_z_inv = temp_z.invert(); let mont_x = temp_x.mul(temp_z_inv); mont_x } #[cfg(test)] mod tests { use ed25519::{keypair, signature, verify, exchange}; use curve25519::{curve25519_base, curve25519}; use digest::Digest; use sha2::{Sha512}; fn do_keypair_case(seed: [u8; 32], expected_secret: [u8; 64], expected_public: [u8; 32]) { let (actual_secret, actual_public) = keypair(seed.as_ref()); assert_eq!(actual_secret.to_vec(), expected_secret.to_vec()); assert_eq!(actual_public.to_vec(), expected_public.to_vec()); } #[test] fn keypair_cases() { do_keypair_case( [0x26, 0x27, 0xf6, 0x85, 0x97, 0x15, 0xad, 0x1d, 0xd2, 0x94, 0xdd, 0xc4, 0x76, 0x19, 0x39, 0x31, 0xf1, 0xad, 0xb5, 0x58, 0xf0, 0x93, 0x97, 0x32, 0x19, 0x2b, 0xd1, 0xc0, 0xfd, 0x16, 0x8e, 0x4e], [0x26, 0x27, 0xf6, 0x85, 0x97, 0x15, 0xad, 0x1d, 0xd2, 0x94, 0xdd, 0xc4, 0x76, 0x19, 0x39, 0x31, 0xf1, 0xad, 0xb5, 0x58, 0xf0, 0x93, 0x97, 0x32, 0x19, 0x2b, 0xd1, 0xc0, 0xfd, 0x16, 0x8e, 0x4e, 0x5d, 0x6d, 0x23, 0x6b, 0x52, 0xd1, 0x8e, 0x3a, 0xb6, 0xd6, 0x07, 0x2f, 0xb6, 0xe4, 0xc7, 0xd4, 0x6b, 0xd5, 0x9a, 0xd9, 0xcc, 0x19, 0x47, 0x26, 0x5f, 0x00, 0xb7, 0x20, 0xfa, 0x2c, 0x8f, 0x66], [0x5d, 0x6d, 0x23, 0x6b, 0x52, 0xd1, 0x8e, 0x3a, 0xb6, 0xd6, 0x07, 0x2f, 0xb6, 0xe4, 0xc7, 0xd4, 0x6b, 0xd5, 0x9a, 0xd9, 0xcc, 0x19, 0x47, 0x26, 0x5f, 0x00, 0xb7, 0x20, 0xfa, 0x2c, 0x8f, 0x66]); do_keypair_case( [0x29, 0x23, 0xbe, 0x84, 0xe1, 0x6c, 0xd6, 0xae, 0x52, 0x90, 0x49, 0xf1, 0xf1, 0xbb, 0xe9, 0xeb, 0xb3, 0xa6, 0xdb, 0x3c, 0x87, 0x0c, 0x3e, 0x99, 0x24, 0x5e, 0x0d, 0x1c, 0x06, 0xb7, 0x47, 0xde], [0x29, 0x23, 0xbe, 0x84, 0xe1, 0x6c, 0xd6, 0xae, 0x52, 0x90, 0x49, 0xf1, 0xf1, 0xbb, 0xe9, 0xeb, 0xb3, 0xa6, 0xdb, 0x3c, 0x87, 0x0c, 0x3e, 0x99, 0x24, 0x5e, 0x0d, 0x1c, 0x06, 0xb7, 0x47, 0xde, 0x5d, 0x83, 0x31, 0x26, 0x56, 0x0c, 0xb1, 0x9a, 0x14, 0x19, 0x37, 0x27, 0x78, 0x96, 0xf0, 0xfd, 0x43, 0x7b, 0xa6, 0x80, 0x1e, 0xb2, 0x10, 0xac, 0x4c, 0x39, 0xd9, 0x00, 0x72, 0xd7, 0x0d, 0xa8], [0x5d, 0x83, 0x31, 0x26, 0x56, 0x0c, 0xb1, 0x9a, 0x14, 0x19, 0x37, 0x27, 0x78, 0x96, 0xf0, 0xfd, 0x43, 0x7b, 0xa6, 0x80, 0x1e, 0xb2, 0x10, 0xac, 0x4c, 0x39, 0xd9, 0x00, 0x72, 0xd7, 0x0d, 0xa8]); } #[test] fn keypair_matches_mont() { let seed = [0x26, 0x27, 0xf6, 0x85, 0x97, 0x15, 0xad, 0x1d, 0xd2, 0x94, 0xdd, 0xc4, 0x76, 0x19, 0x39, 0x31, 0xf1, 0xad, 0xb5, 0x58, 0xf0, 0x93, 0x97, 0x32, 0x19, 0x2b, 0xd1, 0xc0, 0xfd, 0x16, 0x8e, 0x4e]; let (ed_private, ed_public) = keypair(seed.as_ref()); let mut hasher = Sha512::new(); hasher.input(&ed_private[0..32]); let mut hash: [u8; 64] = [0; 64]; hasher.result(&mut hash); hash[0] &= 248; hash[31] &= 127; hash[31] |= 64; let cv_public = curve25519_base(&hash); let edx_ss = exchange(&ed_public, &ed_private); let cv_ss = curve25519(&hash, &cv_public); assert_eq!(edx_ss.to_vec(), cv_ss.to_vec()); } fn do_sign_verify_case(seed: [u8; 32], message: &[u8], expected_signature: [u8; 64])
#[test] fn sign_verify_cases() { do_sign_verify_case( [0x2d, 0x20, 0x86, 0x83, 0x2c, 0xc2, 0xfe, 0x3f, 0xd1, 0x8c, 0xb5, 0x1d, 0x6c, 0x5e, 0x99, 0xa5, 0x75, 0x9f, 0x02, 0x21, 0x1f, 0x85, 0xe5, 0xff, 0x2f, 0x90, 0x4a, 0x78, 0x0f, 0x58, 0x00, 0x6f], [0x89, 0x8f, 0x9c, 0x4b, 0x2c, 0x6e, 0xe9, 0xe2, 0x28, 0x76, 0x1c, 0xa5, 0x08, 0x97, 0xb7, 0x1f, 0xfe, 0xca, 0x1c, 0x35, 0x28, 0x46, 0xf5, 0xfe, 0x13, 0xf7, 0xd3, 0xd5, 0x7e, 0x2c, 0x15, 0xac, 0x60, 0x90, 0x0c, 0xa3, 0x2c, 0x5b, 0x5d, 0xd9, 0x53, 0xc9, 0xa6, 0x81, 0x0a, 0xcc, 0x64, 0x39, 0x4f, 0xfd, 0x14, 0x98, 0x26, 0xd9, 0x98, 0x06, 0x29, 0x2a, 0xdd, 0xd1, 0x3f, 0xc3, 0xbb, 0x7d, 0xac, 0x70, 0x1c, 0x5b, 0x4a, 0x2d, 0x61, 0x5d, 0x15, 0x96, 0x01, 0x28, 0xed, 0x9f, 0x73, 0x6b, 0x98, 0x85, 0x4f, 0x6f, 0x07, 0x05, 0xb0, 0xf0, 0xda, 0xcb, 0xdc, 0x2c, 0x26, 0x2d, 0x27, 0x39, 0x75, 0x19, 0x14, 0x9b, 0x0e, 0x4c, 0xbe, 0x16, 0x77, 0xc5, 0x76, 0xc1, 0x39, 0x7a, 0xae, 0x5c, 0xe3, 0x49, 0x16, 0xe3, 0x51, 0x31, 0x04, 0x63, 0x2e, 0xc2, 0x19, 0x0d, 0xb8, 0xd2, 0x22, 0x89, 0xc3, 0x72, 0x3c, 0x8d, 0x01, 0x21, 0x3c, 0xad, 0x80, 0x3f, 0x4d, 0x75, 0x74, 0xc4, 0xdb, 0xb5, 0x37, 0x31, 0xb0, 0x1c, 0x8e, 0xc7, 0x5d, 0x08, 0x2e, 0xf7, 0xdc, 0x9d, 0x7f, 0x1b, 0x73, 0x15, 0x9f, 0x63, 0xdb, 0x56, 0xaa, 0x12, 0xa2, 0xca, 0x39, 0xea, 0xce, 0x6b, 0x28, 0xe4, 0xc3, 0x1d, 0x9d, 0x25, 0x67, 0x41, 0x45, 0x2e, 0x83, 0x87, 0xe1, 0x53, 0x6d, 0x03, 0x02, 0x6e, 0xe4, 0x84, 0x10, 0xd4, 0x3b, 0x21, 0x91, 0x88, 0xba, 0x14, 0xa8, 0xaf].as_ref(), [0x91, 0x20, 0x91, 0x66, 0x1e, 0xed, 0x18, 0xa4, 0x03, 0x4b, 0xc7, 0xdb, 0x4b, 0xd6, 0x0f, 0xe2, 0xde, 0xeb, 0xf3, 0xff, 0x3b, 0x6b, 0x99, 0x8d, 0xae, 0x20, 0x94, 0xb6, 0x09, 0x86, 0x5c, 0x20, 0x19, 0xec, 0x67, 0x22, 0xbf, 0xdc, 0x87, 0xbd, 0xa5, 0x40, 0x91, 0x92, 0x2e, 0x11, 0xe3, 0x93, 0xf5, 0xfd, 0xce, 0xea, 0x3e, 0x09, 0x1f, 0x2e, 0xe6, 0xbc, 0x62, 0xdf, 0x94, 0x8e, 0x99, 0x09] ); do_sign_verify_case( [0x33, 0x19, 0x17, 0x82, 0xc1, 0x70, 0x4f, 0x60, 0xd0, 0x84, 0x8d, 0x75, 0x62, 0xa2, 0xfa, 0x19, 0xf9, 0x92, 0x4f, 0xea, 0x4e, 0x77, 0x33, 0xcd, 0x45, 0xf6, 0xc3, 0x2f, 0x21, 0x9a, 0x72, 0x91], [0x77, 0x13, 0x43, 0x5a, 0x0e, 0x34, 0x6f, 0x67, 0x71, 0xae, 0x5a, 0xde, 0xa8, 0x7a, 0xe7, 0xa4, 0x52, 0xc6, 0x5d, 0x74, 0x8f, 0x48, 0x69, 0xd3, 0x1e, 0xd3, 0x67, 0x47, 0xc3, 0x28, 0xdd, 0xc4, 0xec, 0x0e, 0x48, 0x67, 0x93, 0xa5, 0x1c, 0x67, 0x66, 0xf7, 0x06, 0x48, 0x26, 0xd0, 0x74, 0x51, 0x4d, 0xd0, 0x57, 0x41, 0xf3, 0xbe, 0x27, 0x3e, 0xf2, 0x1f, 0x28, 0x0e, 0x49, 0x07, 0xed, 0x89, 0xbe, 0x30, 0x1a, 0x4e, 0xc8, 0x49, 0x6e, 0xb6, 0xab, 0x90, 0x00, 0x06, 0xe5, 0xa3, 0xc8, 0xe9, 0xc9, 0x93, 0x62, 0x1d, 0x6a, 0x3b, 0x0f, 0x6c, 0xba, 0xd0, 0xfd, 0xde, 0xf3, 0xb9, 0xc8, 0x2d].as_ref(), [0x4b, 0x8d, 0x9b, 0x1e, 0xca, 0x54, 0x00, 0xea, 0xc6, 0xf5, 0xcc, 0x0c, 0x94, 0x39, 0x63, 0x00, 0x52, 0xf7, 0x34, 0xce, 0x45, 0x3e, 0x94, 0x26, 0xf3, 0x19, 0xdd, 0x96, 0x03, 0xb6, 0xae, 0xae, 0xb9, 0xd2, 0x3a, 0x5f, 0x93, 0xf0, 0x6a, 0x46, 0x00, 0x18, 0xf0, 0x69, 0xdf, 0x19, 0x44, 0x48, 0xf5, 0x60, 0x51, 0xab, 0x9e, 0x6b, 0xfa, 0xeb, 0x64, 0x10, 0x16, 0xf7, 0xa9, 0x0b, 0xe2, 0x0c] ); } }
{ let (secret_key, public_key) = keypair(seed.as_ref()); let mut actual_signature = signature(message, secret_key.as_ref()); assert_eq!(expected_signature.to_vec(), actual_signature.to_vec()); assert!(verify(message, public_key.as_ref(), actual_signature.as_ref())); for &(index, flip) in [(0, 1), (31, 0x80), (20, 0xff)].iter() { actual_signature[index] ^= flip; assert!(!verify(message, public_key.as_ref(), actual_signature.as_ref())); actual_signature[index] ^= flip; } let mut public_key_corrupt = public_key; public_key_corrupt[0] ^= 1; assert!(!verify(message, public_key_corrupt.as_ref(), actual_signature.as_ref())); }
identifier_body
magenta.rs
extern crate drm; use std::io::Result as IoResult; use std::thread::sleep; use std::time::Duration; fn main() -> IoResult<()> { let mut dev0 = drm::Device::first_card().unwrap(); let dev = dev0.set_master() .map_err(|err| { eprintln!("Failed to set master: {:?}", err); err })?; let res = dev.get_resources()?; let connector = res.connectors().iter() .filter_map(|id| dev.get(*id).ok()) .find(|conn| conn.encoder_id().is_some()) .expect("No active connectors"); let encoder_id = connector.encoder_id().unwrap(); let encoder = dev.get(encoder_id) .expect("failed get encoder"); let crtc_id = encoder.crtc_id().unwrap(); let crtc = dev.get(crtc_id) .expect("failed get crtc"); let old_fbid = crtc.fb_id().expect("Currently no fb"); let mode = crtc.mode().expect("mode") .clone();
let mut buffer = drm::mode::DumbBuf::create_with_depth( &dev, mode.hdisplay as u32, mode.vdisplay as u32, 32, 32) .expect("creating buffer"); dev.set_crtc(crtc.id(), Some(buffer.fb().id()), 0, 0, &[ connector.id() ], Some(&mode)) .expect("set_crtc 1"); fill_buffer(&mut buffer); sleep(Duration::new(1, 0)); dev.set_crtc(crtc.id(), Some(old_fbid), 0, 0, &[ connector.id() ], Some(&mode)) .expect("set_crtc 1"); Ok(()) } fn fill_buffer<B:AsMut<[u32]>>(mut buffer_ref: B) { let mut buffer = buffer_ref.as_mut(); for p in buffer.iter_mut() { *p = 0xffff00ff; } }
random_line_split
magenta.rs
extern crate drm; use std::io::Result as IoResult; use std::thread::sleep; use std::time::Duration; fn main() -> IoResult<()>
let encoder = dev.get(encoder_id) .expect("failed get encoder"); let crtc_id = encoder.crtc_id().unwrap(); let crtc = dev.get(crtc_id) .expect("failed get crtc"); let old_fbid = crtc.fb_id().expect("Currently no fb"); let mode = crtc.mode().expect("mode") .clone(); let mut buffer = drm::mode::DumbBuf::create_with_depth( &dev, mode.hdisplay as u32, mode.vdisplay as u32, 32, 32) .expect("creating buffer"); dev.set_crtc(crtc.id(), Some(buffer.fb().id()), 0, 0, &[ connector.id() ], Some(&mode)) .expect("set_crtc 1"); fill_buffer(&mut buffer); sleep(Duration::new(1, 0)); dev.set_crtc(crtc.id(), Some(old_fbid), 0, 0, &[ connector.id() ], Some(&mode)) .expect("set_crtc 1"); Ok(()) } fn fill_buffer<B:AsMut<[u32]>>(mut buffer_ref: B) { let mut buffer = buffer_ref.as_mut(); for p in buffer.iter_mut() { *p = 0xffff00ff; } }
{ let mut dev0 = drm::Device::first_card().unwrap(); let dev = dev0.set_master() .map_err(|err| { eprintln!("Failed to set master: {:?}", err); err })?; let res = dev.get_resources()?; let connector = res.connectors().iter() .filter_map(|id| dev.get(*id).ok()) .find(|conn| conn.encoder_id().is_some()) .expect("No active connectors"); let encoder_id = connector.encoder_id().unwrap();
identifier_body
magenta.rs
extern crate drm; use std::io::Result as IoResult; use std::thread::sleep; use std::time::Duration; fn main() -> IoResult<()> { let mut dev0 = drm::Device::first_card().unwrap(); let dev = dev0.set_master() .map_err(|err| { eprintln!("Failed to set master: {:?}", err); err })?; let res = dev.get_resources()?; let connector = res.connectors().iter() .filter_map(|id| dev.get(*id).ok()) .find(|conn| conn.encoder_id().is_some()) .expect("No active connectors"); let encoder_id = connector.encoder_id().unwrap(); let encoder = dev.get(encoder_id) .expect("failed get encoder"); let crtc_id = encoder.crtc_id().unwrap(); let crtc = dev.get(crtc_id) .expect("failed get crtc"); let old_fbid = crtc.fb_id().expect("Currently no fb"); let mode = crtc.mode().expect("mode") .clone(); let mut buffer = drm::mode::DumbBuf::create_with_depth( &dev, mode.hdisplay as u32, mode.vdisplay as u32, 32, 32) .expect("creating buffer"); dev.set_crtc(crtc.id(), Some(buffer.fb().id()), 0, 0, &[ connector.id() ], Some(&mode)) .expect("set_crtc 1"); fill_buffer(&mut buffer); sleep(Duration::new(1, 0)); dev.set_crtc(crtc.id(), Some(old_fbid), 0, 0, &[ connector.id() ], Some(&mode)) .expect("set_crtc 1"); Ok(()) } fn
<B:AsMut<[u32]>>(mut buffer_ref: B) { let mut buffer = buffer_ref.as_mut(); for p in buffer.iter_mut() { *p = 0xffff00ff; } }
fill_buffer
identifier_name
check_const.rs
bl: ast::Mutability) { match self.rvalue_borrows.entry(id) { Entry::Occupied(mut entry) => { // Merge the two borrows, taking the most demanding // one, mutability-wise. if mutbl == ast::MutMutable { entry.insert(mutbl); } } Entry::Vacant(entry) => { entry.insert(mutbl); } } } fn msg(&self) -> &'static str { match self.mode { Mode::Const => "constant", Mode::StaticMut | Mode::Static => "static", Mode::Var => unreachable!(), }
let tcontents = ty::type_contents(self.tcx, node_ty); let suffix = if tcontents.has_dtor() { "destructors" } else if tcontents.owns_owned() { "owned pointers" } else { return }; self.tcx.sess.span_err(e.span, &format!("mutable statics are not allowed \ to have {}", suffix)); } fn check_static_type(&self, e: &ast::Expr) { let ty = ty::node_id_to_type(self.tcx, e.id); let infcx = infer::new_infer_ctxt(self.tcx); let mut fulfill_cx = traits::FulfillmentContext::new(); let cause = traits::ObligationCause::new(e.span, e.id, traits::SharedStatic); fulfill_cx.register_builtin_bound(&infcx, ty, ty::BoundSync, cause); let env = ty::empty_parameter_environment(self.tcx); match fulfill_cx.select_all_or_error(&infcx, &env) { Ok(()) => { }, Err(ref errors) => { traits::report_fulfillment_errors(&infcx, errors); } } } } impl<'a, 'tcx, 'v> Visitor<'v> for CheckCrateVisitor<'a, 'tcx> { fn visit_item(&mut self, i: &ast::Item) { debug!("visit_item(item={})", pprust::item_to_string(i)); match i.node { ast::ItemStatic(_, ast::MutImmutable, ref expr) => { self.check_static_type(&**expr); self.global_expr(Mode::Static, &**expr); } ast::ItemStatic(_, ast::MutMutable, ref expr) => { self.check_static_mut_type(&**expr); self.global_expr(Mode::StaticMut, &**expr); } ast::ItemConst(_, ref expr) => { self.global_expr(Mode::Const, &**expr); } ast::ItemEnum(ref enum_definition, _) => { for var in &enum_definition.variants { if let Some(ref ex) = var.node.disr_expr { self.global_expr(Mode::Const, &**ex); } } } _ => { self.with_mode(Mode::Var, |v| visit::walk_item(v, i)); } } } fn visit_fn(&mut self, fk: visit::FnKind<'v>, fd: &'v ast::FnDecl, b: &'v ast::Block, s: Span, fn_id: ast::NodeId) { assert!(self.mode == Mode::Var); self.with_euv(Some(fn_id), |euv| euv.walk_fn(fd, b)); visit::walk_fn(self, fk, fd, b, s); } fn visit_pat(&mut self, p: &ast::Pat) { match p.node { ast::PatLit(ref lit) => { self.global_expr(Mode::Const, &**lit); } ast::PatRange(ref start, ref end) => { self.global_expr(Mode::Const, &**start); self.global_expr(Mode::Const, &**end); } _ => visit::walk_pat(self, p) } } fn visit_expr(&mut self, ex: &ast::Expr) { let mut outer = self.qualif; self.qualif = PURE_CONST; let node_ty = ty::node_id_to_type(self.tcx, ex.id); check_expr(self, ex, node_ty); // Special-case some expressions to avoid certain flags bubbling up. match ex.node { ast::ExprCall(ref callee, ref args) => { for arg in args.iter() { self.visit_expr(&**arg) } let inner = self.qualif; self.visit_expr(&**callee); // The callee's size doesn't count in the call. let added = self.qualif - inner; self.qualif = inner | (added - NON_ZERO_SIZED); } ast::ExprRepeat(ref element, _) => { self.visit_expr(&**element); // The count is checked elsewhere (typeck). let count = match node_ty.sty { ty::ty_vec(_, Some(n)) => n, _ => unreachable!() }; // [element; 0] is always zero-sized. if count == 0 { self.qualif = self.qualif - (NON_ZERO_SIZED | PREFER_IN_PLACE); } } ast::ExprMatch(ref discr, ref arms, _) => { // Compute the most demanding borrow from all the arms' // patterns and set that on the discriminator. let mut borrow = None; for pat in arms.iter().flat_map(|arm| arm.pats.iter()) { let pat_borrow = self.rvalue_borrows.remove(&pat.id); match (borrow, pat_borrow) { (None, _) | (_, Some(ast::MutMutable)) => { borrow = pat_borrow; } _ => {} } } if let Some(mutbl) = borrow { self.record_borrow(discr.id, mutbl); } visit::walk_expr(self, ex); } // Division by zero and overflow checking. ast::ExprBinary(op, _, _) => { visit::walk_expr(self, ex); let div_or_rem = op.node == ast::BiDiv || op.node == ast::BiRem; match node_ty.sty { ty::ty_uint(_) | ty::ty_int(_) if div_or_rem => { if!self.qualif.intersects(NOT_CONST) { match const_eval::eval_const_expr_partial(self.tcx, ex, None) { Ok(_) => {} Err(msg) => { span_err!(self.tcx.sess, ex.span, E0020, "{} in a constant expression", msg) } } } } _ => {} } } _ => visit::walk_expr(self, ex) } // Handle borrows on (or inside the autorefs of) this expression. match self.rvalue_borrows.remove(&ex.id) { Some(ast::MutImmutable) => { // Constants cannot be borrowed if they contain interior mutability as // it means that our "silent insertion of statics" could change // initializer values (very bad). // If the type doesn't have interior mutability, then `MUTABLE_MEM` has // propagated from another error, so erroring again would be just noise. let tc = ty::type_contents(self.tcx, node_ty); if self.qualif.intersects(MUTABLE_MEM) && tc.interior_unsafe() { outer = outer | NOT_CONST; if self.mode!= Mode::Var { self.tcx.sess.span_err(ex.span, "cannot borrow a constant which contains \ interior mutability, create a static instead"); } } // If the reference has to be'static, avoid in-place initialization // as that will end up pointing to the stack instead. if!self.qualif.intersects(NON_STATIC_BORROWS) { self.qualif = self.qualif - PREFER_IN_PLACE; self.add_qualif(HAS_STATIC_BORROWS); } } Some(ast::MutMutable) => { // `&mut expr` means expr could be mutated, unless it's zero-sized. if self.qualif.intersects(NON_ZERO_SIZED) { if self.mode == Mode::Var { outer = outer | NOT_CONST; self.add_qualif(MUTABLE_MEM); } else { span_err!(self.tcx.sess, ex.span, E0017, "references in {}s may only refer \ to immutable values", self.msg()) } } if!self.qualif.intersects(NON_STATIC_BORROWS) { self.add_qualif(HAS_STATIC_BORROWS); } } None => {} } self.tcx.const_qualif_map.borrow_mut().insert(ex.id, self.qualif); // Don't propagate certain flags. self.qualif = outer | (self.qualif - HAS_STATIC_BORROWS); } } /// This function is used to enforce the constraints on /// const/static items. It walks through the *value* /// of the item walking down the expression and evaluating /// every nested expression. If the expression is not part /// of a const/static item, it is qualified for promotion /// instead of producing errors. fn check_expr<'a, 'tcx>(v: &mut CheckCrateVisitor<'a, 'tcx>, e: &ast::Expr, node_ty: Ty<'tcx>) { match node_ty.sty { ty::ty_struct(did, _) | ty::ty_enum(did, _) if ty::has_dtor(v.tcx, did) => { v.add_qualif(NEEDS_DROP); if v.mode!= Mode::Var { v.tcx.sess.span_err(e.span, &format!("{}s are not allowed to have destructors", v.msg())); } } _ => {} } let method_call = ty::MethodCall::expr(e.id); match e.node { ast::ExprUnary(..) | ast::ExprBinary(..) | ast::ExprIndex(..) if v.tcx.method_map.borrow().contains_key(&method_call) => { v.add_qualif(NOT_CONST); if v.mode!= Mode::Var { span_err!(v.tcx.sess, e.span, E0011, "user-defined operators are not allowed in {}s", v.msg()); } } ast::ExprBox(..) | ast::ExprUnary(ast::UnUniq, _) => { v.add_qualif(NOT_CONST); if v.mode!= Mode::Var { span_err!(v.tcx.sess, e.span, E0010, "allocations are not allowed in {}s", v.msg()); } } ast::ExprUnary(ast::UnDeref, ref ptr) => { match ty::node_id_to_type(v.tcx, ptr.id).sty { ty::ty_ptr(_) => { // This shouldn't be allowed in constants at all. v.add_qualif(NOT_CONST); } _ => {} } } ast::ExprCast(ref from, _) => { let toty = ty::expr_ty(v.tcx, e); let fromty = ty::expr_ty(v.tcx, &**from); let is_legal_cast = ty::type_is_numeric(toty) || ty::type_is_unsafe_ptr(toty) || (ty::type_is_bare_fn(toty) && ty::type_is_bare_fn_item(fromty)); if!is_legal_cast { v.add_qualif(NOT_CONST); if v.mode!= Mode::Var { span_err!(v.tcx.sess, e.span, E0012, "can not cast to `{}` in {}s", ppaux::ty_to_string(v.tcx, toty), v.msg()); } } if ty::type_is_unsafe_ptr(fromty) && ty::type_is_numeric(toty) { v.add_qualif(NOT_CONST); if v.mode!= Mode::Var { span_err!(v.tcx.sess, e.span, E0018, "can not cast a pointer to an integer in {}s", v.msg()); } } } ast::ExprPath(_) | ast::ExprQPath(_) => { let def = v.tcx.def_map.borrow().get(&e.id).cloned(); match def { Some(def::DefVariant(_, _, _)) => { // Count the discriminator or function pointer. v.add_qualif(NON_ZERO_SIZED); } Some(def::DefStruct(_)) => { if let ty::ty_bare_fn(..) = node_ty.sty { // Count the function pointer. v.add_qualif(NON_ZERO_SIZED); } } Some(def::DefFn(..)) | Some(def::DefStaticMethod(..)) | Some(def::DefMethod(..)) => { // Count the function pointer. v.add_qualif(NON_ZERO_SIZED); } Some(def::DefStatic(..)) => { match v.mode { Mode::Static | Mode::StaticMut => {} Mode::Const => { span_err!(v.tcx.sess, e.span, E0013, "constants cannot refer to other statics, \ insert an intermediate constant instead"); } Mode::Var => v.add_qualif(NOT_CONST) } } Some(def::DefConst(did)) => { if let Some(expr) = const_eval::lookup_const_by_id(v.tcx, did) { let inner = v.global_expr(Mode::Const, expr); v.add_qualif(inner); } else { v.tcx.sess.span_bug(e.span, "DefConst doesn't point \ to an ItemConst"); } } def => { v.add_qualif(NOT_CONST); if v.mode!= Mode::Var { debug!("(checking const) found bad def: {:?}", def); span_err!(v.tcx.sess, e.span, E0014, "paths in {}s may only refer to constants \ or functions", v.msg()); } } } } ast::ExprCall(ref callee, _) => {
} fn check_static_mut_type(&self, e: &ast::Expr) { let node_ty = ty::node_id_to_type(self.tcx, e.id);
random_line_split
check_const.rs
to immutable values", self.msg()) } } if!self.qualif.intersects(NON_STATIC_BORROWS) { self.add_qualif(HAS_STATIC_BORROWS); } } None => {} } self.tcx.const_qualif_map.borrow_mut().insert(ex.id, self.qualif); // Don't propagate certain flags. self.qualif = outer | (self.qualif - HAS_STATIC_BORROWS); } } /// This function is used to enforce the constraints on /// const/static items. It walks through the *value* /// of the item walking down the expression and evaluating /// every nested expression. If the expression is not part /// of a const/static item, it is qualified for promotion /// instead of producing errors. fn check_expr<'a, 'tcx>(v: &mut CheckCrateVisitor<'a, 'tcx>, e: &ast::Expr, node_ty: Ty<'tcx>) { match node_ty.sty { ty::ty_struct(did, _) | ty::ty_enum(did, _) if ty::has_dtor(v.tcx, did) => { v.add_qualif(NEEDS_DROP); if v.mode!= Mode::Var { v.tcx.sess.span_err(e.span, &format!("{}s are not allowed to have destructors", v.msg())); } } _ => {} } let method_call = ty::MethodCall::expr(e.id); match e.node { ast::ExprUnary(..) | ast::ExprBinary(..) | ast::ExprIndex(..) if v.tcx.method_map.borrow().contains_key(&method_call) => { v.add_qualif(NOT_CONST); if v.mode!= Mode::Var { span_err!(v.tcx.sess, e.span, E0011, "user-defined operators are not allowed in {}s", v.msg()); } } ast::ExprBox(..) | ast::ExprUnary(ast::UnUniq, _) => { v.add_qualif(NOT_CONST); if v.mode!= Mode::Var { span_err!(v.tcx.sess, e.span, E0010, "allocations are not allowed in {}s", v.msg()); } } ast::ExprUnary(ast::UnDeref, ref ptr) => { match ty::node_id_to_type(v.tcx, ptr.id).sty { ty::ty_ptr(_) => { // This shouldn't be allowed in constants at all. v.add_qualif(NOT_CONST); } _ => {} } } ast::ExprCast(ref from, _) => { let toty = ty::expr_ty(v.tcx, e); let fromty = ty::expr_ty(v.tcx, &**from); let is_legal_cast = ty::type_is_numeric(toty) || ty::type_is_unsafe_ptr(toty) || (ty::type_is_bare_fn(toty) && ty::type_is_bare_fn_item(fromty)); if!is_legal_cast { v.add_qualif(NOT_CONST); if v.mode!= Mode::Var { span_err!(v.tcx.sess, e.span, E0012, "can not cast to `{}` in {}s", ppaux::ty_to_string(v.tcx, toty), v.msg()); } } if ty::type_is_unsafe_ptr(fromty) && ty::type_is_numeric(toty) { v.add_qualif(NOT_CONST); if v.mode!= Mode::Var { span_err!(v.tcx.sess, e.span, E0018, "can not cast a pointer to an integer in {}s", v.msg()); } } } ast::ExprPath(_) | ast::ExprQPath(_) => { let def = v.tcx.def_map.borrow().get(&e.id).cloned(); match def { Some(def::DefVariant(_, _, _)) => { // Count the discriminator or function pointer. v.add_qualif(NON_ZERO_SIZED); } Some(def::DefStruct(_)) => { if let ty::ty_bare_fn(..) = node_ty.sty { // Count the function pointer. v.add_qualif(NON_ZERO_SIZED); } } Some(def::DefFn(..)) | Some(def::DefStaticMethod(..)) | Some(def::DefMethod(..)) => { // Count the function pointer. v.add_qualif(NON_ZERO_SIZED); } Some(def::DefStatic(..)) => { match v.mode { Mode::Static | Mode::StaticMut => {} Mode::Const => { span_err!(v.tcx.sess, e.span, E0013, "constants cannot refer to other statics, \ insert an intermediate constant instead"); } Mode::Var => v.add_qualif(NOT_CONST) } } Some(def::DefConst(did)) => { if let Some(expr) = const_eval::lookup_const_by_id(v.tcx, did) { let inner = v.global_expr(Mode::Const, expr); v.add_qualif(inner); } else { v.tcx.sess.span_bug(e.span, "DefConst doesn't point \ to an ItemConst"); } } def => { v.add_qualif(NOT_CONST); if v.mode!= Mode::Var { debug!("(checking const) found bad def: {:?}", def); span_err!(v.tcx.sess, e.span, E0014, "paths in {}s may only refer to constants \ or functions", v.msg()); } } } } ast::ExprCall(ref callee, _) => { let mut callee = &**callee; loop { callee = match callee.node { ast::ExprParen(ref inner) => &**inner, ast::ExprBlock(ref block) => match block.expr { Some(ref tail) => &**tail, None => break }, _ => break }; } let def = v.tcx.def_map.borrow().get(&callee.id).cloned(); match def { Some(def::DefStruct(..)) => {} Some(def::DefVariant(..)) => { // Count the discriminator. v.add_qualif(NON_ZERO_SIZED); } _ => { v.add_qualif(NOT_CONST); if v.mode!= Mode::Var { span_err!(v.tcx.sess, e.span, E0015, "function calls in {}s are limited to \ struct and enum constructors", v.msg()); } } } } ast::ExprBlock(ref block) => { // Check all statements in the block let mut block_span_err = |span| { v.add_qualif(NOT_CONST); if v.mode!= Mode::Var { span_err!(v.tcx.sess, span, E0016, "blocks in {}s are limited to items and \ tail expressions", v.msg()); } }; for stmt in &block.stmts { match stmt.node { ast::StmtDecl(ref decl, _) => { match decl.node { ast::DeclLocal(_) => block_span_err(decl.span), // Item statements are allowed ast::DeclItem(_) => {} } } ast::StmtExpr(ref expr, _) => block_span_err(expr.span), ast::StmtSemi(ref semi, _) => block_span_err(semi.span), ast::StmtMac(..) => { v.tcx.sess.span_bug(e.span, "unexpanded statement \ macro in const?!") } } } } ast::ExprStruct(..) => { let did = v.tcx.def_map.borrow().get(&e.id).map(|def| def.def_id()); if did == v.tcx.lang_items.unsafe_cell_type() { v.add_qualif(MUTABLE_MEM); } } ast::ExprLit(_) | ast::ExprAddrOf(..) => { v.add_qualif(NON_ZERO_SIZED); } ast::ExprRepeat(..) => { v.add_qualif(PREFER_IN_PLACE); } ast::ExprClosure(..) => { // Paths in constant constexts cannot refer to local variables, // as there are none, and thus closures can't have upvars there. if ty::with_freevars(v.tcx, e.id, |fv|!fv.is_empty()) { assert!(v.mode == Mode::Var, "global closures can't capture anything"); v.add_qualif(NOT_CONST); } } ast::ExprUnary(..) | ast::ExprBinary(..) | ast::ExprIndex(..) | ast::ExprField(..) | ast::ExprTupField(..) | ast::ExprVec(_) | ast::ExprParen(..) | ast::ExprTup(..) => {} // Conditional control flow (possible to implement). ast::ExprMatch(..) | ast::ExprIf(..) | ast::ExprIfLet(..) | // Loops (not very meaningful in constants). ast::ExprWhile(..) | ast::ExprWhileLet(..) | ast::ExprForLoop(..) | ast::ExprLoop(..) | // More control flow (also not very meaningful). ast::ExprBreak(_) | ast::ExprAgain(_) | ast::ExprRet(_) | // Miscellaneous expressions that could be implemented. ast::ExprRange(..) | // Various other expressions. ast::ExprMethodCall(..) | ast::ExprAssign(..) | ast::ExprAssignOp(..) | ast::ExprInlineAsm(_) | ast::ExprMac(_) => { v.add_qualif(NOT_CONST); if v.mode!= Mode::Var { span_err!(v.tcx.sess, e.span, E0019, "{} contains unimplemented expression type", v.msg()); } } } } pub fn check_crate(tcx: &ty::ctxt) { visit::walk_crate(&mut CheckCrateVisitor { tcx: tcx, mode: Mode::Var, qualif: NOT_CONST, rvalue_borrows: NodeMap() }, tcx.map.krate()); tcx.sess.abort_if_errors(); } impl<'a, 'tcx> euv::Delegate<'tcx> for CheckCrateVisitor<'a, 'tcx> { fn consume(&mut self, _consume_id: ast::NodeId, consume_span: Span, cmt: mc::cmt, _mode: euv::ConsumeMode) { let mut cur = &cmt; loop { match cur.cat { mc::cat_static_item => { if self.mode!= Mode::Var { // statics cannot be consumed by value at any time, that would imply // that they're an initializer (what a const is for) or kept in sync // over time (not feasible), so deny it outright. self.tcx.sess.span_err(consume_span, "cannot refer to other statics by value, use the \ address-of operator or a constant instead"); } break; } mc::cat_deref(ref cmt, _, _) | mc::cat_downcast(ref cmt, _) | mc::cat_interior(ref cmt, _) => cur = cmt, mc::cat_rvalue(..) | mc::cat_upvar(..) | mc::cat_local(..) => break } } } fn borrow(&mut self, borrow_id: ast::NodeId, borrow_span: Span, cmt: mc::cmt<'tcx>, _loan_region: ty::Region, bk: ty::BorrowKind, loan_cause: euv::LoanCause) { let mut cur = &cmt; let mut is_interior = false; loop { match cur.cat { mc::cat_rvalue(..) => { if loan_cause == euv::MatchDiscriminant { // Ignore the dummy immutable borrow created by EUV. break; } let mutbl = bk.to_mutbl_lossy(); if mutbl == ast::MutMutable && self.mode == Mode::StaticMut { // Mutable slices are the only `&mut` allowed in globals, // but only in `static mut`, nowhere else. match cmt.ty.sty { ty::ty_vec(_, _) => break, _ => {} } } self.record_borrow(borrow_id, mutbl); break; } mc::cat_static_item => { if is_interior && self.mode!= Mode::Var { // Borrowed statics can specifically *only* have their address taken, // not any number of other borrows such as borrowing fields, reading // elements of an array, etc. self.tcx.sess.span_err(borrow_span, "cannot refer to the interior of another \ static, use a constant instead"); } break; } mc::cat_deref(ref cmt, _, _) | mc::cat_downcast(ref cmt, _) | mc::cat_interior(ref cmt, _) => { is_interior = true; cur = cmt; } mc::cat_upvar(..) | mc::cat_local(..) => break } } } fn decl_without_init(&mut self, _id: ast::NodeId, _span: Span) {} fn mutate(&mut self, _assignment_id: ast::NodeId, _assignment_span: Span, _assignee_cmt: mc::cmt, _mode: euv::MutateMode) {} fn
matched_pat
identifier_name
check_const.rs
: ast::Mutability) { match self.rvalue_borrows.entry(id) { Entry::Occupied(mut entry) => { // Merge the two borrows, taking the most demanding // one, mutability-wise. if mutbl == ast::MutMutable { entry.insert(mutbl); } } Entry::Vacant(entry) => { entry.insert(mutbl); } } } fn msg(&self) -> &'static str { match self.mode { Mode::Const => "constant", Mode::StaticMut | Mode::Static => "static", Mode::Var => unreachable!(), } } fn check_static_mut_type(&self, e: &ast::Expr) { let node_ty = ty::node_id_to_type(self.tcx, e.id); let tcontents = ty::type_contents(self.tcx, node_ty); let suffix = if tcontents.has_dtor() { "destructors" } else if tcontents.owns_owned() { "owned pointers" } else { return }; self.tcx.sess.span_err(e.span, &format!("mutable statics are not allowed \ to have {}", suffix)); } fn check_static_type(&self, e: &ast::Expr) { let ty = ty::node_id_to_type(self.tcx, e.id); let infcx = infer::new_infer_ctxt(self.tcx); let mut fulfill_cx = traits::FulfillmentContext::new(); let cause = traits::ObligationCause::new(e.span, e.id, traits::SharedStatic); fulfill_cx.register_builtin_bound(&infcx, ty, ty::BoundSync, cause); let env = ty::empty_parameter_environment(self.tcx); match fulfill_cx.select_all_or_error(&infcx, &env) { Ok(()) => { }, Err(ref errors) => { traits::report_fulfillment_errors(&infcx, errors); } } } } impl<'a, 'tcx, 'v> Visitor<'v> for CheckCrateVisitor<'a, 'tcx> { fn visit_item(&mut self, i: &ast::Item) { debug!("visit_item(item={})", pprust::item_to_string(i)); match i.node { ast::ItemStatic(_, ast::MutImmutable, ref expr) => { self.check_static_type(&**expr); self.global_expr(Mode::Static, &**expr); } ast::ItemStatic(_, ast::MutMutable, ref expr) => { self.check_static_mut_type(&**expr); self.global_expr(Mode::StaticMut, &**expr); } ast::ItemConst(_, ref expr) => { self.global_expr(Mode::Const, &**expr); } ast::ItemEnum(ref enum_definition, _) => { for var in &enum_definition.variants { if let Some(ref ex) = var.node.disr_expr { self.global_expr(Mode::Const, &**ex); } } } _ => { self.with_mode(Mode::Var, |v| visit::walk_item(v, i)); } } } fn visit_fn(&mut self, fk: visit::FnKind<'v>, fd: &'v ast::FnDecl, b: &'v ast::Block, s: Span, fn_id: ast::NodeId)
fn visit_pat(&mut self, p: &ast::Pat) { match p.node { ast::PatLit(ref lit) => { self.global_expr(Mode::Const, &**lit); } ast::PatRange(ref start, ref end) => { self.global_expr(Mode::Const, &**start); self.global_expr(Mode::Const, &**end); } _ => visit::walk_pat(self, p) } } fn visit_expr(&mut self, ex: &ast::Expr) { let mut outer = self.qualif; self.qualif = PURE_CONST; let node_ty = ty::node_id_to_type(self.tcx, ex.id); check_expr(self, ex, node_ty); // Special-case some expressions to avoid certain flags bubbling up. match ex.node { ast::ExprCall(ref callee, ref args) => { for arg in args.iter() { self.visit_expr(&**arg) } let inner = self.qualif; self.visit_expr(&**callee); // The callee's size doesn't count in the call. let added = self.qualif - inner; self.qualif = inner | (added - NON_ZERO_SIZED); } ast::ExprRepeat(ref element, _) => { self.visit_expr(&**element); // The count is checked elsewhere (typeck). let count = match node_ty.sty { ty::ty_vec(_, Some(n)) => n, _ => unreachable!() }; // [element; 0] is always zero-sized. if count == 0 { self.qualif = self.qualif - (NON_ZERO_SIZED | PREFER_IN_PLACE); } } ast::ExprMatch(ref discr, ref arms, _) => { // Compute the most demanding borrow from all the arms' // patterns and set that on the discriminator. let mut borrow = None; for pat in arms.iter().flat_map(|arm| arm.pats.iter()) { let pat_borrow = self.rvalue_borrows.remove(&pat.id); match (borrow, pat_borrow) { (None, _) | (_, Some(ast::MutMutable)) => { borrow = pat_borrow; } _ => {} } } if let Some(mutbl) = borrow { self.record_borrow(discr.id, mutbl); } visit::walk_expr(self, ex); } // Division by zero and overflow checking. ast::ExprBinary(op, _, _) => { visit::walk_expr(self, ex); let div_or_rem = op.node == ast::BiDiv || op.node == ast::BiRem; match node_ty.sty { ty::ty_uint(_) | ty::ty_int(_) if div_or_rem => { if!self.qualif.intersects(NOT_CONST) { match const_eval::eval_const_expr_partial(self.tcx, ex, None) { Ok(_) => {} Err(msg) => { span_err!(self.tcx.sess, ex.span, E0020, "{} in a constant expression", msg) } } } } _ => {} } } _ => visit::walk_expr(self, ex) } // Handle borrows on (or inside the autorefs of) this expression. match self.rvalue_borrows.remove(&ex.id) { Some(ast::MutImmutable) => { // Constants cannot be borrowed if they contain interior mutability as // it means that our "silent insertion of statics" could change // initializer values (very bad). // If the type doesn't have interior mutability, then `MUTABLE_MEM` has // propagated from another error, so erroring again would be just noise. let tc = ty::type_contents(self.tcx, node_ty); if self.qualif.intersects(MUTABLE_MEM) && tc.interior_unsafe() { outer = outer | NOT_CONST; if self.mode!= Mode::Var { self.tcx.sess.span_err(ex.span, "cannot borrow a constant which contains \ interior mutability, create a static instead"); } } // If the reference has to be'static, avoid in-place initialization // as that will end up pointing to the stack instead. if!self.qualif.intersects(NON_STATIC_BORROWS) { self.qualif = self.qualif - PREFER_IN_PLACE; self.add_qualif(HAS_STATIC_BORROWS); } } Some(ast::MutMutable) => { // `&mut expr` means expr could be mutated, unless it's zero-sized. if self.qualif.intersects(NON_ZERO_SIZED) { if self.mode == Mode::Var { outer = outer | NOT_CONST; self.add_qualif(MUTABLE_MEM); } else { span_err!(self.tcx.sess, ex.span, E0017, "references in {}s may only refer \ to immutable values", self.msg()) } } if!self.qualif.intersects(NON_STATIC_BORROWS) { self.add_qualif(HAS_STATIC_BORROWS); } } None => {} } self.tcx.const_qualif_map.borrow_mut().insert(ex.id, self.qualif); // Don't propagate certain flags. self.qualif = outer | (self.qualif - HAS_STATIC_BORROWS); } } /// This function is used to enforce the constraints on /// const/static items. It walks through the *value* /// of the item walking down the expression and evaluating /// every nested expression. If the expression is not part /// of a const/static item, it is qualified for promotion /// instead of producing errors. fn check_expr<'a, 'tcx>(v: &mut CheckCrateVisitor<'a, 'tcx>, e: &ast::Expr, node_ty: Ty<'tcx>) { match node_ty.sty { ty::ty_struct(did, _) | ty::ty_enum(did, _) if ty::has_dtor(v.tcx, did) => { v.add_qualif(NEEDS_DROP); if v.mode!= Mode::Var { v.tcx.sess.span_err(e.span, &format!("{}s are not allowed to have destructors", v.msg())); } } _ => {} } let method_call = ty::MethodCall::expr(e.id); match e.node { ast::ExprUnary(..) | ast::ExprBinary(..) | ast::ExprIndex(..) if v.tcx.method_map.borrow().contains_key(&method_call) => { v.add_qualif(NOT_CONST); if v.mode!= Mode::Var { span_err!(v.tcx.sess, e.span, E0011, "user-defined operators are not allowed in {}s", v.msg()); } } ast::ExprBox(..) | ast::ExprUnary(ast::UnUniq, _) => { v.add_qualif(NOT_CONST); if v.mode!= Mode::Var { span_err!(v.tcx.sess, e.span, E0010, "allocations are not allowed in {}s", v.msg()); } } ast::ExprUnary(ast::UnDeref, ref ptr) => { match ty::node_id_to_type(v.tcx, ptr.id).sty { ty::ty_ptr(_) => { // This shouldn't be allowed in constants at all. v.add_qualif(NOT_CONST); } _ => {} } } ast::ExprCast(ref from, _) => { let toty = ty::expr_ty(v.tcx, e); let fromty = ty::expr_ty(v.tcx, &**from); let is_legal_cast = ty::type_is_numeric(toty) || ty::type_is_unsafe_ptr(toty) || (ty::type_is_bare_fn(toty) && ty::type_is_bare_fn_item(fromty)); if!is_legal_cast { v.add_qualif(NOT_CONST); if v.mode!= Mode::Var { span_err!(v.tcx.sess, e.span, E0012, "can not cast to `{}` in {}s", ppaux::ty_to_string(v.tcx, toty), v.msg()); } } if ty::type_is_unsafe_ptr(fromty) && ty::type_is_numeric(toty) { v.add_qualif(NOT_CONST); if v.mode!= Mode::Var { span_err!(v.tcx.sess, e.span, E0018, "can not cast a pointer to an integer in {}s", v.msg()); } } } ast::ExprPath(_) | ast::ExprQPath(_) => { let def = v.tcx.def_map.borrow().get(&e.id).cloned(); match def { Some(def::DefVariant(_, _, _)) => { // Count the discriminator or function pointer. v.add_qualif(NON_ZERO_SIZED); } Some(def::DefStruct(_)) => { if let ty::ty_bare_fn(..) = node_ty.sty { // Count the function pointer. v.add_qualif(NON_ZERO_SIZED); } } Some(def::DefFn(..)) | Some(def::DefStaticMethod(..)) | Some(def::DefMethod(..)) => { // Count the function pointer. v.add_qualif(NON_ZERO_SIZED); } Some(def::DefStatic(..)) => { match v.mode { Mode::Static | Mode::StaticMut => {} Mode::Const => { span_err!(v.tcx.sess, e.span, E0013, "constants cannot refer to other statics, \ insert an intermediate constant instead"); } Mode::Var => v.add_qualif(NOT_CONST) } } Some(def::DefConst(did)) => { if let Some(expr) = const_eval::lookup_const_by_id(v.tcx, did) { let inner = v.global_expr(Mode::Const, expr); v.add_qualif(inner); } else { v.tcx.sess.span_bug(e.span, "DefConst doesn't point \ to an ItemConst"); } } def => { v.add_qualif(NOT_CONST); if v.mode!= Mode::Var { debug!("(checking const) found bad def: {:?}", def); span_err!(v.tcx.sess, e.span, E0014, "paths in {}s may only refer to constants \ or functions", v.msg()); } } } } ast::ExprCall(ref callee, _) => {
{ assert!(self.mode == Mode::Var); self.with_euv(Some(fn_id), |euv| euv.walk_fn(fd, b)); visit::walk_fn(self, fk, fd, b, s); }
identifier_body
get_exact_record.rs
use uuid::Uuid; use rustorm::dao::{Dao, IsDao}; use rustorm::pool::ManagedPool; use rustorm::em::EntityManager; use rustorm::table::{Table, Column}; use rustorm::table::IsTable; #[derive(Debug, Clone)] pub struct Product { pub product_id: Uuid, pub name: String, pub description: Option<String>, } impl IsDao for Product{ fn from_dao(dao: &Dao) -> Self { Product { product_id: dao.get("product_id"), name: dao.get("name"), description: dao.get_opt("description"), } } fn to_dao(&self) -> Dao { let mut dao = Dao::new(); dao.set("product_id", &self.product_id); dao.set("name", &self.name); match self.description { Some(ref _value) => dao.set("description", _value), None => dao.set_null("description"), } dao } } impl IsTable for Product{ fn table() -> Table { Table { schema: "bazaar".to_string(), name: "product".to_string(), parent_table: None, sub_table: vec![], comment: None, columns: vec![ Column{ name:"product_id".to_string(), data_type:"Uuid".to_string(), db_data_type:"uuid".to_string(), is_primary:true, is_unique:false, not_null:true, is_inherited:false, default:Some("uuid_generate_v4()".to_string()), comment:None, foreign:None, }, Column{ name:"name".to_string(), data_type:"String".to_string(), db_data_type:"character varying".to_string(), is_primary:false, is_unique:false, not_null:true, is_inherited:false, default:None, comment:None, foreign:None, }, Column{ name:"description".to_string(), data_type:"String".to_string(), db_data_type:"character varying".to_string(), is_primary:false, is_unique:false, not_null:false, is_inherited:true, default:None, comment:None, foreign:None, }, ], is_view: false, } } } fn main() { let url = "postgres://postgres:p0stgr3s@localhost/bazaar_v6"; let pool = ManagedPool::init(&url, 1).unwrap(); let db = pool.connect().unwrap(); let em = EntityManager::new(db.as_ref()); let pid = Uuid::parse_str("6db712e6-cc50-4c3a-8269-451c98ace5ad").unwrap(); let prod: Product = em.get_exact(&pid).unwrap(); println!("{} {} {:?}", prod.product_id, prod.name, prod.description); //pool.release(db); }
extern crate rustorm; extern crate uuid; extern crate chrono; extern crate rustc_serialize;
random_line_split
get_exact_record.rs
extern crate rustorm; extern crate uuid; extern crate chrono; extern crate rustc_serialize; use uuid::Uuid; use rustorm::dao::{Dao, IsDao}; use rustorm::pool::ManagedPool; use rustorm::em::EntityManager; use rustorm::table::{Table, Column}; use rustorm::table::IsTable; #[derive(Debug, Clone)] pub struct Product { pub product_id: Uuid, pub name: String, pub description: Option<String>, } impl IsDao for Product{ fn from_dao(dao: &Dao) -> Self { Product { product_id: dao.get("product_id"), name: dao.get("name"), description: dao.get_opt("description"), } } fn to_dao(&self) -> Dao { let mut dao = Dao::new(); dao.set("product_id", &self.product_id); dao.set("name", &self.name); match self.description { Some(ref _value) => dao.set("description", _value), None => dao.set_null("description"), } dao } } impl IsTable for Product{ fn table() -> Table
data_type:"String".to_string(), db_data_type:"character varying".to_string(), is_primary:false, is_unique:false, not_null:true, is_inherited:false, default:None, comment:None, foreign:None, }, Column{ name:"description".to_string(), data_type:"String".to_string(), db_data_type:"character varying".to_string(), is_primary:false, is_unique:false, not_null:false, is_inherited:true, default:None, comment:None, foreign:None, }, ], is_view: false, } } } fn main() { let url = "postgres://postgres:p0stgr3s@localhost/bazaar_v6"; let pool = ManagedPool::init(&url, 1).unwrap(); let db = pool.connect().unwrap(); let em = EntityManager::new(db.as_ref()); let pid = Uuid::parse_str("6db712e6-cc50-4c3a-8269-451c98ace5ad").unwrap(); let prod: Product = em.get_exact(&pid).unwrap(); println!("{} {} {:?}", prod.product_id, prod.name, prod.description); //pool.release(db); }
{ Table { schema: "bazaar".to_string(), name: "product".to_string(), parent_table: None, sub_table: vec![], comment: None, columns: vec![ Column{ name:"product_id".to_string(), data_type:"Uuid".to_string(), db_data_type:"uuid".to_string(), is_primary:true, is_unique:false, not_null:true, is_inherited:false, default:Some("uuid_generate_v4()".to_string()), comment:None, foreign:None, }, Column{ name:"name".to_string(),
identifier_body
get_exact_record.rs
extern crate rustorm; extern crate uuid; extern crate chrono; extern crate rustc_serialize; use uuid::Uuid; use rustorm::dao::{Dao, IsDao}; use rustorm::pool::ManagedPool; use rustorm::em::EntityManager; use rustorm::table::{Table, Column}; use rustorm::table::IsTable; #[derive(Debug, Clone)] pub struct Product { pub product_id: Uuid, pub name: String, pub description: Option<String>, } impl IsDao for Product{ fn from_dao(dao: &Dao) -> Self { Product { product_id: dao.get("product_id"), name: dao.get("name"), description: dao.get_opt("description"), } } fn to_dao(&self) -> Dao { let mut dao = Dao::new(); dao.set("product_id", &self.product_id); dao.set("name", &self.name); match self.description { Some(ref _value) => dao.set("description", _value), None => dao.set_null("description"), } dao } } impl IsTable for Product{ fn table() -> Table { Table { schema: "bazaar".to_string(), name: "product".to_string(), parent_table: None, sub_table: vec![], comment: None, columns: vec![ Column{ name:"product_id".to_string(), data_type:"Uuid".to_string(), db_data_type:"uuid".to_string(), is_primary:true, is_unique:false, not_null:true, is_inherited:false, default:Some("uuid_generate_v4()".to_string()), comment:None, foreign:None, }, Column{ name:"name".to_string(), data_type:"String".to_string(), db_data_type:"character varying".to_string(), is_primary:false, is_unique:false, not_null:true, is_inherited:false, default:None, comment:None, foreign:None, }, Column{ name:"description".to_string(), data_type:"String".to_string(), db_data_type:"character varying".to_string(), is_primary:false, is_unique:false, not_null:false, is_inherited:true, default:None, comment:None, foreign:None, }, ], is_view: false, } } } fn
() { let url = "postgres://postgres:p0stgr3s@localhost/bazaar_v6"; let pool = ManagedPool::init(&url, 1).unwrap(); let db = pool.connect().unwrap(); let em = EntityManager::new(db.as_ref()); let pid = Uuid::parse_str("6db712e6-cc50-4c3a-8269-451c98ace5ad").unwrap(); let prod: Product = em.get_exact(&pid).unwrap(); println!("{} {} {:?}", prod.product_id, prod.name, prod.description); //pool.release(db); }
main
identifier_name
statics-and-consts.rs
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // ignore-tidy-linelength // compile-flags:-Zprint-mono-items=eager #![deny(dead_code)] #![feature(start)] static STATIC1: i64 = { const STATIC1_CONST1: i64 = 2; 1 + CONST1 as i64 + STATIC1_CONST1 }; const CONST1: i64 = { const CONST1_1: i64 = { const CONST1_1_1: i64 = 2; CONST1_1_1 + 1 }; 1 + CONST1_1 as i64 }; fn foo() { let _ = { const CONST2: i64 = 0; static STATIC2: i64 = CONST2; let x = { const CONST2: i64 = 1; static STATIC2: i64 = CONST2; STATIC2 }; x + STATIC2 }; let _ = { const CONST2: i64 = 0; static STATIC2: i64 = CONST2; STATIC2 }; } //~ MONO_ITEM fn statics_and_consts::start[0] #[start] fn start(_: isize, _: *const *const u8) -> isize
//~ MONO_ITEM static statics_and_consts::STATIC1[0] //~ MONO_ITEM fn statics_and_consts::foo[0] //~ MONO_ITEM static statics_and_consts::foo[0]::STATIC2[0] //~ MONO_ITEM static statics_and_consts::foo[0]::STATIC2[1] //~ MONO_ITEM static statics_and_consts::foo[0]::STATIC2[2]
{ foo(); let _ = STATIC1; 0 }
identifier_body
statics-and-consts.rs
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // ignore-tidy-linelength // compile-flags:-Zprint-mono-items=eager #![deny(dead_code)] #![feature(start)] static STATIC1: i64 = { const STATIC1_CONST1: i64 = 2; 1 + CONST1 as i64 + STATIC1_CONST1 }; const CONST1: i64 = { const CONST1_1: i64 = { const CONST1_1_1: i64 = 2; CONST1_1_1 + 1 }; 1 + CONST1_1 as i64 }; fn foo() { let _ = { const CONST2: i64 = 0; static STATIC2: i64 = CONST2; let x = { const CONST2: i64 = 1; static STATIC2: i64 = CONST2; STATIC2 }; x + STATIC2 }; let _ = { const CONST2: i64 = 0; static STATIC2: i64 = CONST2; STATIC2 }; } //~ MONO_ITEM fn statics_and_consts::start[0] #[start] fn
(_: isize, _: *const *const u8) -> isize { foo(); let _ = STATIC1; 0 } //~ MONO_ITEM static statics_and_consts::STATIC1[0] //~ MONO_ITEM fn statics_and_consts::foo[0] //~ MONO_ITEM static statics_and_consts::foo[0]::STATIC2[0] //~ MONO_ITEM static statics_and_consts::foo[0]::STATIC2[1] //~ MONO_ITEM static statics_and_consts::foo[0]::STATIC2[2]
start
identifier_name
statics-and-consts.rs
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms.
static STATIC1: i64 = { const STATIC1_CONST1: i64 = 2; 1 + CONST1 as i64 + STATIC1_CONST1 }; const CONST1: i64 = { const CONST1_1: i64 = { const CONST1_1_1: i64 = 2; CONST1_1_1 + 1 }; 1 + CONST1_1 as i64 }; fn foo() { let _ = { const CONST2: i64 = 0; static STATIC2: i64 = CONST2; let x = { const CONST2: i64 = 1; static STATIC2: i64 = CONST2; STATIC2 }; x + STATIC2 }; let _ = { const CONST2: i64 = 0; static STATIC2: i64 = CONST2; STATIC2 }; } //~ MONO_ITEM fn statics_and_consts::start[0] #[start] fn start(_: isize, _: *const *const u8) -> isize { foo(); let _ = STATIC1; 0 } //~ MONO_ITEM static statics_and_consts::STATIC1[0] //~ MONO_ITEM fn statics_and_consts::foo[0] //~ MONO_ITEM static statics_and_consts::foo[0]::STATIC2[0] //~ MONO_ITEM static statics_and_consts::foo[0]::STATIC2[1] //~ MONO_ITEM static statics_and_consts::foo[0]::STATIC2[2]
// ignore-tidy-linelength // compile-flags:-Zprint-mono-items=eager #![deny(dead_code)] #![feature(start)]
random_line_split
issue-23442.rs
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // compile-pass #![allow(dead_code)] use std::marker::PhantomData; pub struct UnionedKeys<'a,K> where K: UnifyKey + 'a { table: &'a mut UnificationTable<K>, root_key: K, stack: Vec<K>, } pub trait UnifyKey { type Value; } pub struct
<K:UnifyKey> { values: Delegate<K>, } pub struct Delegate<K>(PhantomData<K>); fn main() {}
UnificationTable
identifier_name
issue-23442.rs
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // compile-pass #![allow(dead_code)] use std::marker::PhantomData; pub struct UnionedKeys<'a,K> where K: UnifyKey + 'a { table: &'a mut UnificationTable<K>, root_key: K, stack: Vec<K>, } pub trait UnifyKey { type Value; } pub struct UnificationTable<K:UnifyKey> { values: Delegate<K>, } pub struct Delegate<K>(PhantomData<K>); fn main() {}
// file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT.
random_line_split
issue-23442.rs
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // compile-pass #![allow(dead_code)] use std::marker::PhantomData; pub struct UnionedKeys<'a,K> where K: UnifyKey + 'a { table: &'a mut UnificationTable<K>, root_key: K, stack: Vec<K>, } pub trait UnifyKey { type Value; } pub struct UnificationTable<K:UnifyKey> { values: Delegate<K>, } pub struct Delegate<K>(PhantomData<K>); fn main()
{}
identifier_body
robots_txt.rs
use crate::model::Path; use crate::model::RequestRate; use crate::model::RobotsTxt; use crate::service::RobotsTxtService; use std::time::Duration; use url::Url; impl RobotsTxtService for RobotsTxt { fn can_fetch(&self, user_agent: &str, url: &Url) -> bool { if url.origin()!= *self.get_origin() { return false; } let path = Path::from_url(url); let rule_decision = self.find_in_group(user_agent, |group| { let rules = group.get_rules_sorted_by_path_len_desc(); for rule in rules.iter() { if rule.applies_to(&path) { return Some(rule.get_allowance()); } } None }); if let Some(rule_decision) = rule_decision { return rule_decision; } // Empty robots.txt allows crawling. Everything that was not denied must be allowed. true } fn get_crawl_delay(&self, user_agent: &str) -> Option<Duration> { self.find_in_group(user_agent, |group| group.get_crawl_delay()) } fn normalize_url(&self, url: &mut Url) -> bool
fn normalize_url_ignore_origin(&self, url: &mut Url) { if url.query().is_none() { return; } let mut query_params_to_filter = Vec::new(); let path = Path::from_url(url); for clean_params in self.get_clean_params().iter() { if clean_params.get_path_pattern().applies_to(&path) { query_params_to_filter.extend_from_slice(clean_params.get_params()) } } let mut pairs: Vec<(String, String)> = url .query_pairs() .map(|(key, value)| (key.into(), value.into())) .collect(); { let mut query_pairs_mut = url.query_pairs_mut(); query_pairs_mut.clear(); for (key, value) in pairs.drain(..) { if!query_params_to_filter.contains(&key) { query_pairs_mut.append_pair(&key, &value); } } } if url.query() == Some("") { url.set_query(None); } } fn get_sitemaps(&self) -> &[Url] { self.get_sitemaps_slice() } fn get_req_rate(&self, user_agent: &str) -> Option<RequestRate> { self.find_in_group(user_agent, |group| group.get_req_rate()) } }
{ if url.origin() != *self.get_origin() { return false; } self.normalize_url_ignore_origin(url); true }
identifier_body
robots_txt.rs
use crate::model::Path; use crate::model::RequestRate; use crate::model::RobotsTxt; use crate::service::RobotsTxtService; use std::time::Duration; use url::Url; impl RobotsTxtService for RobotsTxt { fn can_fetch(&self, user_agent: &str, url: &Url) -> bool { if url.origin()!= *self.get_origin() { return false; } let path = Path::from_url(url); let rule_decision = self.find_in_group(user_agent, |group| { let rules = group.get_rules_sorted_by_path_len_desc(); for rule in rules.iter() { if rule.applies_to(&path) { return Some(rule.get_allowance()); } } None }); if let Some(rule_decision) = rule_decision { return rule_decision; } // Empty robots.txt allows crawling. Everything that was not denied must be allowed. true } fn get_crawl_delay(&self, user_agent: &str) -> Option<Duration> { self.find_in_group(user_agent, |group| group.get_crawl_delay()) } fn normalize_url(&self, url: &mut Url) -> bool { if url.origin()!= *self.get_origin() { return false; } self.normalize_url_ignore_origin(url); true } fn
(&self, url: &mut Url) { if url.query().is_none() { return; } let mut query_params_to_filter = Vec::new(); let path = Path::from_url(url); for clean_params in self.get_clean_params().iter() { if clean_params.get_path_pattern().applies_to(&path) { query_params_to_filter.extend_from_slice(clean_params.get_params()) } } let mut pairs: Vec<(String, String)> = url .query_pairs() .map(|(key, value)| (key.into(), value.into())) .collect(); { let mut query_pairs_mut = url.query_pairs_mut(); query_pairs_mut.clear(); for (key, value) in pairs.drain(..) { if!query_params_to_filter.contains(&key) { query_pairs_mut.append_pair(&key, &value); } } } if url.query() == Some("") { url.set_query(None); } } fn get_sitemaps(&self) -> &[Url] { self.get_sitemaps_slice() } fn get_req_rate(&self, user_agent: &str) -> Option<RequestRate> { self.find_in_group(user_agent, |group| group.get_req_rate()) } }
normalize_url_ignore_origin
identifier_name
robots_txt.rs
use crate::model::Path; use crate::model::RequestRate; use crate::model::RobotsTxt; use crate::service::RobotsTxtService; use std::time::Duration; use url::Url; impl RobotsTxtService for RobotsTxt { fn can_fetch(&self, user_agent: &str, url: &Url) -> bool { if url.origin()!= *self.get_origin() { return false; } let path = Path::from_url(url); let rule_decision = self.find_in_group(user_agent, |group| { let rules = group.get_rules_sorted_by_path_len_desc(); for rule in rules.iter() { if rule.applies_to(&path) { return Some(rule.get_allowance()); } } None }); if let Some(rule_decision) = rule_decision { return rule_decision; } // Empty robots.txt allows crawling. Everything that was not denied must be allowed. true } fn get_crawl_delay(&self, user_agent: &str) -> Option<Duration> { self.find_in_group(user_agent, |group| group.get_crawl_delay()) } fn normalize_url(&self, url: &mut Url) -> bool { if url.origin()!= *self.get_origin() { return false; } self.normalize_url_ignore_origin(url); true } fn normalize_url_ignore_origin(&self, url: &mut Url) { if url.query().is_none() { return; } let mut query_params_to_filter = Vec::new(); let path = Path::from_url(url); for clean_params in self.get_clean_params().iter() { if clean_params.get_path_pattern().applies_to(&path)
} let mut pairs: Vec<(String, String)> = url .query_pairs() .map(|(key, value)| (key.into(), value.into())) .collect(); { let mut query_pairs_mut = url.query_pairs_mut(); query_pairs_mut.clear(); for (key, value) in pairs.drain(..) { if!query_params_to_filter.contains(&key) { query_pairs_mut.append_pair(&key, &value); } } } if url.query() == Some("") { url.set_query(None); } } fn get_sitemaps(&self) -> &[Url] { self.get_sitemaps_slice() } fn get_req_rate(&self, user_agent: &str) -> Option<RequestRate> { self.find_in_group(user_agent, |group| group.get_req_rate()) } }
{ query_params_to_filter.extend_from_slice(clean_params.get_params()) }
conditional_block
robots_txt.rs
use crate::model::Path; use crate::model::RequestRate; use crate::model::RobotsTxt; use crate::service::RobotsTxtService; use std::time::Duration; use url::Url; impl RobotsTxtService for RobotsTxt { fn can_fetch(&self, user_agent: &str, url: &Url) -> bool { if url.origin()!= *self.get_origin() { return false; } let path = Path::from_url(url); let rule_decision = self.find_in_group(user_agent, |group| { let rules = group.get_rules_sorted_by_path_len_desc(); for rule in rules.iter() { if rule.applies_to(&path) {
} None }); if let Some(rule_decision) = rule_decision { return rule_decision; } // Empty robots.txt allows crawling. Everything that was not denied must be allowed. true } fn get_crawl_delay(&self, user_agent: &str) -> Option<Duration> { self.find_in_group(user_agent, |group| group.get_crawl_delay()) } fn normalize_url(&self, url: &mut Url) -> bool { if url.origin()!= *self.get_origin() { return false; } self.normalize_url_ignore_origin(url); true } fn normalize_url_ignore_origin(&self, url: &mut Url) { if url.query().is_none() { return; } let mut query_params_to_filter = Vec::new(); let path = Path::from_url(url); for clean_params in self.get_clean_params().iter() { if clean_params.get_path_pattern().applies_to(&path) { query_params_to_filter.extend_from_slice(clean_params.get_params()) } } let mut pairs: Vec<(String, String)> = url .query_pairs() .map(|(key, value)| (key.into(), value.into())) .collect(); { let mut query_pairs_mut = url.query_pairs_mut(); query_pairs_mut.clear(); for (key, value) in pairs.drain(..) { if!query_params_to_filter.contains(&key) { query_pairs_mut.append_pair(&key, &value); } } } if url.query() == Some("") { url.set_query(None); } } fn get_sitemaps(&self) -> &[Url] { self.get_sitemaps_slice() } fn get_req_rate(&self, user_agent: &str) -> Option<RequestRate> { self.find_in_group(user_agent, |group| group.get_req_rate()) } }
return Some(rule.get_allowance()); }
random_line_split
hygiene_example_codegen.rs
// force-host // no-prefer-dynamic #![feature(proc_macro_quote)] #![crate_type = "proc-macro"] extern crate proc_macro as proc_macro_renamed; // This does not break `quote!` use proc_macro_renamed::{TokenStream, quote}; #[proc_macro] pub fn
(input: TokenStream) -> TokenStream { quote!(hello_helper!($input)) //^ `hello_helper!` always resolves to the following proc macro, //| no matter where `hello!` is used. } #[proc_macro] pub fn hello_helper(input: TokenStream) -> TokenStream { quote! { extern crate hygiene_example; // This is never a conflict error let string = format!("hello {}", $input); //^ `format!` always resolves to the prelude macro, //| even if a different `format!` is in scope where `hello!` is used. hygiene_example::print(&string) } }
hello
identifier_name
hygiene_example_codegen.rs
// force-host
#![feature(proc_macro_quote)] #![crate_type = "proc-macro"] extern crate proc_macro as proc_macro_renamed; // This does not break `quote!` use proc_macro_renamed::{TokenStream, quote}; #[proc_macro] pub fn hello(input: TokenStream) -> TokenStream { quote!(hello_helper!($input)) //^ `hello_helper!` always resolves to the following proc macro, //| no matter where `hello!` is used. } #[proc_macro] pub fn hello_helper(input: TokenStream) -> TokenStream { quote! { extern crate hygiene_example; // This is never a conflict error let string = format!("hello {}", $input); //^ `format!` always resolves to the prelude macro, //| even if a different `format!` is in scope where `hello!` is used. hygiene_example::print(&string) } }
// no-prefer-dynamic
random_line_split
hygiene_example_codegen.rs
// force-host // no-prefer-dynamic #![feature(proc_macro_quote)] #![crate_type = "proc-macro"] extern crate proc_macro as proc_macro_renamed; // This does not break `quote!` use proc_macro_renamed::{TokenStream, quote}; #[proc_macro] pub fn hello(input: TokenStream) -> TokenStream { quote!(hello_helper!($input)) //^ `hello_helper!` always resolves to the following proc macro, //| no matter where `hello!` is used. } #[proc_macro] pub fn hello_helper(input: TokenStream) -> TokenStream
{ quote! { extern crate hygiene_example; // This is never a conflict error let string = format!("hello {}", $input); //^ `format!` always resolves to the prelude macro, //| even if a different `format!` is in scope where `hello!` is used. hygiene_example::print(&string) } }
identifier_body
req_handler_unary.rs
use crate::error; use crate::result; use crate::server::req_handler::ServerRequestStreamHandler; use crate::server::req_handler::ServerRequestUnaryHandler; use httpbis::IncreaseInWindow; use std::marker; pub(crate) struct RequestHandlerUnaryToStream<M, H> where H: ServerRequestUnaryHandler<M>, M: Send +'static, { pub(crate) increase_in_window: IncreaseInWindow, pub(crate) handler: H, pub(crate) message: Option<M>, pub(crate) _marker: marker::PhantomData<M>, } impl<M, H> ServerRequestStreamHandler<M> for RequestHandlerUnaryToStream<M, H> where H: ServerRequestUnaryHandler<M>, M: Send +'static, { fn grpc_message(&mut self, message: M, frame_size: usize) -> result::Result<()> { self.increase_in_window.data_frame_processed(frame_size); self.increase_in_window.increase_window_auto()?; if let Some(_) = self.message { return Err(error::Error::Other("more than one message in a stream")); } self.message = Some(message); Ok(()) } fn end_stream(&mut self) -> result::Result<()> { match self.message.take() { Some(message) => self.handler.grpc_message(message), None => Err(error::Error::Other("no message, end of stream")), } } fn buffer_processed(&mut self, buffered: usize) -> result::Result<()>
}
{ // TODO: overflow check self.increase_in_window .increase_window_auto_above(buffered as u32)?; Ok(()) }
identifier_body
req_handler_unary.rs
use crate::error; use crate::result; use crate::server::req_handler::ServerRequestStreamHandler; use crate::server::req_handler::ServerRequestUnaryHandler; use httpbis::IncreaseInWindow; use std::marker; pub(crate) struct RequestHandlerUnaryToStream<M, H> where H: ServerRequestUnaryHandler<M>, M: Send +'static, { pub(crate) increase_in_window: IncreaseInWindow, pub(crate) handler: H, pub(crate) message: Option<M>, pub(crate) _marker: marker::PhantomData<M>, } impl<M, H> ServerRequestStreamHandler<M> for RequestHandlerUnaryToStream<M, H> where H: ServerRequestUnaryHandler<M>, M: Send +'static, { fn grpc_message(&mut self, message: M, frame_size: usize) -> result::Result<()> { self.increase_in_window.data_frame_processed(frame_size); self.increase_in_window.increase_window_auto()?; if let Some(_) = self.message
self.message = Some(message); Ok(()) } fn end_stream(&mut self) -> result::Result<()> { match self.message.take() { Some(message) => self.handler.grpc_message(message), None => Err(error::Error::Other("no message, end of stream")), } } fn buffer_processed(&mut self, buffered: usize) -> result::Result<()> { // TODO: overflow check self.increase_in_window .increase_window_auto_above(buffered as u32)?; Ok(()) } }
{ return Err(error::Error::Other("more than one message in a stream")); }
conditional_block
req_handler_unary.rs
use crate::error; use crate::result; use crate::server::req_handler::ServerRequestStreamHandler; use crate::server::req_handler::ServerRequestUnaryHandler; use httpbis::IncreaseInWindow; use std::marker; pub(crate) struct
<M, H> where H: ServerRequestUnaryHandler<M>, M: Send +'static, { pub(crate) increase_in_window: IncreaseInWindow, pub(crate) handler: H, pub(crate) message: Option<M>, pub(crate) _marker: marker::PhantomData<M>, } impl<M, H> ServerRequestStreamHandler<M> for RequestHandlerUnaryToStream<M, H> where H: ServerRequestUnaryHandler<M>, M: Send +'static, { fn grpc_message(&mut self, message: M, frame_size: usize) -> result::Result<()> { self.increase_in_window.data_frame_processed(frame_size); self.increase_in_window.increase_window_auto()?; if let Some(_) = self.message { return Err(error::Error::Other("more than one message in a stream")); } self.message = Some(message); Ok(()) } fn end_stream(&mut self) -> result::Result<()> { match self.message.take() { Some(message) => self.handler.grpc_message(message), None => Err(error::Error::Other("no message, end of stream")), } } fn buffer_processed(&mut self, buffered: usize) -> result::Result<()> { // TODO: overflow check self.increase_in_window .increase_window_auto_above(buffered as u32)?; Ok(()) } }
RequestHandlerUnaryToStream
identifier_name
req_handler_unary.rs
use crate::error; use crate::result; use crate::server::req_handler::ServerRequestStreamHandler; use crate::server::req_handler::ServerRequestUnaryHandler; use httpbis::IncreaseInWindow; use std::marker; pub(crate) struct RequestHandlerUnaryToStream<M, H> where H: ServerRequestUnaryHandler<M>, M: Send +'static, { pub(crate) increase_in_window: IncreaseInWindow, pub(crate) handler: H, pub(crate) message: Option<M>, pub(crate) _marker: marker::PhantomData<M>, } impl<M, H> ServerRequestStreamHandler<M> for RequestHandlerUnaryToStream<M, H> where H: ServerRequestUnaryHandler<M>, M: Send +'static, { fn grpc_message(&mut self, message: M, frame_size: usize) -> result::Result<()> { self.increase_in_window.data_frame_processed(frame_size); self.increase_in_window.increase_window_auto()?; if let Some(_) = self.message { return Err(error::Error::Other("more than one message in a stream")); } self.message = Some(message); Ok(()) } fn end_stream(&mut self) -> result::Result<()> { match self.message.take() {
fn buffer_processed(&mut self, buffered: usize) -> result::Result<()> { // TODO: overflow check self.increase_in_window .increase_window_auto_above(buffered as u32)?; Ok(()) } }
Some(message) => self.handler.grpc_message(message), None => Err(error::Error::Other("no message, end of stream")), } }
random_line_split
storage.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use dom::bindings::codegen::Bindings::StorageBinding; use dom::bindings::codegen::Bindings::StorageBinding::StorageMethods; use dom::bindings::global::{GlobalRef, GlobalField}; use dom::bindings::js::{JSRef, Temporary, Rootable, RootedReference}; use dom::bindings::refcounted::Trusted; use dom::bindings::utils::{Reflector, reflect_dom_object}; use dom::bindings::codegen::InheritTypes::{EventCast, EventTargetCast}; use dom::event::{Event, EventHelpers, EventBubbles, EventCancelable}; use dom::eventtarget::{EventTarget}; use dom::storageevent::StorageEvent; use dom::urlhelper::UrlHelper; use dom::window::WindowHelpers; use util::str::DOMString; use page::IterablePage; use net_traits::storage_task::{StorageTask, StorageTaskMsg, StorageType}; use std::sync::mpsc::channel; use url::Url; use script_task::{ScriptTask, ScriptMsg, MainThreadRunnable}; use collections::borrow::ToOwned; #[dom_struct] pub struct Storage { reflector_: Reflector, global: GlobalField, storage_type: StorageType } impl Storage { fn new_inherited(global: &GlobalRef, storage_type: StorageType) -> Storage { Storage { reflector_: Reflector::new(), global: GlobalField::from_rooted(global), storage_type: storage_type } } pub fn new(global: &GlobalRef, storage_type: StorageType) -> Temporary<Storage> { reflect_dom_object(box Storage::new_inherited(global, storage_type), *global, StorageBinding::Wrap) } fn get_url(&self) -> Url { let global_root = self.global.root(); let global_ref = global_root.r(); global_ref.get_url() } fn get_storage_task(&self) -> StorageTask { let global_root = self.global.root(); let global_ref = global_root.r(); global_ref.as_window().storage_task() } } impl<'a> StorageMethods for JSRef<'a, Storage> { fn Length(self) -> u32 { let (sender, receiver) = channel(); self.get_storage_task().send(StorageTaskMsg::Length(sender, self.get_url(), self.storage_type)).unwrap(); receiver.recv().unwrap() as u32 } fn Key(self, index: u32) -> Option<DOMString> { let (sender, receiver) = channel(); self.get_storage_task().send(StorageTaskMsg::Key(sender, self.get_url(), self.storage_type, index)).unwrap(); receiver.recv().unwrap() } fn GetItem(self, name: DOMString) -> Option<DOMString> { let (sender, receiver) = channel(); let msg = StorageTaskMsg::GetItem(sender, self.get_url(), self.storage_type, name); self.get_storage_task().send(msg).unwrap(); receiver.recv().unwrap() } fn NamedGetter(self, name: DOMString, found: &mut bool) -> Option<DOMString> { let item = self.GetItem(name); *found = item.is_some(); item } fn SetItem(self, name: DOMString, value: DOMString) { let (sender, receiver) = channel(); let msg = StorageTaskMsg::SetItem(sender, self.get_url(), self.storage_type, name.clone(), value.clone()); self.get_storage_task().send(msg).unwrap(); let (changed, old_value) = receiver.recv().unwrap(); if changed { self.broadcast_change_notification(Some(name), old_value, Some(value)); } } fn NamedSetter(self, name: DOMString, value: DOMString) { self.SetItem(name, value); } fn NamedCreator(self, name: DOMString, value: DOMString) { self.SetItem(name, value); } fn RemoveItem(self, name: DOMString) { let (sender, receiver) = channel(); let msg = StorageTaskMsg::RemoveItem(sender, self.get_url(), self.storage_type, name.clone()); self.get_storage_task().send(msg).unwrap(); if let Some(old_value) = receiver.recv().unwrap() { self.broadcast_change_notification(Some(name), Some(old_value), None); } } fn NamedDeleter(self, name: DOMString) { self.RemoveItem(name); } fn Clear(self) { let (sender, receiver) = channel(); self.get_storage_task().send(StorageTaskMsg::Clear(sender, self.get_url(), self.storage_type)).unwrap(); if receiver.recv().unwrap() { self.broadcast_change_notification(None, None, None); } } } trait PrivateStorageHelpers { fn broadcast_change_notification(self, key: Option<DOMString>, old_value: Option<DOMString>, new_value: Option<DOMString>); } impl<'a> PrivateStorageHelpers for JSRef<'a, Storage> { /// https://html.spec.whatwg.org/multipage/#send-a-storage-notification fn broadcast_change_notification(self, key: Option<DOMString>, old_value: Option<DOMString>, new_value: Option<DOMString>){ let global_root = self.global.root(); let global_ref = global_root.r(); let script_chan = global_ref.script_chan(); let trusted_storage = Trusted::new(global_ref.get_cx(), self, script_chan.clone()); script_chan.send(ScriptMsg::MainThreadRunnableMsg( box StorageEventRunnable::new(trusted_storage, key, old_value, new_value))).unwrap(); } } pub struct StorageEventRunnable { element: Trusted<Storage>, key: Option<DOMString>, old_value: Option<DOMString>, new_value: Option<DOMString> }
new_value: Option<DOMString>) -> StorageEventRunnable { StorageEventRunnable { element: storage, key: key, old_value: old_value, new_value: new_value } } } impl MainThreadRunnable for StorageEventRunnable { fn handler(self: Box<StorageEventRunnable>, script_task: &ScriptTask) { let this = *self; let storage_root = this.element.to_temporary().root(); let storage = storage_root.r(); let global_root = storage.global.root(); let global_ref = global_root.r(); let ev_window = global_ref.as_window(); let ev_url = storage.get_url(); let storage_event = StorageEvent::new( global_ref, "storage".to_owned(), EventBubbles::DoesNotBubble, EventCancelable::NotCancelable, this.key, this.old_value, this.new_value, ev_url.to_string(), Some(storage) ).root(); let event: JSRef<Event> = EventCast::from_ref(storage_event.r()); let root_page = script_task.root_page(); for it_page in root_page.iter() { let it_window_root = it_page.window().root(); let it_window = it_window_root.r(); assert!(UrlHelper::SameOrigin(&ev_url, &it_window.get_url())); // TODO: Such a Document object is not necessarily fully active, but events fired on such // objects are ignored by the event loop until the Document becomes fully active again. if ev_window.pipeline()!= it_window.pipeline() { let target: JSRef<EventTarget> = EventTargetCast::from_ref(it_window); event.fire(target); } } } }
impl StorageEventRunnable { fn new(storage: Trusted<Storage>, key: Option<DOMString>, old_value: Option<DOMString>,
random_line_split
storage.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use dom::bindings::codegen::Bindings::StorageBinding; use dom::bindings::codegen::Bindings::StorageBinding::StorageMethods; use dom::bindings::global::{GlobalRef, GlobalField}; use dom::bindings::js::{JSRef, Temporary, Rootable, RootedReference}; use dom::bindings::refcounted::Trusted; use dom::bindings::utils::{Reflector, reflect_dom_object}; use dom::bindings::codegen::InheritTypes::{EventCast, EventTargetCast}; use dom::event::{Event, EventHelpers, EventBubbles, EventCancelable}; use dom::eventtarget::{EventTarget}; use dom::storageevent::StorageEvent; use dom::urlhelper::UrlHelper; use dom::window::WindowHelpers; use util::str::DOMString; use page::IterablePage; use net_traits::storage_task::{StorageTask, StorageTaskMsg, StorageType}; use std::sync::mpsc::channel; use url::Url; use script_task::{ScriptTask, ScriptMsg, MainThreadRunnable}; use collections::borrow::ToOwned; #[dom_struct] pub struct Storage { reflector_: Reflector, global: GlobalField, storage_type: StorageType } impl Storage { fn new_inherited(global: &GlobalRef, storage_type: StorageType) -> Storage { Storage { reflector_: Reflector::new(), global: GlobalField::from_rooted(global), storage_type: storage_type } } pub fn new(global: &GlobalRef, storage_type: StorageType) -> Temporary<Storage> { reflect_dom_object(box Storage::new_inherited(global, storage_type), *global, StorageBinding::Wrap) } fn get_url(&self) -> Url { let global_root = self.global.root(); let global_ref = global_root.r(); global_ref.get_url() } fn get_storage_task(&self) -> StorageTask { let global_root = self.global.root(); let global_ref = global_root.r(); global_ref.as_window().storage_task() } } impl<'a> StorageMethods for JSRef<'a, Storage> { fn Length(self) -> u32
fn Key(self, index: u32) -> Option<DOMString> { let (sender, receiver) = channel(); self.get_storage_task().send(StorageTaskMsg::Key(sender, self.get_url(), self.storage_type, index)).unwrap(); receiver.recv().unwrap() } fn GetItem(self, name: DOMString) -> Option<DOMString> { let (sender, receiver) = channel(); let msg = StorageTaskMsg::GetItem(sender, self.get_url(), self.storage_type, name); self.get_storage_task().send(msg).unwrap(); receiver.recv().unwrap() } fn NamedGetter(self, name: DOMString, found: &mut bool) -> Option<DOMString> { let item = self.GetItem(name); *found = item.is_some(); item } fn SetItem(self, name: DOMString, value: DOMString) { let (sender, receiver) = channel(); let msg = StorageTaskMsg::SetItem(sender, self.get_url(), self.storage_type, name.clone(), value.clone()); self.get_storage_task().send(msg).unwrap(); let (changed, old_value) = receiver.recv().unwrap(); if changed { self.broadcast_change_notification(Some(name), old_value, Some(value)); } } fn NamedSetter(self, name: DOMString, value: DOMString) { self.SetItem(name, value); } fn NamedCreator(self, name: DOMString, value: DOMString) { self.SetItem(name, value); } fn RemoveItem(self, name: DOMString) { let (sender, receiver) = channel(); let msg = StorageTaskMsg::RemoveItem(sender, self.get_url(), self.storage_type, name.clone()); self.get_storage_task().send(msg).unwrap(); if let Some(old_value) = receiver.recv().unwrap() { self.broadcast_change_notification(Some(name), Some(old_value), None); } } fn NamedDeleter(self, name: DOMString) { self.RemoveItem(name); } fn Clear(self) { let (sender, receiver) = channel(); self.get_storage_task().send(StorageTaskMsg::Clear(sender, self.get_url(), self.storage_type)).unwrap(); if receiver.recv().unwrap() { self.broadcast_change_notification(None, None, None); } } } trait PrivateStorageHelpers { fn broadcast_change_notification(self, key: Option<DOMString>, old_value: Option<DOMString>, new_value: Option<DOMString>); } impl<'a> PrivateStorageHelpers for JSRef<'a, Storage> { /// https://html.spec.whatwg.org/multipage/#send-a-storage-notification fn broadcast_change_notification(self, key: Option<DOMString>, old_value: Option<DOMString>, new_value: Option<DOMString>){ let global_root = self.global.root(); let global_ref = global_root.r(); let script_chan = global_ref.script_chan(); let trusted_storage = Trusted::new(global_ref.get_cx(), self, script_chan.clone()); script_chan.send(ScriptMsg::MainThreadRunnableMsg( box StorageEventRunnable::new(trusted_storage, key, old_value, new_value))).unwrap(); } } pub struct StorageEventRunnable { element: Trusted<Storage>, key: Option<DOMString>, old_value: Option<DOMString>, new_value: Option<DOMString> } impl StorageEventRunnable { fn new(storage: Trusted<Storage>, key: Option<DOMString>, old_value: Option<DOMString>, new_value: Option<DOMString>) -> StorageEventRunnable { StorageEventRunnable { element: storage, key: key, old_value: old_value, new_value: new_value } } } impl MainThreadRunnable for StorageEventRunnable { fn handler(self: Box<StorageEventRunnable>, script_task: &ScriptTask) { let this = *self; let storage_root = this.element.to_temporary().root(); let storage = storage_root.r(); let global_root = storage.global.root(); let global_ref = global_root.r(); let ev_window = global_ref.as_window(); let ev_url = storage.get_url(); let storage_event = StorageEvent::new( global_ref, "storage".to_owned(), EventBubbles::DoesNotBubble, EventCancelable::NotCancelable, this.key, this.old_value, this.new_value, ev_url.to_string(), Some(storage) ).root(); let event: JSRef<Event> = EventCast::from_ref(storage_event.r()); let root_page = script_task.root_page(); for it_page in root_page.iter() { let it_window_root = it_page.window().root(); let it_window = it_window_root.r(); assert!(UrlHelper::SameOrigin(&ev_url, &it_window.get_url())); // TODO: Such a Document object is not necessarily fully active, but events fired on such // objects are ignored by the event loop until the Document becomes fully active again. if ev_window.pipeline()!= it_window.pipeline() { let target: JSRef<EventTarget> = EventTargetCast::from_ref(it_window); event.fire(target); } } } }
{ let (sender, receiver) = channel(); self.get_storage_task().send(StorageTaskMsg::Length(sender, self.get_url(), self.storage_type)).unwrap(); receiver.recv().unwrap() as u32 }
identifier_body
storage.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use dom::bindings::codegen::Bindings::StorageBinding; use dom::bindings::codegen::Bindings::StorageBinding::StorageMethods; use dom::bindings::global::{GlobalRef, GlobalField}; use dom::bindings::js::{JSRef, Temporary, Rootable, RootedReference}; use dom::bindings::refcounted::Trusted; use dom::bindings::utils::{Reflector, reflect_dom_object}; use dom::bindings::codegen::InheritTypes::{EventCast, EventTargetCast}; use dom::event::{Event, EventHelpers, EventBubbles, EventCancelable}; use dom::eventtarget::{EventTarget}; use dom::storageevent::StorageEvent; use dom::urlhelper::UrlHelper; use dom::window::WindowHelpers; use util::str::DOMString; use page::IterablePage; use net_traits::storage_task::{StorageTask, StorageTaskMsg, StorageType}; use std::sync::mpsc::channel; use url::Url; use script_task::{ScriptTask, ScriptMsg, MainThreadRunnable}; use collections::borrow::ToOwned; #[dom_struct] pub struct Storage { reflector_: Reflector, global: GlobalField, storage_type: StorageType } impl Storage { fn new_inherited(global: &GlobalRef, storage_type: StorageType) -> Storage { Storage { reflector_: Reflector::new(), global: GlobalField::from_rooted(global), storage_type: storage_type } } pub fn
(global: &GlobalRef, storage_type: StorageType) -> Temporary<Storage> { reflect_dom_object(box Storage::new_inherited(global, storage_type), *global, StorageBinding::Wrap) } fn get_url(&self) -> Url { let global_root = self.global.root(); let global_ref = global_root.r(); global_ref.get_url() } fn get_storage_task(&self) -> StorageTask { let global_root = self.global.root(); let global_ref = global_root.r(); global_ref.as_window().storage_task() } } impl<'a> StorageMethods for JSRef<'a, Storage> { fn Length(self) -> u32 { let (sender, receiver) = channel(); self.get_storage_task().send(StorageTaskMsg::Length(sender, self.get_url(), self.storage_type)).unwrap(); receiver.recv().unwrap() as u32 } fn Key(self, index: u32) -> Option<DOMString> { let (sender, receiver) = channel(); self.get_storage_task().send(StorageTaskMsg::Key(sender, self.get_url(), self.storage_type, index)).unwrap(); receiver.recv().unwrap() } fn GetItem(self, name: DOMString) -> Option<DOMString> { let (sender, receiver) = channel(); let msg = StorageTaskMsg::GetItem(sender, self.get_url(), self.storage_type, name); self.get_storage_task().send(msg).unwrap(); receiver.recv().unwrap() } fn NamedGetter(self, name: DOMString, found: &mut bool) -> Option<DOMString> { let item = self.GetItem(name); *found = item.is_some(); item } fn SetItem(self, name: DOMString, value: DOMString) { let (sender, receiver) = channel(); let msg = StorageTaskMsg::SetItem(sender, self.get_url(), self.storage_type, name.clone(), value.clone()); self.get_storage_task().send(msg).unwrap(); let (changed, old_value) = receiver.recv().unwrap(); if changed { self.broadcast_change_notification(Some(name), old_value, Some(value)); } } fn NamedSetter(self, name: DOMString, value: DOMString) { self.SetItem(name, value); } fn NamedCreator(self, name: DOMString, value: DOMString) { self.SetItem(name, value); } fn RemoveItem(self, name: DOMString) { let (sender, receiver) = channel(); let msg = StorageTaskMsg::RemoveItem(sender, self.get_url(), self.storage_type, name.clone()); self.get_storage_task().send(msg).unwrap(); if let Some(old_value) = receiver.recv().unwrap() { self.broadcast_change_notification(Some(name), Some(old_value), None); } } fn NamedDeleter(self, name: DOMString) { self.RemoveItem(name); } fn Clear(self) { let (sender, receiver) = channel(); self.get_storage_task().send(StorageTaskMsg::Clear(sender, self.get_url(), self.storage_type)).unwrap(); if receiver.recv().unwrap() { self.broadcast_change_notification(None, None, None); } } } trait PrivateStorageHelpers { fn broadcast_change_notification(self, key: Option<DOMString>, old_value: Option<DOMString>, new_value: Option<DOMString>); } impl<'a> PrivateStorageHelpers for JSRef<'a, Storage> { /// https://html.spec.whatwg.org/multipage/#send-a-storage-notification fn broadcast_change_notification(self, key: Option<DOMString>, old_value: Option<DOMString>, new_value: Option<DOMString>){ let global_root = self.global.root(); let global_ref = global_root.r(); let script_chan = global_ref.script_chan(); let trusted_storage = Trusted::new(global_ref.get_cx(), self, script_chan.clone()); script_chan.send(ScriptMsg::MainThreadRunnableMsg( box StorageEventRunnable::new(trusted_storage, key, old_value, new_value))).unwrap(); } } pub struct StorageEventRunnable { element: Trusted<Storage>, key: Option<DOMString>, old_value: Option<DOMString>, new_value: Option<DOMString> } impl StorageEventRunnable { fn new(storage: Trusted<Storage>, key: Option<DOMString>, old_value: Option<DOMString>, new_value: Option<DOMString>) -> StorageEventRunnable { StorageEventRunnable { element: storage, key: key, old_value: old_value, new_value: new_value } } } impl MainThreadRunnable for StorageEventRunnable { fn handler(self: Box<StorageEventRunnable>, script_task: &ScriptTask) { let this = *self; let storage_root = this.element.to_temporary().root(); let storage = storage_root.r(); let global_root = storage.global.root(); let global_ref = global_root.r(); let ev_window = global_ref.as_window(); let ev_url = storage.get_url(); let storage_event = StorageEvent::new( global_ref, "storage".to_owned(), EventBubbles::DoesNotBubble, EventCancelable::NotCancelable, this.key, this.old_value, this.new_value, ev_url.to_string(), Some(storage) ).root(); let event: JSRef<Event> = EventCast::from_ref(storage_event.r()); let root_page = script_task.root_page(); for it_page in root_page.iter() { let it_window_root = it_page.window().root(); let it_window = it_window_root.r(); assert!(UrlHelper::SameOrigin(&ev_url, &it_window.get_url())); // TODO: Such a Document object is not necessarily fully active, but events fired on such // objects are ignored by the event loop until the Document becomes fully active again. if ev_window.pipeline()!= it_window.pipeline() { let target: JSRef<EventTarget> = EventTargetCast::from_ref(it_window); event.fire(target); } } } }
new
identifier_name
storage.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use dom::bindings::codegen::Bindings::StorageBinding; use dom::bindings::codegen::Bindings::StorageBinding::StorageMethods; use dom::bindings::global::{GlobalRef, GlobalField}; use dom::bindings::js::{JSRef, Temporary, Rootable, RootedReference}; use dom::bindings::refcounted::Trusted; use dom::bindings::utils::{Reflector, reflect_dom_object}; use dom::bindings::codegen::InheritTypes::{EventCast, EventTargetCast}; use dom::event::{Event, EventHelpers, EventBubbles, EventCancelable}; use dom::eventtarget::{EventTarget}; use dom::storageevent::StorageEvent; use dom::urlhelper::UrlHelper; use dom::window::WindowHelpers; use util::str::DOMString; use page::IterablePage; use net_traits::storage_task::{StorageTask, StorageTaskMsg, StorageType}; use std::sync::mpsc::channel; use url::Url; use script_task::{ScriptTask, ScriptMsg, MainThreadRunnable}; use collections::borrow::ToOwned; #[dom_struct] pub struct Storage { reflector_: Reflector, global: GlobalField, storage_type: StorageType } impl Storage { fn new_inherited(global: &GlobalRef, storage_type: StorageType) -> Storage { Storage { reflector_: Reflector::new(), global: GlobalField::from_rooted(global), storage_type: storage_type } } pub fn new(global: &GlobalRef, storage_type: StorageType) -> Temporary<Storage> { reflect_dom_object(box Storage::new_inherited(global, storage_type), *global, StorageBinding::Wrap) } fn get_url(&self) -> Url { let global_root = self.global.root(); let global_ref = global_root.r(); global_ref.get_url() } fn get_storage_task(&self) -> StorageTask { let global_root = self.global.root(); let global_ref = global_root.r(); global_ref.as_window().storage_task() } } impl<'a> StorageMethods for JSRef<'a, Storage> { fn Length(self) -> u32 { let (sender, receiver) = channel(); self.get_storage_task().send(StorageTaskMsg::Length(sender, self.get_url(), self.storage_type)).unwrap(); receiver.recv().unwrap() as u32 } fn Key(self, index: u32) -> Option<DOMString> { let (sender, receiver) = channel(); self.get_storage_task().send(StorageTaskMsg::Key(sender, self.get_url(), self.storage_type, index)).unwrap(); receiver.recv().unwrap() } fn GetItem(self, name: DOMString) -> Option<DOMString> { let (sender, receiver) = channel(); let msg = StorageTaskMsg::GetItem(sender, self.get_url(), self.storage_type, name); self.get_storage_task().send(msg).unwrap(); receiver.recv().unwrap() } fn NamedGetter(self, name: DOMString, found: &mut bool) -> Option<DOMString> { let item = self.GetItem(name); *found = item.is_some(); item } fn SetItem(self, name: DOMString, value: DOMString) { let (sender, receiver) = channel(); let msg = StorageTaskMsg::SetItem(sender, self.get_url(), self.storage_type, name.clone(), value.clone()); self.get_storage_task().send(msg).unwrap(); let (changed, old_value) = receiver.recv().unwrap(); if changed { self.broadcast_change_notification(Some(name), old_value, Some(value)); } } fn NamedSetter(self, name: DOMString, value: DOMString) { self.SetItem(name, value); } fn NamedCreator(self, name: DOMString, value: DOMString) { self.SetItem(name, value); } fn RemoveItem(self, name: DOMString) { let (sender, receiver) = channel(); let msg = StorageTaskMsg::RemoveItem(sender, self.get_url(), self.storage_type, name.clone()); self.get_storage_task().send(msg).unwrap(); if let Some(old_value) = receiver.recv().unwrap() { self.broadcast_change_notification(Some(name), Some(old_value), None); } } fn NamedDeleter(self, name: DOMString) { self.RemoveItem(name); } fn Clear(self) { let (sender, receiver) = channel(); self.get_storage_task().send(StorageTaskMsg::Clear(sender, self.get_url(), self.storage_type)).unwrap(); if receiver.recv().unwrap() { self.broadcast_change_notification(None, None, None); } } } trait PrivateStorageHelpers { fn broadcast_change_notification(self, key: Option<DOMString>, old_value: Option<DOMString>, new_value: Option<DOMString>); } impl<'a> PrivateStorageHelpers for JSRef<'a, Storage> { /// https://html.spec.whatwg.org/multipage/#send-a-storage-notification fn broadcast_change_notification(self, key: Option<DOMString>, old_value: Option<DOMString>, new_value: Option<DOMString>){ let global_root = self.global.root(); let global_ref = global_root.r(); let script_chan = global_ref.script_chan(); let trusted_storage = Trusted::new(global_ref.get_cx(), self, script_chan.clone()); script_chan.send(ScriptMsg::MainThreadRunnableMsg( box StorageEventRunnable::new(trusted_storage, key, old_value, new_value))).unwrap(); } } pub struct StorageEventRunnable { element: Trusted<Storage>, key: Option<DOMString>, old_value: Option<DOMString>, new_value: Option<DOMString> } impl StorageEventRunnable { fn new(storage: Trusted<Storage>, key: Option<DOMString>, old_value: Option<DOMString>, new_value: Option<DOMString>) -> StorageEventRunnable { StorageEventRunnable { element: storage, key: key, old_value: old_value, new_value: new_value } } } impl MainThreadRunnable for StorageEventRunnable { fn handler(self: Box<StorageEventRunnable>, script_task: &ScriptTask) { let this = *self; let storage_root = this.element.to_temporary().root(); let storage = storage_root.r(); let global_root = storage.global.root(); let global_ref = global_root.r(); let ev_window = global_ref.as_window(); let ev_url = storage.get_url(); let storage_event = StorageEvent::new( global_ref, "storage".to_owned(), EventBubbles::DoesNotBubble, EventCancelable::NotCancelable, this.key, this.old_value, this.new_value, ev_url.to_string(), Some(storage) ).root(); let event: JSRef<Event> = EventCast::from_ref(storage_event.r()); let root_page = script_task.root_page(); for it_page in root_page.iter() { let it_window_root = it_page.window().root(); let it_window = it_window_root.r(); assert!(UrlHelper::SameOrigin(&ev_url, &it_window.get_url())); // TODO: Such a Document object is not necessarily fully active, but events fired on such // objects are ignored by the event loop until the Document becomes fully active again. if ev_window.pipeline()!= it_window.pipeline()
} } }
{ let target: JSRef<EventTarget> = EventTargetCast::from_ref(it_window); event.fire(target); }
conditional_block
color.rs
// Copyright (c) 2015-2019 William (B.J.) Snow Orvis // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! Color-related constants and functions. /// Colors that work with `graphics` functions, which want color as vectors of f32. #[derive(Debug, Clone, Copy, PartialEq)] pub struct ColorF32(pub [f32; 4]); /// Colors that work with `image` functions, which want color as vectors of u8. #[derive(Debug, Clone, Copy, PartialEq)] pub struct
(pub [u8; 4]); /// Black for use with `graphics`' functions pub const BLACK_F32: ColorF32 = ColorF32([0.0, 0.0, 0.0, 1.0]); /// Grey for use with `graphics`' functions pub const GREY_F32: ColorF32 = ColorF32([0.5, 0.5, 0.5, 1.0]); /// White for use with `graphics`' functions pub const WHITE_F32: ColorF32 = ColorF32([1.0, 1.0, 1.0, 1.0]); /// Dark blue for use with `image`' functions pub const AEBLUE_U8: ColorU8 = ColorU8([0, 0, 48, 255]); /// Black for use with `image`' functions pub const BLACK_U8: ColorU8 = ColorU8([0, 0, 0, 255]); /// White for use with `image`' functions pub const WHITE_U8: ColorU8 = ColorU8([255, 255, 255, 255]); /// Generates a linear range of RGBA colors from a start color to a final color. /// /// /// Eg, to create a spectrum from white to black: /// /// ``` /// use fractal_lib::color::{ColorU8, color_range_linear}; /// /// let black = ColorU8([0,0,0,255]); /// let white = ColorU8([255,255,255,255]); /// /// let range = color_range_linear(black, white, 256); /// /// assert_eq!(range[0], black); /// assert_eq!(range[255], white); /// assert_eq!(range[10], ColorU8([10,10,10,255])); /// ``` /// /// If you want to simulate a cutoff/saturation point where the gradients reach the peak color /// before some maximium index value, then you can use `std::cmp::min` to prevent an out of bounds /// error: /// /// ``` /// use fractal_lib::color::{ColorU8, color_range_linear}; /// use std::cmp::min; /// /// let black = ColorU8([0,0,0,255]); /// let white = ColorU8([255,255,255,255]); /// let gradient_count = 128; /// let range = color_range_linear(black, white, gradient_count); /// /// assert_eq!(range[min(gradient_count-1, 0)], black); /// assert_eq!(range[min(gradient_count-1, gradient_count-1)], white); /// assert_eq!(range[min(gradient_count-1, 255)], white); /// assert_eq!(range[min(gradient_count-1, 127)], white); /// assert_eq!(range[min(gradient_count-1, 10)], ColorU8([20,20,20,255])); /// ``` pub fn color_range_linear(first: ColorU8, last: ColorU8, count: usize) -> Vec<ColorU8> { if count < 2 { panic!("Count must be 2 or more: {}", count); } let deltas = [ (f32::from(last.0[0]) - f32::from(first.0[0])) / f32::from((count as u16) - 1), (f32::from(last.0[1]) - f32::from(first.0[1])) / f32::from((count as u16) - 1), (f32::from(last.0[2]) - f32::from(first.0[2])) / f32::from((count as u16) - 1), (f32::from(last.0[3]) - f32::from(first.0[3])) / f32::from((count as u16) - 1), ]; (0..count) .map(|i| { ColorU8([ (f32::from(first.0[0]) + f32::from(i as u16) * deltas[0]) as u8, (f32::from(first.0[1]) + f32::from(i as u16) * deltas[1]) as u8, (f32::from(first.0[2]) + f32::from(i as u16) * deltas[2]) as u8, (f32::from(first.0[3]) + f32::from(i as u16) * deltas[3]) as u8, ]) }) .collect() } #[cfg(test)] mod test { use super::*; #[test] #[should_panic(expected = "Count must be 2 or more")] fn test_linear_zero() { let black = ColorU8([0, 0, 0, 255]); let white = ColorU8([255, 255, 255, 255]); let range = color_range_linear(black, white, 0); assert!(range.len() == 0); } #[test] #[should_panic(expected = "Count must be 2 or more")] fn test_linear_one() { let black = ColorU8([0, 0, 0, 255]); let white = ColorU8([255, 255, 255, 255]); let range = color_range_linear(black, white, 1); assert!(range.len() == 1); } #[test] fn test_linear_two() { let black = ColorU8([0, 0, 0, 255]); let white = ColorU8([255, 255, 255, 255]); let range = color_range_linear(black, white, 2); assert_eq!(black, range[0]); assert_eq!(white, range[1]); } }
ColorU8
identifier_name
color.rs
// Copyright (c) 2015-2019 William (B.J.) Snow Orvis // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! Color-related constants and functions. /// Colors that work with `graphics` functions, which want color as vectors of f32. #[derive(Debug, Clone, Copy, PartialEq)] pub struct ColorF32(pub [f32; 4]); /// Colors that work with `image` functions, which want color as vectors of u8. #[derive(Debug, Clone, Copy, PartialEq)] pub struct ColorU8(pub [u8; 4]); /// Black for use with `graphics`' functions pub const BLACK_F32: ColorF32 = ColorF32([0.0, 0.0, 0.0, 1.0]); /// Grey for use with `graphics`' functions pub const GREY_F32: ColorF32 = ColorF32([0.5, 0.5, 0.5, 1.0]); /// White for use with `graphics`' functions pub const WHITE_F32: ColorF32 = ColorF32([1.0, 1.0, 1.0, 1.0]); /// Dark blue for use with `image`' functions pub const AEBLUE_U8: ColorU8 = ColorU8([0, 0, 48, 255]); /// Black for use with `image`' functions pub const BLACK_U8: ColorU8 = ColorU8([0, 0, 0, 255]); /// White for use with `image`' functions pub const WHITE_U8: ColorU8 = ColorU8([255, 255, 255, 255]); /// Generates a linear range of RGBA colors from a start color to a final color. /// /// /// Eg, to create a spectrum from white to black: /// /// ``` /// use fractal_lib::color::{ColorU8, color_range_linear}; /// /// let black = ColorU8([0,0,0,255]); /// let white = ColorU8([255,255,255,255]); /// /// let range = color_range_linear(black, white, 256); /// /// assert_eq!(range[0], black); /// assert_eq!(range[255], white); /// assert_eq!(range[10], ColorU8([10,10,10,255])); /// ``` /// /// If you want to simulate a cutoff/saturation point where the gradients reach the peak color /// before some maximium index value, then you can use `std::cmp::min` to prevent an out of bounds
/// /// ``` /// use fractal_lib::color::{ColorU8, color_range_linear}; /// use std::cmp::min; /// /// let black = ColorU8([0,0,0,255]); /// let white = ColorU8([255,255,255,255]); /// let gradient_count = 128; /// let range = color_range_linear(black, white, gradient_count); /// /// assert_eq!(range[min(gradient_count-1, 0)], black); /// assert_eq!(range[min(gradient_count-1, gradient_count-1)], white); /// assert_eq!(range[min(gradient_count-1, 255)], white); /// assert_eq!(range[min(gradient_count-1, 127)], white); /// assert_eq!(range[min(gradient_count-1, 10)], ColorU8([20,20,20,255])); /// ``` pub fn color_range_linear(first: ColorU8, last: ColorU8, count: usize) -> Vec<ColorU8> { if count < 2 { panic!("Count must be 2 or more: {}", count); } let deltas = [ (f32::from(last.0[0]) - f32::from(first.0[0])) / f32::from((count as u16) - 1), (f32::from(last.0[1]) - f32::from(first.0[1])) / f32::from((count as u16) - 1), (f32::from(last.0[2]) - f32::from(first.0[2])) / f32::from((count as u16) - 1), (f32::from(last.0[3]) - f32::from(first.0[3])) / f32::from((count as u16) - 1), ]; (0..count) .map(|i| { ColorU8([ (f32::from(first.0[0]) + f32::from(i as u16) * deltas[0]) as u8, (f32::from(first.0[1]) + f32::from(i as u16) * deltas[1]) as u8, (f32::from(first.0[2]) + f32::from(i as u16) * deltas[2]) as u8, (f32::from(first.0[3]) + f32::from(i as u16) * deltas[3]) as u8, ]) }) .collect() } #[cfg(test)] mod test { use super::*; #[test] #[should_panic(expected = "Count must be 2 or more")] fn test_linear_zero() { let black = ColorU8([0, 0, 0, 255]); let white = ColorU8([255, 255, 255, 255]); let range = color_range_linear(black, white, 0); assert!(range.len() == 0); } #[test] #[should_panic(expected = "Count must be 2 or more")] fn test_linear_one() { let black = ColorU8([0, 0, 0, 255]); let white = ColorU8([255, 255, 255, 255]); let range = color_range_linear(black, white, 1); assert!(range.len() == 1); } #[test] fn test_linear_two() { let black = ColorU8([0, 0, 0, 255]); let white = ColorU8([255, 255, 255, 255]); let range = color_range_linear(black, white, 2); assert_eq!(black, range[0]); assert_eq!(white, range[1]); } }
/// error:
random_line_split
color.rs
// Copyright (c) 2015-2019 William (B.J.) Snow Orvis // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! Color-related constants and functions. /// Colors that work with `graphics` functions, which want color as vectors of f32. #[derive(Debug, Clone, Copy, PartialEq)] pub struct ColorF32(pub [f32; 4]); /// Colors that work with `image` functions, which want color as vectors of u8. #[derive(Debug, Clone, Copy, PartialEq)] pub struct ColorU8(pub [u8; 4]); /// Black for use with `graphics`' functions pub const BLACK_F32: ColorF32 = ColorF32([0.0, 0.0, 0.0, 1.0]); /// Grey for use with `graphics`' functions pub const GREY_F32: ColorF32 = ColorF32([0.5, 0.5, 0.5, 1.0]); /// White for use with `graphics`' functions pub const WHITE_F32: ColorF32 = ColorF32([1.0, 1.0, 1.0, 1.0]); /// Dark blue for use with `image`' functions pub const AEBLUE_U8: ColorU8 = ColorU8([0, 0, 48, 255]); /// Black for use with `image`' functions pub const BLACK_U8: ColorU8 = ColorU8([0, 0, 0, 255]); /// White for use with `image`' functions pub const WHITE_U8: ColorU8 = ColorU8([255, 255, 255, 255]); /// Generates a linear range of RGBA colors from a start color to a final color. /// /// /// Eg, to create a spectrum from white to black: /// /// ``` /// use fractal_lib::color::{ColorU8, color_range_linear}; /// /// let black = ColorU8([0,0,0,255]); /// let white = ColorU8([255,255,255,255]); /// /// let range = color_range_linear(black, white, 256); /// /// assert_eq!(range[0], black); /// assert_eq!(range[255], white); /// assert_eq!(range[10], ColorU8([10,10,10,255])); /// ``` /// /// If you want to simulate a cutoff/saturation point where the gradients reach the peak color /// before some maximium index value, then you can use `std::cmp::min` to prevent an out of bounds /// error: /// /// ``` /// use fractal_lib::color::{ColorU8, color_range_linear}; /// use std::cmp::min; /// /// let black = ColorU8([0,0,0,255]); /// let white = ColorU8([255,255,255,255]); /// let gradient_count = 128; /// let range = color_range_linear(black, white, gradient_count); /// /// assert_eq!(range[min(gradient_count-1, 0)], black); /// assert_eq!(range[min(gradient_count-1, gradient_count-1)], white); /// assert_eq!(range[min(gradient_count-1, 255)], white); /// assert_eq!(range[min(gradient_count-1, 127)], white); /// assert_eq!(range[min(gradient_count-1, 10)], ColorU8([20,20,20,255])); /// ``` pub fn color_range_linear(first: ColorU8, last: ColorU8, count: usize) -> Vec<ColorU8> { if count < 2
let deltas = [ (f32::from(last.0[0]) - f32::from(first.0[0])) / f32::from((count as u16) - 1), (f32::from(last.0[1]) - f32::from(first.0[1])) / f32::from((count as u16) - 1), (f32::from(last.0[2]) - f32::from(first.0[2])) / f32::from((count as u16) - 1), (f32::from(last.0[3]) - f32::from(first.0[3])) / f32::from((count as u16) - 1), ]; (0..count) .map(|i| { ColorU8([ (f32::from(first.0[0]) + f32::from(i as u16) * deltas[0]) as u8, (f32::from(first.0[1]) + f32::from(i as u16) * deltas[1]) as u8, (f32::from(first.0[2]) + f32::from(i as u16) * deltas[2]) as u8, (f32::from(first.0[3]) + f32::from(i as u16) * deltas[3]) as u8, ]) }) .collect() } #[cfg(test)] mod test { use super::*; #[test] #[should_panic(expected = "Count must be 2 or more")] fn test_linear_zero() { let black = ColorU8([0, 0, 0, 255]); let white = ColorU8([255, 255, 255, 255]); let range = color_range_linear(black, white, 0); assert!(range.len() == 0); } #[test] #[should_panic(expected = "Count must be 2 or more")] fn test_linear_one() { let black = ColorU8([0, 0, 0, 255]); let white = ColorU8([255, 255, 255, 255]); let range = color_range_linear(black, white, 1); assert!(range.len() == 1); } #[test] fn test_linear_two() { let black = ColorU8([0, 0, 0, 255]); let white = ColorU8([255, 255, 255, 255]); let range = color_range_linear(black, white, 2); assert_eq!(black, range[0]); assert_eq!(white, range[1]); } }
{ panic!("Count must be 2 or more: {}", count); }
conditional_block
or-patterns-syntactic-fail.rs
// Test some cases where or-patterns may ostensibly be allowed but are in fact not. // This is not a semantic test. We only test parsing. fn
() {} enum E { A, B } use E::*; fn no_top_level_or_patterns() { // We do *not* allow or-patterns at the top level of lambdas... let _ = |A | B: E| (); //~ ERROR no implementation for `E | ()` // -------- This looks like an or-pattern but is in fact `|A| (B: E | ())`. //...and for now neither do we allow or-patterns at the top level of functions. fn fun1(A | B: E) {} //~^ ERROR top-level or-patterns are not allowed fn fun2(| A | B: E) {} //~^ ERROR top-level or-patterns are not allowed // We don't allow top-level or-patterns before type annotation in let-statements because we // want to reserve this syntactic space for possible future type ascription. let A | B: E = A; //~^ ERROR top-level or-patterns are not allowed let | A | B: E = A; //~^ ERROR top-level or-patterns are not allowed let (A | B): E = A; // ok -- wrapped in parens }
main
identifier_name
or-patterns-syntactic-fail.rs
// Test some cases where or-patterns may ostensibly be allowed but are in fact not. // This is not a semantic test. We only test parsing. fn main() {} enum E { A, B } use E::*; fn no_top_level_or_patterns() { // We do *not* allow or-patterns at the top level of lambdas... let _ = |A | B: E| (); //~ ERROR no implementation for `E | ()` // -------- This looks like an or-pattern but is in fact `|A| (B: E | ())`. //...and for now neither do we allow or-patterns at the top level of functions. fn fun1(A | B: E)
//~^ ERROR top-level or-patterns are not allowed fn fun2(| A | B: E) {} //~^ ERROR top-level or-patterns are not allowed // We don't allow top-level or-patterns before type annotation in let-statements because we // want to reserve this syntactic space for possible future type ascription. let A | B: E = A; //~^ ERROR top-level or-patterns are not allowed let | A | B: E = A; //~^ ERROR top-level or-patterns are not allowed let (A | B): E = A; // ok -- wrapped in parens }
{}
identifier_body
or-patterns-syntactic-fail.rs
// Test some cases where or-patterns may ostensibly be allowed but are in fact not. // This is not a semantic test. We only test parsing. fn main() {} enum E { A, B }
// We do *not* allow or-patterns at the top level of lambdas... let _ = |A | B: E| (); //~ ERROR no implementation for `E | ()` // -------- This looks like an or-pattern but is in fact `|A| (B: E | ())`. //...and for now neither do we allow or-patterns at the top level of functions. fn fun1(A | B: E) {} //~^ ERROR top-level or-patterns are not allowed fn fun2(| A | B: E) {} //~^ ERROR top-level or-patterns are not allowed // We don't allow top-level or-patterns before type annotation in let-statements because we // want to reserve this syntactic space for possible future type ascription. let A | B: E = A; //~^ ERROR top-level or-patterns are not allowed let | A | B: E = A; //~^ ERROR top-level or-patterns are not allowed let (A | B): E = A; // ok -- wrapped in parens }
use E::*; fn no_top_level_or_patterns() {
random_line_split
array_slice.rs
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. #[macro_use] extern crate criterion; use criterion::Criterion; extern crate arrow; use arrow::array::*; use std::sync::Arc; fn create_array_slice(array: &ArrayRef, length: usize) -> ArrayRef { array.slice(0, length) } fn create_array_with_nulls(size: usize) -> ArrayRef { let array: Float64Array = (0..size) .map(|i| if i % 2 == 0 { Some(1.0) } else
) .collect(); Arc::new(array) } fn array_slice_benchmark(c: &mut Criterion) { let array = create_array_with_nulls(4096); c.bench_function("array_slice 128", |b| { b.iter(|| create_array_slice(&array, 128)) }); c.bench_function("array_slice 512", |b| { b.iter(|| create_array_slice(&array, 512)) }); c.bench_function("array_slice 2048", |b| { b.iter(|| create_array_slice(&array, 2048)) }); } criterion_group!(benches, array_slice_benchmark); criterion_main!(benches);
{ None }
conditional_block
array_slice.rs
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. #[macro_use] extern crate criterion; use criterion::Criterion; extern crate arrow; use arrow::array::*; use std::sync::Arc; fn create_array_slice(array: &ArrayRef, length: usize) -> ArrayRef { array.slice(0, length) } fn create_array_with_nulls(size: usize) -> ArrayRef { let array: Float64Array = (0..size) .map(|i| if i % 2 == 0 { Some(1.0) } else { None }) .collect(); Arc::new(array) } fn array_slice_benchmark(c: &mut Criterion) { let array = create_array_with_nulls(4096); c.bench_function("array_slice 128", |b| { b.iter(|| create_array_slice(&array, 128)) }); c.bench_function("array_slice 512", |b| {
}); c.bench_function("array_slice 2048", |b| { b.iter(|| create_array_slice(&array, 2048)) }); } criterion_group!(benches, array_slice_benchmark); criterion_main!(benches);
b.iter(|| create_array_slice(&array, 512))
random_line_split
array_slice.rs
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. #[macro_use] extern crate criterion; use criterion::Criterion; extern crate arrow; use arrow::array::*; use std::sync::Arc; fn create_array_slice(array: &ArrayRef, length: usize) -> ArrayRef
fn create_array_with_nulls(size: usize) -> ArrayRef { let array: Float64Array = (0..size) .map(|i| if i % 2 == 0 { Some(1.0) } else { None }) .collect(); Arc::new(array) } fn array_slice_benchmark(c: &mut Criterion) { let array = create_array_with_nulls(4096); c.bench_function("array_slice 128", |b| { b.iter(|| create_array_slice(&array, 128)) }); c.bench_function("array_slice 512", |b| { b.iter(|| create_array_slice(&array, 512)) }); c.bench_function("array_slice 2048", |b| { b.iter(|| create_array_slice(&array, 2048)) }); } criterion_group!(benches, array_slice_benchmark); criterion_main!(benches);
{ array.slice(0, length) }
identifier_body
array_slice.rs
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. #[macro_use] extern crate criterion; use criterion::Criterion; extern crate arrow; use arrow::array::*; use std::sync::Arc; fn create_array_slice(array: &ArrayRef, length: usize) -> ArrayRef { array.slice(0, length) } fn create_array_with_nulls(size: usize) -> ArrayRef { let array: Float64Array = (0..size) .map(|i| if i % 2 == 0 { Some(1.0) } else { None }) .collect(); Arc::new(array) } fn
(c: &mut Criterion) { let array = create_array_with_nulls(4096); c.bench_function("array_slice 128", |b| { b.iter(|| create_array_slice(&array, 128)) }); c.bench_function("array_slice 512", |b| { b.iter(|| create_array_slice(&array, 512)) }); c.bench_function("array_slice 2048", |b| { b.iter(|| create_array_slice(&array, 2048)) }); } criterion_group!(benches, array_slice_benchmark); criterion_main!(benches);
array_slice_benchmark
identifier_name
sha256_bench.rs
#![cfg_attr(all(feature = "nightly", test), feature(test))] #![cfg(all(feature = "nightly", test))] extern crate test; extern crate cxema; #[cfg(test)] use cxema::sha2::{Sha256}; use cxema::digest::Digest; use test::Bencher; #[bench] pub fn sha256_10(bh: &mut Bencher) { let mut sh = Sha256::new(); let bytes = [1u8; 10]; bh.iter(|| { sh.input(&bytes); }); bh.bytes = bytes.len() as u64; } #[bench] pub fn
(bh: &mut Bencher) { let mut sh = Sha256::new(); let bytes = [1u8; 1024]; bh.iter(|| { sh.input(&bytes); }); bh.bytes = bytes.len() as u64; } #[bench] pub fn sha256_64k(bh: &mut Bencher) { let mut sh = Sha256::new(); let bytes = [1u8; 65536]; bh.iter(|| { sh.input(&bytes); }); bh.bytes = bytes.len() as u64; }
sha256_1k
identifier_name
sha256_bench.rs
#![cfg_attr(all(feature = "nightly", test), feature(test))] #![cfg(all(feature = "nightly", test))] extern crate test; extern crate cxema; #[cfg(test)] use cxema::sha2::{Sha256}; use cxema::digest::Digest; use test::Bencher; #[bench] pub fn sha256_10(bh: &mut Bencher) { let mut sh = Sha256::new();
bh.iter(|| { sh.input(&bytes); }); bh.bytes = bytes.len() as u64; } #[bench] pub fn sha256_1k(bh: &mut Bencher) { let mut sh = Sha256::new(); let bytes = [1u8; 1024]; bh.iter(|| { sh.input(&bytes); }); bh.bytes = bytes.len() as u64; } #[bench] pub fn sha256_64k(bh: &mut Bencher) { let mut sh = Sha256::new(); let bytes = [1u8; 65536]; bh.iter(|| { sh.input(&bytes); }); bh.bytes = bytes.len() as u64; }
let bytes = [1u8; 10];
random_line_split
sha256_bench.rs
#![cfg_attr(all(feature = "nightly", test), feature(test))] #![cfg(all(feature = "nightly", test))] extern crate test; extern crate cxema; #[cfg(test)] use cxema::sha2::{Sha256}; use cxema::digest::Digest; use test::Bencher; #[bench] pub fn sha256_10(bh: &mut Bencher) { let mut sh = Sha256::new(); let bytes = [1u8; 10]; bh.iter(|| { sh.input(&bytes); }); bh.bytes = bytes.len() as u64; } #[bench] pub fn sha256_1k(bh: &mut Bencher) { let mut sh = Sha256::new(); let bytes = [1u8; 1024]; bh.iter(|| { sh.input(&bytes); }); bh.bytes = bytes.len() as u64; } #[bench] pub fn sha256_64k(bh: &mut Bencher)
{ let mut sh = Sha256::new(); let bytes = [1u8; 65536]; bh.iter(|| { sh.input(&bytes); }); bh.bytes = bytes.len() as u64; }
identifier_body
lib.rs
// Zinc, the bare metal stack for rust. // Copyright 2014 Vladimir "farcaller" Pouzanov <[email protected]> // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #![feature(rustc_private, plugin_registrar, quote)] extern crate platformtree; extern crate rustc; extern crate serialize; extern crate syntax; extern crate rustc_plugin; use std::clone::Clone; use std::ops::Deref; use rustc_plugin::Registry; use syntax::ast; use syntax::tokenstream; use syntax::codemap::DUMMY_SP; use syntax::codemap::Span; use syntax::ext::base::{ExtCtxt, MacResult, MultiModifier, Annotatable}; use syntax::ext::build::AstBuilder; use syntax::print::pprust; use syntax::util::small_vector::SmallVector; use syntax::ptr::P; use platformtree::parser::Parser; use platformtree::builder::Builder; use platformtree::builder::meta_args::ToTyHash; #[plugin_registrar] pub fn plugin_registrar(reg: &mut Registry) { reg.register_macro("platformtree", macro_platformtree); reg.register_macro("platformtree_verbose", macro_platformtree_verbose); reg.register_syntax_extension(syntax::parse::token::intern("zinc_task"), MultiModifier(Box::new(macro_zinc_task))); } pub fn macro_platformtree(cx: &mut ExtCtxt, _: Span, tts: &[tokenstream::TokenTree]) -> Box<MacResult+'static> { let pt = Parser::new(cx, tts).parse_platformtree(); let items = Builder::build(cx, pt.unwrap()) .expect(format!("Unexpected failure on {}", line!()).as_str()) .emit_items(cx); MacItems::new(items) } pub fn macro_platformtree_verbose(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) -> Box<MacResult+'static> { let result = macro_platformtree(cx, sp, tts); println!("Platform Tree dump:"); for i in result.make_items().unwrap().as_slice().iter() { println!("{}", pprust::item_to_string(i.deref())); } macro_platformtree(cx, sp, tts) } fn macro_zinc_task(cx: &mut ExtCtxt, _: Span, _: &ast::MetaItem, ann: Annotatable) -> Annotatable { match ann { Annotatable::Item(it) => {Annotatable::Item(macro_zinc_task_item(cx, it))} other => {other} } } fn
(cx: &mut ExtCtxt, it: P<ast::Item>) -> P<ast::Item> { match it.node { ast::ItemKind::Fn(ref decl, style, constness, abi, _, ref block) => { let istr = it.ident.name.as_str(); let fn_name = &*istr; let ty_params = platformtree::builder::meta_args::get_ty_params_for_task(cx, fn_name); let params = ty_params.iter().map(|ty| { cx.typaram( DUMMY_SP, cx.ident_of(ty.to_tyhash().as_str()), P::from_vec(vec!(cx.typarambound( cx.path(DUMMY_SP, ty.as_str().split("::").map(|t| cx.ident_of(t)).collect())))), None) }).collect(); let new_arg = cx.arg(DUMMY_SP, cx.ident_of("args"), cx.ty_rptr( DUMMY_SP, cx.ty_path( cx.path_all( DUMMY_SP, false, ["pt".to_string(), fn_name.to_string() + "_args"].iter().map(|t| cx.ident_of(t.as_str())).collect(), vec!(), ty_params.iter().map(|ty| { cx.ty_path(cx.path_ident(DUMMY_SP, cx.ident_of(ty.to_tyhash().as_str()))) }).collect(), vec!())), None, ast::Mutability::Immutable)); let new_decl = P(ast::FnDecl { inputs: vec!(new_arg), ..decl.deref().clone() }); let new_generics = ast::Generics { lifetimes: vec!(), ty_params: P::from_vec(params), where_clause: ast::WhereClause { id: ast::DUMMY_NODE_ID, predicates: vec!(), }, span : DUMMY_SP, }; let new_node = ast::ItemKind::Fn(new_decl, style, constness, abi, new_generics, block.clone()); P(ast::Item {node: new_node,..it.deref().clone() }) }, _ => panic!(), } } pub struct MacItems { items: Vec<P<ast::Item>> } impl MacItems { pub fn new(items: Vec<P<ast::Item>>) -> Box<MacResult+'static> { Box::new(MacItems { items: items }) } } impl MacResult for MacItems { fn make_items(self: Box<MacItems>) -> Option<SmallVector<P<ast::Item>>> { Some(SmallVector::many(self.items.clone())) } }
macro_zinc_task_item
identifier_name
lib.rs
// Zinc, the bare metal stack for rust. // Copyright 2014 Vladimir "farcaller" Pouzanov <[email protected]> // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #![feature(rustc_private, plugin_registrar, quote)] extern crate platformtree; extern crate rustc; extern crate serialize; extern crate syntax; extern crate rustc_plugin; use std::clone::Clone; use std::ops::Deref; use rustc_plugin::Registry; use syntax::ast; use syntax::tokenstream; use syntax::codemap::DUMMY_SP; use syntax::codemap::Span; use syntax::ext::base::{ExtCtxt, MacResult, MultiModifier, Annotatable}; use syntax::ext::build::AstBuilder; use syntax::print::pprust; use syntax::util::small_vector::SmallVector; use syntax::ptr::P; use platformtree::parser::Parser; use platformtree::builder::Builder; use platformtree::builder::meta_args::ToTyHash; #[plugin_registrar] pub fn plugin_registrar(reg: &mut Registry) { reg.register_macro("platformtree", macro_platformtree); reg.register_macro("platformtree_verbose", macro_platformtree_verbose); reg.register_syntax_extension(syntax::parse::token::intern("zinc_task"), MultiModifier(Box::new(macro_zinc_task))); } pub fn macro_platformtree(cx: &mut ExtCtxt, _: Span, tts: &[tokenstream::TokenTree]) -> Box<MacResult+'static> { let pt = Parser::new(cx, tts).parse_platformtree(); let items = Builder::build(cx, pt.unwrap()) .expect(format!("Unexpected failure on {}", line!()).as_str()) .emit_items(cx); MacItems::new(items) } pub fn macro_platformtree_verbose(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) -> Box<MacResult+'static> { let result = macro_platformtree(cx, sp, tts); println!("Platform Tree dump:"); for i in result.make_items().unwrap().as_slice().iter() { println!("{}", pprust::item_to_string(i.deref()));
fn macro_zinc_task(cx: &mut ExtCtxt, _: Span, _: &ast::MetaItem, ann: Annotatable) -> Annotatable { match ann { Annotatable::Item(it) => {Annotatable::Item(macro_zinc_task_item(cx, it))} other => {other} } } fn macro_zinc_task_item(cx: &mut ExtCtxt, it: P<ast::Item>) -> P<ast::Item> { match it.node { ast::ItemKind::Fn(ref decl, style, constness, abi, _, ref block) => { let istr = it.ident.name.as_str(); let fn_name = &*istr; let ty_params = platformtree::builder::meta_args::get_ty_params_for_task(cx, fn_name); let params = ty_params.iter().map(|ty| { cx.typaram( DUMMY_SP, cx.ident_of(ty.to_tyhash().as_str()), P::from_vec(vec!(cx.typarambound( cx.path(DUMMY_SP, ty.as_str().split("::").map(|t| cx.ident_of(t)).collect())))), None) }).collect(); let new_arg = cx.arg(DUMMY_SP, cx.ident_of("args"), cx.ty_rptr( DUMMY_SP, cx.ty_path( cx.path_all( DUMMY_SP, false, ["pt".to_string(), fn_name.to_string() + "_args"].iter().map(|t| cx.ident_of(t.as_str())).collect(), vec!(), ty_params.iter().map(|ty| { cx.ty_path(cx.path_ident(DUMMY_SP, cx.ident_of(ty.to_tyhash().as_str()))) }).collect(), vec!())), None, ast::Mutability::Immutable)); let new_decl = P(ast::FnDecl { inputs: vec!(new_arg), ..decl.deref().clone() }); let new_generics = ast::Generics { lifetimes: vec!(), ty_params: P::from_vec(params), where_clause: ast::WhereClause { id: ast::DUMMY_NODE_ID, predicates: vec!(), }, span : DUMMY_SP, }; let new_node = ast::ItemKind::Fn(new_decl, style, constness, abi, new_generics, block.clone()); P(ast::Item {node: new_node,..it.deref().clone() }) }, _ => panic!(), } } pub struct MacItems { items: Vec<P<ast::Item>> } impl MacItems { pub fn new(items: Vec<P<ast::Item>>) -> Box<MacResult+'static> { Box::new(MacItems { items: items }) } } impl MacResult for MacItems { fn make_items(self: Box<MacItems>) -> Option<SmallVector<P<ast::Item>>> { Some(SmallVector::many(self.items.clone())) } }
} macro_platformtree(cx, sp, tts) }
random_line_split
lib.rs
// Zinc, the bare metal stack for rust. // Copyright 2014 Vladimir "farcaller" Pouzanov <[email protected]> // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #![feature(rustc_private, plugin_registrar, quote)] extern crate platformtree; extern crate rustc; extern crate serialize; extern crate syntax; extern crate rustc_plugin; use std::clone::Clone; use std::ops::Deref; use rustc_plugin::Registry; use syntax::ast; use syntax::tokenstream; use syntax::codemap::DUMMY_SP; use syntax::codemap::Span; use syntax::ext::base::{ExtCtxt, MacResult, MultiModifier, Annotatable}; use syntax::ext::build::AstBuilder; use syntax::print::pprust; use syntax::util::small_vector::SmallVector; use syntax::ptr::P; use platformtree::parser::Parser; use platformtree::builder::Builder; use platformtree::builder::meta_args::ToTyHash; #[plugin_registrar] pub fn plugin_registrar(reg: &mut Registry) { reg.register_macro("platformtree", macro_platformtree); reg.register_macro("platformtree_verbose", macro_platformtree_verbose); reg.register_syntax_extension(syntax::parse::token::intern("zinc_task"), MultiModifier(Box::new(macro_zinc_task))); } pub fn macro_platformtree(cx: &mut ExtCtxt, _: Span, tts: &[tokenstream::TokenTree]) -> Box<MacResult+'static> { let pt = Parser::new(cx, tts).parse_platformtree(); let items = Builder::build(cx, pt.unwrap()) .expect(format!("Unexpected failure on {}", line!()).as_str()) .emit_items(cx); MacItems::new(items) } pub fn macro_platformtree_verbose(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) -> Box<MacResult+'static> { let result = macro_platformtree(cx, sp, tts); println!("Platform Tree dump:"); for i in result.make_items().unwrap().as_slice().iter() { println!("{}", pprust::item_to_string(i.deref())); } macro_platformtree(cx, sp, tts) } fn macro_zinc_task(cx: &mut ExtCtxt, _: Span, _: &ast::MetaItem, ann: Annotatable) -> Annotatable { match ann { Annotatable::Item(it) => {Annotatable::Item(macro_zinc_task_item(cx, it))} other => {other} } } fn macro_zinc_task_item(cx: &mut ExtCtxt, it: P<ast::Item>) -> P<ast::Item> { match it.node { ast::ItemKind::Fn(ref decl, style, constness, abi, _, ref block) =>
["pt".to_string(), fn_name.to_string() + "_args"].iter().map(|t| cx.ident_of(t.as_str())).collect(), vec!(), ty_params.iter().map(|ty| { cx.ty_path(cx.path_ident(DUMMY_SP, cx.ident_of(ty.to_tyhash().as_str()))) }).collect(), vec!())), None, ast::Mutability::Immutable)); let new_decl = P(ast::FnDecl { inputs: vec!(new_arg), ..decl.deref().clone() }); let new_generics = ast::Generics { lifetimes: vec!(), ty_params: P::from_vec(params), where_clause: ast::WhereClause { id: ast::DUMMY_NODE_ID, predicates: vec!(), }, span : DUMMY_SP, }; let new_node = ast::ItemKind::Fn(new_decl, style, constness, abi, new_generics, block.clone()); P(ast::Item {node: new_node,..it.deref().clone() }) } , _ => panic!(), } } pub struct MacItems { items: Vec<P<ast::Item>> } impl MacItems { pub fn new(items: Vec<P<ast::Item>>) -> Box<MacResult+'static> { Box::new(MacItems { items: items }) } } impl MacResult for MacItems { fn make_items(self: Box<MacItems>) -> Option<SmallVector<P<ast::Item>>> { Some(SmallVector::many(self.items.clone())) } }
{ let istr = it.ident.name.as_str(); let fn_name = &*istr; let ty_params = platformtree::builder::meta_args::get_ty_params_for_task(cx, fn_name); let params = ty_params.iter().map(|ty| { cx.typaram( DUMMY_SP, cx.ident_of(ty.to_tyhash().as_str()), P::from_vec(vec!(cx.typarambound( cx.path(DUMMY_SP, ty.as_str().split("::").map(|t| cx.ident_of(t)).collect())))), None) }).collect(); let new_arg = cx.arg(DUMMY_SP, cx.ident_of("args"), cx.ty_rptr( DUMMY_SP, cx.ty_path( cx.path_all( DUMMY_SP, false,
conditional_block
main.rs
//! Module with the entry point of the binary. extern crate case; extern crate clap; extern crate conv; #[macro_use] extern crate log; extern crate rush; mod args; mod logging; mod rcfile; use std::error::Error; // for.cause() method use std::io::{self, Write}; use std::iter::repeat; use std::process::exit; use conv::TryFrom; use rush::Context; use args::InputMode; fn main() { logging::init().unwrap(); let opts = args::parse(); let before = opts.before.as_ref().map(|b| b as &str); let exprs: Vec<&str> = opts.expressions.iter().map(|e| e as &str).collect(); let after = opts.after.as_ref().map(|a| a as &str); match opts.input_mode { Some(mode) => { if let Err(error) = process_input(mode, before, &exprs, after) { handle_error(error); exit(1); } }, None => { if let Some(before) = before { println!("--before expression:"); print_ast(before); println!(""); } for expr in exprs { print_ast(expr); } if let Some(after) = after { println!(""); println!("--after expression:"); print_ast(after); } }, } } /// Process standard input through given expressions, writing results to stdout. fn process_input(mode: InputMode, before: Option<&str>, exprs: &[&str], after: Option<&str>) -> io::Result<()> { // Prepare a Context for the processing. // This includes evaluating any "before" expression within it. let mut context = Context::new(); try!(rcfile::load_into(&mut context) .map_err(|err| io::Error::new(io::ErrorKind::InvalidData, format!("Error processing startup file: {}", err)))); if let Some(before) = before { try!(rush::exec(before, &mut context)); } // Do the processing. // // If there is an "after" expression provided, it is that expression that should produce // the only output of the program. So we'll just consume whatever results would normally // be printed otherwise. if after.is_some() { // HACK: Because the intermediate results have to be printed out -- even if only to /dev/null // -- we have to ensure there is always a non-empty value to use as the intermediate result. // This is necessary especially since with --after (and --before), intermediate expressions // are likely to be just assignments (and the result of an assignment is empty). // // We can make sure there is always a value to print simply by adding one more expression // at the end of the chain. It so happens that zero (or any number) is compatible with // all the input modes, so let's use that. let mut exprs = exprs.to_vec(); exprs.push("0"); try!(apply_multi_ctx(mode, &mut context, &exprs, &mut io::sink())); } else { try!(apply_multi_ctx(mode, &mut context, exprs, &mut io::stdout())); } // Evaluate the "after" expression, if provided, and return it as the result. if let Some(after) = after
Ok(()) } /// Apply the expressions to the standard input with given mode. /// This forms the bulk of the input processing. #[inline] fn apply_multi_ctx(mode: InputMode, context: &mut Context, exprs: &[&str], mut output: &mut Write) -> io::Result<()> { let func: fn(_, _, _, _) -> _ = match mode { InputMode::String => rush::apply_string_multi_ctx, InputMode::Lines => rush::map_lines_multi_ctx, InputMode::Words => rush::map_words_multi_ctx, InputMode::Chars => rush::map_chars_multi_ctx, InputMode::Bytes => rush::map_bytes_multi_ctx, InputMode::Files => rush::map_files_multi_ctx, }; func(context, exprs, io::stdin(), &mut output) } /// Handle an error that occurred while processing the input. fn handle_error(error: io::Error) { writeln!(&mut io::stderr(), "error: {}", error).unwrap(); // Print the error causes as an indented "tree". let mut cause = error.cause(); let mut indent = 0; while let Some(error) = cause { writeln!(&mut io::stderr(), "{}{}{}", repeat(" ").take(CAUSE_PREFIX.len() * indent).collect::<String>(), CAUSE_PREFIX, error).unwrap(); indent += 1; cause = error.cause(); } } const CAUSE_PREFIX: &'static str = "└ "; // U+2514 /// Print the AST for given expression to stdout. fn print_ast(expr: &str) { debug!("Printing the AST of: {}", expr); match rush::parse(expr) { Ok(ast) => println!("{:?}", ast), Err(error) => { error!("{:?}", error); exit(1); }, } }
{ let result = try!(rush::eval(after, &mut context)); let result_string = try!(String::try_from(result) .map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))); // Make it so that the output always ends with a newline, // regardless whether it consists of a single value or multiple lines. return if result_string.ends_with("\n") { write!(&mut io::stdout(), "{}", result_string) } else { write!(&mut io::stdout(), "{}\n", result_string) }; }
conditional_block
main.rs
//! Module with the entry point of the binary. extern crate case; extern crate clap; extern crate conv; #[macro_use] extern crate log; extern crate rush; mod args; mod logging; mod rcfile; use std::error::Error; // for.cause() method use std::io::{self, Write}; use std::iter::repeat; use std::process::exit; use conv::TryFrom; use rush::Context; use args::InputMode; fn main() { logging::init().unwrap(); let opts = args::parse(); let before = opts.before.as_ref().map(|b| b as &str); let exprs: Vec<&str> = opts.expressions.iter().map(|e| e as &str).collect(); let after = opts.after.as_ref().map(|a| a as &str); match opts.input_mode { Some(mode) => { if let Err(error) = process_input(mode, before, &exprs, after) { handle_error(error); exit(1); } }, None => { if let Some(before) = before { println!("--before expression:"); print_ast(before); println!(""); } for expr in exprs { print_ast(expr); } if let Some(after) = after { println!(""); println!("--after expression:"); print_ast(after); } }, } } /// Process standard input through given expressions, writing results to stdout. fn process_input(mode: InputMode, before: Option<&str>, exprs: &[&str], after: Option<&str>) -> io::Result<()> { // Prepare a Context for the processing. // This includes evaluating any "before" expression within it. let mut context = Context::new(); try!(rcfile::load_into(&mut context) .map_err(|err| io::Error::new(io::ErrorKind::InvalidData, format!("Error processing startup file: {}", err))));
// Do the processing. // // If there is an "after" expression provided, it is that expression that should produce // the only output of the program. So we'll just consume whatever results would normally // be printed otherwise. if after.is_some() { // HACK: Because the intermediate results have to be printed out -- even if only to /dev/null // -- we have to ensure there is always a non-empty value to use as the intermediate result. // This is necessary especially since with --after (and --before), intermediate expressions // are likely to be just assignments (and the result of an assignment is empty). // // We can make sure there is always a value to print simply by adding one more expression // at the end of the chain. It so happens that zero (or any number) is compatible with // all the input modes, so let's use that. let mut exprs = exprs.to_vec(); exprs.push("0"); try!(apply_multi_ctx(mode, &mut context, &exprs, &mut io::sink())); } else { try!(apply_multi_ctx(mode, &mut context, exprs, &mut io::stdout())); } // Evaluate the "after" expression, if provided, and return it as the result. if let Some(after) = after { let result = try!(rush::eval(after, &mut context)); let result_string = try!(String::try_from(result) .map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))); // Make it so that the output always ends with a newline, // regardless whether it consists of a single value or multiple lines. return if result_string.ends_with("\n") { write!(&mut io::stdout(), "{}", result_string) } else { write!(&mut io::stdout(), "{}\n", result_string) }; } Ok(()) } /// Apply the expressions to the standard input with given mode. /// This forms the bulk of the input processing. #[inline] fn apply_multi_ctx(mode: InputMode, context: &mut Context, exprs: &[&str], mut output: &mut Write) -> io::Result<()> { let func: fn(_, _, _, _) -> _ = match mode { InputMode::String => rush::apply_string_multi_ctx, InputMode::Lines => rush::map_lines_multi_ctx, InputMode::Words => rush::map_words_multi_ctx, InputMode::Chars => rush::map_chars_multi_ctx, InputMode::Bytes => rush::map_bytes_multi_ctx, InputMode::Files => rush::map_files_multi_ctx, }; func(context, exprs, io::stdin(), &mut output) } /// Handle an error that occurred while processing the input. fn handle_error(error: io::Error) { writeln!(&mut io::stderr(), "error: {}", error).unwrap(); // Print the error causes as an indented "tree". let mut cause = error.cause(); let mut indent = 0; while let Some(error) = cause { writeln!(&mut io::stderr(), "{}{}{}", repeat(" ").take(CAUSE_PREFIX.len() * indent).collect::<String>(), CAUSE_PREFIX, error).unwrap(); indent += 1; cause = error.cause(); } } const CAUSE_PREFIX: &'static str = "└ "; // U+2514 /// Print the AST for given expression to stdout. fn print_ast(expr: &str) { debug!("Printing the AST of: {}", expr); match rush::parse(expr) { Ok(ast) => println!("{:?}", ast), Err(error) => { error!("{:?}", error); exit(1); }, } }
if let Some(before) = before { try!(rush::exec(before, &mut context)); }
random_line_split
main.rs
//! Module with the entry point of the binary. extern crate case; extern crate clap; extern crate conv; #[macro_use] extern crate log; extern crate rush; mod args; mod logging; mod rcfile; use std::error::Error; // for.cause() method use std::io::{self, Write}; use std::iter::repeat; use std::process::exit; use conv::TryFrom; use rush::Context; use args::InputMode; fn main() { logging::init().unwrap(); let opts = args::parse(); let before = opts.before.as_ref().map(|b| b as &str); let exprs: Vec<&str> = opts.expressions.iter().map(|e| e as &str).collect(); let after = opts.after.as_ref().map(|a| a as &str); match opts.input_mode { Some(mode) => { if let Err(error) = process_input(mode, before, &exprs, after) { handle_error(error); exit(1); } }, None => { if let Some(before) = before { println!("--before expression:"); print_ast(before); println!(""); } for expr in exprs { print_ast(expr); } if let Some(after) = after { println!(""); println!("--after expression:"); print_ast(after); } }, } } /// Process standard input through given expressions, writing results to stdout. fn
(mode: InputMode, before: Option<&str>, exprs: &[&str], after: Option<&str>) -> io::Result<()> { // Prepare a Context for the processing. // This includes evaluating any "before" expression within it. let mut context = Context::new(); try!(rcfile::load_into(&mut context) .map_err(|err| io::Error::new(io::ErrorKind::InvalidData, format!("Error processing startup file: {}", err)))); if let Some(before) = before { try!(rush::exec(before, &mut context)); } // Do the processing. // // If there is an "after" expression provided, it is that expression that should produce // the only output of the program. So we'll just consume whatever results would normally // be printed otherwise. if after.is_some() { // HACK: Because the intermediate results have to be printed out -- even if only to /dev/null // -- we have to ensure there is always a non-empty value to use as the intermediate result. // This is necessary especially since with --after (and --before), intermediate expressions // are likely to be just assignments (and the result of an assignment is empty). // // We can make sure there is always a value to print simply by adding one more expression // at the end of the chain. It so happens that zero (or any number) is compatible with // all the input modes, so let's use that. let mut exprs = exprs.to_vec(); exprs.push("0"); try!(apply_multi_ctx(mode, &mut context, &exprs, &mut io::sink())); } else { try!(apply_multi_ctx(mode, &mut context, exprs, &mut io::stdout())); } // Evaluate the "after" expression, if provided, and return it as the result. if let Some(after) = after { let result = try!(rush::eval(after, &mut context)); let result_string = try!(String::try_from(result) .map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))); // Make it so that the output always ends with a newline, // regardless whether it consists of a single value or multiple lines. return if result_string.ends_with("\n") { write!(&mut io::stdout(), "{}", result_string) } else { write!(&mut io::stdout(), "{}\n", result_string) }; } Ok(()) } /// Apply the expressions to the standard input with given mode. /// This forms the bulk of the input processing. #[inline] fn apply_multi_ctx(mode: InputMode, context: &mut Context, exprs: &[&str], mut output: &mut Write) -> io::Result<()> { let func: fn(_, _, _, _) -> _ = match mode { InputMode::String => rush::apply_string_multi_ctx, InputMode::Lines => rush::map_lines_multi_ctx, InputMode::Words => rush::map_words_multi_ctx, InputMode::Chars => rush::map_chars_multi_ctx, InputMode::Bytes => rush::map_bytes_multi_ctx, InputMode::Files => rush::map_files_multi_ctx, }; func(context, exprs, io::stdin(), &mut output) } /// Handle an error that occurred while processing the input. fn handle_error(error: io::Error) { writeln!(&mut io::stderr(), "error: {}", error).unwrap(); // Print the error causes as an indented "tree". let mut cause = error.cause(); let mut indent = 0; while let Some(error) = cause { writeln!(&mut io::stderr(), "{}{}{}", repeat(" ").take(CAUSE_PREFIX.len() * indent).collect::<String>(), CAUSE_PREFIX, error).unwrap(); indent += 1; cause = error.cause(); } } const CAUSE_PREFIX: &'static str = "└ "; // U+2514 /// Print the AST for given expression to stdout. fn print_ast(expr: &str) { debug!("Printing the AST of: {}", expr); match rush::parse(expr) { Ok(ast) => println!("{:?}", ast), Err(error) => { error!("{:?}", error); exit(1); }, } }
process_input
identifier_name
main.rs
//! Module with the entry point of the binary. extern crate case; extern crate clap; extern crate conv; #[macro_use] extern crate log; extern crate rush; mod args; mod logging; mod rcfile; use std::error::Error; // for.cause() method use std::io::{self, Write}; use std::iter::repeat; use std::process::exit; use conv::TryFrom; use rush::Context; use args::InputMode; fn main() { logging::init().unwrap(); let opts = args::parse(); let before = opts.before.as_ref().map(|b| b as &str); let exprs: Vec<&str> = opts.expressions.iter().map(|e| e as &str).collect(); let after = opts.after.as_ref().map(|a| a as &str); match opts.input_mode { Some(mode) => { if let Err(error) = process_input(mode, before, &exprs, after) { handle_error(error); exit(1); } }, None => { if let Some(before) = before { println!("--before expression:"); print_ast(before); println!(""); } for expr in exprs { print_ast(expr); } if let Some(after) = after { println!(""); println!("--after expression:"); print_ast(after); } }, } } /// Process standard input through given expressions, writing results to stdout. fn process_input(mode: InputMode, before: Option<&str>, exprs: &[&str], after: Option<&str>) -> io::Result<()> { // Prepare a Context for the processing. // This includes evaluating any "before" expression within it. let mut context = Context::new(); try!(rcfile::load_into(&mut context) .map_err(|err| io::Error::new(io::ErrorKind::InvalidData, format!("Error processing startup file: {}", err)))); if let Some(before) = before { try!(rush::exec(before, &mut context)); } // Do the processing. // // If there is an "after" expression provided, it is that expression that should produce // the only output of the program. So we'll just consume whatever results would normally // be printed otherwise. if after.is_some() { // HACK: Because the intermediate results have to be printed out -- even if only to /dev/null // -- we have to ensure there is always a non-empty value to use as the intermediate result. // This is necessary especially since with --after (and --before), intermediate expressions // are likely to be just assignments (and the result of an assignment is empty). // // We can make sure there is always a value to print simply by adding one more expression // at the end of the chain. It so happens that zero (or any number) is compatible with // all the input modes, so let's use that. let mut exprs = exprs.to_vec(); exprs.push("0"); try!(apply_multi_ctx(mode, &mut context, &exprs, &mut io::sink())); } else { try!(apply_multi_ctx(mode, &mut context, exprs, &mut io::stdout())); } // Evaluate the "after" expression, if provided, and return it as the result. if let Some(after) = after { let result = try!(rush::eval(after, &mut context)); let result_string = try!(String::try_from(result) .map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))); // Make it so that the output always ends with a newline, // regardless whether it consists of a single value or multiple lines. return if result_string.ends_with("\n") { write!(&mut io::stdout(), "{}", result_string) } else { write!(&mut io::stdout(), "{}\n", result_string) }; } Ok(()) } /// Apply the expressions to the standard input with given mode. /// This forms the bulk of the input processing. #[inline] fn apply_multi_ctx(mode: InputMode, context: &mut Context, exprs: &[&str], mut output: &mut Write) -> io::Result<()>
/// Handle an error that occurred while processing the input. fn handle_error(error: io::Error) { writeln!(&mut io::stderr(), "error: {}", error).unwrap(); // Print the error causes as an indented "tree". let mut cause = error.cause(); let mut indent = 0; while let Some(error) = cause { writeln!(&mut io::stderr(), "{}{}{}", repeat(" ").take(CAUSE_PREFIX.len() * indent).collect::<String>(), CAUSE_PREFIX, error).unwrap(); indent += 1; cause = error.cause(); } } const CAUSE_PREFIX: &'static str = "└ "; // U+2514 /// Print the AST for given expression to stdout. fn print_ast(expr: &str) { debug!("Printing the AST of: {}", expr); match rush::parse(expr) { Ok(ast) => println!("{:?}", ast), Err(error) => { error!("{:?}", error); exit(1); }, } }
{ let func: fn(_, _, _, _) -> _ = match mode { InputMode::String => rush::apply_string_multi_ctx, InputMode::Lines => rush::map_lines_multi_ctx, InputMode::Words => rush::map_words_multi_ctx, InputMode::Chars => rush::map_chars_multi_ctx, InputMode::Bytes => rush::map_bytes_multi_ctx, InputMode::Files => rush::map_files_multi_ctx, }; func(context, exprs, io::stdin(), &mut output) }
identifier_body
config.rs
// In heavy WIP use std::fs::{ self, File }; use std::io::Read; use std::path::Path; use toml; use error::PackageError; use repository::RepositoryUrls; /// Represents the config file. #[derive(Debug, Deserialize)] pub struct Config { pub buffer_size: Option<u64>, // pub config_path: Option<String>, pub download_path: Option<String>, pub unpack_path: Option<String>, pub repository: Option<RepositoryUrls>, } impl Default for Config { fn default() -> Self { Self { buffer_size: Some(65536), // config_path: Some(String::from("~/.wiz/config/")), download_path: Some(String::from("~/.wiz/downloads/")), unpack_path: Some(String::from("~/.wiz/downloads/unpacked/")), repository: Some(RepositoryUrls(Vec::new())), } } } impl Config { /// Read the config file from the config path specified in `path`. /// If the config file is read & parsed properly, it should return /// a `Config`. pub fn read_from<P: AsRef<Path>>(path: P) -> Result<Self, PackageError> { let mut content = String::new(); // Check whether there are tons of configs in the path if path.as_ref().is_dir() { // Read every config files and put them into a string, if it is for entry in fs::read_dir(path)? { let entry = entry?; let mut config = File::open(entry.path())?; config.read_to_string(&mut content)?; } } else { // Read the config file into a string, if it isn't let mut config = File::open(path)?; config.read_to_string(&mut content)?; } // Try to parse the string, and convert it into a `Config`. let config = content.parse::<toml::Value>()?; let config = config.try_into::<Self>()?; Ok(config) } /// This function sets the None(s) in the config, to the default values. pub fn fill_with_default(mut self) -> Self
// Wait for RFC-2086 to be implemented first. /* [allow(irrefutable_let_pattern)] if let Config { $key: mut x,.. } = $dest { if let None = x { x = $value } } */ ); }; // If there are None(s), set them to the default value. set_on_none!(self, buffer_size, buf_size); set_on_none!(self, download_path, dl_path); set_on_none!(self, unpack_path, unpk_path); set_on_none!(self, repository, repo); self } }
{ // Destructuring the default configs into individual variables. let Self { buffer_size: buf_size, download_path: dl_path, unpack_path: unpk_path, repository: repo, } = Self::default(); /// Internal macro, to ease the implementation of `fill_with_default`. macro_rules! set_on_none { ($dest:ident, $key:ident, $value:expr) => ( match $dest { Self { $key: ref mut x, .. } => { match x { &mut Some(_) => {}, &mut None => *x = $value, } } }
identifier_body
config.rs
// In heavy WIP use std::fs::{ self, File }; use std::io::Read; use std::path::Path; use toml; use error::PackageError; use repository::RepositoryUrls; /// Represents the config file. #[derive(Debug, Deserialize)] pub struct Config { pub buffer_size: Option<u64>, // pub config_path: Option<String>, pub download_path: Option<String>, pub unpack_path: Option<String>, pub repository: Option<RepositoryUrls>, } impl Default for Config { fn default() -> Self { Self { buffer_size: Some(65536), // config_path: Some(String::from("~/.wiz/config/")), download_path: Some(String::from("~/.wiz/downloads/")), unpack_path: Some(String::from("~/.wiz/downloads/unpacked/")), repository: Some(RepositoryUrls(Vec::new())), } } } impl Config { /// Read the config file from the config path specified in `path`. /// If the config file is read & parsed properly, it should return /// a `Config`. pub fn
<P: AsRef<Path>>(path: P) -> Result<Self, PackageError> { let mut content = String::new(); // Check whether there are tons of configs in the path if path.as_ref().is_dir() { // Read every config files and put them into a string, if it is for entry in fs::read_dir(path)? { let entry = entry?; let mut config = File::open(entry.path())?; config.read_to_string(&mut content)?; } } else { // Read the config file into a string, if it isn't let mut config = File::open(path)?; config.read_to_string(&mut content)?; } // Try to parse the string, and convert it into a `Config`. let config = content.parse::<toml::Value>()?; let config = config.try_into::<Self>()?; Ok(config) } /// This function sets the None(s) in the config, to the default values. pub fn fill_with_default(mut self) -> Self { // Destructuring the default configs into individual variables. let Self { buffer_size: buf_size, download_path: dl_path, unpack_path: unpk_path, repository: repo, } = Self::default(); /// Internal macro, to ease the implementation of `fill_with_default`. macro_rules! set_on_none { ($dest:ident, $key:ident, $value:expr) => ( match $dest { Self { $key: ref mut x,.. } => { match x { &mut Some(_) => {}, &mut None => *x = $value, } } } // Wait for RFC-2086 to be implemented first. /* [allow(irrefutable_let_pattern)] if let Config { $key: mut x,.. } = $dest { if let None = x { x = $value } } */ ); }; // If there are None(s), set them to the default value. set_on_none!(self, buffer_size, buf_size); set_on_none!(self, download_path, dl_path); set_on_none!(self, unpack_path, unpk_path); set_on_none!(self, repository, repo); self } }
read_from
identifier_name
config.rs
// In heavy WIP use std::fs::{ self, File }; use std::io::Read; use std::path::Path; use toml; use error::PackageError; use repository::RepositoryUrls; /// Represents the config file. #[derive(Debug, Deserialize)] pub struct Config { pub buffer_size: Option<u64>, // pub config_path: Option<String>, pub download_path: Option<String>, pub unpack_path: Option<String>, pub repository: Option<RepositoryUrls>, } impl Default for Config { fn default() -> Self { Self { buffer_size: Some(65536), // config_path: Some(String::from("~/.wiz/config/")), download_path: Some(String::from("~/.wiz/downloads/")), unpack_path: Some(String::from("~/.wiz/downloads/unpacked/")), repository: Some(RepositoryUrls(Vec::new())), } } } impl Config { /// Read the config file from the config path specified in `path`. /// If the config file is read & parsed properly, it should return /// a `Config`. pub fn read_from<P: AsRef<Path>>(path: P) -> Result<Self, PackageError> { let mut content = String::new(); // Check whether there are tons of configs in the path if path.as_ref().is_dir()
else { // Read the config file into a string, if it isn't let mut config = File::open(path)?; config.read_to_string(&mut content)?; } // Try to parse the string, and convert it into a `Config`. let config = content.parse::<toml::Value>()?; let config = config.try_into::<Self>()?; Ok(config) } /// This function sets the None(s) in the config, to the default values. pub fn fill_with_default(mut self) -> Self { // Destructuring the default configs into individual variables. let Self { buffer_size: buf_size, download_path: dl_path, unpack_path: unpk_path, repository: repo, } = Self::default(); /// Internal macro, to ease the implementation of `fill_with_default`. macro_rules! set_on_none { ($dest:ident, $key:ident, $value:expr) => ( match $dest { Self { $key: ref mut x,.. } => { match x { &mut Some(_) => {}, &mut None => *x = $value, } } } // Wait for RFC-2086 to be implemented first. /* [allow(irrefutable_let_pattern)] if let Config { $key: mut x,.. } = $dest { if let None = x { x = $value } } */ ); }; // If there are None(s), set them to the default value. set_on_none!(self, buffer_size, buf_size); set_on_none!(self, download_path, dl_path); set_on_none!(self, unpack_path, unpk_path); set_on_none!(self, repository, repo); self } }
{ // Read every config files and put them into a string, if it is for entry in fs::read_dir(path)? { let entry = entry?; let mut config = File::open(entry.path())?; config.read_to_string(&mut content)?; } }
conditional_block
config.rs
// In heavy WIP use std::fs::{ self, File }; use std::io::Read; use std::path::Path; use toml; use error::PackageError; use repository::RepositoryUrls; /// Represents the config file. #[derive(Debug, Deserialize)] pub struct Config { pub buffer_size: Option<u64>, // pub config_path: Option<String>, pub download_path: Option<String>, pub unpack_path: Option<String>, pub repository: Option<RepositoryUrls>, } impl Default for Config { fn default() -> Self { Self { buffer_size: Some(65536), // config_path: Some(String::from("~/.wiz/config/")), download_path: Some(String::from("~/.wiz/downloads/")), unpack_path: Some(String::from("~/.wiz/downloads/unpacked/")), repository: Some(RepositoryUrls(Vec::new())), } } } impl Config { /// Read the config file from the config path specified in `path`. /// If the config file is read & parsed properly, it should return /// a `Config`. pub fn read_from<P: AsRef<Path>>(path: P) -> Result<Self, PackageError> { let mut content = String::new(); // Check whether there are tons of configs in the path if path.as_ref().is_dir() { // Read every config files and put them into a string, if it is for entry in fs::read_dir(path)? { let entry = entry?; let mut config = File::open(entry.path())?; config.read_to_string(&mut content)?; } } else { // Read the config file into a string, if it isn't let mut config = File::open(path)?; config.read_to_string(&mut content)?; } // Try to parse the string, and convert it into a `Config`. let config = content.parse::<toml::Value>()?; let config = config.try_into::<Self>()?; Ok(config) } /// This function sets the None(s) in the config, to the default values. pub fn fill_with_default(mut self) -> Self { // Destructuring the default configs into individual variables. let Self { buffer_size: buf_size, download_path: dl_path, unpack_path: unpk_path, repository: repo, } = Self::default(); /// Internal macro, to ease the implementation of `fill_with_default`.
match $dest { Self { $key: ref mut x,.. } => { match x { &mut Some(_) => {}, &mut None => *x = $value, } } } // Wait for RFC-2086 to be implemented first. /* [allow(irrefutable_let_pattern)] if let Config { $key: mut x,.. } = $dest { if let None = x { x = $value } } */ ); }; // If there are None(s), set them to the default value. set_on_none!(self, buffer_size, buf_size); set_on_none!(self, download_path, dl_path); set_on_none!(self, unpack_path, unpk_path); set_on_none!(self, repository, repo); self } }
macro_rules! set_on_none { ($dest:ident, $key:ident, $value:expr) => (
random_line_split
lib.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ #![feature(macro_rules, plugin_registrar, quote, phase)] #![deny(unused_imports, unused_variable)] //! Exports macros for use in other Servo crates. extern crate syntax; #[phase(plugin, link)] extern crate rustc; #[cfg(test)] extern crate sync; use syntax::ast; use syntax::attr::AttrMetaMethods; use rustc::lint::{Context, LintPass, LintPassObject, LintArray}; use rustc::plugin::Registry; use rustc::middle::ty::expr_ty; use rustc::middle::{ty, def}; use rustc::middle::typeck::astconv::AstConv; use rustc::util::ppaux::Repr; declare_lint!(TRANSMUTE_TYPE_LINT, Allow, "Warn and report types being transmuted") declare_lint!(UNROOTED_MUST_ROOT, Deny, "Warn and report usage of unrooted jsmanaged objects") struct TransmutePass; struct UnrootedPass; impl LintPass for TransmutePass { fn get_lints(&self) -> LintArray { lint_array!(TRANSMUTE_TYPE_LINT) } fn check_expr(&mut self, cx: &Context, ex: &ast::Expr) { match ex.node { ast::ExprCall(ref expr, ref args) => { match expr.node { ast::ExprPath(ref path) => { if path.segments.last() .map_or(false, |ref segment| segment.identifier.name.as_str() == "transmute") && args.len() == 1 { let tcx = cx.tcx(); cx.span_lint(TRANSMUTE_TYPE_LINT, ex.span, format!("Transmute from {} to {} detected", expr_ty(tcx, ex).repr(tcx), expr_ty(tcx, &**args.get(0)).repr(tcx) ).as_slice()); } } _ => {} } } _ => {} } } } fn lint_unrooted_ty(cx: &Context, ty: &ast::Ty, warning: &str)
impl LintPass for UnrootedPass { fn get_lints(&self) -> LintArray { lint_array!(UNROOTED_MUST_ROOT) } fn check_struct_def(&mut self, cx: &Context, def: &ast::StructDef, _i: ast::Ident, _gen: &ast::Generics, id: ast::NodeId) { if cx.tcx.map.expect_item(id).attrs.iter().all(|a|!a.check_name("must_root")) { for ref field in def.fields.iter() { lint_unrooted_ty(cx, &*field.node.ty, "Type must be rooted, use #[must_root] on the struct definition to propagate"); } } } fn check_variant(&mut self, cx: &Context, var: &ast::Variant, _gen: &ast::Generics) { let ref map = cx.tcx.map; if map.expect_item(map.get_parent(var.node.id)).attrs.iter().all(|a|!a.check_name("must_root")) { match var.node.kind { ast::TupleVariantKind(ref vec) => { for ty in vec.iter() { lint_unrooted_ty(cx, &*ty.ty, "Type must be rooted, use #[must_root] on the enum definition to propagate") } } _ => () // Struct variants already caught by check_struct_def } } } fn check_fn(&mut self, cx: &Context, kind: syntax::visit::FnKind, decl: &ast::FnDecl, block: &ast::Block, _span: syntax::codemap::Span, _id: ast::NodeId) { match kind { syntax::visit::FkItemFn(i, _, _, _) | syntax::visit::FkMethod(i, _, _) if i.as_str() == "new" || i.as_str() == "new_inherited" => { return; } _ => () } match block.rules { ast::DefaultBlock => { for arg in decl.inputs.iter() { lint_unrooted_ty(cx, &*arg.ty, "Type must be rooted, use #[must_root] on the fn definition to propagate") } } _ => () // fn is `unsafe` } } // Partially copied from rustc::middle::lint::builtin // Catches `let` statements which store a #[must_root] value // Expressions which return out of blocks eventually end up in a `let` // statement or a function return (which will be caught when it is used elsewhere) fn check_stmt(&mut self, cx: &Context, s: &ast::Stmt) { // Catch the let binding let expr = match s.node { ast::StmtDecl(ref decl, _) => match decl.node { ast::DeclLocal(ref loc) => match loc.init { Some(ref e) => &**e, _ => return }, _ => return }, _ => return }; let t = expr_ty(cx.tcx, &*expr); match ty::get(t).sty { ty::ty_struct(did, _) | ty::ty_enum(did, _) => { if ty::has_attr(cx.tcx, did, "must_root") { cx.span_lint(UNROOTED_MUST_ROOT, expr.span, format!("Expression of type {} must be rooted", t.repr(cx.tcx)).as_slice()); } } _ => {} } } } #[plugin_registrar] pub fn plugin_registrar(reg: &mut Registry) { reg.register_lint_pass(box TransmutePass as LintPassObject); reg.register_lint_pass(box UnrootedPass as LintPassObject); } #[macro_export] macro_rules! bitfield( ($bitfieldname:ident, $getter:ident, $setter:ident, $value:expr) => ( impl $bitfieldname { #[inline] pub fn $getter(self) -> bool { let $bitfieldname(this) = self; (this & $value)!= 0 } #[inline] pub fn $setter(&mut self, value: bool) { let $bitfieldname(this) = *self; *self = $bitfieldname((this &!$value) | (if value { $value } else { 0 })) } } ) )
{ match ty.node { ast::TyBox(ref t) | ast::TyUniq(ref t) | ast::TyVec(ref t) | ast::TyFixedLengthVec(ref t, _) | ast::TyPtr(ast::MutTy { ty: ref t, ..}) | ast::TyRptr(_, ast::MutTy { ty: ref t, ..}) => lint_unrooted_ty(cx, &**t, warning), ast::TyPath(_, _, id) => { match cx.tcx.def_map.borrow().get_copy(&id) { def::DefTy(def_id) => { if ty::has_attr(cx.tcx, def_id, "must_root") { cx.span_lint(UNROOTED_MUST_ROOT, ty.span, warning); } } _ => (), } } _ => (), }; }
identifier_body
lib.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ #![feature(macro_rules, plugin_registrar, quote, phase)] #![deny(unused_imports, unused_variable)] //! Exports macros for use in other Servo crates. extern crate syntax; #[phase(plugin, link)] extern crate rustc; #[cfg(test)] extern crate sync; use syntax::ast; use syntax::attr::AttrMetaMethods; use rustc::lint::{Context, LintPass, LintPassObject, LintArray}; use rustc::plugin::Registry; use rustc::middle::ty::expr_ty; use rustc::middle::{ty, def}; use rustc::middle::typeck::astconv::AstConv; use rustc::util::ppaux::Repr; declare_lint!(TRANSMUTE_TYPE_LINT, Allow, "Warn and report types being transmuted") declare_lint!(UNROOTED_MUST_ROOT, Deny, "Warn and report usage of unrooted jsmanaged objects") struct TransmutePass; struct UnrootedPass; impl LintPass for TransmutePass { fn get_lints(&self) -> LintArray { lint_array!(TRANSMUTE_TYPE_LINT) } fn check_expr(&mut self, cx: &Context, ex: &ast::Expr) { match ex.node { ast::ExprCall(ref expr, ref args) => { match expr.node { ast::ExprPath(ref path) => { if path.segments.last() .map_or(false, |ref segment| segment.identifier.name.as_str() == "transmute") && args.len() == 1 { let tcx = cx.tcx(); cx.span_lint(TRANSMUTE_TYPE_LINT, ex.span, format!("Transmute from {} to {} detected", expr_ty(tcx, ex).repr(tcx), expr_ty(tcx, &**args.get(0)).repr(tcx) ).as_slice()); } } _ => {} } } _ => {} } } } fn lint_unrooted_ty(cx: &Context, ty: &ast::Ty, warning: &str) { match ty.node { ast::TyBox(ref t) | ast::TyUniq(ref t) | ast::TyVec(ref t) | ast::TyFixedLengthVec(ref t, _) | ast::TyPtr(ast::MutTy { ty: ref t,..}) | ast::TyRptr(_, ast::MutTy { ty: ref t,..}) => lint_unrooted_ty(cx, &**t, warning), ast::TyPath(_, _, id) => { match cx.tcx.def_map.borrow().get_copy(&id) { def::DefTy(def_id) => { if ty::has_attr(cx.tcx, def_id, "must_root") { cx.span_lint(UNROOTED_MUST_ROOT, ty.span, warning); } } _ => (), } } _ => (), }; } impl LintPass for UnrootedPass { fn get_lints(&self) -> LintArray { lint_array!(UNROOTED_MUST_ROOT) } fn check_struct_def(&mut self, cx: &Context, def: &ast::StructDef, _i: ast::Ident, _gen: &ast::Generics, id: ast::NodeId) { if cx.tcx.map.expect_item(id).attrs.iter().all(|a|!a.check_name("must_root")) { for ref field in def.fields.iter() { lint_unrooted_ty(cx, &*field.node.ty,
fn check_variant(&mut self, cx: &Context, var: &ast::Variant, _gen: &ast::Generics) { let ref map = cx.tcx.map; if map.expect_item(map.get_parent(var.node.id)).attrs.iter().all(|a|!a.check_name("must_root")) { match var.node.kind { ast::TupleVariantKind(ref vec) => { for ty in vec.iter() { lint_unrooted_ty(cx, &*ty.ty, "Type must be rooted, use #[must_root] on the enum definition to propagate") } } _ => () // Struct variants already caught by check_struct_def } } } fn check_fn(&mut self, cx: &Context, kind: syntax::visit::FnKind, decl: &ast::FnDecl, block: &ast::Block, _span: syntax::codemap::Span, _id: ast::NodeId) { match kind { syntax::visit::FkItemFn(i, _, _, _) | syntax::visit::FkMethod(i, _, _) if i.as_str() == "new" || i.as_str() == "new_inherited" => { return; } _ => () } match block.rules { ast::DefaultBlock => { for arg in decl.inputs.iter() { lint_unrooted_ty(cx, &*arg.ty, "Type must be rooted, use #[must_root] on the fn definition to propagate") } } _ => () // fn is `unsafe` } } // Partially copied from rustc::middle::lint::builtin // Catches `let` statements which store a #[must_root] value // Expressions which return out of blocks eventually end up in a `let` // statement or a function return (which will be caught when it is used elsewhere) fn check_stmt(&mut self, cx: &Context, s: &ast::Stmt) { // Catch the let binding let expr = match s.node { ast::StmtDecl(ref decl, _) => match decl.node { ast::DeclLocal(ref loc) => match loc.init { Some(ref e) => &**e, _ => return }, _ => return }, _ => return }; let t = expr_ty(cx.tcx, &*expr); match ty::get(t).sty { ty::ty_struct(did, _) | ty::ty_enum(did, _) => { if ty::has_attr(cx.tcx, did, "must_root") { cx.span_lint(UNROOTED_MUST_ROOT, expr.span, format!("Expression of type {} must be rooted", t.repr(cx.tcx)).as_slice()); } } _ => {} } } } #[plugin_registrar] pub fn plugin_registrar(reg: &mut Registry) { reg.register_lint_pass(box TransmutePass as LintPassObject); reg.register_lint_pass(box UnrootedPass as LintPassObject); } #[macro_export] macro_rules! bitfield( ($bitfieldname:ident, $getter:ident, $setter:ident, $value:expr) => ( impl $bitfieldname { #[inline] pub fn $getter(self) -> bool { let $bitfieldname(this) = self; (this & $value)!= 0 } #[inline] pub fn $setter(&mut self, value: bool) { let $bitfieldname(this) = *self; *self = $bitfieldname((this &!$value) | (if value { $value } else { 0 })) } } ) )
"Type must be rooted, use #[must_root] on the struct definition to propagate"); } } }
random_line_split
lib.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ #![feature(macro_rules, plugin_registrar, quote, phase)] #![deny(unused_imports, unused_variable)] //! Exports macros for use in other Servo crates. extern crate syntax; #[phase(plugin, link)] extern crate rustc; #[cfg(test)] extern crate sync; use syntax::ast; use syntax::attr::AttrMetaMethods; use rustc::lint::{Context, LintPass, LintPassObject, LintArray}; use rustc::plugin::Registry; use rustc::middle::ty::expr_ty; use rustc::middle::{ty, def}; use rustc::middle::typeck::astconv::AstConv; use rustc::util::ppaux::Repr; declare_lint!(TRANSMUTE_TYPE_LINT, Allow, "Warn and report types being transmuted") declare_lint!(UNROOTED_MUST_ROOT, Deny, "Warn and report usage of unrooted jsmanaged objects") struct TransmutePass; struct UnrootedPass; impl LintPass for TransmutePass { fn get_lints(&self) -> LintArray { lint_array!(TRANSMUTE_TYPE_LINT) } fn check_expr(&mut self, cx: &Context, ex: &ast::Expr) { match ex.node { ast::ExprCall(ref expr, ref args) => { match expr.node { ast::ExprPath(ref path) => { if path.segments.last() .map_or(false, |ref segment| segment.identifier.name.as_str() == "transmute") && args.len() == 1 { let tcx = cx.tcx(); cx.span_lint(TRANSMUTE_TYPE_LINT, ex.span, format!("Transmute from {} to {} detected", expr_ty(tcx, ex).repr(tcx), expr_ty(tcx, &**args.get(0)).repr(tcx) ).as_slice()); } } _ => {} } } _ => {} } } } fn lint_unrooted_ty(cx: &Context, ty: &ast::Ty, warning: &str) { match ty.node { ast::TyBox(ref t) | ast::TyUniq(ref t) | ast::TyVec(ref t) | ast::TyFixedLengthVec(ref t, _) | ast::TyPtr(ast::MutTy { ty: ref t,..}) | ast::TyRptr(_, ast::MutTy { ty: ref t,..}) => lint_unrooted_ty(cx, &**t, warning), ast::TyPath(_, _, id) => { match cx.tcx.def_map.borrow().get_copy(&id) { def::DefTy(def_id) => { if ty::has_attr(cx.tcx, def_id, "must_root") { cx.span_lint(UNROOTED_MUST_ROOT, ty.span, warning); } } _ => (), } } _ => (), }; } impl LintPass for UnrootedPass { fn get_lints(&self) -> LintArray { lint_array!(UNROOTED_MUST_ROOT) } fn check_struct_def(&mut self, cx: &Context, def: &ast::StructDef, _i: ast::Ident, _gen: &ast::Generics, id: ast::NodeId) { if cx.tcx.map.expect_item(id).attrs.iter().all(|a|!a.check_name("must_root")) { for ref field in def.fields.iter() { lint_unrooted_ty(cx, &*field.node.ty, "Type must be rooted, use #[must_root] on the struct definition to propagate"); } } } fn check_variant(&mut self, cx: &Context, var: &ast::Variant, _gen: &ast::Generics) { let ref map = cx.tcx.map; if map.expect_item(map.get_parent(var.node.id)).attrs.iter().all(|a|!a.check_name("must_root")) { match var.node.kind { ast::TupleVariantKind(ref vec) => { for ty in vec.iter() { lint_unrooted_ty(cx, &*ty.ty, "Type must be rooted, use #[must_root] on the enum definition to propagate") } } _ => () // Struct variants already caught by check_struct_def } } } fn check_fn(&mut self, cx: &Context, kind: syntax::visit::FnKind, decl: &ast::FnDecl, block: &ast::Block, _span: syntax::codemap::Span, _id: ast::NodeId) { match kind { syntax::visit::FkItemFn(i, _, _, _) | syntax::visit::FkMethod(i, _, _) if i.as_str() == "new" || i.as_str() == "new_inherited" => { return; } _ => () } match block.rules { ast::DefaultBlock => { for arg in decl.inputs.iter() { lint_unrooted_ty(cx, &*arg.ty, "Type must be rooted, use #[must_root] on the fn definition to propagate") } } _ => () // fn is `unsafe` } } // Partially copied from rustc::middle::lint::builtin // Catches `let` statements which store a #[must_root] value // Expressions which return out of blocks eventually end up in a `let` // statement or a function return (which will be caught when it is used elsewhere) fn check_stmt(&mut self, cx: &Context, s: &ast::Stmt) { // Catch the let binding let expr = match s.node { ast::StmtDecl(ref decl, _) => match decl.node { ast::DeclLocal(ref loc) => match loc.init { Some(ref e) => &**e, _ => return }, _ => return }, _ => return }; let t = expr_ty(cx.tcx, &*expr); match ty::get(t).sty { ty::ty_struct(did, _) | ty::ty_enum(did, _) => { if ty::has_attr(cx.tcx, did, "must_root") { cx.span_lint(UNROOTED_MUST_ROOT, expr.span, format!("Expression of type {} must be rooted", t.repr(cx.tcx)).as_slice()); } } _ => {} } } } #[plugin_registrar] pub fn
(reg: &mut Registry) { reg.register_lint_pass(box TransmutePass as LintPassObject); reg.register_lint_pass(box UnrootedPass as LintPassObject); } #[macro_export] macro_rules! bitfield( ($bitfieldname:ident, $getter:ident, $setter:ident, $value:expr) => ( impl $bitfieldname { #[inline] pub fn $getter(self) -> bool { let $bitfieldname(this) = self; (this & $value)!= 0 } #[inline] pub fn $setter(&mut self, value: bool) { let $bitfieldname(this) = *self; *self = $bitfieldname((this &!$value) | (if value { $value } else { 0 })) } } ) )
plugin_registrar
identifier_name
mod.rs
pub mod counts; mod hashing; pub mod mash; pub mod scaled; use needletail::parser::SequenceRecord; use serde::{Deserialize, Serialize}; use crate::bail; use crate::errors::FinchResult; use crate::filtering::FilterParams; use crate::serialization::Sketch; pub use hashing::ItemHash; #[derive(Clone, Debug, Deserialize, Eq, PartialEq, Hash, Serialize)] pub struct KmerCount { pub hash: ItemHash, pub kmer: Vec<u8>, pub count: u32, pub extra_count: u32, pub label: Option<Vec<u8>>, } pub trait SketchScheme { fn process(&mut self, seq: SequenceRecord); fn total_bases_and_kmers(&self) -> (u64, u64); fn to_vec(&self) -> Vec<KmerCount>; fn parameters(&self) -> SketchParams; fn to_sketch(&self) -> Sketch { // TODO: maybe this should be the primary teardown method for // sketching and sketch_stream should wrap it? // TODO: this doesn't really use filtering // TODO: also the pass-through for the post-filtering trimming is // weird for SketchParams::Mash let (seq_length, num_valid_kmers) = self.total_bases_and_kmers(); let hashes = self.to_vec(); Sketch { name: "".to_string(), seq_length, num_valid_kmers, comment: "".to_string(), hashes, filter_params: FilterParams::default(), sketch_params: self.parameters(), } } } #[derive(Clone, Debug, PartialEq)] pub enum SketchParams { Mash { kmers_to_sketch: usize, final_size: usize, no_strict: bool, kmer_length: u8, hash_seed: u64, }, Scaled { kmers_to_sketch: usize, kmer_length: u8, scale: f64, hash_seed: u64, }, AllCounts { kmer_length: u8, }, } impl Default for SketchParams { fn default() -> Self { SketchParams::Mash { kmers_to_sketch: 1000, final_size: 1000, no_strict: false, kmer_length: 21, hash_seed: 0, } } } impl SketchParams { pub fn create_sketcher(&self) -> Box<dyn SketchScheme> { match self { SketchParams::Mash { kmers_to_sketch, kmer_length, hash_seed, .. } => Box::new(mash::MashSketcher::new( *kmers_to_sketch, *kmer_length, *hash_seed, )), SketchParams::Scaled { kmers_to_sketch, kmer_length, scale, hash_seed, } => Box::new(scaled::ScaledSketcher::new( *kmers_to_sketch, *scale, *kmer_length, *hash_seed, )), SketchParams::AllCounts { kmer_length } => { Box::new(counts::AllCountsSketcher::new(*kmer_length)) } } } pub fn process_post_filter(&self, kmers: &mut Vec<KmerCount>, name: &str) -> FinchResult<()> { if let SketchParams::Mash { final_size, no_strict, .. } = self { kmers.truncate(*final_size); if!no_strict && kmers.len() < *final_size { bail!("{} had too few kmers ({}) to sketch", name, kmers.len(),); } } Ok(()) } pub fn k(&self) -> u8 { match self { SketchParams::Mash { kmer_length,.. } => *kmer_length, SketchParams::Scaled { kmer_length,.. } => *kmer_length, SketchParams::AllCounts { kmer_length,.. } => *kmer_length, } } pub fn hash_info(&self) -> (&str, u16, u64, Option<f64>) { match self { SketchParams::Mash { hash_seed,.. } => ("MurmurHash3_x64_128", 64, *hash_seed, None), SketchParams::Scaled { hash_seed, scale,.. } => ("MurmurHash3_x64_128", 64, *hash_seed, Some(*scale)), SketchParams::AllCounts {.. } => ("None", 0, 0, None), } } pub fn expected_size(&self) -> usize { match self { SketchParams::Mash { final_size,.. } => *final_size, SketchParams::Scaled { kmers_to_sketch,.. } => *kmers_to_sketch, SketchParams::AllCounts { kmer_length,.. } => 4usize.pow(u32::from(*kmer_length)), } } pub fn from_sketches(sketches: &[Sketch]) -> FinchResult<Self> { let first_params = sketches[0].sketch_params.clone(); for (ix, sketch) in sketches.iter().enumerate().skip(1) { let params = &sketch.sketch_params; if let Some((mismatched_param, v1, v2)) = first_params.check_compatibility(&params) { bail!( "First sketch has {} {}, but sketch {} has {0} {}", mismatched_param, v1, ix + 1, v2, ); } // TODO: harmonize scaled/non-scaled sketches?
Ok(first_params) } /// Return any sketch parameter difference that would make comparisons /// between sketches generated by these parameter sets not work. /// /// Note this doesn't actually check the enum variants themselves, but it /// should still break if there are different variants because the hash /// types should be different. pub fn check_compatibility(&self, other: &SketchParams) -> Option<(&str, String, String)> { if self.k()!= other.k() { return Some(("k", self.k().to_string(), other.k().to_string())); } if self.hash_info().0!= other.hash_info().0 { return Some(( "hash type", self.hash_info().0.to_string(), other.hash_info().0.to_string(), )); } if self.hash_info().1!= other.hash_info().1 { return Some(( "hash bits", self.hash_info().1.to_string(), other.hash_info().1.to_string(), )); } if self.hash_info().2!= other.hash_info().2 { return Some(( "hash seed", self.hash_info().2.to_string(), other.hash_info().2.to_string(), )); } None } }
// TODO: harminize sketch sizes? // TODO: do something with no_strict and final_size }
random_line_split
mod.rs
pub mod counts; mod hashing; pub mod mash; pub mod scaled; use needletail::parser::SequenceRecord; use serde::{Deserialize, Serialize}; use crate::bail; use crate::errors::FinchResult; use crate::filtering::FilterParams; use crate::serialization::Sketch; pub use hashing::ItemHash; #[derive(Clone, Debug, Deserialize, Eq, PartialEq, Hash, Serialize)] pub struct KmerCount { pub hash: ItemHash, pub kmer: Vec<u8>, pub count: u32, pub extra_count: u32, pub label: Option<Vec<u8>>, } pub trait SketchScheme { fn process(&mut self, seq: SequenceRecord); fn total_bases_and_kmers(&self) -> (u64, u64); fn to_vec(&self) -> Vec<KmerCount>; fn parameters(&self) -> SketchParams; fn to_sketch(&self) -> Sketch { // TODO: maybe this should be the primary teardown method for // sketching and sketch_stream should wrap it? // TODO: this doesn't really use filtering // TODO: also the pass-through for the post-filtering trimming is // weird for SketchParams::Mash let (seq_length, num_valid_kmers) = self.total_bases_and_kmers(); let hashes = self.to_vec(); Sketch { name: "".to_string(), seq_length, num_valid_kmers, comment: "".to_string(), hashes, filter_params: FilterParams::default(), sketch_params: self.parameters(), } } } #[derive(Clone, Debug, PartialEq)] pub enum SketchParams { Mash { kmers_to_sketch: usize, final_size: usize, no_strict: bool, kmer_length: u8, hash_seed: u64, }, Scaled { kmers_to_sketch: usize, kmer_length: u8, scale: f64, hash_seed: u64, }, AllCounts { kmer_length: u8, }, } impl Default for SketchParams { fn default() -> Self { SketchParams::Mash { kmers_to_sketch: 1000, final_size: 1000, no_strict: false, kmer_length: 21, hash_seed: 0, } } } impl SketchParams { pub fn create_sketcher(&self) -> Box<dyn SketchScheme> { match self { SketchParams::Mash { kmers_to_sketch, kmer_length, hash_seed, .. } => Box::new(mash::MashSketcher::new( *kmers_to_sketch, *kmer_length, *hash_seed, )), SketchParams::Scaled { kmers_to_sketch, kmer_length, scale, hash_seed, } => Box::new(scaled::ScaledSketcher::new( *kmers_to_sketch, *scale, *kmer_length, *hash_seed, )), SketchParams::AllCounts { kmer_length } => { Box::new(counts::AllCountsSketcher::new(*kmer_length)) } } } pub fn process_post_filter(&self, kmers: &mut Vec<KmerCount>, name: &str) -> FinchResult<()> { if let SketchParams::Mash { final_size, no_strict, .. } = self { kmers.truncate(*final_size); if!no_strict && kmers.len() < *final_size { bail!("{} had too few kmers ({}) to sketch", name, kmers.len(),); } } Ok(()) } pub fn k(&self) -> u8 { match self { SketchParams::Mash { kmer_length,.. } => *kmer_length, SketchParams::Scaled { kmer_length,.. } => *kmer_length, SketchParams::AllCounts { kmer_length,.. } => *kmer_length, } } pub fn
(&self) -> (&str, u16, u64, Option<f64>) { match self { SketchParams::Mash { hash_seed,.. } => ("MurmurHash3_x64_128", 64, *hash_seed, None), SketchParams::Scaled { hash_seed, scale,.. } => ("MurmurHash3_x64_128", 64, *hash_seed, Some(*scale)), SketchParams::AllCounts {.. } => ("None", 0, 0, None), } } pub fn expected_size(&self) -> usize { match self { SketchParams::Mash { final_size,.. } => *final_size, SketchParams::Scaled { kmers_to_sketch,.. } => *kmers_to_sketch, SketchParams::AllCounts { kmer_length,.. } => 4usize.pow(u32::from(*kmer_length)), } } pub fn from_sketches(sketches: &[Sketch]) -> FinchResult<Self> { let first_params = sketches[0].sketch_params.clone(); for (ix, sketch) in sketches.iter().enumerate().skip(1) { let params = &sketch.sketch_params; if let Some((mismatched_param, v1, v2)) = first_params.check_compatibility(&params) { bail!( "First sketch has {} {}, but sketch {} has {0} {}", mismatched_param, v1, ix + 1, v2, ); } // TODO: harmonize scaled/non-scaled sketches? // TODO: harminize sketch sizes? // TODO: do something with no_strict and final_size } Ok(first_params) } /// Return any sketch parameter difference that would make comparisons /// between sketches generated by these parameter sets not work. /// /// Note this doesn't actually check the enum variants themselves, but it /// should still break if there are different variants because the hash /// types should be different. pub fn check_compatibility(&self, other: &SketchParams) -> Option<(&str, String, String)> { if self.k()!= other.k() { return Some(("k", self.k().to_string(), other.k().to_string())); } if self.hash_info().0!= other.hash_info().0 { return Some(( "hash type", self.hash_info().0.to_string(), other.hash_info().0.to_string(), )); } if self.hash_info().1!= other.hash_info().1 { return Some(( "hash bits", self.hash_info().1.to_string(), other.hash_info().1.to_string(), )); } if self.hash_info().2!= other.hash_info().2 { return Some(( "hash seed", self.hash_info().2.to_string(), other.hash_info().2.to_string(), )); } None } }
hash_info
identifier_name
mod.rs
pub mod counts; mod hashing; pub mod mash; pub mod scaled; use needletail::parser::SequenceRecord; use serde::{Deserialize, Serialize}; use crate::bail; use crate::errors::FinchResult; use crate::filtering::FilterParams; use crate::serialization::Sketch; pub use hashing::ItemHash; #[derive(Clone, Debug, Deserialize, Eq, PartialEq, Hash, Serialize)] pub struct KmerCount { pub hash: ItemHash, pub kmer: Vec<u8>, pub count: u32, pub extra_count: u32, pub label: Option<Vec<u8>>, } pub trait SketchScheme { fn process(&mut self, seq: SequenceRecord); fn total_bases_and_kmers(&self) -> (u64, u64); fn to_vec(&self) -> Vec<KmerCount>; fn parameters(&self) -> SketchParams; fn to_sketch(&self) -> Sketch { // TODO: maybe this should be the primary teardown method for // sketching and sketch_stream should wrap it? // TODO: this doesn't really use filtering // TODO: also the pass-through for the post-filtering trimming is // weird for SketchParams::Mash let (seq_length, num_valid_kmers) = self.total_bases_and_kmers(); let hashes = self.to_vec(); Sketch { name: "".to_string(), seq_length, num_valid_kmers, comment: "".to_string(), hashes, filter_params: FilterParams::default(), sketch_params: self.parameters(), } } } #[derive(Clone, Debug, PartialEq)] pub enum SketchParams { Mash { kmers_to_sketch: usize, final_size: usize, no_strict: bool, kmer_length: u8, hash_seed: u64, }, Scaled { kmers_to_sketch: usize, kmer_length: u8, scale: f64, hash_seed: u64, }, AllCounts { kmer_length: u8, }, } impl Default for SketchParams { fn default() -> Self { SketchParams::Mash { kmers_to_sketch: 1000, final_size: 1000, no_strict: false, kmer_length: 21, hash_seed: 0, } } } impl SketchParams { pub fn create_sketcher(&self) -> Box<dyn SketchScheme>
*kmer_length, *hash_seed, )), SketchParams::AllCounts { kmer_length } => { Box::new(counts::AllCountsSketcher::new(*kmer_length)) } } } pub fn process_post_filter(&self, kmers: &mut Vec<KmerCount>, name: &str) -> FinchResult<()> { if let SketchParams::Mash { final_size, no_strict, .. } = self { kmers.truncate(*final_size); if!no_strict && kmers.len() < *final_size { bail!("{} had too few kmers ({}) to sketch", name, kmers.len(),); } } Ok(()) } pub fn k(&self) -> u8 { match self { SketchParams::Mash { kmer_length,.. } => *kmer_length, SketchParams::Scaled { kmer_length,.. } => *kmer_length, SketchParams::AllCounts { kmer_length,.. } => *kmer_length, } } pub fn hash_info(&self) -> (&str, u16, u64, Option<f64>) { match self { SketchParams::Mash { hash_seed,.. } => ("MurmurHash3_x64_128", 64, *hash_seed, None), SketchParams::Scaled { hash_seed, scale,.. } => ("MurmurHash3_x64_128", 64, *hash_seed, Some(*scale)), SketchParams::AllCounts {.. } => ("None", 0, 0, None), } } pub fn expected_size(&self) -> usize { match self { SketchParams::Mash { final_size,.. } => *final_size, SketchParams::Scaled { kmers_to_sketch,.. } => *kmers_to_sketch, SketchParams::AllCounts { kmer_length,.. } => 4usize.pow(u32::from(*kmer_length)), } } pub fn from_sketches(sketches: &[Sketch]) -> FinchResult<Self> { let first_params = sketches[0].sketch_params.clone(); for (ix, sketch) in sketches.iter().enumerate().skip(1) { let params = &sketch.sketch_params; if let Some((mismatched_param, v1, v2)) = first_params.check_compatibility(&params) { bail!( "First sketch has {} {}, but sketch {} has {0} {}", mismatched_param, v1, ix + 1, v2, ); } // TODO: harmonize scaled/non-scaled sketches? // TODO: harminize sketch sizes? // TODO: do something with no_strict and final_size } Ok(first_params) } /// Return any sketch parameter difference that would make comparisons /// between sketches generated by these parameter sets not work. /// /// Note this doesn't actually check the enum variants themselves, but it /// should still break if there are different variants because the hash /// types should be different. pub fn check_compatibility(&self, other: &SketchParams) -> Option<(&str, String, String)> { if self.k()!= other.k() { return Some(("k", self.k().to_string(), other.k().to_string())); } if self.hash_info().0!= other.hash_info().0 { return Some(( "hash type", self.hash_info().0.to_string(), other.hash_info().0.to_string(), )); } if self.hash_info().1!= other.hash_info().1 { return Some(( "hash bits", self.hash_info().1.to_string(), other.hash_info().1.to_string(), )); } if self.hash_info().2!= other.hash_info().2 { return Some(( "hash seed", self.hash_info().2.to_string(), other.hash_info().2.to_string(), )); } None } }
{ match self { SketchParams::Mash { kmers_to_sketch, kmer_length, hash_seed, .. } => Box::new(mash::MashSketcher::new( *kmers_to_sketch, *kmer_length, *hash_seed, )), SketchParams::Scaled { kmers_to_sketch, kmer_length, scale, hash_seed, } => Box::new(scaled::ScaledSketcher::new( *kmers_to_sketch, *scale,
identifier_body
htmltablecaptionelement.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use crate::dom::bindings::codegen::Bindings::HTMLTableCaptionElementBinding; use crate::dom::bindings::root::DomRoot; use crate::dom::document::Document; use crate::dom::htmlelement::HTMLElement; use crate::dom::node::Node; use dom_struct::dom_struct; use html5ever::{LocalName, Prefix}; #[dom_struct] pub struct HTMLTableCaptionElement { htmlelement: HTMLElement, }
document: &Document, ) -> HTMLTableCaptionElement { HTMLTableCaptionElement { htmlelement: HTMLElement::new_inherited(local_name, prefix, document), } } #[allow(unrooted_must_root)] pub fn new( local_name: LocalName, prefix: Option<Prefix>, document: &Document, ) -> DomRoot<HTMLTableCaptionElement> { Node::reflect_node( Box::new(HTMLTableCaptionElement::new_inherited( local_name, prefix, document, )), document, HTMLTableCaptionElementBinding::Wrap, ) } }
impl HTMLTableCaptionElement { fn new_inherited( local_name: LocalName, prefix: Option<Prefix>,
random_line_split
htmltablecaptionelement.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use crate::dom::bindings::codegen::Bindings::HTMLTableCaptionElementBinding; use crate::dom::bindings::root::DomRoot; use crate::dom::document::Document; use crate::dom::htmlelement::HTMLElement; use crate::dom::node::Node; use dom_struct::dom_struct; use html5ever::{LocalName, Prefix}; #[dom_struct] pub struct HTMLTableCaptionElement { htmlelement: HTMLElement, } impl HTMLTableCaptionElement { fn new_inherited( local_name: LocalName, prefix: Option<Prefix>, document: &Document, ) -> HTMLTableCaptionElement { HTMLTableCaptionElement { htmlelement: HTMLElement::new_inherited(local_name, prefix, document), } } #[allow(unrooted_must_root)] pub fn
( local_name: LocalName, prefix: Option<Prefix>, document: &Document, ) -> DomRoot<HTMLTableCaptionElement> { Node::reflect_node( Box::new(HTMLTableCaptionElement::new_inherited( local_name, prefix, document, )), document, HTMLTableCaptionElementBinding::Wrap, ) } }
new
identifier_name
main.rs
#![feature(async_await)] use futures_util::stream::StreamExt; use std::env; use std::io::*; use std::process::Stdio; use tokio::codec::{FramedRead, LinesCodec}; use tokio::prelude::*; use tokio::process::{Child, Command}; const USAGE: &str = "args: config_file"; struct Qemu { pub process: Child, } impl Qemu { fn new(disk_file: &str) -> Self { let cmd: &str = &vec![ "qemu-system-x86_64.exe", "-m", "4G", "-no-reboot", "-no-shutdown", "-drive", &format!("file={},format=raw,if=ide", disk_file), "-monitor", "stdio", "-s", "-S", ] .join(" "); let process = Command::new("sh") .args(&["-c", cmd]) .stdin(Stdio::piped()) .stdout(Stdio::null()) .stderr(Stdio::null()) .spawn() .expect("Unable to start qemu"); Self { process } } fn
(mut self) { { self.process .stdin() .as_mut() .unwrap() .write_all(b"q\n") .unwrap(); } let ecode = self.process.wait().expect("failed to wait on child"); assert!(ecode.success()); } } struct Gdb { pub process: Child, stdout: FramedRead<Vec<u8>, LinesCodec>, } impl Gdb { fn new() -> Self { let process = Command::new("gdb") .stdin(Stdio::piped()) .stdout(Stdio::null()) //.stderr(Stdio::null()) .spawn() .expect("Unable to start gdb"); let stdout = process.stdout().take().unwrap(); Self { process, stdout: FramedRead::new(stdout, LinesCodec::new()), } } fn read(&mut self) -> Vec<u8> { let mut result = Vec::new(); self.process .stdout .as_mut() .unwrap() .read_to_end(&mut result) .unwrap(); result } fn write(&mut self, bytes: &[u8]) { self.process .stdin .as_mut() .unwrap() .write_all(bytes) .unwrap(); } fn start(&mut self) {} fn terminate(mut self) { self.write(b"q\n"); let ecode = self.process.wait().expect("failed to wait on child"); assert!(ecode.success()); } } #[tokio::main] async fn main() { let _args: Vec<_> = env::args().skip(1).collect(); let mut qemu = Qemu::new("build/test_disk.img"); let mut gdb = Gdb::new(); gdb.start(); std::thread::sleep_ms(1000); gdb.terminate(); qemu.terminate(); println!("DONE") }
terminate
identifier_name
main.rs
#![feature(async_await)] use futures_util::stream::StreamExt; use std::env; use std::io::*; use std::process::Stdio; use tokio::codec::{FramedRead, LinesCodec}; use tokio::prelude::*; use tokio::process::{Child, Command}; const USAGE: &str = "args: config_file"; struct Qemu { pub process: Child, } impl Qemu { fn new(disk_file: &str) -> Self { let cmd: &str = &vec![ "qemu-system-x86_64.exe", "-m", "4G", "-no-reboot", "-no-shutdown", "-drive", &format!("file={},format=raw,if=ide", disk_file), "-monitor", "stdio", "-s", "-S", ] .join(" "); let process = Command::new("sh") .args(&["-c", cmd]) .stdin(Stdio::piped()) .stdout(Stdio::null()) .stderr(Stdio::null()) .spawn() .expect("Unable to start qemu"); Self { process } } fn terminate(mut self) { { self.process .stdin() .as_mut() .unwrap() .write_all(b"q\n") .unwrap(); } let ecode = self.process.wait().expect("failed to wait on child"); assert!(ecode.success()); } } struct Gdb { pub process: Child, stdout: FramedRead<Vec<u8>, LinesCodec>, } impl Gdb { fn new() -> Self
fn read(&mut self) -> Vec<u8> { let mut result = Vec::new(); self.process .stdout .as_mut() .unwrap() .read_to_end(&mut result) .unwrap(); result } fn write(&mut self, bytes: &[u8]) { self.process .stdin .as_mut() .unwrap() .write_all(bytes) .unwrap(); } fn start(&mut self) {} fn terminate(mut self) { self.write(b"q\n"); let ecode = self.process.wait().expect("failed to wait on child"); assert!(ecode.success()); } } #[tokio::main] async fn main() { let _args: Vec<_> = env::args().skip(1).collect(); let mut qemu = Qemu::new("build/test_disk.img"); let mut gdb = Gdb::new(); gdb.start(); std::thread::sleep_ms(1000); gdb.terminate(); qemu.terminate(); println!("DONE") }
{ let process = Command::new("gdb") .stdin(Stdio::piped()) .stdout(Stdio::null()) // .stderr(Stdio::null()) .spawn() .expect("Unable to start gdb"); let stdout = process.stdout().take().unwrap(); Self { process, stdout: FramedRead::new(stdout, LinesCodec::new()), } }
identifier_body
main.rs
#![feature(async_await)] use futures_util::stream::StreamExt; use std::env; use std::io::*; use std::process::Stdio; use tokio::codec::{FramedRead, LinesCodec}; use tokio::prelude::*; use tokio::process::{Child, Command}; const USAGE: &str = "args: config_file"; struct Qemu { pub process: Child, } impl Qemu { fn new(disk_file: &str) -> Self { let cmd: &str = &vec![ "qemu-system-x86_64.exe", "-m", "4G", "-no-reboot", "-no-shutdown", "-drive", &format!("file={},format=raw,if=ide", disk_file), "-monitor", "stdio", "-s", "-S", ] .join(" "); let process = Command::new("sh") .args(&["-c", cmd]) .stdin(Stdio::piped()) .stdout(Stdio::null()) .stderr(Stdio::null()) .spawn() .expect("Unable to start qemu"); Self { process } } fn terminate(mut self) { { self.process .stdin() .as_mut() .unwrap() .write_all(b"q\n") .unwrap(); } let ecode = self.process.wait().expect("failed to wait on child"); assert!(ecode.success()); } } struct Gdb { pub process: Child, stdout: FramedRead<Vec<u8>, LinesCodec>, } impl Gdb { fn new() -> Self { let process = Command::new("gdb") .stdin(Stdio::piped()) .stdout(Stdio::null()) //.stderr(Stdio::null()) .spawn() .expect("Unable to start gdb"); let stdout = process.stdout().take().unwrap(); Self { process, stdout: FramedRead::new(stdout, LinesCodec::new()), } } fn read(&mut self) -> Vec<u8> { let mut result = Vec::new(); self.process .stdout .as_mut() .unwrap() .read_to_end(&mut result) .unwrap(); result } fn write(&mut self, bytes: &[u8]) { self.process .stdin .as_mut() .unwrap() .write_all(bytes) .unwrap(); } fn start(&mut self) {} fn terminate(mut self) {
let ecode = self.process.wait().expect("failed to wait on child"); assert!(ecode.success()); } } #[tokio::main] async fn main() { let _args: Vec<_> = env::args().skip(1).collect(); let mut qemu = Qemu::new("build/test_disk.img"); let mut gdb = Gdb::new(); gdb.start(); std::thread::sleep_ms(1000); gdb.terminate(); qemu.terminate(); println!("DONE") }
self.write(b"q\n");
random_line_split
devlinks.rs
// This Source Code Form is subject to the terms of the Mozilla Public // License, v. 2.0. If a copy of the MPL was not distributed with this // file, You can obtain one at http://mozilla.org/MPL/2.0/. use std::collections::HashSet; use std::io::ErrorKind; use std::os::unix::fs::symlink; use std::path::{Path, PathBuf}; use std::{fs, str}; use crate::engine::Pool; use crate::stratis::StratisResult; use crate::engine::engine::DEV_PATH; use crate::engine::types::{Name, PoolUuid}; /// Set up the root Stratis directory, where dev links as well as temporary /// MDV mounts will be created. This must occur before any pools are setup. pub fn setup_dev_path() -> StratisResult<()> { if let Err(err) = fs::create_dir(DEV_PATH) { if err.kind()!= ErrorKind::AlreadyExists { return Err(From::from(err)); } } Ok(()) } /// Setup the pool directory and the symlinks in /stratis for the specified pool and filesystems /// it contains. // Don't just remove and recreate everything in case there are processes // (e.g. user shells) with the current working directory within the tree. pub fn setup_pool_devlinks(pool_name: &str, pool: &Pool) { if let Err(err) = || -> StratisResult<()> { let pool_path = pool_directory(pool_name); if!pool_path.exists() { pool_added(pool_name); } let mut existing_files = fs::read_dir(pool_path)? .map(|dir_e| { dir_e.and_then(|d| Ok(d.file_name().into_string().expect("Unix is utf-8"))) }) .collect::<Result<HashSet<_>, _>>()?; for (fs_name, _, fs) in pool.filesystems() { filesystem_added(pool_name, &fs_name, &fs.devnode()); existing_files.remove(&fs_name.to_owned()); } for leftover in existing_files { filesystem_removed(pool_name, &leftover); } Ok(()) }() { warn!( "setup_pool_devlinks failed for /stratis/{}, reason {:?}", pool_name, err ); }; } /// Clean up directories and symlinks under /stratis based on current /// config. Clear out any directory or file that doesn't correspond to a pool. // Don't just remove everything in case there are processes // (e.g. user shells) with the current working directory within the tree. pub fn cleanup_devlinks<'a, I: Iterator<Item = &'a (Name, PoolUuid, &'a Pool)>>(pools: I) { if let Err(err) = || -> StratisResult<()> { let mut existing_dirs = fs::read_dir(DEV_PATH)? .map(|dir_e| { dir_e.and_then(|d| Ok(d.file_name().into_string().expect("Unix is utf-8"))) }) .collect::<Result<HashSet<_>, _>>()?; for &(ref pool_name, _, _) in pools { existing_dirs.remove(&pool_name.to_owned()); } for leftover in existing_dirs { pool_removed(&Name::new(leftover)); } Ok(()) }() { warn!("cleanup_devlinks failed, reason {:?}", err); } } /// Create a directory when a pool is added. pub fn pool_added(pool: &str) { let p = pool_directory(pool); if let Err(e) = fs::create_dir(&p) { warn!("unable to create pool directory {:?}, reason {:?}", p, e); } } /// Remove the directory and its contents when the pool is removed. pub fn pool_removed(pool: &str) { let p = pool_directory(pool); if let Err(e) = fs::remove_dir_all(&p) { warn!("unable to remove pool directory {:?}, reason {:?}", p, e); } } /// Rename the directory to match the pool's new name. pub fn pool_renamed(old_name: &str, new_name: &str) { let old = pool_directory(old_name); let new = pool_directory(new_name); if let Err(e) = fs::rename(&old, &new) { warn!( "unable to rename pool directory old {:?}, new {:?}, reason {:?}", old, new, e ); } } /// Create a symlink to the new filesystem's block device within its pool's /// directory. pub fn
(pool_name: &str, fs_name: &str, devnode: &Path) { let p = filesystem_mount_path(pool_name, fs_name); // Remove existing and recreate to ensure it points to the correct devnode let _ = fs::remove_file(&p); if let Err(e) = symlink(devnode, &p) { warn!( "unable to create symlink for {:?} -> {:?}, reason {:?}", devnode, p, e ); } } /// Remove the symlink when the filesystem is destroyed. pub fn filesystem_removed(pool_name: &str, fs_name: &str) { let p = filesystem_mount_path(pool_name, fs_name); if let Err(e) = fs::remove_file(&p) { warn!( "unable to remove symlink for filesystem {:?}, reason {:?}", p, e ); } } /// Rename the symlink to track the filesystem's new name. pub fn filesystem_renamed(pool_name: &str, old_name: &str, new_name: &str) { let old = filesystem_mount_path(pool_name, old_name); let new = filesystem_mount_path(pool_name, new_name); if let Err(e) = fs::rename(&old, &new) { warn!( "unable to rename filesystem symlink for {:?} -> {:?}, reason {:?}", old, new, e ); } } /// Given a pool name, synthesize a pool directory name for storing filesystem /// mount paths. fn pool_directory<T: AsRef<str>>(pool_name: T) -> PathBuf { vec![DEV_PATH, pool_name.as_ref()].iter().collect() } /// Given a pool name and a filesystem name, return the path it should be /// available as a device for mounting. pub fn filesystem_mount_path<T: AsRef<str>>(pool_name: T, fs_name: T) -> PathBuf { vec![DEV_PATH, pool_name.as_ref(), fs_name.as_ref()] .iter() .collect() }
filesystem_added
identifier_name
devlinks.rs
// This Source Code Form is subject to the terms of the Mozilla Public // License, v. 2.0. If a copy of the MPL was not distributed with this // file, You can obtain one at http://mozilla.org/MPL/2.0/. use std::collections::HashSet; use std::io::ErrorKind; use std::os::unix::fs::symlink; use std::path::{Path, PathBuf}; use std::{fs, str}; use crate::engine::Pool; use crate::stratis::StratisResult; use crate::engine::engine::DEV_PATH; use crate::engine::types::{Name, PoolUuid}; /// Set up the root Stratis directory, where dev links as well as temporary /// MDV mounts will be created. This must occur before any pools are setup.
if err.kind()!= ErrorKind::AlreadyExists { return Err(From::from(err)); } } Ok(()) } /// Setup the pool directory and the symlinks in /stratis for the specified pool and filesystems /// it contains. // Don't just remove and recreate everything in case there are processes // (e.g. user shells) with the current working directory within the tree. pub fn setup_pool_devlinks(pool_name: &str, pool: &Pool) { if let Err(err) = || -> StratisResult<()> { let pool_path = pool_directory(pool_name); if!pool_path.exists() { pool_added(pool_name); } let mut existing_files = fs::read_dir(pool_path)? .map(|dir_e| { dir_e.and_then(|d| Ok(d.file_name().into_string().expect("Unix is utf-8"))) }) .collect::<Result<HashSet<_>, _>>()?; for (fs_name, _, fs) in pool.filesystems() { filesystem_added(pool_name, &fs_name, &fs.devnode()); existing_files.remove(&fs_name.to_owned()); } for leftover in existing_files { filesystem_removed(pool_name, &leftover); } Ok(()) }() { warn!( "setup_pool_devlinks failed for /stratis/{}, reason {:?}", pool_name, err ); }; } /// Clean up directories and symlinks under /stratis based on current /// config. Clear out any directory or file that doesn't correspond to a pool. // Don't just remove everything in case there are processes // (e.g. user shells) with the current working directory within the tree. pub fn cleanup_devlinks<'a, I: Iterator<Item = &'a (Name, PoolUuid, &'a Pool)>>(pools: I) { if let Err(err) = || -> StratisResult<()> { let mut existing_dirs = fs::read_dir(DEV_PATH)? .map(|dir_e| { dir_e.and_then(|d| Ok(d.file_name().into_string().expect("Unix is utf-8"))) }) .collect::<Result<HashSet<_>, _>>()?; for &(ref pool_name, _, _) in pools { existing_dirs.remove(&pool_name.to_owned()); } for leftover in existing_dirs { pool_removed(&Name::new(leftover)); } Ok(()) }() { warn!("cleanup_devlinks failed, reason {:?}", err); } } /// Create a directory when a pool is added. pub fn pool_added(pool: &str) { let p = pool_directory(pool); if let Err(e) = fs::create_dir(&p) { warn!("unable to create pool directory {:?}, reason {:?}", p, e); } } /// Remove the directory and its contents when the pool is removed. pub fn pool_removed(pool: &str) { let p = pool_directory(pool); if let Err(e) = fs::remove_dir_all(&p) { warn!("unable to remove pool directory {:?}, reason {:?}", p, e); } } /// Rename the directory to match the pool's new name. pub fn pool_renamed(old_name: &str, new_name: &str) { let old = pool_directory(old_name); let new = pool_directory(new_name); if let Err(e) = fs::rename(&old, &new) { warn!( "unable to rename pool directory old {:?}, new {:?}, reason {:?}", old, new, e ); } } /// Create a symlink to the new filesystem's block device within its pool's /// directory. pub fn filesystem_added(pool_name: &str, fs_name: &str, devnode: &Path) { let p = filesystem_mount_path(pool_name, fs_name); // Remove existing and recreate to ensure it points to the correct devnode let _ = fs::remove_file(&p); if let Err(e) = symlink(devnode, &p) { warn!( "unable to create symlink for {:?} -> {:?}, reason {:?}", devnode, p, e ); } } /// Remove the symlink when the filesystem is destroyed. pub fn filesystem_removed(pool_name: &str, fs_name: &str) { let p = filesystem_mount_path(pool_name, fs_name); if let Err(e) = fs::remove_file(&p) { warn!( "unable to remove symlink for filesystem {:?}, reason {:?}", p, e ); } } /// Rename the symlink to track the filesystem's new name. pub fn filesystem_renamed(pool_name: &str, old_name: &str, new_name: &str) { let old = filesystem_mount_path(pool_name, old_name); let new = filesystem_mount_path(pool_name, new_name); if let Err(e) = fs::rename(&old, &new) { warn!( "unable to rename filesystem symlink for {:?} -> {:?}, reason {:?}", old, new, e ); } } /// Given a pool name, synthesize a pool directory name for storing filesystem /// mount paths. fn pool_directory<T: AsRef<str>>(pool_name: T) -> PathBuf { vec![DEV_PATH, pool_name.as_ref()].iter().collect() } /// Given a pool name and a filesystem name, return the path it should be /// available as a device for mounting. pub fn filesystem_mount_path<T: AsRef<str>>(pool_name: T, fs_name: T) -> PathBuf { vec![DEV_PATH, pool_name.as_ref(), fs_name.as_ref()] .iter() .collect() }
pub fn setup_dev_path() -> StratisResult<()> { if let Err(err) = fs::create_dir(DEV_PATH) {
random_line_split
devlinks.rs
// This Source Code Form is subject to the terms of the Mozilla Public // License, v. 2.0. If a copy of the MPL was not distributed with this // file, You can obtain one at http://mozilla.org/MPL/2.0/. use std::collections::HashSet; use std::io::ErrorKind; use std::os::unix::fs::symlink; use std::path::{Path, PathBuf}; use std::{fs, str}; use crate::engine::Pool; use crate::stratis::StratisResult; use crate::engine::engine::DEV_PATH; use crate::engine::types::{Name, PoolUuid}; /// Set up the root Stratis directory, where dev links as well as temporary /// MDV mounts will be created. This must occur before any pools are setup. pub fn setup_dev_path() -> StratisResult<()> { if let Err(err) = fs::create_dir(DEV_PATH) { if err.kind()!= ErrorKind::AlreadyExists { return Err(From::from(err)); } } Ok(()) } /// Setup the pool directory and the symlinks in /stratis for the specified pool and filesystems /// it contains. // Don't just remove and recreate everything in case there are processes // (e.g. user shells) with the current working directory within the tree. pub fn setup_pool_devlinks(pool_name: &str, pool: &Pool) { if let Err(err) = || -> StratisResult<()> { let pool_path = pool_directory(pool_name); if!pool_path.exists() { pool_added(pool_name); } let mut existing_files = fs::read_dir(pool_path)? .map(|dir_e| { dir_e.and_then(|d| Ok(d.file_name().into_string().expect("Unix is utf-8"))) }) .collect::<Result<HashSet<_>, _>>()?; for (fs_name, _, fs) in pool.filesystems() { filesystem_added(pool_name, &fs_name, &fs.devnode()); existing_files.remove(&fs_name.to_owned()); } for leftover in existing_files { filesystem_removed(pool_name, &leftover); } Ok(()) }() { warn!( "setup_pool_devlinks failed for /stratis/{}, reason {:?}", pool_name, err ); }; } /// Clean up directories and symlinks under /stratis based on current /// config. Clear out any directory or file that doesn't correspond to a pool. // Don't just remove everything in case there are processes // (e.g. user shells) with the current working directory within the tree. pub fn cleanup_devlinks<'a, I: Iterator<Item = &'a (Name, PoolUuid, &'a Pool)>>(pools: I) { if let Err(err) = || -> StratisResult<()> { let mut existing_dirs = fs::read_dir(DEV_PATH)? .map(|dir_e| { dir_e.and_then(|d| Ok(d.file_name().into_string().expect("Unix is utf-8"))) }) .collect::<Result<HashSet<_>, _>>()?; for &(ref pool_name, _, _) in pools { existing_dirs.remove(&pool_name.to_owned()); } for leftover in existing_dirs { pool_removed(&Name::new(leftover)); } Ok(()) }()
} /// Create a directory when a pool is added. pub fn pool_added(pool: &str) { let p = pool_directory(pool); if let Err(e) = fs::create_dir(&p) { warn!("unable to create pool directory {:?}, reason {:?}", p, e); } } /// Remove the directory and its contents when the pool is removed. pub fn pool_removed(pool: &str) { let p = pool_directory(pool); if let Err(e) = fs::remove_dir_all(&p) { warn!("unable to remove pool directory {:?}, reason {:?}", p, e); } } /// Rename the directory to match the pool's new name. pub fn pool_renamed(old_name: &str, new_name: &str) { let old = pool_directory(old_name); let new = pool_directory(new_name); if let Err(e) = fs::rename(&old, &new) { warn!( "unable to rename pool directory old {:?}, new {:?}, reason {:?}", old, new, e ); } } /// Create a symlink to the new filesystem's block device within its pool's /// directory. pub fn filesystem_added(pool_name: &str, fs_name: &str, devnode: &Path) { let p = filesystem_mount_path(pool_name, fs_name); // Remove existing and recreate to ensure it points to the correct devnode let _ = fs::remove_file(&p); if let Err(e) = symlink(devnode, &p) { warn!( "unable to create symlink for {:?} -> {:?}, reason {:?}", devnode, p, e ); } } /// Remove the symlink when the filesystem is destroyed. pub fn filesystem_removed(pool_name: &str, fs_name: &str) { let p = filesystem_mount_path(pool_name, fs_name); if let Err(e) = fs::remove_file(&p) { warn!( "unable to remove symlink for filesystem {:?}, reason {:?}", p, e ); } } /// Rename the symlink to track the filesystem's new name. pub fn filesystem_renamed(pool_name: &str, old_name: &str, new_name: &str) { let old = filesystem_mount_path(pool_name, old_name); let new = filesystem_mount_path(pool_name, new_name); if let Err(e) = fs::rename(&old, &new) { warn!( "unable to rename filesystem symlink for {:?} -> {:?}, reason {:?}", old, new, e ); } } /// Given a pool name, synthesize a pool directory name for storing filesystem /// mount paths. fn pool_directory<T: AsRef<str>>(pool_name: T) -> PathBuf { vec![DEV_PATH, pool_name.as_ref()].iter().collect() } /// Given a pool name and a filesystem name, return the path it should be /// available as a device for mounting. pub fn filesystem_mount_path<T: AsRef<str>>(pool_name: T, fs_name: T) -> PathBuf { vec![DEV_PATH, pool_name.as_ref(), fs_name.as_ref()] .iter() .collect() }
{ warn!("cleanup_devlinks failed, reason {:?}", err); }
conditional_block
devlinks.rs
// This Source Code Form is subject to the terms of the Mozilla Public // License, v. 2.0. If a copy of the MPL was not distributed with this // file, You can obtain one at http://mozilla.org/MPL/2.0/. use std::collections::HashSet; use std::io::ErrorKind; use std::os::unix::fs::symlink; use std::path::{Path, PathBuf}; use std::{fs, str}; use crate::engine::Pool; use crate::stratis::StratisResult; use crate::engine::engine::DEV_PATH; use crate::engine::types::{Name, PoolUuid}; /// Set up the root Stratis directory, where dev links as well as temporary /// MDV mounts will be created. This must occur before any pools are setup. pub fn setup_dev_path() -> StratisResult<()> { if let Err(err) = fs::create_dir(DEV_PATH) { if err.kind()!= ErrorKind::AlreadyExists { return Err(From::from(err)); } } Ok(()) } /// Setup the pool directory and the symlinks in /stratis for the specified pool and filesystems /// it contains. // Don't just remove and recreate everything in case there are processes // (e.g. user shells) with the current working directory within the tree. pub fn setup_pool_devlinks(pool_name: &str, pool: &Pool) { if let Err(err) = || -> StratisResult<()> { let pool_path = pool_directory(pool_name); if!pool_path.exists() { pool_added(pool_name); } let mut existing_files = fs::read_dir(pool_path)? .map(|dir_e| { dir_e.and_then(|d| Ok(d.file_name().into_string().expect("Unix is utf-8"))) }) .collect::<Result<HashSet<_>, _>>()?; for (fs_name, _, fs) in pool.filesystems() { filesystem_added(pool_name, &fs_name, &fs.devnode()); existing_files.remove(&fs_name.to_owned()); } for leftover in existing_files { filesystem_removed(pool_name, &leftover); } Ok(()) }() { warn!( "setup_pool_devlinks failed for /stratis/{}, reason {:?}", pool_name, err ); }; } /// Clean up directories and symlinks under /stratis based on current /// config. Clear out any directory or file that doesn't correspond to a pool. // Don't just remove everything in case there are processes // (e.g. user shells) with the current working directory within the tree. pub fn cleanup_devlinks<'a, I: Iterator<Item = &'a (Name, PoolUuid, &'a Pool)>>(pools: I) { if let Err(err) = || -> StratisResult<()> { let mut existing_dirs = fs::read_dir(DEV_PATH)? .map(|dir_e| { dir_e.and_then(|d| Ok(d.file_name().into_string().expect("Unix is utf-8"))) }) .collect::<Result<HashSet<_>, _>>()?; for &(ref pool_name, _, _) in pools { existing_dirs.remove(&pool_name.to_owned()); } for leftover in existing_dirs { pool_removed(&Name::new(leftover)); } Ok(()) }() { warn!("cleanup_devlinks failed, reason {:?}", err); } } /// Create a directory when a pool is added. pub fn pool_added(pool: &str)
/// Remove the directory and its contents when the pool is removed. pub fn pool_removed(pool: &str) { let p = pool_directory(pool); if let Err(e) = fs::remove_dir_all(&p) { warn!("unable to remove pool directory {:?}, reason {:?}", p, e); } } /// Rename the directory to match the pool's new name. pub fn pool_renamed(old_name: &str, new_name: &str) { let old = pool_directory(old_name); let new = pool_directory(new_name); if let Err(e) = fs::rename(&old, &new) { warn!( "unable to rename pool directory old {:?}, new {:?}, reason {:?}", old, new, e ); } } /// Create a symlink to the new filesystem's block device within its pool's /// directory. pub fn filesystem_added(pool_name: &str, fs_name: &str, devnode: &Path) { let p = filesystem_mount_path(pool_name, fs_name); // Remove existing and recreate to ensure it points to the correct devnode let _ = fs::remove_file(&p); if let Err(e) = symlink(devnode, &p) { warn!( "unable to create symlink for {:?} -> {:?}, reason {:?}", devnode, p, e ); } } /// Remove the symlink when the filesystem is destroyed. pub fn filesystem_removed(pool_name: &str, fs_name: &str) { let p = filesystem_mount_path(pool_name, fs_name); if let Err(e) = fs::remove_file(&p) { warn!( "unable to remove symlink for filesystem {:?}, reason {:?}", p, e ); } } /// Rename the symlink to track the filesystem's new name. pub fn filesystem_renamed(pool_name: &str, old_name: &str, new_name: &str) { let old = filesystem_mount_path(pool_name, old_name); let new = filesystem_mount_path(pool_name, new_name); if let Err(e) = fs::rename(&old, &new) { warn!( "unable to rename filesystem symlink for {:?} -> {:?}, reason {:?}", old, new, e ); } } /// Given a pool name, synthesize a pool directory name for storing filesystem /// mount paths. fn pool_directory<T: AsRef<str>>(pool_name: T) -> PathBuf { vec![DEV_PATH, pool_name.as_ref()].iter().collect() } /// Given a pool name and a filesystem name, return the path it should be /// available as a device for mounting. pub fn filesystem_mount_path<T: AsRef<str>>(pool_name: T, fs_name: T) -> PathBuf { vec![DEV_PATH, pool_name.as_ref(), fs_name.as_ref()] .iter() .collect() }
{ let p = pool_directory(pool); if let Err(e) = fs::create_dir(&p) { warn!("unable to create pool directory {:?}, reason {:?}", p, e); } }
identifier_body
fence_rmw.rs
//! This mod provides orderings to use with RMW operations //! that optimally handle the case when all loads and stores //! after an RMW operation must be ordered after the operation. //! # Example: //! ``` //! use std::sync::atomic::{AtomicUsize, fence, Ordering}; //! use atomic_utilities::fence_rmw::{RMWOrder, fence_rmw}; //! //! let atomic_refcnt = AtomicUsize::new(0); //! atomic_refcnt.fetch_add(1, RMWOrder); //! //! //... do work here //! // This will be ordered after the store of the fetch_add //! // and will use minimal fences for various hardware platforms //! atomic_refcnt.fetch_sub(1, Ordering::Release); //! ``` use std::sync::atomic::Ordering; #[cfg(any(target_platform = "x86", target_platform = "x86_64"))] mod internal_ordering { use std::sync::atomic::Ordering; pub const RMW_O: Ordering = Ordering::Acquire; #[inline(always)] pub fn the_fence() {} } #[cfg(not(any(target_platform = "x86", target_platform = "x86_64")))] mod internal_ordering { use std::sync::atomic::{Ordering, fence}; pub const RMW_O: Ordering = Ordering::Relaxed; pub fn the_fence()
} /// The ordering to be used for RMW operations in this fencing scheme #[allow(non_upper_case_globals)] pub const RMWOrder: Ordering = internal_ordering::RMW_O; /// The fence to be used after the RMW #[inline(always)] pub fn fence_rmw() { internal_ordering::the_fence() }
{ fence(Ordering::SeqCst) }
identifier_body
fence_rmw.rs
//! This mod provides orderings to use with RMW operations //! that optimally handle the case when all loads and stores //! after an RMW operation must be ordered after the operation. //! # Example: //! ``` //! use std::sync::atomic::{AtomicUsize, fence, Ordering}; //! use atomic_utilities::fence_rmw::{RMWOrder, fence_rmw}; //! //! let atomic_refcnt = AtomicUsize::new(0); //! atomic_refcnt.fetch_add(1, RMWOrder); //! //! //... do work here //! // This will be ordered after the store of the fetch_add //! // and will use minimal fences for various hardware platforms //! atomic_refcnt.fetch_sub(1, Ordering::Release); //! ``` use std::sync::atomic::Ordering; #[cfg(any(target_platform = "x86", target_platform = "x86_64"))] mod internal_ordering { use std::sync::atomic::Ordering; pub const RMW_O: Ordering = Ordering::Acquire; #[inline(always)] pub fn the_fence() {} } #[cfg(not(any(target_platform = "x86", target_platform = "x86_64")))]
mod internal_ordering { use std::sync::atomic::{Ordering, fence}; pub const RMW_O: Ordering = Ordering::Relaxed; pub fn the_fence() { fence(Ordering::SeqCst) } } /// The ordering to be used for RMW operations in this fencing scheme #[allow(non_upper_case_globals)] pub const RMWOrder: Ordering = internal_ordering::RMW_O; /// The fence to be used after the RMW #[inline(always)] pub fn fence_rmw() { internal_ordering::the_fence() }
random_line_split
fence_rmw.rs
//! This mod provides orderings to use with RMW operations //! that optimally handle the case when all loads and stores //! after an RMW operation must be ordered after the operation. //! # Example: //! ``` //! use std::sync::atomic::{AtomicUsize, fence, Ordering}; //! use atomic_utilities::fence_rmw::{RMWOrder, fence_rmw}; //! //! let atomic_refcnt = AtomicUsize::new(0); //! atomic_refcnt.fetch_add(1, RMWOrder); //! //! //... do work here //! // This will be ordered after the store of the fetch_add //! // and will use minimal fences for various hardware platforms //! atomic_refcnt.fetch_sub(1, Ordering::Release); //! ``` use std::sync::atomic::Ordering; #[cfg(any(target_platform = "x86", target_platform = "x86_64"))] mod internal_ordering { use std::sync::atomic::Ordering; pub const RMW_O: Ordering = Ordering::Acquire; #[inline(always)] pub fn the_fence() {} } #[cfg(not(any(target_platform = "x86", target_platform = "x86_64")))] mod internal_ordering { use std::sync::atomic::{Ordering, fence}; pub const RMW_O: Ordering = Ordering::Relaxed; pub fn the_fence() { fence(Ordering::SeqCst) } } /// The ordering to be used for RMW operations in this fencing scheme #[allow(non_upper_case_globals)] pub const RMWOrder: Ordering = internal_ordering::RMW_O; /// The fence to be used after the RMW #[inline(always)] pub fn
() { internal_ordering::the_fence() }
fence_rmw
identifier_name
mock_executor.rs
// Copyright 2019 TiKV Project Authors. Licensed under Apache-2.0. use tipb::FieldType; use crate::interface::*; use tidb_query_common::storage::IntervalRange; use tidb_query_datatype::codec::batch::LazyBatchColumnVec; use tidb_query_datatype::codec::data_type::VectorValue; use tidb_query_datatype::expr::EvalWarnings; /// A simple mock executor that will return batch data according to a fixture without any /// modification. /// /// Normally this should be only used in tests. pub struct MockExecutor { schema: Vec<FieldType>, results: std::vec::IntoIter<BatchExecuteResult>, } impl MockExecutor { pub fn new(schema: Vec<FieldType>, results: Vec<BatchExecuteResult>) -> Self { assert!(!results.is_empty()); Self { schema, results: results.into_iter(), } } } impl BatchExecutor for MockExecutor { type StorageStats = (); fn schema(&self) -> &[FieldType] { &self.schema } fn next_batch(&mut self, _scan_rows: usize) -> BatchExecuteResult { self.results.next().unwrap() } fn collect_exec_stats(&mut self, _dest: &mut ExecuteStats) { // Do nothing } fn collect_storage_stats(&mut self, _dest: &mut Self::StorageStats) { // Do nothing } fn take_scanned_range(&mut self) -> IntervalRange { // Do nothing unreachable!() } fn can_be_cached(&self) -> bool { false } } pub struct MockScanExecutor { pub rows: Vec<i64>, pub pos: usize, schema: Vec<FieldType>, } impl MockScanExecutor { pub fn new(rows: Vec<i64>, schema: Vec<FieldType>) -> Self { MockScanExecutor { rows, pos: 0, schema, } } } impl BatchExecutor for MockScanExecutor { type StorageStats = (); fn schema(&self) -> &[FieldType] { &self.schema } fn next_batch(&mut self, scan_rows: usize) -> BatchExecuteResult { let real_scan_rows = std::cmp::min(scan_rows, self.rows.len()); // just one column let mut res_col = Vec::new(); let mut res_logical_rows = Vec::new(); let mut cur_row_idx = 0; while self.pos < self.rows.len() && cur_row_idx < real_scan_rows { res_col.push(Some(self.rows[self.pos])); res_logical_rows.push(cur_row_idx); self.pos += 1; cur_row_idx += 1; } let is_drained = self.pos >= self.rows.len(); BatchExecuteResult { physical_columns: LazyBatchColumnVec::from(vec![VectorValue::Int(res_col.into())]), logical_rows: res_logical_rows, warnings: EvalWarnings::default(), is_drained: Ok(is_drained), } } fn collect_exec_stats(&mut self, _dest: &mut ExecuteStats) { // Do nothing } fn
(&mut self, _dest: &mut Self::StorageStats) { // Do nothing } fn take_scanned_range(&mut self) -> IntervalRange { // Do nothing unreachable!() } fn can_be_cached(&self) -> bool { false } }
collect_storage_stats
identifier_name
mock_executor.rs
// Copyright 2019 TiKV Project Authors. Licensed under Apache-2.0. use tipb::FieldType; use crate::interface::*; use tidb_query_common::storage::IntervalRange; use tidb_query_datatype::codec::batch::LazyBatchColumnVec; use tidb_query_datatype::codec::data_type::VectorValue; use tidb_query_datatype::expr::EvalWarnings; /// A simple mock executor that will return batch data according to a fixture without any /// modification. /// /// Normally this should be only used in tests. pub struct MockExecutor { schema: Vec<FieldType>, results: std::vec::IntoIter<BatchExecuteResult>, } impl MockExecutor { pub fn new(schema: Vec<FieldType>, results: Vec<BatchExecuteResult>) -> Self { assert!(!results.is_empty()); Self { schema, results: results.into_iter(), } } } impl BatchExecutor for MockExecutor { type StorageStats = (); fn schema(&self) -> &[FieldType] { &self.schema } fn next_batch(&mut self, _scan_rows: usize) -> BatchExecuteResult { self.results.next().unwrap() } fn collect_exec_stats(&mut self, _dest: &mut ExecuteStats) { // Do nothing } fn collect_storage_stats(&mut self, _dest: &mut Self::StorageStats) { // Do nothing } fn take_scanned_range(&mut self) -> IntervalRange { // Do nothing
fn can_be_cached(&self) -> bool { false } } pub struct MockScanExecutor { pub rows: Vec<i64>, pub pos: usize, schema: Vec<FieldType>, } impl MockScanExecutor { pub fn new(rows: Vec<i64>, schema: Vec<FieldType>) -> Self { MockScanExecutor { rows, pos: 0, schema, } } } impl BatchExecutor for MockScanExecutor { type StorageStats = (); fn schema(&self) -> &[FieldType] { &self.schema } fn next_batch(&mut self, scan_rows: usize) -> BatchExecuteResult { let real_scan_rows = std::cmp::min(scan_rows, self.rows.len()); // just one column let mut res_col = Vec::new(); let mut res_logical_rows = Vec::new(); let mut cur_row_idx = 0; while self.pos < self.rows.len() && cur_row_idx < real_scan_rows { res_col.push(Some(self.rows[self.pos])); res_logical_rows.push(cur_row_idx); self.pos += 1; cur_row_idx += 1; } let is_drained = self.pos >= self.rows.len(); BatchExecuteResult { physical_columns: LazyBatchColumnVec::from(vec![VectorValue::Int(res_col.into())]), logical_rows: res_logical_rows, warnings: EvalWarnings::default(), is_drained: Ok(is_drained), } } fn collect_exec_stats(&mut self, _dest: &mut ExecuteStats) { // Do nothing } fn collect_storage_stats(&mut self, _dest: &mut Self::StorageStats) { // Do nothing } fn take_scanned_range(&mut self) -> IntervalRange { // Do nothing unreachable!() } fn can_be_cached(&self) -> bool { false } }
unreachable!() }
random_line_split
group.rs
extern crate kiss3d; extern crate nalgebra as na; use na::Vec3; use kiss3d::window::Window; use kiss3d::light::Light; fn main()
while window.render() { g1.prepend_to_local_rotation(&Vec3::new(0.0f32, 0.014, 0.0)); g2.prepend_to_local_rotation(&Vec3::new(0.014f32, 0.0, 0.0)); } }
{ let mut window = Window::new("Kiss3d: cube"); let mut g1 = window.add_group(); let mut g2 = window.add_group(); g1.append_translation(&Vec3::new(2.0f32, 0.0, 0.0)); g2.append_translation(&Vec3::new(-2.0f32, 0.0, 0.0)); g1.add_cube(1.0, 5.0, 1.0); g1.add_cube(5.0, 1.0, 1.0); g2.add_cube(1.0, 5.0, 1.0); g2.add_cube(1.0, 1.0, 5.0); g1.set_color(1.0, 0.0, 0.0); g2.set_color(0.0, 1.0, 0.0); window.set_light(Light::StickToCamera);
identifier_body
group.rs
use kiss3d::window::Window; use kiss3d::light::Light; fn main() { let mut window = Window::new("Kiss3d: cube"); let mut g1 = window.add_group(); let mut g2 = window.add_group(); g1.append_translation(&Vec3::new(2.0f32, 0.0, 0.0)); g2.append_translation(&Vec3::new(-2.0f32, 0.0, 0.0)); g1.add_cube(1.0, 5.0, 1.0); g1.add_cube(5.0, 1.0, 1.0); g2.add_cube(1.0, 5.0, 1.0); g2.add_cube(1.0, 1.0, 5.0); g1.set_color(1.0, 0.0, 0.0); g2.set_color(0.0, 1.0, 0.0); window.set_light(Light::StickToCamera); while window.render() { g1.prepend_to_local_rotation(&Vec3::new(0.0f32, 0.014, 0.0)); g2.prepend_to_local_rotation(&Vec3::new(0.014f32, 0.0, 0.0)); } }
extern crate kiss3d; extern crate nalgebra as na; use na::Vec3;
random_line_split
group.rs
extern crate kiss3d; extern crate nalgebra as na; use na::Vec3; use kiss3d::window::Window; use kiss3d::light::Light; fn
() { let mut window = Window::new("Kiss3d: cube"); let mut g1 = window.add_group(); let mut g2 = window.add_group(); g1.append_translation(&Vec3::new(2.0f32, 0.0, 0.0)); g2.append_translation(&Vec3::new(-2.0f32, 0.0, 0.0)); g1.add_cube(1.0, 5.0, 1.0); g1.add_cube(5.0, 1.0, 1.0); g2.add_cube(1.0, 5.0, 1.0); g2.add_cube(1.0, 1.0, 5.0); g1.set_color(1.0, 0.0, 0.0); g2.set_color(0.0, 1.0, 0.0); window.set_light(Light::StickToCamera); while window.render() { g1.prepend_to_local_rotation(&Vec3::new(0.0f32, 0.014, 0.0)); g2.prepend_to_local_rotation(&Vec3::new(0.014f32, 0.0, 0.0)); } }
main
identifier_name
record_backend.rs
extern crate chrono; extern crate mysql; use self::chrono::UTC; use self::chrono::offset::TimeZone; use self::mysql::conn::MyOpts; use self::mysql::conn::pool::MyPool; use self::mysql::error::MyResult; use self::mysql::value::from_row; use self::mysql::value::Value; use std::clone::Clone; use std::default::Default; use worker::Record; pub trait RecordRepository { fn store (&self, record: Record) -> Result<(), RecordRepositoryError>; fn fetch_record (&self, id: String) -> Result<(Record), RecordRepositoryError>; fn fetch_limit (&self, size: u32, offset: u32) -> Result<(Vec<Record>), RecordRepositoryError>; } #[derive(Debug)] pub enum RecordRepositoryError { CannotStoreRecord, CannotFetchRecord, CannotDenormalizeRecord, RecordNotFound } #[derive(Debug, Clone, RustcDecodable, RustcEncodable)] pub struct MysqlConfig { address: String, username: String, password: String, database: String } impl MysqlConfig { pub fn to_connection (&self) -> MysqlRepository { let opts = MyOpts { tcp_addr: Some(self.address.clone()), user: Some(self.username.clone()), pass: Some(self.password.clone()), db_name: Some(self.database.to_string()), ..Default::default() }; MysqlRepository::new(MyPool::new(opts).unwrap()) } } #[derive(Clone, Debug)] pub struct MysqlRepository { pool: MyPool } impl MysqlRepository { pub fn new (pool: MyPool) -> MysqlRepository { MysqlRepository { pool: pool } } fn row_to_record (&self, row: MyResult<Vec<Value>>) -> Record { let (id, command, cwd, status, stderr, stdout, started_at_col, finished_at_col) = from_row::<(String, String, String, i32, String, String, String, String)>(row.unwrap()); let started_at = UTC.datetime_from_str(&started_at_col, "%Y-%m-%d %H:%M:%S").unwrap(); let finished_at = UTC.datetime_from_str(&finished_at_col, "%Y-%m-%d %H:%M:%S").unwrap(); let optimized_uuid = MysqlOptimizedUuid { uuid: id.to_string() }; Record { id: optimized_uuid.to_uuid(), command: command, cwd: cwd, status: status, stderr: stderr, stdout: stdout,
finished_at: finished_at } } } #[derive(Clone, Debug)] pub struct MysqlOptimizedUuid { uuid: String } impl MysqlOptimizedUuid { pub fn from_uuid (uuid: String) -> MysqlOptimizedUuid { // the optimized way https://www.percona.com/blog/2014/12/19/store-uuid-optimized-way/ let mut ordered_uuid = uuid[14..18].to_string(); ordered_uuid.push_str(&uuid[9..13]); ordered_uuid.push_str(&uuid[0..8]); ordered_uuid.push_str(&uuid[19..23]); ordered_uuid.push_str(&uuid[24..]); MysqlOptimizedUuid { uuid: ordered_uuid } } pub fn to_uuid (&self) -> String { let mut uuid = self.uuid[8..16].to_string(); uuid.push_str("-"); uuid.push_str(&self.uuid[4..8]); uuid.push_str("-"); uuid.push_str(&self.uuid[0..4]); uuid.push_str("-"); uuid.push_str(&self.uuid[16..20]); uuid.push_str("-"); uuid.push_str(&self.uuid[20..]); uuid } } impl RecordRepository for MysqlRepository { fn store (&self, record: Record) -> Result<(), RecordRepositoryError> { let uuid_optimized = MysqlOptimizedUuid::from_uuid(record.id.clone()); let query = r"INSERT INTO results (id, command, cwd, status, stderr, stdout, started_at, finished_at) VALUES (UNHEX(?),?,?,?,?,?,?,?)"; let mut stmt = match self.pool.prepare(query) { Ok(s) => s, Err(_) => return Err(RecordRepositoryError::CannotStoreRecord) }; let result = match stmt.execute( (uuid_optimized.clone().uuid, record.command, record.cwd, record.status, record.stderr, record.stdout, record.started_at.format("%Y-%m-%d %H:%M:%S").to_string(), record.finished_at.format("%Y-%m-%d %H:%M:%S").to_string() ) ) { Ok(_) => Ok(()), Err(err) => { error!("[{:?}] error storing in mysql {:?}", uuid_optimized.clone().uuid, err); return Err(RecordRepositoryError::CannotStoreRecord); } }; result } fn fetch_limit (&self, size: u32, limit: u32) -> Result<(Vec<Record>), RecordRepositoryError> { let query = r"SELECT HEX(id) AS id, command, cwd, status, stderr, stdout, CAST(started_at AS char) AS started_at, CAST(finished_at AS char) AS finished_at FROM results ORDER BY started_at DESC LIMIT? OFFSET?"; let mut stmt = match self.pool.prepare(query) { Ok(s) => s, Err(_) => return Err(RecordRepositoryError::CannotFetchRecord) }; let results: Result<(Vec<Record>), RecordRepositoryError> = match stmt .execute((size, limit)) .map(|result| { result.map(|row| { self.row_to_record(row) }).collect() }) { Ok(records) => Ok(records), Err(err) => { error!("error fetching from mysql {:?}", err); return Err(RecordRepositoryError::CannotDenormalizeRecord) } }; results } fn fetch_record (&self, id: String) -> Result<(Record), RecordRepositoryError> { let uuid_optimized = MysqlOptimizedUuid::from_uuid(id.clone()); let query = r"SELECT HEX(id) AS id, command, cwd, status, stderr, stdout, CAST(started_at AS char) AS started_at, CAST(finished_at AS char) AS finished_at FROM results WHERE HEX(id) =?"; let mut stmt = match self.pool.prepare(query) { Ok(s) => s, Err(_) => return Err(RecordRepositoryError::CannotFetchRecord) }; let results: Result<(Vec<Record>), RecordRepositoryError> = match stmt .execute((uuid_optimized.uuid, )) .map(|result| { result.map(|row| { self.row_to_record(row) }).collect() }) { Ok(records) => Ok(records), Err(err) => { error!("error fetching from mysql {:?}", err); return Err(RecordRepositoryError::CannotDenormalizeRecord) } }; let records: Vec<Record> = results.unwrap(); let result: Result<(Record), RecordRepositoryError> = match records.len() { 1 => { Ok(records[0].clone()) }, _ => return Err(RecordRepositoryError::RecordNotFound) }; result } } #[cfg(test)] mod tests { use super::MysqlOptimizedUuid; #[test] fn optimized_uuid() { let uuid = String::from("58e0a7d7-eebc-11d8-9669-0800200c9a66"); let optimized_uuid = MysqlOptimizedUuid::from_uuid(uuid); assert_eq!("11d8eebc58e0a7d796690800200c9a66", optimized_uuid.uuid); assert_eq!("58e0a7d7-eebc-11d8-9669-0800200c9a66", optimized_uuid.to_uuid()); } }
started_at: started_at,
random_line_split
record_backend.rs
extern crate chrono; extern crate mysql; use self::chrono::UTC; use self::chrono::offset::TimeZone; use self::mysql::conn::MyOpts; use self::mysql::conn::pool::MyPool; use self::mysql::error::MyResult; use self::mysql::value::from_row; use self::mysql::value::Value; use std::clone::Clone; use std::default::Default; use worker::Record; pub trait RecordRepository { fn store (&self, record: Record) -> Result<(), RecordRepositoryError>; fn fetch_record (&self, id: String) -> Result<(Record), RecordRepositoryError>; fn fetch_limit (&self, size: u32, offset: u32) -> Result<(Vec<Record>), RecordRepositoryError>; } #[derive(Debug)] pub enum
{ CannotStoreRecord, CannotFetchRecord, CannotDenormalizeRecord, RecordNotFound } #[derive(Debug, Clone, RustcDecodable, RustcEncodable)] pub struct MysqlConfig { address: String, username: String, password: String, database: String } impl MysqlConfig { pub fn to_connection (&self) -> MysqlRepository { let opts = MyOpts { tcp_addr: Some(self.address.clone()), user: Some(self.username.clone()), pass: Some(self.password.clone()), db_name: Some(self.database.to_string()), ..Default::default() }; MysqlRepository::new(MyPool::new(opts).unwrap()) } } #[derive(Clone, Debug)] pub struct MysqlRepository { pool: MyPool } impl MysqlRepository { pub fn new (pool: MyPool) -> MysqlRepository { MysqlRepository { pool: pool } } fn row_to_record (&self, row: MyResult<Vec<Value>>) -> Record { let (id, command, cwd, status, stderr, stdout, started_at_col, finished_at_col) = from_row::<(String, String, String, i32, String, String, String, String)>(row.unwrap()); let started_at = UTC.datetime_from_str(&started_at_col, "%Y-%m-%d %H:%M:%S").unwrap(); let finished_at = UTC.datetime_from_str(&finished_at_col, "%Y-%m-%d %H:%M:%S").unwrap(); let optimized_uuid = MysqlOptimizedUuid { uuid: id.to_string() }; Record { id: optimized_uuid.to_uuid(), command: command, cwd: cwd, status: status, stderr: stderr, stdout: stdout, started_at: started_at, finished_at: finished_at } } } #[derive(Clone, Debug)] pub struct MysqlOptimizedUuid { uuid: String } impl MysqlOptimizedUuid { pub fn from_uuid (uuid: String) -> MysqlOptimizedUuid { // the optimized way https://www.percona.com/blog/2014/12/19/store-uuid-optimized-way/ let mut ordered_uuid = uuid[14..18].to_string(); ordered_uuid.push_str(&uuid[9..13]); ordered_uuid.push_str(&uuid[0..8]); ordered_uuid.push_str(&uuid[19..23]); ordered_uuid.push_str(&uuid[24..]); MysqlOptimizedUuid { uuid: ordered_uuid } } pub fn to_uuid (&self) -> String { let mut uuid = self.uuid[8..16].to_string(); uuid.push_str("-"); uuid.push_str(&self.uuid[4..8]); uuid.push_str("-"); uuid.push_str(&self.uuid[0..4]); uuid.push_str("-"); uuid.push_str(&self.uuid[16..20]); uuid.push_str("-"); uuid.push_str(&self.uuid[20..]); uuid } } impl RecordRepository for MysqlRepository { fn store (&self, record: Record) -> Result<(), RecordRepositoryError> { let uuid_optimized = MysqlOptimizedUuid::from_uuid(record.id.clone()); let query = r"INSERT INTO results (id, command, cwd, status, stderr, stdout, started_at, finished_at) VALUES (UNHEX(?),?,?,?,?,?,?,?)"; let mut stmt = match self.pool.prepare(query) { Ok(s) => s, Err(_) => return Err(RecordRepositoryError::CannotStoreRecord) }; let result = match stmt.execute( (uuid_optimized.clone().uuid, record.command, record.cwd, record.status, record.stderr, record.stdout, record.started_at.format("%Y-%m-%d %H:%M:%S").to_string(), record.finished_at.format("%Y-%m-%d %H:%M:%S").to_string() ) ) { Ok(_) => Ok(()), Err(err) => { error!("[{:?}] error storing in mysql {:?}", uuid_optimized.clone().uuid, err); return Err(RecordRepositoryError::CannotStoreRecord); } }; result } fn fetch_limit (&self, size: u32, limit: u32) -> Result<(Vec<Record>), RecordRepositoryError> { let query = r"SELECT HEX(id) AS id, command, cwd, status, stderr, stdout, CAST(started_at AS char) AS started_at, CAST(finished_at AS char) AS finished_at FROM results ORDER BY started_at DESC LIMIT? OFFSET?"; let mut stmt = match self.pool.prepare(query) { Ok(s) => s, Err(_) => return Err(RecordRepositoryError::CannotFetchRecord) }; let results: Result<(Vec<Record>), RecordRepositoryError> = match stmt .execute((size, limit)) .map(|result| { result.map(|row| { self.row_to_record(row) }).collect() }) { Ok(records) => Ok(records), Err(err) => { error!("error fetching from mysql {:?}", err); return Err(RecordRepositoryError::CannotDenormalizeRecord) } }; results } fn fetch_record (&self, id: String) -> Result<(Record), RecordRepositoryError> { let uuid_optimized = MysqlOptimizedUuid::from_uuid(id.clone()); let query = r"SELECT HEX(id) AS id, command, cwd, status, stderr, stdout, CAST(started_at AS char) AS started_at, CAST(finished_at AS char) AS finished_at FROM results WHERE HEX(id) =?"; let mut stmt = match self.pool.prepare(query) { Ok(s) => s, Err(_) => return Err(RecordRepositoryError::CannotFetchRecord) }; let results: Result<(Vec<Record>), RecordRepositoryError> = match stmt .execute((uuid_optimized.uuid, )) .map(|result| { result.map(|row| { self.row_to_record(row) }).collect() }) { Ok(records) => Ok(records), Err(err) => { error!("error fetching from mysql {:?}", err); return Err(RecordRepositoryError::CannotDenormalizeRecord) } }; let records: Vec<Record> = results.unwrap(); let result: Result<(Record), RecordRepositoryError> = match records.len() { 1 => { Ok(records[0].clone()) }, _ => return Err(RecordRepositoryError::RecordNotFound) }; result } } #[cfg(test)] mod tests { use super::MysqlOptimizedUuid; #[test] fn optimized_uuid() { let uuid = String::from("58e0a7d7-eebc-11d8-9669-0800200c9a66"); let optimized_uuid = MysqlOptimizedUuid::from_uuid(uuid); assert_eq!("11d8eebc58e0a7d796690800200c9a66", optimized_uuid.uuid); assert_eq!("58e0a7d7-eebc-11d8-9669-0800200c9a66", optimized_uuid.to_uuid()); } }
RecordRepositoryError
identifier_name
record_backend.rs
extern crate chrono; extern crate mysql; use self::chrono::UTC; use self::chrono::offset::TimeZone; use self::mysql::conn::MyOpts; use self::mysql::conn::pool::MyPool; use self::mysql::error::MyResult; use self::mysql::value::from_row; use self::mysql::value::Value; use std::clone::Clone; use std::default::Default; use worker::Record; pub trait RecordRepository { fn store (&self, record: Record) -> Result<(), RecordRepositoryError>; fn fetch_record (&self, id: String) -> Result<(Record), RecordRepositoryError>; fn fetch_limit (&self, size: u32, offset: u32) -> Result<(Vec<Record>), RecordRepositoryError>; } #[derive(Debug)] pub enum RecordRepositoryError { CannotStoreRecord, CannotFetchRecord, CannotDenormalizeRecord, RecordNotFound } #[derive(Debug, Clone, RustcDecodable, RustcEncodable)] pub struct MysqlConfig { address: String, username: String, password: String, database: String } impl MysqlConfig { pub fn to_connection (&self) -> MysqlRepository { let opts = MyOpts { tcp_addr: Some(self.address.clone()), user: Some(self.username.clone()), pass: Some(self.password.clone()), db_name: Some(self.database.to_string()), ..Default::default() }; MysqlRepository::new(MyPool::new(opts).unwrap()) } } #[derive(Clone, Debug)] pub struct MysqlRepository { pool: MyPool } impl MysqlRepository { pub fn new (pool: MyPool) -> MysqlRepository { MysqlRepository { pool: pool } } fn row_to_record (&self, row: MyResult<Vec<Value>>) -> Record { let (id, command, cwd, status, stderr, stdout, started_at_col, finished_at_col) = from_row::<(String, String, String, i32, String, String, String, String)>(row.unwrap()); let started_at = UTC.datetime_from_str(&started_at_col, "%Y-%m-%d %H:%M:%S").unwrap(); let finished_at = UTC.datetime_from_str(&finished_at_col, "%Y-%m-%d %H:%M:%S").unwrap(); let optimized_uuid = MysqlOptimizedUuid { uuid: id.to_string() }; Record { id: optimized_uuid.to_uuid(), command: command, cwd: cwd, status: status, stderr: stderr, stdout: stdout, started_at: started_at, finished_at: finished_at } } } #[derive(Clone, Debug)] pub struct MysqlOptimizedUuid { uuid: String } impl MysqlOptimizedUuid { pub fn from_uuid (uuid: String) -> MysqlOptimizedUuid { // the optimized way https://www.percona.com/blog/2014/12/19/store-uuid-optimized-way/ let mut ordered_uuid = uuid[14..18].to_string(); ordered_uuid.push_str(&uuid[9..13]); ordered_uuid.push_str(&uuid[0..8]); ordered_uuid.push_str(&uuid[19..23]); ordered_uuid.push_str(&uuid[24..]); MysqlOptimizedUuid { uuid: ordered_uuid } } pub fn to_uuid (&self) -> String { let mut uuid = self.uuid[8..16].to_string(); uuid.push_str("-"); uuid.push_str(&self.uuid[4..8]); uuid.push_str("-"); uuid.push_str(&self.uuid[0..4]); uuid.push_str("-"); uuid.push_str(&self.uuid[16..20]); uuid.push_str("-"); uuid.push_str(&self.uuid[20..]); uuid } } impl RecordRepository for MysqlRepository { fn store (&self, record: Record) -> Result<(), RecordRepositoryError> { let uuid_optimized = MysqlOptimizedUuid::from_uuid(record.id.clone()); let query = r"INSERT INTO results (id, command, cwd, status, stderr, stdout, started_at, finished_at) VALUES (UNHEX(?),?,?,?,?,?,?,?)"; let mut stmt = match self.pool.prepare(query) { Ok(s) => s, Err(_) => return Err(RecordRepositoryError::CannotStoreRecord) }; let result = match stmt.execute( (uuid_optimized.clone().uuid, record.command, record.cwd, record.status, record.stderr, record.stdout, record.started_at.format("%Y-%m-%d %H:%M:%S").to_string(), record.finished_at.format("%Y-%m-%d %H:%M:%S").to_string() ) ) { Ok(_) => Ok(()), Err(err) => { error!("[{:?}] error storing in mysql {:?}", uuid_optimized.clone().uuid, err); return Err(RecordRepositoryError::CannotStoreRecord); } }; result } fn fetch_limit (&self, size: u32, limit: u32) -> Result<(Vec<Record>), RecordRepositoryError> { let query = r"SELECT HEX(id) AS id, command, cwd, status, stderr, stdout, CAST(started_at AS char) AS started_at, CAST(finished_at AS char) AS finished_at FROM results ORDER BY started_at DESC LIMIT? OFFSET?"; let mut stmt = match self.pool.prepare(query) { Ok(s) => s, Err(_) => return Err(RecordRepositoryError::CannotFetchRecord) }; let results: Result<(Vec<Record>), RecordRepositoryError> = match stmt .execute((size, limit)) .map(|result| { result.map(|row| { self.row_to_record(row) }).collect() }) { Ok(records) => Ok(records), Err(err) =>
}; results } fn fetch_record (&self, id: String) -> Result<(Record), RecordRepositoryError> { let uuid_optimized = MysqlOptimizedUuid::from_uuid(id.clone()); let query = r"SELECT HEX(id) AS id, command, cwd, status, stderr, stdout, CAST(started_at AS char) AS started_at, CAST(finished_at AS char) AS finished_at FROM results WHERE HEX(id) =?"; let mut stmt = match self.pool.prepare(query) { Ok(s) => s, Err(_) => return Err(RecordRepositoryError::CannotFetchRecord) }; let results: Result<(Vec<Record>), RecordRepositoryError> = match stmt .execute((uuid_optimized.uuid, )) .map(|result| { result.map(|row| { self.row_to_record(row) }).collect() }) { Ok(records) => Ok(records), Err(err) => { error!("error fetching from mysql {:?}", err); return Err(RecordRepositoryError::CannotDenormalizeRecord) } }; let records: Vec<Record> = results.unwrap(); let result: Result<(Record), RecordRepositoryError> = match records.len() { 1 => { Ok(records[0].clone()) }, _ => return Err(RecordRepositoryError::RecordNotFound) }; result } } #[cfg(test)] mod tests { use super::MysqlOptimizedUuid; #[test] fn optimized_uuid() { let uuid = String::from("58e0a7d7-eebc-11d8-9669-0800200c9a66"); let optimized_uuid = MysqlOptimizedUuid::from_uuid(uuid); assert_eq!("11d8eebc58e0a7d796690800200c9a66", optimized_uuid.uuid); assert_eq!("58e0a7d7-eebc-11d8-9669-0800200c9a66", optimized_uuid.to_uuid()); } }
{ error!("error fetching from mysql {:?}", err); return Err(RecordRepositoryError::CannotDenormalizeRecord) }
conditional_block
mozmap.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! The `MozMap` (open-ended dictionary) type. use dom::bindings::conversions::jsid_to_string; use dom::bindings::str::DOMString; use js::conversions::{FromJSValConvertible, ToJSValConvertible, ConversionResult}; use js::jsapi::GetPropertyKeys; use js::jsapi::HandleValue; use js::jsapi::JSContext; use js::jsapi::JSITER_OWNONLY; use js::jsapi::JSPROP_ENUMERATE; use js::jsapi::JS_DefineUCProperty2; use js::jsapi::JS_GetPropertyById; use js::jsapi::JS_NewPlainObject; use js::jsapi::MutableHandleValue; use js::jsval::ObjectValue; use js::jsval::UndefinedValue; use js::rust::IdVector; use std::collections::HashMap; use std::ops::Deref; /// The `MozMap` (open-ended dictionary) type. #[derive(Clone)] pub struct MozMap<T> { map: HashMap<DOMString, T>, } impl<T> MozMap<T> { /// Create an empty `MozMap`. pub fn new() -> Self { MozMap { map: HashMap::new(), } } } impl<T> Deref for MozMap<T> { type Target = HashMap<DOMString, T>; fn deref(&self) -> &HashMap<DOMString, T> { &self.map } } impl<T, C> FromJSValConvertible for MozMap<T> where T: FromJSValConvertible<Config=C>, C: Clone, { type Config = C; unsafe fn from_jsval(cx: *mut JSContext, value: HandleValue, config: C) -> Result<ConversionResult<Self>, ()>
ConversionResult::Failure(message) => return Ok(ConversionResult::Failure(message)), }; let key = jsid_to_string(cx, id.handle()).unwrap(); map.insert(key, property); } Ok(ConversionResult::Success(MozMap { map: map, })) } } impl<T: ToJSValConvertible> ToJSValConvertible for MozMap<T> { #[inline] unsafe fn to_jsval(&self, cx: *mut JSContext, rval: MutableHandleValue) { rooted!(in(cx) let js_object = JS_NewPlainObject(cx)); assert!(!js_object.handle().is_null()); rooted!(in(cx) let mut js_value = UndefinedValue()); for (key, value) in &self.map { let key = key.encode_utf16().collect::<Vec<_>>(); value.to_jsval(cx, js_value.handle_mut()); assert!(JS_DefineUCProperty2(cx, js_object.handle(), key.as_ptr(), key.len(), js_value.handle(), JSPROP_ENUMERATE, None, None)); } rval.set(ObjectValue(js_object.handle().get())); } }
{ if !value.is_object() { return Ok(ConversionResult::Failure("MozMap value was not an object".into())); } rooted!(in(cx) let object = value.to_object()); let ids = IdVector::new(cx); assert!(GetPropertyKeys(cx, object.handle(), JSITER_OWNONLY, ids.get())); let mut map = HashMap::new(); for id in &*ids { rooted!(in(cx) let id = *id); rooted!(in(cx) let mut property = UndefinedValue()); if !JS_GetPropertyById(cx, object.handle(), id.handle(), property.handle_mut()) { return Err(()); } let property = match try!(T::from_jsval(cx, property.handle(), config.clone())) { ConversionResult::Success(property) => property,
identifier_body
mozmap.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! The `MozMap` (open-ended dictionary) type. use dom::bindings::conversions::jsid_to_string; use dom::bindings::str::DOMString; use js::conversions::{FromJSValConvertible, ToJSValConvertible, ConversionResult}; use js::jsapi::GetPropertyKeys; use js::jsapi::HandleValue; use js::jsapi::JSContext; use js::jsapi::JSITER_OWNONLY; use js::jsapi::JSPROP_ENUMERATE; use js::jsapi::JS_DefineUCProperty2; use js::jsapi::JS_GetPropertyById; use js::jsapi::JS_NewPlainObject; use js::jsapi::MutableHandleValue; use js::jsval::ObjectValue; use js::jsval::UndefinedValue; use js::rust::IdVector; use std::collections::HashMap; use std::ops::Deref; /// The `MozMap` (open-ended dictionary) type. #[derive(Clone)] pub struct MozMap<T> { map: HashMap<DOMString, T>, } impl<T> MozMap<T> { /// Create an empty `MozMap`. pub fn new() -> Self { MozMap { map: HashMap::new(), } } } impl<T> Deref for MozMap<T> { type Target = HashMap<DOMString, T>; fn deref(&self) -> &HashMap<DOMString, T> { &self.map } }
where T: FromJSValConvertible<Config=C>, C: Clone, { type Config = C; unsafe fn from_jsval(cx: *mut JSContext, value: HandleValue, config: C) -> Result<ConversionResult<Self>, ()> { if!value.is_object() { return Ok(ConversionResult::Failure("MozMap value was not an object".into())); } rooted!(in(cx) let object = value.to_object()); let ids = IdVector::new(cx); assert!(GetPropertyKeys(cx, object.handle(), JSITER_OWNONLY, ids.get())); let mut map = HashMap::new(); for id in &*ids { rooted!(in(cx) let id = *id); rooted!(in(cx) let mut property = UndefinedValue()); if!JS_GetPropertyById(cx, object.handle(), id.handle(), property.handle_mut()) { return Err(()); } let property = match try!(T::from_jsval(cx, property.handle(), config.clone())) { ConversionResult::Success(property) => property, ConversionResult::Failure(message) => return Ok(ConversionResult::Failure(message)), }; let key = jsid_to_string(cx, id.handle()).unwrap(); map.insert(key, property); } Ok(ConversionResult::Success(MozMap { map: map, })) } } impl<T: ToJSValConvertible> ToJSValConvertible for MozMap<T> { #[inline] unsafe fn to_jsval(&self, cx: *mut JSContext, rval: MutableHandleValue) { rooted!(in(cx) let js_object = JS_NewPlainObject(cx)); assert!(!js_object.handle().is_null()); rooted!(in(cx) let mut js_value = UndefinedValue()); for (key, value) in &self.map { let key = key.encode_utf16().collect::<Vec<_>>(); value.to_jsval(cx, js_value.handle_mut()); assert!(JS_DefineUCProperty2(cx, js_object.handle(), key.as_ptr(), key.len(), js_value.handle(), JSPROP_ENUMERATE, None, None)); } rval.set(ObjectValue(js_object.handle().get())); } }
impl<T, C> FromJSValConvertible for MozMap<T>
random_line_split
mozmap.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! The `MozMap` (open-ended dictionary) type. use dom::bindings::conversions::jsid_to_string; use dom::bindings::str::DOMString; use js::conversions::{FromJSValConvertible, ToJSValConvertible, ConversionResult}; use js::jsapi::GetPropertyKeys; use js::jsapi::HandleValue; use js::jsapi::JSContext; use js::jsapi::JSITER_OWNONLY; use js::jsapi::JSPROP_ENUMERATE; use js::jsapi::JS_DefineUCProperty2; use js::jsapi::JS_GetPropertyById; use js::jsapi::JS_NewPlainObject; use js::jsapi::MutableHandleValue; use js::jsval::ObjectValue; use js::jsval::UndefinedValue; use js::rust::IdVector; use std::collections::HashMap; use std::ops::Deref; /// The `MozMap` (open-ended dictionary) type. #[derive(Clone)] pub struct MozMap<T> { map: HashMap<DOMString, T>, } impl<T> MozMap<T> { /// Create an empty `MozMap`. pub fn
() -> Self { MozMap { map: HashMap::new(), } } } impl<T> Deref for MozMap<T> { type Target = HashMap<DOMString, T>; fn deref(&self) -> &HashMap<DOMString, T> { &self.map } } impl<T, C> FromJSValConvertible for MozMap<T> where T: FromJSValConvertible<Config=C>, C: Clone, { type Config = C; unsafe fn from_jsval(cx: *mut JSContext, value: HandleValue, config: C) -> Result<ConversionResult<Self>, ()> { if!value.is_object() { return Ok(ConversionResult::Failure("MozMap value was not an object".into())); } rooted!(in(cx) let object = value.to_object()); let ids = IdVector::new(cx); assert!(GetPropertyKeys(cx, object.handle(), JSITER_OWNONLY, ids.get())); let mut map = HashMap::new(); for id in &*ids { rooted!(in(cx) let id = *id); rooted!(in(cx) let mut property = UndefinedValue()); if!JS_GetPropertyById(cx, object.handle(), id.handle(), property.handle_mut()) { return Err(()); } let property = match try!(T::from_jsval(cx, property.handle(), config.clone())) { ConversionResult::Success(property) => property, ConversionResult::Failure(message) => return Ok(ConversionResult::Failure(message)), }; let key = jsid_to_string(cx, id.handle()).unwrap(); map.insert(key, property); } Ok(ConversionResult::Success(MozMap { map: map, })) } } impl<T: ToJSValConvertible> ToJSValConvertible for MozMap<T> { #[inline] unsafe fn to_jsval(&self, cx: *mut JSContext, rval: MutableHandleValue) { rooted!(in(cx) let js_object = JS_NewPlainObject(cx)); assert!(!js_object.handle().is_null()); rooted!(in(cx) let mut js_value = UndefinedValue()); for (key, value) in &self.map { let key = key.encode_utf16().collect::<Vec<_>>(); value.to_jsval(cx, js_value.handle_mut()); assert!(JS_DefineUCProperty2(cx, js_object.handle(), key.as_ptr(), key.len(), js_value.handle(), JSPROP_ENUMERATE, None, None)); } rval.set(ObjectValue(js_object.handle().get())); } }
new
identifier_name
lib.rs
// Copyright 2013-2015, The Rust-GNOME Project Developers. // See the COPYRIGHT file at the top-level directory of this distribution. // Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT> /*! Bindings and wrappers for __GTK__ To implement __GTK+__ inheritance in rust, we implemented gtk superclasses as traits located in `rgtk::self::traits::*`. The various widgets implement these traits and live in `rgtk::gtk::widgets::*` and are rexported into `rgtk::gtk::*`. GTK Inheritance in rgtk ====================== You probably know but __Gtk+__ uses its own GObject system: inherited class and interface. To respect this design I follow a special design on __rgtk__: * Interface -> Implement them on a trait with only default methods. * Class -> Implement the construct on the class impl and other methods on a traits. * Sub-class -> Implement all the methods on the class. Exemple for GtkOrientable, GtkBox, GtkButtonBox: GtkOrientable is an interface with all the methods implemented as default method of the trait self::traits::Orientable. GtkBox is a class with constructors implemented on the struct `gtk::Box`, and the other method as default methods of the trait `self::traits::Box`. So `gtk::Box` implements `self::traits::Orientable` and `self::traits::Box`. GtkButtonBox is a sub-class of GtkBox, the struct `gtk::ButtonBox` implements all the methods of GtkButtonBox and the traits `self::traits::Orientable` and `self::traits::Box`. Finally all the gtk widgets implement the trait self::traits::Widget. */ //#![macro_use] #![allow(dead_code)] // TODO: drop this #![allow(raw_pointer_derive)] extern crate libc; extern crate glib_sys as glib_ffi; extern crate gobject_sys as gobject_ffi; extern crate gio_sys as gio_ffi; extern crate gdk_sys as gdk_ffi; extern crate gdk_pixbuf_sys as gdk_pixbuf_ffi; extern crate gtk_sys as ffi; extern crate cairo_sys as cairo_ffi; extern crate pango_sys as pango_ffi; extern crate glib; extern crate gdk; extern crate cairo; extern crate pango; pub use glib::ValuePublic; // These are/should be inlined pub use self::rt::{ init, main, main_quit, main_level, main_iteration, main_iteration_do, get_major_version, get_minor_version, get_micro_version, get_binary_age, get_interface_age, check_version, events_pending }; /// GTK Widgets for all versions pub use self::widgets::{ CssProvider, StyleContext, Widget, Window, Label, Button, Box, ButtonBox, Frame, AspectFrame, Fixed, Separator, FontButton, ToggleButton, CheckButton, ColorButton, LinkButton, Adjustment, ScaleButton, VolumeButton, Grid, EntryBuffer, Entry, Switch,
ProgressBar, Arrow, Calendar, Alignment, Expander, Paned, InfoBar, Toolbar, ToolItem, SeparatorToolItem, ToolButton, ToggleToolButton, MenuToolButton, Dialog, AboutDialog, ColorChooserDialog, FontChooserDialog, MessageDialog, NoteBook, Overlay, Layout, FileFilter, FileChooserDialog, AppInfo, AppLaunchContext, AppChooserDialog, DrawingArea, PageSetup, PaperSize, PrintSettings, RecentChooserDialog, //PageSetupUnixDialog RecentInfo, RecentFilter, RecentFilterInfo, RecentData, RecentManager, TextView, TextBuffer, TextTagTable, ScrolledWindow, RadioButton, TreeView, TreeViewColumn, TreePath, TreeIter, TreeModel, ListStore, TreeStore, MenuItem, SeparatorMenuItem, CheckMenuItem, ScrollBar, Viewport, StatusBar, CellRendererText, CellRendererToggle, LockButton, EntryCompletion, IconView, TreeSelection, RecentChooserWidget, ComboBox, //g_type, ComboBoxText, TextMark, TextTag, TextAttributes, TextIter, TextChildAnchor, ToolPalette, ToolItemGroup, SizeGroup, AppChooserWidget, FileChooserWidget, ColorChooserWidget, FontChooserWidget, EventBox }; #[cfg(target_os = "linux")] pub use self::widgets::{Socket}; #[cfg(gtk_3_6)] /// GTK Widgets for versions since GTK 3.6 pub use self::widgets::{ MenuButton, LevelBar, }; #[cfg(gtk_3_10)] /// GTK Widgets for versions since GTK 3.10 pub use self::widgets::{ SearchEntry, SearchBar, Stack, StackSwitcher, Revealer, HeaderBar, ListBox, ListBoxRow, PlacesSidebar }; #[cfg(gtk_3_12)] /// GTK Widgets for versions since GTK 3.12 pub use self::widgets::{ FlowBox, FlowBoxChild, ActionBar, Popover }; /// GTK Enum types pub use ffi::GtkAccelFlags as AccelFlags; pub use ffi::GtkAlign as Align; pub use ffi::GtkArrowPlacement as ArrowPlacement; pub use ffi::GtkArrowType as ArrowType; pub use ffi::GtkAttachOptions as AttachOptions; pub use ffi::GtkBorderStyle as BorderStyle; pub use ffi::GtkBuilderError as BuilderError; pub use ffi::GtkButtonBoxStyle as ButtonBoxStyle; pub use ffi::GtkButtonsType as ButtonsType; pub use ffi::GtkCalendarDisplayOptions as CalendarDisplayOptions; pub use ffi::GtkCellRendererState as CellRendererState; pub use ffi::GtkCornerType as CornerType; pub use ffi::GtkDeleteType as DeleteType; pub use ffi::GtkDestDefaults as DestDefaults; pub use ffi::GtkDialogFlags as DialogFlags; pub use ffi::GtkDirectionType as DirectionType; pub use ffi::GtkDragResult as DragResult; pub use ffi::GtkEntryIconPosition as EntryIconPosition; pub use ffi::GtkExpanderStyle as ExpanderStyle; pub use ffi::GtkFileChooserAction as FileChooserAction; pub use ffi::GtkFileFilterFlags as FileFilterFlags; pub use ffi::GtkIMPreeditStyle as IMPreeditStyle; pub use ffi::GtkIMStatusStyle as IMStatusStyle; pub use ffi::GtkIconSize as IconSize; pub use ffi::GtkIconViewDropPosition as IconViewDropPosition; pub use ffi::GtkImageType as ImageType; pub use ffi::GtkInputHints as InputHints; pub use ffi::GtkInputPurpose as InputPurpose; pub use ffi::GtkJunctionSides as JunctionSides; pub use ffi::GtkJustification as Justification; pub use ffi::GtkLevelBarMode as LevelBarMode; pub use ffi::GtkLicense as License; pub use ffi::GtkMessageType as MessageType; pub use ffi::GtkMovementStep as MovementStep; pub use ffi::GtkNumberUpLayout as NumberUpLayout; pub use ffi::GtkOrientation as Orientation; pub use ffi::GtkPackType as PackType; pub use ffi::GtkPageOrientation as PageOrientation; pub use ffi::GtkPageSet as PageSet; pub use ffi::GtkPathPriorityType as PathPriorityType; pub use ffi::GtkPathType as PathType; pub use ffi::GtkPlacesOpenFlags as PlacesOpenFlags; pub use ffi::GtkPolicyType as PolicyType; pub use ffi::GtkPositionType as PositionType; pub use ffi::GtkPrintPages as PrintPages; pub use ffi::GtkRecentFilterFlags as RecentFilterFlags; pub use ffi::GtkRecentSortType as RecentSortType; pub use ffi::GtkRegionFlags as RegionFlags; pub use ffi::GtkReliefStyle as ReliefStyle; pub use ffi::GtkResizeMode as ResizeMode; pub use ffi::GtkResponseType as ResponseType; pub use ffi::GtkRevealerTransitionType as RevealerTransitionType; pub use ffi::GtkScrollStep as ScrollStep; pub use ffi::GtkScrollType as ScrollType; pub use ffi::GtkScrollablePolicy as ScrollablePolicy; pub use ffi::GtkSelectionMode as SelectionMode; pub use ffi::GtkSensitivityType as SensitivityType; pub use ffi::GtkShadowType as ShadowType; pub use ffi::GtkSizeGroupMode as SizeGroupMode; pub use ffi::GtkSizeRequestMode as SizeRequestMode; pub use ffi::GtkSortType as SortType; pub use ffi::GtkSpinButtonUpdatePolicy as SpinButtonUpdatePolicy; pub use ffi::GtkSpinType as SpinType; pub use ffi::GtkStackTransitionType as StackTransitionType; pub use ffi::GtkStateFlags as StateFlags; pub use ffi::GtkStateType as StateType; pub use ffi::GtkTextDirection as TextDirection; pub use ffi::GtkTextSearchFlags as TextSearchFlags; pub use ffi::GtkTextWindowType as TextWindowType; pub use ffi::GtkToolPaletteDragTargets as ToolPaletteDragTargets; pub use ffi::GtkToolbarStyle as ToolbarStyle; pub use ffi::GtkTreeModelFlags as TreeModelFlags; pub use ffi::GtkTreeViewColumnSizing as TreeViewColumnSizing; pub use ffi::GtkTreeViewGridLines as TreeViewGridLines; pub use ffi::GtkUnit as Unit; pub use ffi::GtkWidgetHelpType as WidgetHelpType; pub use ffi::GtkWindowPosition as WindowPosition; pub use ffi::GtkWindowType as WindowType; pub use ffi::GtkWrapMode as WrapMode; /// Gtk Traits pub use self::traits::FFIWidget; pub use self::traits::StyleProviderTrait; pub use self::traits::GObjectTrait; pub use self::traits::BoxTrait; pub use self::traits::ActionableTrait; pub use self::traits::AppChooserTrait; pub use self::traits::BinTrait; pub use self::traits::ButtonTrait; pub use self::traits::ButtonSignals; pub use self::traits::CellEditableTrait; pub use self::traits::CellLayoutTrait; pub use self::traits::CellRendererTrait; pub use self::traits::CheckMenuItemTrait; pub use self::traits::ColorChooserTrait; pub use self::traits::ComboBoxTrait; pub use self::traits::ContainerTrait; pub use self::traits::DialogButtons; pub use self::traits::DialogTrait; pub use self::traits::EditableTrait; pub use self::traits::EntryTrait; pub use self::traits::FileChooserTrait; pub use self::traits::FontChooserTrait; pub use self::traits::FrameTrait; pub use self::traits::LabelTrait; pub use self::traits::MenuItemTrait; pub use self::traits::MenuShellTrait; pub use self::traits::MiscTrait; pub use self::traits::OrientableTrait; pub use self::traits::RangeTrait; pub use self::traits::RecentChooserTrait; pub use self::traits::ScaleButtonTrait; pub use self::traits::ScrollableTrait; pub use self::traits::ScrolledWindowTrait; pub use self::traits::TextBufferTrait; pub use self::traits::ToggleButtonTrait; pub use self::traits::ToggleToolButtonTrait; pub use self::traits::ToolButtonTrait; pub use self::traits::ToolButtonSignals; pub use self::traits::ToolItemTrait; pub use self::traits::ToolShellTrait; pub use self::traits::WidgetTrait; pub use self::traits::WidgetSignals; pub use self::traits::WindowTrait; pub use self::traits::style_provider::{ STYLE_PROVIDER_PRIORITY_FALLBACK, STYLE_PROVIDER_PRIORITY_THEME, STYLE_PROVIDER_PRIORITY_SETTINGS, STYLE_PROVIDER_PRIORITY_APPLICATION, STYLE_PROVIDER_PRIORITY_USER }; pub const DIALOG_MODAL: DialogFlags = ffi::GTK_DIALOG_MODAL; /// GTK various struct pub use self::types::{ Tooltip, }; mod macros; mod cast; mod rt; pub mod traits; pub mod signal; pub mod widgets; pub mod types;
Range, Scale, SpinButton, Spinner, Image,
random_line_split
privacy-struct-variant.rs
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // aux-build:privacy-struct-variant.rs #![feature(struct_variant)] extern crate "privacy-struct-variant" as other; mod a { pub enum Foo { Bar { baz: int } } fn
() { let foo = Bar { baz: 42 }; let Bar { baz: _ } = foo; match foo { Bar { baz: _ } => {} } } } fn main() { let foo = a::Bar { baz: 42 }; //~^ ERROR: field `baz` of variant `Bar` of enum `a::Foo` is private let a::Bar { baz: _ } = foo; //~^ ERROR: field `baz` of variant `Bar` of enum `a::Foo` is private match foo { a::Bar { baz: _ } => {} } //~^ ERROR: field `baz` of variant `Bar` of enum `a::Foo` is private // let foo = other::Bar { baz: 42 }; //~^ ERROR: field `baz` of variant `Bar` of enum `privacy-struct-variant::Foo` is private let other::Bar { baz: _ } = foo; //~^ ERROR: field `baz` of variant `Bar` of enum `privacy-struct-variant::Foo` is private match foo { other::Bar { baz: _ } => {} } //~^ ERROR: field `baz` of variant `Bar` of enum `privacy-struct-variant::Foo` is private }
test
identifier_name
privacy-struct-variant.rs
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // aux-build:privacy-struct-variant.rs #![feature(struct_variant)]
extern crate "privacy-struct-variant" as other; mod a { pub enum Foo { Bar { baz: int } } fn test() { let foo = Bar { baz: 42 }; let Bar { baz: _ } = foo; match foo { Bar { baz: _ } => {} } } } fn main() { let foo = a::Bar { baz: 42 }; //~^ ERROR: field `baz` of variant `Bar` of enum `a::Foo` is private let a::Bar { baz: _ } = foo; //~^ ERROR: field `baz` of variant `Bar` of enum `a::Foo` is private match foo { a::Bar { baz: _ } => {} } //~^ ERROR: field `baz` of variant `Bar` of enum `a::Foo` is private // let foo = other::Bar { baz: 42 }; //~^ ERROR: field `baz` of variant `Bar` of enum `privacy-struct-variant::Foo` is private let other::Bar { baz: _ } = foo; //~^ ERROR: field `baz` of variant `Bar` of enum `privacy-struct-variant::Foo` is private match foo { other::Bar { baz: _ } => {} } //~^ ERROR: field `baz` of variant `Bar` of enum `privacy-struct-variant::Foo` is private }
random_line_split
privacy-struct-variant.rs
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // aux-build:privacy-struct-variant.rs #![feature(struct_variant)] extern crate "privacy-struct-variant" as other; mod a { pub enum Foo { Bar { baz: int } } fn test() { let foo = Bar { baz: 42 }; let Bar { baz: _ } = foo; match foo { Bar { baz: _ } => {} } } } fn main()
{ let foo = a::Bar { baz: 42 }; //~^ ERROR: field `baz` of variant `Bar` of enum `a::Foo` is private let a::Bar { baz: _ } = foo; //~^ ERROR: field `baz` of variant `Bar` of enum `a::Foo` is private match foo { a::Bar { baz: _ } => {} } //~^ ERROR: field `baz` of variant `Bar` of enum `a::Foo` is private // let foo = other::Bar { baz: 42 }; //~^ ERROR: field `baz` of variant `Bar` of enum `privacy-struct-variant::Foo` is private let other::Bar { baz: _ } = foo; //~^ ERROR: field `baz` of variant `Bar` of enum `privacy-struct-variant::Foo` is private match foo { other::Bar { baz: _ } => {} } //~^ ERROR: field `baz` of variant `Bar` of enum `privacy-struct-variant::Foo` is private }
identifier_body
chip8.rs
extern crate sdl2; use std::fs::File; use std::io::prelude::*; use std::time::{Duration, Instant}; #[cfg(feature="debugger")] use std::process::Command; use chip8::MEMORY_SIZE; use chip8::ROM_START_ADDRESS; use chip8::keyboard::Keyboard; use chip8::display::Display; #[cfg(feature="interpreter")] use chip8::interpreter::Interpreter; #[cfg(not(feature="interpreter"))] use chip8::recompiler::Recompiler; const STACK_SIZE: usize = 16; const V_REGISTERS_COUNT: usize = 16; pub struct Chip8 { pub memory: [u8; MEMORY_SIZE], pub stack: [u16; STACK_SIZE], pub register_v: [u8; V_REGISTERS_COUNT], pub register_i: u16, pub register_dt: u8, pub register_st: u8, pub register_pc: u16, pub register_sp: u8, pub keyboard: Keyboard, pub display: Display, time_last_frame: Instant } impl Chip8 { pub fn new() -> Chip8
chip8.memory[0x03] = 0x90; chip8.memory[0x04] = 0xF0; chip8.memory[0x05] = 0x20; chip8.memory[0x06] = 0x60; chip8.memory[0x07] = 0x20; chip8.memory[0x08] = 0x20; chip8.memory[0x09] = 0x70; chip8.memory[0x0A] = 0xF0; chip8.memory[0x0B] = 0x10; chip8.memory[0x0C] = 0xF0; chip8.memory[0x0D] = 0x80; chip8.memory[0x0E] = 0xF0; chip8.memory[0x0F] = 0xF0; chip8.memory[0x10] = 0x10; chip8.memory[0x11] = 0xF0; chip8.memory[0x12] = 0x10; chip8.memory[0x13] = 0xF0; chip8.memory[0x14] = 0x90; chip8.memory[0x15] = 0x90; chip8.memory[0x16] = 0xF0; chip8.memory[0x17] = 0x10; chip8.memory[0x18] = 0x10; chip8.memory[0x19] = 0xF0; chip8.memory[0x1A] = 0x80; chip8.memory[0x1B] = 0xF0; chip8.memory[0x1C] = 0x10; chip8.memory[0x1D] = 0xF0; chip8.memory[0x1E] = 0xF0; chip8.memory[0x1F] = 0x80; chip8.memory[0x20] = 0xF0; chip8.memory[0x21] = 0x90; chip8.memory[0x22] = 0xF0; chip8.memory[0x23] = 0xF0; chip8.memory[0x24] = 0x10; chip8.memory[0x25] = 0x20; chip8.memory[0x26] = 0x40; chip8.memory[0x27] = 0x40; chip8.memory[0x28] = 0xF0; chip8.memory[0x29] = 0x90; chip8.memory[0x2A] = 0xF0; chip8.memory[0x2B] = 0x90; chip8.memory[0x2C] = 0xF0; chip8.memory[0x2D] = 0xF0; chip8.memory[0x2E] = 0x90; chip8.memory[0x2F] = 0xF0; chip8.memory[0x30] = 0x10; chip8.memory[0x31] = 0xF0; chip8.memory[0x32] = 0xF0; chip8.memory[0x33] = 0x90; chip8.memory[0x34] = 0xF0; chip8.memory[0x35] = 0x90; chip8.memory[0x36] = 0x90; chip8.memory[0x37] = 0xE0; chip8.memory[0x38] = 0x90; chip8.memory[0x39] = 0xE0; chip8.memory[0x3A] = 0x90; chip8.memory[0x3B] = 0xE0; chip8.memory[0x3C] = 0xF0; chip8.memory[0x3D] = 0x80; chip8.memory[0x3E] = 0x80; chip8.memory[0x3F] = 0x80; chip8.memory[0x40] = 0xF0; chip8.memory[0x41] = 0xE0; chip8.memory[0x42] = 0x90; chip8.memory[0x43] = 0x90; chip8.memory[0x44] = 0x90; chip8.memory[0x45] = 0xE0; chip8.memory[0x46] = 0xF0; chip8.memory[0x47] = 0x80; chip8.memory[0x48] = 0xF0; chip8.memory[0x49] = 0x80; chip8.memory[0x4A] = 0xF0; chip8.memory[0x4B] = 0xF0; chip8.memory[0x4C] = 0x80; chip8.memory[0x4D] = 0xF0; chip8.memory[0x4E] = 0x80; chip8.memory[0x4F] = 0x80; chip8 } fn load_rom(&mut self, filename: String) { let mut file = File::open(filename).expect("file not found"); let mut buffer: Vec<u8> = Vec::new(); file.read_to_end(&mut buffer).expect("something went wrong reading the file"); self.memory[ROM_START_ADDRESS as usize..ROM_START_ADDRESS as usize + buffer.len()].copy_from_slice(&buffer); } #[cfg(feature="debugger")] fn print_registers(&self) { println!("PC= {:x}", self.register_pc); println!("I= {:x}", self.register_i); println!("DT= {:x}", self.register_dt); println!("ST= {:x}", self.register_st); println!("SP= {:x}", self.register_sp); for i in 0..16 as usize { println!("V{}= {:x}", i, self.register_v[i]); } let _ = Command::new("cmd.exe").arg("/c").arg("pause").status(); } pub extern "stdcall" fn refresh(&mut self) { // ~60Hz if self.time_last_frame.elapsed() >= Duration::from_millis(1000 / 60) { self.time_last_frame = Instant::now(); self.keyboard.update_key_states(); self.display.refresh(); if self.register_dt > 0 { self.register_dt -= 1 } if self.register_st > 0 { self.register_st -= 1; // TODO: beep } } } pub fn run(&mut self, filename: String) { self.load_rom(filename); #[cfg(not(feature="interpreter"))] let mut recompiler = Recompiler::new(&self.register_pc); loop { #[cfg(feature="debugger")] self.print_registers(); #[cfg(feature="interpreter")] Interpreter::execute_next_instruction(self); #[cfg(feature="interpreter")] self.refresh(); #[cfg(not(feature="interpreter"))] recompiler.execute_next_code_block(self); } } }
{ let sdl_context = sdl2::init().unwrap(); let mut chip8 = Chip8 { memory: [0; MEMORY_SIZE], stack: [0; STACK_SIZE], register_v: [0; V_REGISTERS_COUNT], register_i: 0, register_dt: 0, register_st: 0, register_pc: ROM_START_ADDRESS, register_sp: 0xFF, keyboard: Keyboard::new(&sdl_context), display: Display::new(&sdl_context), time_last_frame: Instant::now() }; chip8.memory[0x00] = 0xF0; chip8.memory[0x01] = 0x90; chip8.memory[0x02] = 0x90;
identifier_body