file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
formatter.rs
|
use std::str;
use std::fmt;
use std::fmt::Write;
use std::iter::Iterator;
use std::string::String;
use types::*;
#[derive(Debug, PartialEq)]
pub struct Formatter<'a, 'b> {
pub key: &'a str,
fill: char,
align: Alignment, // default Right for numbers, Left for strings
sign: Sign,
alternate: bool,
width: Option<usize>,
thousands: bool,
precision: Option<usize>,
ty: Option<char>,
buff: &'b mut String,
pattern: &'a str,
}
fn is_alignment_token(c: char) -> bool {
match c {
'=' | '<' | '^' | '>' => true,
_ => false,
}
}
fn is_sign_element(c: char) -> bool {
match c {
'' | '-' | '+' => true,
_ => false,
}
}
fn is_type_element(c: char) -> bool {
match c {
'b' |
'o' |
'x' |
'X' |
'e' |
'E' |
'f' |
'F' |
'%' |
's' |
'?' => true,
_ => false,
}
}
// get an integer from pos, returning the number of bytes
// consumed and the integer
fn get_integer(s: &[u8], pos: usize) -> (usize, Option<i64>) {
let (_, rest) = s.split_at(pos);
let mut consumed: usize = 0;
for b in rest {
match *b as char {
'0'...'9' => {}
_ => break,
};
consumed += 1;
}
if consumed == 0 {
(0, None)
} else {
let (intstr, _) = rest.split_at(consumed);
let val = unsafe {
// I think I can be reasonably sure that 0-9 chars are utf8 :)
match str::from_utf8_unchecked(intstr).parse::<i64>() {
Ok(v) => Some(v),
Err(_) => None,
}
};
(consumed, val)
}
}
#[derive(Debug)]
/// The format struct as it is defined in the python source
struct FmtPy {
pub fill: char,
pub align: char,
pub alternate: bool,
pub sign: char,
pub width: i64,
pub thousands: bool,
pub precision: i64,
pub ty: char,
}
fn parse_like_python(rest: &str) -> Result<FmtPy> {
// The rest of this was pretty much strait up copied from python's format parser
// All credit goes to python source file: formatter_unicode.c
//
let mut format = FmtPy {
fill:'',
align: '\0',
alternate: false,
sign: '\0',
width: -1,
thousands: false,
precision: -1,
ty: '\0',
};
let mut chars = rest.chars();
let fake_fill = match chars.next() {
Some(c) => c,
None => return Ok(format),
};
// from now on all format characters MUST be valid
// ASCII characters (fill and identifier were the
// only ones that weren't.
// Therefore we can use bytes for the rest
let rest = rest.as_bytes();
let mut align_specified = false;
let mut fill_specified = false;
let end: usize = rest.len();
let mut pos: usize = 0;
// If the second char is an alignment token,
// then fake_fill as fill
if end - pos >= 1 + fake_fill.len_utf8() &&
is_alignment_token(rest[pos + fake_fill.len_utf8()] as char) {
format.align = rest[pos + fake_fill.len_utf8()] as char;
format.fill = fake_fill;
fill_specified = true;
align_specified = true;
pos += 1 + fake_fill.len_utf8();
} else if end - pos >= 1 && is_alignment_token(fake_fill) {
format.align = fake_fill;
pos += fake_fill.len_utf8();
}
// Parse the various sign options
if end - pos >= 1 && is_sign_element(rest[pos] as char) {
format.sign = rest[pos] as char;
pos += 1;
}
// If the next character is #, we're in alternate mode. This only
// applies to integers.
if end - pos >= 1 && rest[pos] as char == '#' {
format.alternate = true;
pos += 1;
}
// The special case for 0-padding (backwards compat)
if!fill_specified && end - pos >= 1 && rest[pos] == '0' as u8 {
format.fill = '0';
if!align_specified {
format.align = '=';
}
pos += 1;
}
// check to make sure that val is good
let (consumed, val) = get_integer(rest, pos);
pos += consumed;
if consumed!= 0 {
match val {
None => return Err(FmtError::Invalid("overflow error when parsing width".to_string())),
Some(v) => {
format.width = v;
}
}
}
// Comma signifies add thousands separators
if end - pos > 0 && rest[pos] as char == ',' {
format.thousands = true;
pos += 1;
}
// Parse field precision
if end - pos > 0 && rest[pos] as char == '.' {
pos += 1;
let (consumed, val) = get_integer(rest, pos);
if consumed!= 0 {
match val {
None => {
return Err(FmtError::Invalid("overflow error when parsing precision"
.to_string()))
}
Some(v) => {
format.precision = v;
}
}
} else {
// Not having a precision after a dot is an error.
if consumed == 0 {
return Err(FmtError::Invalid("Format specifier missing precision".to_string()));
}
}
pos += consumed;
}
// Finally, parse the type field.
if end - pos > 1 {
// More than one char remain, invalid format specifier.
return Err(FmtError::Invalid("Invalid format specifier".to_string()));
}
if end - pos == 1 {
format.ty = rest[pos] as char;
if!is_type_element(format.ty) {
let mut msg = String::new();
write!(msg, "Invalid type specifier: {:?}", format.ty).unwrap();
return Err(FmtError::TypeError(msg));
}
// pos+=1;
}
// Do as much validating as we can, just by looking at the format
// specifier. Do not take into account what type of formatting
// we're doing (int, float, string).
if format.thousands {
match format.ty {
'd' |
'e' |
'f' |
'g' |
'E' |
'G' |
'%' |
'F' |
'\0' => {} /* These are allowed. See PEP 378.*/
_ => {
let mut msg = String::new();
write!(msg, "Invalid comma type: {}", format.ty).unwrap();
return Err(FmtError::Invalid(msg));
}
}
}
Ok(format)
}
impl<'a, 'b> Formatter<'a, 'b> {
/// create Formatter from format string
pub fn from_str(s: &'a str, buff: &'b mut String) -> Result<Formatter<'a, 'b>> {
let mut found_colon = false;
let mut chars = s.chars();
let mut c = match chars.next() {
Some(':') | None => {
return Err(FmtError::Invalid("must specify identifier".to_string()))
}
Some(c) => c,
};
let mut consumed = 0;
// find the identifier
loop {
consumed += c.len_utf8();
if c == ':' {
found_colon = true;
break;
}
c = match chars.next() {
Some(c) => c,
None => {
break;
}
};
}
let (identifier, rest) = s.split_at(consumed);
let identifier = if found_colon {
let (i, _) = identifier.split_at(identifier.len() - 1); // get rid of ':'
i
} else {
identifier
};
let format = try!(parse_like_python(rest));
Ok(Formatter {
key: identifier,
fill: format.fill,
align: match format.align {
'\0' => Alignment::Unspecified,
'<' => Alignment::Left,
'^' => Alignment::Center,
'>' => Alignment::Right,
'=' => Alignment::Equal,
_ => unreachable!(),
},
sign: match format.sign {
'\0' => Sign::Unspecified,
'+' => Sign::Plus,
'-' => Sign::Minus,
'' => Sign::Space,
_ => unreachable!(),
},
alternate: format.alternate,
width: match format.width {
-1 => None,
_ => Some(format.width as usize),
},
thousands: format.thousands,
precision: match format.precision {
-1 => None,
_ => Some(format.precision as usize),
},
ty: match format.ty {
'\0' => None,
_ => Some(format.ty),
},
buff: buff,
pattern: s,
})
}
/// call this to re-write the original format string verbatum
/// back to the output
pub fn skip(mut self) -> Result<()> {
self.buff.push('{');
self.write_str(self.pattern).unwrap();
self.buff.push('}');
Ok(())
}
/// fill getter
pub fn fill(&self) -> char {
self.fill
}
/// align getter
pub fn align(&self) -> Alignment {
self.align.clone()
}
// provide default for unspecified alignment
pub fn set_default_align(&mut self, align: Alignment) {
if self.align == Alignment::Unspecified {
self.align = align
}
}
/// width getter
pub fn width(&self) -> Option<usize> {
self.width
}
/// thousands getter
pub fn thousands(&self) -> bool
|
/// precision getter
pub fn precision(&self) -> Option<usize> {
self.precision
}
/// set precision to None, used for formatting int, float, etc
pub fn set_precision(&mut self, precision: Option<usize>) {
self.precision = precision;
}
/// sign getter
pub fn sign(&self) -> Sign {
self.sign.clone()
}
/// sign plus getter
/// here because it is in fmt::Formatter
pub fn sign_plus(&self) -> bool {
self.sign == Sign::Plus
}
/// sign minus getter
/// here because it is in fmt::Formatter
pub fn sign_minus(&self) -> bool {
self.sign == Sign::Minus
}
/// alternate getter
pub fn alternate(&self) -> bool {
self.alternate
}
// sign_aware_zero_pad // Not supported
/// type getter
pub fn ty(&self) -> Option<char> {
self.ty
}
/// UNSTABLE: in the future, this may return true if all validty
/// checks for a float return true
/// return true if ty is valid for formatting integers
pub fn is_int_type(&self) -> bool {
match self.ty {
None => true,
Some(c) => match c {
'b' | 'o' | 'x' | 'X' => true,
_ => false,
}
}
}
/// UNSTABLE: in the future, this may return true if all validty
/// checks for a float return true
/// return true if ty is valid for formatting floats
pub fn is_float_type(&self) -> bool {
match self.ty {
None => true,
Some(c) => match c {
'f' | 'e' | 'E' => true,
_ => false,
}
}
}
}
impl<'a, 'b> fmt::Write for Formatter<'a, 'b> {
fn write_str(&mut self, s: &str) -> fmt::Result {
self.buff.write_str(s)
}
}
|
{
self.thousands
}
|
identifier_body
|
formatter.rs
|
use std::str;
use std::fmt;
use std::fmt::Write;
use std::iter::Iterator;
use std::string::String;
use types::*;
#[derive(Debug, PartialEq)]
pub struct Formatter<'a, 'b> {
pub key: &'a str,
fill: char,
align: Alignment, // default Right for numbers, Left for strings
sign: Sign,
alternate: bool,
width: Option<usize>,
thousands: bool,
precision: Option<usize>,
ty: Option<char>,
buff: &'b mut String,
pattern: &'a str,
}
fn is_alignment_token(c: char) -> bool {
match c {
'=' | '<' | '^' | '>' => true,
_ => false,
}
}
fn is_sign_element(c: char) -> bool {
match c {
'' | '-' | '+' => true,
_ => false,
}
}
fn is_type_element(c: char) -> bool {
match c {
'b' |
'o' |
'x' |
'X' |
'e' |
'E' |
'f' |
'F' |
'%' |
's' |
'?' => true,
_ => false,
}
}
// get an integer from pos, returning the number of bytes
// consumed and the integer
fn get_integer(s: &[u8], pos: usize) -> (usize, Option<i64>) {
let (_, rest) = s.split_at(pos);
let mut consumed: usize = 0;
for b in rest {
match *b as char {
'0'...'9' => {}
_ => break,
};
consumed += 1;
}
if consumed == 0 {
(0, None)
} else {
let (intstr, _) = rest.split_at(consumed);
let val = unsafe {
// I think I can be reasonably sure that 0-9 chars are utf8 :)
match str::from_utf8_unchecked(intstr).parse::<i64>() {
Ok(v) => Some(v),
Err(_) => None,
}
};
(consumed, val)
}
}
#[derive(Debug)]
/// The format struct as it is defined in the python source
struct FmtPy {
pub fill: char,
pub align: char,
pub alternate: bool,
pub sign: char,
pub width: i64,
pub thousands: bool,
pub precision: i64,
pub ty: char,
}
fn parse_like_python(rest: &str) -> Result<FmtPy> {
// The rest of this was pretty much strait up copied from python's format parser
// All credit goes to python source file: formatter_unicode.c
//
let mut format = FmtPy {
fill:'',
align: '\0',
alternate: false,
sign: '\0',
width: -1,
thousands: false,
precision: -1,
ty: '\0',
};
let mut chars = rest.chars();
let fake_fill = match chars.next() {
Some(c) => c,
None => return Ok(format),
};
// from now on all format characters MUST be valid
// ASCII characters (fill and identifier were the
// only ones that weren't.
// Therefore we can use bytes for the rest
let rest = rest.as_bytes();
let mut align_specified = false;
let mut fill_specified = false;
let end: usize = rest.len();
let mut pos: usize = 0;
// If the second char is an alignment token,
// then fake_fill as fill
if end - pos >= 1 + fake_fill.len_utf8() &&
is_alignment_token(rest[pos + fake_fill.len_utf8()] as char) {
format.align = rest[pos + fake_fill.len_utf8()] as char;
format.fill = fake_fill;
fill_specified = true;
align_specified = true;
pos += 1 + fake_fill.len_utf8();
} else if end - pos >= 1 && is_alignment_token(fake_fill) {
format.align = fake_fill;
pos += fake_fill.len_utf8();
}
// Parse the various sign options
if end - pos >= 1 && is_sign_element(rest[pos] as char) {
format.sign = rest[pos] as char;
pos += 1;
}
// If the next character is #, we're in alternate mode. This only
// applies to integers.
if end - pos >= 1 && rest[pos] as char == '#' {
format.alternate = true;
pos += 1;
}
// The special case for 0-padding (backwards compat)
if!fill_specified && end - pos >= 1 && rest[pos] == '0' as u8 {
format.fill = '0';
if!align_specified {
format.align = '=';
}
pos += 1;
}
// check to make sure that val is good
let (consumed, val) = get_integer(rest, pos);
pos += consumed;
if consumed!= 0 {
match val {
None => return Err(FmtError::Invalid("overflow error when parsing width".to_string())),
Some(v) => {
format.width = v;
}
}
}
// Comma signifies add thousands separators
if end - pos > 0 && rest[pos] as char == ',' {
format.thousands = true;
pos += 1;
}
// Parse field precision
if end - pos > 0 && rest[pos] as char == '.' {
pos += 1;
let (consumed, val) = get_integer(rest, pos);
if consumed!= 0 {
match val {
None => {
return Err(FmtError::Invalid("overflow error when parsing precision"
.to_string()))
}
Some(v) => {
format.precision = v;
}
}
} else {
// Not having a precision after a dot is an error.
if consumed == 0 {
return Err(FmtError::Invalid("Format specifier missing precision".to_string()));
}
}
pos += consumed;
}
// Finally, parse the type field.
if end - pos > 1 {
// More than one char remain, invalid format specifier.
return Err(FmtError::Invalid("Invalid format specifier".to_string()));
}
if end - pos == 1 {
format.ty = rest[pos] as char;
if!is_type_element(format.ty) {
let mut msg = String::new();
write!(msg, "Invalid type specifier: {:?}", format.ty).unwrap();
return Err(FmtError::TypeError(msg));
}
// pos+=1;
}
// Do as much validating as we can, just by looking at the format
// specifier. Do not take into account what type of formatting
// we're doing (int, float, string).
if format.thousands {
match format.ty {
'd' |
'e' |
'f' |
'g' |
'E' |
'G' |
'%' |
'F' |
'\0' => {} /* These are allowed. See PEP 378.*/
_ => {
let mut msg = String::new();
write!(msg, "Invalid comma type: {}", format.ty).unwrap();
return Err(FmtError::Invalid(msg));
}
}
}
Ok(format)
}
impl<'a, 'b> Formatter<'a, 'b> {
/// create Formatter from format string
pub fn from_str(s: &'a str, buff: &'b mut String) -> Result<Formatter<'a, 'b>> {
let mut found_colon = false;
let mut chars = s.chars();
let mut c = match chars.next() {
Some(':') | None => {
return Err(FmtError::Invalid("must specify identifier".to_string()))
}
Some(c) => c,
};
let mut consumed = 0;
// find the identifier
loop {
consumed += c.len_utf8();
if c == ':'
|
c = match chars.next() {
Some(c) => c,
None => {
break;
}
};
}
let (identifier, rest) = s.split_at(consumed);
let identifier = if found_colon {
let (i, _) = identifier.split_at(identifier.len() - 1); // get rid of ':'
i
} else {
identifier
};
let format = try!(parse_like_python(rest));
Ok(Formatter {
key: identifier,
fill: format.fill,
align: match format.align {
'\0' => Alignment::Unspecified,
'<' => Alignment::Left,
'^' => Alignment::Center,
'>' => Alignment::Right,
'=' => Alignment::Equal,
_ => unreachable!(),
},
sign: match format.sign {
'\0' => Sign::Unspecified,
'+' => Sign::Plus,
'-' => Sign::Minus,
'' => Sign::Space,
_ => unreachable!(),
},
alternate: format.alternate,
width: match format.width {
-1 => None,
_ => Some(format.width as usize),
},
thousands: format.thousands,
precision: match format.precision {
-1 => None,
_ => Some(format.precision as usize),
},
ty: match format.ty {
'\0' => None,
_ => Some(format.ty),
},
buff: buff,
pattern: s,
})
}
/// call this to re-write the original format string verbatum
/// back to the output
pub fn skip(mut self) -> Result<()> {
self.buff.push('{');
self.write_str(self.pattern).unwrap();
self.buff.push('}');
Ok(())
}
/// fill getter
pub fn fill(&self) -> char {
self.fill
}
/// align getter
pub fn align(&self) -> Alignment {
self.align.clone()
}
// provide default for unspecified alignment
pub fn set_default_align(&mut self, align: Alignment) {
if self.align == Alignment::Unspecified {
self.align = align
}
}
/// width getter
pub fn width(&self) -> Option<usize> {
self.width
}
/// thousands getter
pub fn thousands(&self) -> bool {
self.thousands
}
/// precision getter
pub fn precision(&self) -> Option<usize> {
self.precision
}
/// set precision to None, used for formatting int, float, etc
pub fn set_precision(&mut self, precision: Option<usize>) {
self.precision = precision;
}
/// sign getter
pub fn sign(&self) -> Sign {
self.sign.clone()
}
/// sign plus getter
/// here because it is in fmt::Formatter
pub fn sign_plus(&self) -> bool {
self.sign == Sign::Plus
}
/// sign minus getter
/// here because it is in fmt::Formatter
pub fn sign_minus(&self) -> bool {
self.sign == Sign::Minus
}
/// alternate getter
pub fn alternate(&self) -> bool {
self.alternate
}
// sign_aware_zero_pad // Not supported
/// type getter
pub fn ty(&self) -> Option<char> {
self.ty
}
/// UNSTABLE: in the future, this may return true if all validty
/// checks for a float return true
/// return true if ty is valid for formatting integers
pub fn is_int_type(&self) -> bool {
match self.ty {
None => true,
Some(c) => match c {
'b' | 'o' | 'x' | 'X' => true,
_ => false,
}
}
}
/// UNSTABLE: in the future, this may return true if all validty
/// checks for a float return true
/// return true if ty is valid for formatting floats
pub fn is_float_type(&self) -> bool {
match self.ty {
None => true,
Some(c) => match c {
'f' | 'e' | 'E' => true,
_ => false,
}
}
}
}
impl<'a, 'b> fmt::Write for Formatter<'a, 'b> {
fn write_str(&mut self, s: &str) -> fmt::Result {
self.buff.write_str(s)
}
}
|
{
found_colon = true;
break;
}
|
conditional_block
|
formatter.rs
|
use std::str;
use std::fmt;
use std::fmt::Write;
use std::iter::Iterator;
use std::string::String;
use types::*;
#[derive(Debug, PartialEq)]
pub struct Formatter<'a, 'b> {
pub key: &'a str,
fill: char,
align: Alignment, // default Right for numbers, Left for strings
sign: Sign,
alternate: bool,
width: Option<usize>,
thousands: bool,
precision: Option<usize>,
ty: Option<char>,
buff: &'b mut String,
pattern: &'a str,
}
fn is_alignment_token(c: char) -> bool {
match c {
'=' | '<' | '^' | '>' => true,
_ => false,
}
}
fn is_sign_element(c: char) -> bool {
match c {
'' | '-' | '+' => true,
_ => false,
}
}
fn is_type_element(c: char) -> bool {
match c {
'b' |
'o' |
'x' |
'X' |
'e' |
'E' |
'f' |
'F' |
'%' |
's' |
'?' => true,
_ => false,
}
}
// get an integer from pos, returning the number of bytes
// consumed and the integer
fn get_integer(s: &[u8], pos: usize) -> (usize, Option<i64>) {
let (_, rest) = s.split_at(pos);
let mut consumed: usize = 0;
for b in rest {
match *b as char {
'0'...'9' => {}
_ => break,
};
consumed += 1;
}
if consumed == 0 {
(0, None)
} else {
let (intstr, _) = rest.split_at(consumed);
let val = unsafe {
// I think I can be reasonably sure that 0-9 chars are utf8 :)
match str::from_utf8_unchecked(intstr).parse::<i64>() {
Ok(v) => Some(v),
Err(_) => None,
}
};
(consumed, val)
}
}
#[derive(Debug)]
/// The format struct as it is defined in the python source
struct FmtPy {
pub fill: char,
pub align: char,
pub alternate: bool,
pub sign: char,
pub width: i64,
pub thousands: bool,
pub precision: i64,
pub ty: char,
}
fn parse_like_python(rest: &str) -> Result<FmtPy> {
// The rest of this was pretty much strait up copied from python's format parser
// All credit goes to python source file: formatter_unicode.c
//
let mut format = FmtPy {
fill:'',
align: '\0',
alternate: false,
sign: '\0',
width: -1,
thousands: false,
precision: -1,
ty: '\0',
};
let mut chars = rest.chars();
let fake_fill = match chars.next() {
Some(c) => c,
None => return Ok(format),
};
// from now on all format characters MUST be valid
// ASCII characters (fill and identifier were the
// only ones that weren't.
// Therefore we can use bytes for the rest
let rest = rest.as_bytes();
let mut align_specified = false;
let mut fill_specified = false;
let end: usize = rest.len();
let mut pos: usize = 0;
// If the second char is an alignment token,
// then fake_fill as fill
if end - pos >= 1 + fake_fill.len_utf8() &&
is_alignment_token(rest[pos + fake_fill.len_utf8()] as char) {
format.align = rest[pos + fake_fill.len_utf8()] as char;
format.fill = fake_fill;
fill_specified = true;
align_specified = true;
pos += 1 + fake_fill.len_utf8();
} else if end - pos >= 1 && is_alignment_token(fake_fill) {
format.align = fake_fill;
pos += fake_fill.len_utf8();
}
// Parse the various sign options
if end - pos >= 1 && is_sign_element(rest[pos] as char) {
format.sign = rest[pos] as char;
pos += 1;
}
// If the next character is #, we're in alternate mode. This only
// applies to integers.
if end - pos >= 1 && rest[pos] as char == '#' {
format.alternate = true;
pos += 1;
}
// The special case for 0-padding (backwards compat)
if!fill_specified && end - pos >= 1 && rest[pos] == '0' as u8 {
format.fill = '0';
if!align_specified {
format.align = '=';
}
pos += 1;
}
// check to make sure that val is good
let (consumed, val) = get_integer(rest, pos);
pos += consumed;
if consumed!= 0 {
match val {
None => return Err(FmtError::Invalid("overflow error when parsing width".to_string())),
Some(v) => {
format.width = v;
}
}
}
// Comma signifies add thousands separators
if end - pos > 0 && rest[pos] as char == ',' {
format.thousands = true;
pos += 1;
}
// Parse field precision
if end - pos > 0 && rest[pos] as char == '.' {
pos += 1;
|
return Err(FmtError::Invalid("overflow error when parsing precision"
.to_string()))
}
Some(v) => {
format.precision = v;
}
}
} else {
// Not having a precision after a dot is an error.
if consumed == 0 {
return Err(FmtError::Invalid("Format specifier missing precision".to_string()));
}
}
pos += consumed;
}
// Finally, parse the type field.
if end - pos > 1 {
// More than one char remain, invalid format specifier.
return Err(FmtError::Invalid("Invalid format specifier".to_string()));
}
if end - pos == 1 {
format.ty = rest[pos] as char;
if!is_type_element(format.ty) {
let mut msg = String::new();
write!(msg, "Invalid type specifier: {:?}", format.ty).unwrap();
return Err(FmtError::TypeError(msg));
}
// pos+=1;
}
// Do as much validating as we can, just by looking at the format
// specifier. Do not take into account what type of formatting
// we're doing (int, float, string).
if format.thousands {
match format.ty {
'd' |
'e' |
'f' |
'g' |
'E' |
'G' |
'%' |
'F' |
'\0' => {} /* These are allowed. See PEP 378.*/
_ => {
let mut msg = String::new();
write!(msg, "Invalid comma type: {}", format.ty).unwrap();
return Err(FmtError::Invalid(msg));
}
}
}
Ok(format)
}
impl<'a, 'b> Formatter<'a, 'b> {
/// create Formatter from format string
pub fn from_str(s: &'a str, buff: &'b mut String) -> Result<Formatter<'a, 'b>> {
let mut found_colon = false;
let mut chars = s.chars();
let mut c = match chars.next() {
Some(':') | None => {
return Err(FmtError::Invalid("must specify identifier".to_string()))
}
Some(c) => c,
};
let mut consumed = 0;
// find the identifier
loop {
consumed += c.len_utf8();
if c == ':' {
found_colon = true;
break;
}
c = match chars.next() {
Some(c) => c,
None => {
break;
}
};
}
let (identifier, rest) = s.split_at(consumed);
let identifier = if found_colon {
let (i, _) = identifier.split_at(identifier.len() - 1); // get rid of ':'
i
} else {
identifier
};
let format = try!(parse_like_python(rest));
Ok(Formatter {
key: identifier,
fill: format.fill,
align: match format.align {
'\0' => Alignment::Unspecified,
'<' => Alignment::Left,
'^' => Alignment::Center,
'>' => Alignment::Right,
'=' => Alignment::Equal,
_ => unreachable!(),
},
sign: match format.sign {
'\0' => Sign::Unspecified,
'+' => Sign::Plus,
'-' => Sign::Minus,
'' => Sign::Space,
_ => unreachable!(),
},
alternate: format.alternate,
width: match format.width {
-1 => None,
_ => Some(format.width as usize),
},
thousands: format.thousands,
precision: match format.precision {
-1 => None,
_ => Some(format.precision as usize),
},
ty: match format.ty {
'\0' => None,
_ => Some(format.ty),
},
buff: buff,
pattern: s,
})
}
/// call this to re-write the original format string verbatum
/// back to the output
pub fn skip(mut self) -> Result<()> {
self.buff.push('{');
self.write_str(self.pattern).unwrap();
self.buff.push('}');
Ok(())
}
/// fill getter
pub fn fill(&self) -> char {
self.fill
}
/// align getter
pub fn align(&self) -> Alignment {
self.align.clone()
}
// provide default for unspecified alignment
pub fn set_default_align(&mut self, align: Alignment) {
if self.align == Alignment::Unspecified {
self.align = align
}
}
/// width getter
pub fn width(&self) -> Option<usize> {
self.width
}
/// thousands getter
pub fn thousands(&self) -> bool {
self.thousands
}
/// precision getter
pub fn precision(&self) -> Option<usize> {
self.precision
}
/// set precision to None, used for formatting int, float, etc
pub fn set_precision(&mut self, precision: Option<usize>) {
self.precision = precision;
}
/// sign getter
pub fn sign(&self) -> Sign {
self.sign.clone()
}
/// sign plus getter
/// here because it is in fmt::Formatter
pub fn sign_plus(&self) -> bool {
self.sign == Sign::Plus
}
/// sign minus getter
/// here because it is in fmt::Formatter
pub fn sign_minus(&self) -> bool {
self.sign == Sign::Minus
}
/// alternate getter
pub fn alternate(&self) -> bool {
self.alternate
}
// sign_aware_zero_pad // Not supported
/// type getter
pub fn ty(&self) -> Option<char> {
self.ty
}
/// UNSTABLE: in the future, this may return true if all validty
/// checks for a float return true
/// return true if ty is valid for formatting integers
pub fn is_int_type(&self) -> bool {
match self.ty {
None => true,
Some(c) => match c {
'b' | 'o' | 'x' | 'X' => true,
_ => false,
}
}
}
/// UNSTABLE: in the future, this may return true if all validty
/// checks for a float return true
/// return true if ty is valid for formatting floats
pub fn is_float_type(&self) -> bool {
match self.ty {
None => true,
Some(c) => match c {
'f' | 'e' | 'E' => true,
_ => false,
}
}
}
}
impl<'a, 'b> fmt::Write for Formatter<'a, 'b> {
fn write_str(&mut self, s: &str) -> fmt::Result {
self.buff.write_str(s)
}
}
|
let (consumed, val) = get_integer(rest, pos);
if consumed != 0 {
match val {
None => {
|
random_line_split
|
formatter.rs
|
use std::str;
use std::fmt;
use std::fmt::Write;
use std::iter::Iterator;
use std::string::String;
use types::*;
#[derive(Debug, PartialEq)]
pub struct Formatter<'a, 'b> {
pub key: &'a str,
fill: char,
align: Alignment, // default Right for numbers, Left for strings
sign: Sign,
alternate: bool,
width: Option<usize>,
thousands: bool,
precision: Option<usize>,
ty: Option<char>,
buff: &'b mut String,
pattern: &'a str,
}
fn is_alignment_token(c: char) -> bool {
match c {
'=' | '<' | '^' | '>' => true,
_ => false,
}
}
fn is_sign_element(c: char) -> bool {
match c {
'' | '-' | '+' => true,
_ => false,
}
}
fn is_type_element(c: char) -> bool {
match c {
'b' |
'o' |
'x' |
'X' |
'e' |
'E' |
'f' |
'F' |
'%' |
's' |
'?' => true,
_ => false,
}
}
// get an integer from pos, returning the number of bytes
// consumed and the integer
fn get_integer(s: &[u8], pos: usize) -> (usize, Option<i64>) {
let (_, rest) = s.split_at(pos);
let mut consumed: usize = 0;
for b in rest {
match *b as char {
'0'...'9' => {}
_ => break,
};
consumed += 1;
}
if consumed == 0 {
(0, None)
} else {
let (intstr, _) = rest.split_at(consumed);
let val = unsafe {
// I think I can be reasonably sure that 0-9 chars are utf8 :)
match str::from_utf8_unchecked(intstr).parse::<i64>() {
Ok(v) => Some(v),
Err(_) => None,
}
};
(consumed, val)
}
}
#[derive(Debug)]
/// The format struct as it is defined in the python source
struct FmtPy {
pub fill: char,
pub align: char,
pub alternate: bool,
pub sign: char,
pub width: i64,
pub thousands: bool,
pub precision: i64,
pub ty: char,
}
fn parse_like_python(rest: &str) -> Result<FmtPy> {
// The rest of this was pretty much strait up copied from python's format parser
// All credit goes to python source file: formatter_unicode.c
//
let mut format = FmtPy {
fill:'',
align: '\0',
alternate: false,
sign: '\0',
width: -1,
thousands: false,
precision: -1,
ty: '\0',
};
let mut chars = rest.chars();
let fake_fill = match chars.next() {
Some(c) => c,
None => return Ok(format),
};
// from now on all format characters MUST be valid
// ASCII characters (fill and identifier were the
// only ones that weren't.
// Therefore we can use bytes for the rest
let rest = rest.as_bytes();
let mut align_specified = false;
let mut fill_specified = false;
let end: usize = rest.len();
let mut pos: usize = 0;
// If the second char is an alignment token,
// then fake_fill as fill
if end - pos >= 1 + fake_fill.len_utf8() &&
is_alignment_token(rest[pos + fake_fill.len_utf8()] as char) {
format.align = rest[pos + fake_fill.len_utf8()] as char;
format.fill = fake_fill;
fill_specified = true;
align_specified = true;
pos += 1 + fake_fill.len_utf8();
} else if end - pos >= 1 && is_alignment_token(fake_fill) {
format.align = fake_fill;
pos += fake_fill.len_utf8();
}
// Parse the various sign options
if end - pos >= 1 && is_sign_element(rest[pos] as char) {
format.sign = rest[pos] as char;
pos += 1;
}
// If the next character is #, we're in alternate mode. This only
// applies to integers.
if end - pos >= 1 && rest[pos] as char == '#' {
format.alternate = true;
pos += 1;
}
// The special case for 0-padding (backwards compat)
if!fill_specified && end - pos >= 1 && rest[pos] == '0' as u8 {
format.fill = '0';
if!align_specified {
format.align = '=';
}
pos += 1;
}
// check to make sure that val is good
let (consumed, val) = get_integer(rest, pos);
pos += consumed;
if consumed!= 0 {
match val {
None => return Err(FmtError::Invalid("overflow error when parsing width".to_string())),
Some(v) => {
format.width = v;
}
}
}
// Comma signifies add thousands separators
if end - pos > 0 && rest[pos] as char == ',' {
format.thousands = true;
pos += 1;
}
// Parse field precision
if end - pos > 0 && rest[pos] as char == '.' {
pos += 1;
let (consumed, val) = get_integer(rest, pos);
if consumed!= 0 {
match val {
None => {
return Err(FmtError::Invalid("overflow error when parsing precision"
.to_string()))
}
Some(v) => {
format.precision = v;
}
}
} else {
// Not having a precision after a dot is an error.
if consumed == 0 {
return Err(FmtError::Invalid("Format specifier missing precision".to_string()));
}
}
pos += consumed;
}
// Finally, parse the type field.
if end - pos > 1 {
// More than one char remain, invalid format specifier.
return Err(FmtError::Invalid("Invalid format specifier".to_string()));
}
if end - pos == 1 {
format.ty = rest[pos] as char;
if!is_type_element(format.ty) {
let mut msg = String::new();
write!(msg, "Invalid type specifier: {:?}", format.ty).unwrap();
return Err(FmtError::TypeError(msg));
}
// pos+=1;
}
// Do as much validating as we can, just by looking at the format
// specifier. Do not take into account what type of formatting
// we're doing (int, float, string).
if format.thousands {
match format.ty {
'd' |
'e' |
'f' |
'g' |
'E' |
'G' |
'%' |
'F' |
'\0' => {} /* These are allowed. See PEP 378.*/
_ => {
let mut msg = String::new();
write!(msg, "Invalid comma type: {}", format.ty).unwrap();
return Err(FmtError::Invalid(msg));
}
}
}
Ok(format)
}
impl<'a, 'b> Formatter<'a, 'b> {
/// create Formatter from format string
pub fn
|
(s: &'a str, buff: &'b mut String) -> Result<Formatter<'a, 'b>> {
let mut found_colon = false;
let mut chars = s.chars();
let mut c = match chars.next() {
Some(':') | None => {
return Err(FmtError::Invalid("must specify identifier".to_string()))
}
Some(c) => c,
};
let mut consumed = 0;
// find the identifier
loop {
consumed += c.len_utf8();
if c == ':' {
found_colon = true;
break;
}
c = match chars.next() {
Some(c) => c,
None => {
break;
}
};
}
let (identifier, rest) = s.split_at(consumed);
let identifier = if found_colon {
let (i, _) = identifier.split_at(identifier.len() - 1); // get rid of ':'
i
} else {
identifier
};
let format = try!(parse_like_python(rest));
Ok(Formatter {
key: identifier,
fill: format.fill,
align: match format.align {
'\0' => Alignment::Unspecified,
'<' => Alignment::Left,
'^' => Alignment::Center,
'>' => Alignment::Right,
'=' => Alignment::Equal,
_ => unreachable!(),
},
sign: match format.sign {
'\0' => Sign::Unspecified,
'+' => Sign::Plus,
'-' => Sign::Minus,
'' => Sign::Space,
_ => unreachable!(),
},
alternate: format.alternate,
width: match format.width {
-1 => None,
_ => Some(format.width as usize),
},
thousands: format.thousands,
precision: match format.precision {
-1 => None,
_ => Some(format.precision as usize),
},
ty: match format.ty {
'\0' => None,
_ => Some(format.ty),
},
buff: buff,
pattern: s,
})
}
/// call this to re-write the original format string verbatum
/// back to the output
pub fn skip(mut self) -> Result<()> {
self.buff.push('{');
self.write_str(self.pattern).unwrap();
self.buff.push('}');
Ok(())
}
/// fill getter
pub fn fill(&self) -> char {
self.fill
}
/// align getter
pub fn align(&self) -> Alignment {
self.align.clone()
}
// provide default for unspecified alignment
pub fn set_default_align(&mut self, align: Alignment) {
if self.align == Alignment::Unspecified {
self.align = align
}
}
/// width getter
pub fn width(&self) -> Option<usize> {
self.width
}
/// thousands getter
pub fn thousands(&self) -> bool {
self.thousands
}
/// precision getter
pub fn precision(&self) -> Option<usize> {
self.precision
}
/// set precision to None, used for formatting int, float, etc
pub fn set_precision(&mut self, precision: Option<usize>) {
self.precision = precision;
}
/// sign getter
pub fn sign(&self) -> Sign {
self.sign.clone()
}
/// sign plus getter
/// here because it is in fmt::Formatter
pub fn sign_plus(&self) -> bool {
self.sign == Sign::Plus
}
/// sign minus getter
/// here because it is in fmt::Formatter
pub fn sign_minus(&self) -> bool {
self.sign == Sign::Minus
}
/// alternate getter
pub fn alternate(&self) -> bool {
self.alternate
}
// sign_aware_zero_pad // Not supported
/// type getter
pub fn ty(&self) -> Option<char> {
self.ty
}
/// UNSTABLE: in the future, this may return true if all validty
/// checks for a float return true
/// return true if ty is valid for formatting integers
pub fn is_int_type(&self) -> bool {
match self.ty {
None => true,
Some(c) => match c {
'b' | 'o' | 'x' | 'X' => true,
_ => false,
}
}
}
/// UNSTABLE: in the future, this may return true if all validty
/// checks for a float return true
/// return true if ty is valid for formatting floats
pub fn is_float_type(&self) -> bool {
match self.ty {
None => true,
Some(c) => match c {
'f' | 'e' | 'E' => true,
_ => false,
}
}
}
}
impl<'a, 'b> fmt::Write for Formatter<'a, 'b> {
fn write_str(&mut self, s: &str) -> fmt::Result {
self.buff.write_str(s)
}
}
|
from_str
|
identifier_name
|
join.rs
|
use std::fmt;
use std::str;
use column::Column;
use condition::ConditionExpression;
use nom::branch::alt;
use nom::bytes::complete::tag_no_case;
use nom::combinator::map;
use nom::IResult;
use select::{JoinClause, SelectStatement};
use table::Table;
#[derive(Clone, Debug, Eq, Hash, PartialEq, Serialize, Deserialize)]
pub enum
|
{
/// A single table.
Table(Table),
/// A comma-separated (and implicitly joined) sequence of tables.
Tables(Vec<Table>),
/// A nested selection, represented as (query, alias).
NestedSelect(Box<SelectStatement>, Option<String>),
/// A nested join clause.
NestedJoin(Box<JoinClause>),
}
impl fmt::Display for JoinRightSide {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
JoinRightSide::Table(ref t) => write!(f, "{}", t)?,
JoinRightSide::NestedSelect(ref q, ref a) => {
write!(f, "({})", q)?;
if a.is_some() {
write!(f, " AS {}", a.as_ref().unwrap())?;
}
}
JoinRightSide::NestedJoin(ref jc) => write!(f, "({})", jc)?,
_ => unimplemented!(),
}
Ok(())
}
}
#[derive(Clone, Debug, Eq, Hash, PartialEq, Serialize, Deserialize)]
pub enum JoinOperator {
Join,
LeftJoin,
LeftOuterJoin,
RightJoin,
InnerJoin,
CrossJoin,
StraightJoin,
}
impl fmt::Display for JoinOperator {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
JoinOperator::Join => write!(f, "JOIN")?,
JoinOperator::LeftJoin => write!(f, "LEFT JOIN")?,
JoinOperator::LeftOuterJoin => write!(f, "LEFT OUTER JOIN")?,
JoinOperator::RightJoin => write!(f, "RIGHT JOIN")?,
JoinOperator::InnerJoin => write!(f, "INNER JOIN")?,
JoinOperator::CrossJoin => write!(f, "CROSS JOIN")?,
JoinOperator::StraightJoin => write!(f, "STRAIGHT JOIN")?,
}
Ok(())
}
}
#[derive(Clone, Debug, Eq, Hash, PartialEq, Serialize, Deserialize)]
pub enum JoinConstraint {
On(ConditionExpression),
Using(Vec<Column>),
}
impl fmt::Display for JoinConstraint {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
JoinConstraint::On(ref ce) => write!(f, "ON {}", ce)?,
JoinConstraint::Using(ref columns) => write!(
f,
"USING ({})",
columns
.iter()
.map(|c| format!("{}", c))
.collect::<Vec<_>>()
.join(", ")
)?,
}
Ok(())
}
}
// Parse binary comparison operators
pub fn join_operator(i: &[u8]) -> IResult<&[u8], JoinOperator> {
alt((
map(tag_no_case("join"), |_| JoinOperator::Join),
map(tag_no_case("left join"), |_| JoinOperator::LeftJoin),
map(tag_no_case("left outer join"), |_| {
JoinOperator::LeftOuterJoin
}),
map(tag_no_case("right join"), |_| JoinOperator::RightJoin),
map(tag_no_case("inner join"), |_| JoinOperator::InnerJoin),
map(tag_no_case("cross join"), |_| JoinOperator::CrossJoin),
map(tag_no_case("straight_join"), |_| JoinOperator::StraightJoin),
))(i)
}
#[cfg(test)]
mod tests {
use super::*;
use common::{FieldDefinitionExpression, Operator};
use condition::ConditionBase::*;
use condition::ConditionExpression::{self, *};
use condition::ConditionTree;
use select::{selection, JoinClause, SelectStatement};
#[test]
fn inner_join() {
let qstring = "SELECT tags.* FROM tags \
INNER JOIN taggings ON tags.id = taggings.tag_id";
let res = selection(qstring.as_bytes());
let ct = ConditionTree {
left: Box::new(Base(Field(Column::from("tags.id")))),
right: Box::new(Base(Field(Column::from("taggings.tag_id")))),
operator: Operator::Equal,
};
let join_cond = ConditionExpression::ComparisonOp(ct);
let expected_stmt = SelectStatement {
tables: vec![Table::from("tags")],
fields: vec![FieldDefinitionExpression::AllInTable("tags".into())],
join: vec![JoinClause {
operator: JoinOperator::InnerJoin,
right: JoinRightSide::Table(Table::from("taggings")),
constraint: JoinConstraint::On(join_cond),
}],
..Default::default()
};
let q = res.unwrap().1;
assert_eq!(q, expected_stmt);
assert_eq!(qstring, format!("{}", q));
}
}
|
JoinRightSide
|
identifier_name
|
join.rs
|
use std::fmt;
use std::str;
use column::Column;
use condition::ConditionExpression;
use nom::branch::alt;
use nom::bytes::complete::tag_no_case;
use nom::combinator::map;
use nom::IResult;
use select::{JoinClause, SelectStatement};
use table::Table;
#[derive(Clone, Debug, Eq, Hash, PartialEq, Serialize, Deserialize)]
pub enum JoinRightSide {
/// A single table.
Table(Table),
/// A comma-separated (and implicitly joined) sequence of tables.
Tables(Vec<Table>),
/// A nested selection, represented as (query, alias).
NestedSelect(Box<SelectStatement>, Option<String>),
/// A nested join clause.
NestedJoin(Box<JoinClause>),
}
impl fmt::Display for JoinRightSide {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
JoinRightSide::Table(ref t) => write!(f, "{}", t)?,
JoinRightSide::NestedSelect(ref q, ref a) => {
write!(f, "({})", q)?;
if a.is_some() {
write!(f, " AS {}", a.as_ref().unwrap())?;
}
}
JoinRightSide::NestedJoin(ref jc) => write!(f, "({})", jc)?,
_ => unimplemented!(),
}
Ok(())
}
}
#[derive(Clone, Debug, Eq, Hash, PartialEq, Serialize, Deserialize)]
pub enum JoinOperator {
Join,
LeftJoin,
LeftOuterJoin,
RightJoin,
InnerJoin,
CrossJoin,
StraightJoin,
}
impl fmt::Display for JoinOperator {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
JoinOperator::Join => write!(f, "JOIN")?,
JoinOperator::LeftJoin => write!(f, "LEFT JOIN")?,
JoinOperator::LeftOuterJoin => write!(f, "LEFT OUTER JOIN")?,
JoinOperator::RightJoin => write!(f, "RIGHT JOIN")?,
JoinOperator::InnerJoin => write!(f, "INNER JOIN")?,
JoinOperator::CrossJoin => write!(f, "CROSS JOIN")?,
JoinOperator::StraightJoin => write!(f, "STRAIGHT JOIN")?,
}
Ok(())
}
}
#[derive(Clone, Debug, Eq, Hash, PartialEq, Serialize, Deserialize)]
pub enum JoinConstraint {
On(ConditionExpression),
Using(Vec<Column>),
}
impl fmt::Display for JoinConstraint {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
JoinConstraint::On(ref ce) => write!(f, "ON {}", ce)?,
JoinConstraint::Using(ref columns) => write!(
f,
"USING ({})",
columns
.iter()
.map(|c| format!("{}", c))
.collect::<Vec<_>>()
.join(", ")
)?,
}
Ok(())
}
}
// Parse binary comparison operators
pub fn join_operator(i: &[u8]) -> IResult<&[u8], JoinOperator> {
alt((
map(tag_no_case("join"), |_| JoinOperator::Join),
map(tag_no_case("left join"), |_| JoinOperator::LeftJoin),
map(tag_no_case("left outer join"), |_| {
JoinOperator::LeftOuterJoin
}),
map(tag_no_case("right join"), |_| JoinOperator::RightJoin),
map(tag_no_case("inner join"), |_| JoinOperator::InnerJoin),
map(tag_no_case("cross join"), |_| JoinOperator::CrossJoin),
map(tag_no_case("straight_join"), |_| JoinOperator::StraightJoin),
))(i)
}
#[cfg(test)]
mod tests {
use super::*;
use common::{FieldDefinitionExpression, Operator};
use condition::ConditionBase::*;
use condition::ConditionExpression::{self, *};
use condition::ConditionTree;
use select::{selection, JoinClause, SelectStatement};
#[test]
fn inner_join()
|
..Default::default()
};
let q = res.unwrap().1;
assert_eq!(q, expected_stmt);
assert_eq!(qstring, format!("{}", q));
}
}
|
{
let qstring = "SELECT tags.* FROM tags \
INNER JOIN taggings ON tags.id = taggings.tag_id";
let res = selection(qstring.as_bytes());
let ct = ConditionTree {
left: Box::new(Base(Field(Column::from("tags.id")))),
right: Box::new(Base(Field(Column::from("taggings.tag_id")))),
operator: Operator::Equal,
};
let join_cond = ConditionExpression::ComparisonOp(ct);
let expected_stmt = SelectStatement {
tables: vec![Table::from("tags")],
fields: vec![FieldDefinitionExpression::AllInTable("tags".into())],
join: vec![JoinClause {
operator: JoinOperator::InnerJoin,
right: JoinRightSide::Table(Table::from("taggings")),
constraint: JoinConstraint::On(join_cond),
}],
|
identifier_body
|
join.rs
|
use std::fmt;
use std::str;
use column::Column;
use condition::ConditionExpression;
use nom::branch::alt;
use nom::bytes::complete::tag_no_case;
use nom::combinator::map;
use nom::IResult;
use select::{JoinClause, SelectStatement};
use table::Table;
#[derive(Clone, Debug, Eq, Hash, PartialEq, Serialize, Deserialize)]
pub enum JoinRightSide {
/// A single table.
Table(Table),
/// A comma-separated (and implicitly joined) sequence of tables.
Tables(Vec<Table>),
/// A nested selection, represented as (query, alias).
NestedSelect(Box<SelectStatement>, Option<String>),
/// A nested join clause.
NestedJoin(Box<JoinClause>),
}
impl fmt::Display for JoinRightSide {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
JoinRightSide::Table(ref t) => write!(f, "{}", t)?,
JoinRightSide::NestedSelect(ref q, ref a) => {
write!(f, "({})", q)?;
if a.is_some() {
write!(f, " AS {}", a.as_ref().unwrap())?;
}
}
JoinRightSide::NestedJoin(ref jc) => write!(f, "({})", jc)?,
_ => unimplemented!(),
}
Ok(())
}
}
#[derive(Clone, Debug, Eq, Hash, PartialEq, Serialize, Deserialize)]
pub enum JoinOperator {
Join,
LeftJoin,
LeftOuterJoin,
RightJoin,
InnerJoin,
CrossJoin,
StraightJoin,
}
impl fmt::Display for JoinOperator {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
JoinOperator::Join => write!(f, "JOIN")?,
JoinOperator::LeftJoin => write!(f, "LEFT JOIN")?,
JoinOperator::LeftOuterJoin => write!(f, "LEFT OUTER JOIN")?,
JoinOperator::RightJoin => write!(f, "RIGHT JOIN")?,
JoinOperator::InnerJoin => write!(f, "INNER JOIN")?,
JoinOperator::CrossJoin => write!(f, "CROSS JOIN")?,
JoinOperator::StraightJoin => write!(f, "STRAIGHT JOIN")?,
}
Ok(())
}
}
#[derive(Clone, Debug, Eq, Hash, PartialEq, Serialize, Deserialize)]
pub enum JoinConstraint {
On(ConditionExpression),
Using(Vec<Column>),
}
impl fmt::Display for JoinConstraint {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
JoinConstraint::On(ref ce) => write!(f, "ON {}", ce)?,
JoinConstraint::Using(ref columns) => write!(
f,
"USING ({})",
columns
.iter()
.map(|c| format!("{}", c))
.collect::<Vec<_>>()
.join(", ")
)?,
}
Ok(())
}
}
// Parse binary comparison operators
pub fn join_operator(i: &[u8]) -> IResult<&[u8], JoinOperator> {
alt((
map(tag_no_case("join"), |_| JoinOperator::Join),
map(tag_no_case("left join"), |_| JoinOperator::LeftJoin),
map(tag_no_case("left outer join"), |_| {
JoinOperator::LeftOuterJoin
}),
map(tag_no_case("right join"), |_| JoinOperator::RightJoin),
map(tag_no_case("inner join"), |_| JoinOperator::InnerJoin),
map(tag_no_case("cross join"), |_| JoinOperator::CrossJoin),
map(tag_no_case("straight_join"), |_| JoinOperator::StraightJoin),
))(i)
}
#[cfg(test)]
mod tests {
use super::*;
use common::{FieldDefinitionExpression, Operator};
use condition::ConditionBase::*;
use condition::ConditionExpression::{self, *};
use condition::ConditionTree;
use select::{selection, JoinClause, SelectStatement};
#[test]
fn inner_join() {
let qstring = "SELECT tags.* FROM tags \
INNER JOIN taggings ON tags.id = taggings.tag_id";
let res = selection(qstring.as_bytes());
let ct = ConditionTree {
left: Box::new(Base(Field(Column::from("tags.id")))),
right: Box::new(Base(Field(Column::from("taggings.tag_id")))),
operator: Operator::Equal,
};
let join_cond = ConditionExpression::ComparisonOp(ct);
let expected_stmt = SelectStatement {
tables: vec![Table::from("tags")],
fields: vec![FieldDefinitionExpression::AllInTable("tags".into())],
join: vec![JoinClause {
operator: JoinOperator::InnerJoin,
right: JoinRightSide::Table(Table::from("taggings")),
constraint: JoinConstraint::On(join_cond),
}],
..Default::default()
};
|
}
}
|
let q = res.unwrap().1;
assert_eq!(q, expected_stmt);
assert_eq!(qstring, format!("{}", q));
|
random_line_split
|
ecs.rs
|
use std::collections::HashMap;
use std::iter;
use world;
use entity::{Entity};
use components;
use component_ref::{ComponentRef, ComponentRefMut};
use stats;
use world::{WorldState};
/// Entity component system.
#[derive(RustcDecodable, RustcEncodable)]
pub struct Ecs {
next_idx: usize,
reusable_idxs: Vec<usize>,
// Could use Bitv for active, but I can't bother to write the serializer...
active: Vec<bool>,
parent: HashMap<usize, usize>,
}
impl Ecs {
pub fn new() -> Ecs {
Ecs {
next_idx: 0,
reusable_idxs: vec![],
active: vec![],
parent: HashMap::new(),
}
}
pub fn new_entity(&mut self, parent: Option<Entity>) -> Entity {
// Get the entity idx, reuse old ones to keep the indexing compact.
let idx = match self.reusable_idxs.pop() {
None => {
let ret = self.next_idx;
self.next_idx += 1;
ret
}
Some(idx) => idx
};
if let Some(Entity(p_idx)) = parent {
assert!(self.active[p_idx]);
self.parent.insert(idx, p_idx);
}
if self.active.len() <= idx {
let padding = idx + 1 - self.active.len();
self.active.extend(iter::repeat(false).take(padding));
assert!(self.active.len() == idx + 1);
}
assert!(!self.active[idx]);
self.active[idx] = true;
Entity(idx)
}
/// Delete an entity from the entity component system.
///
/// XXX: The user is currently responsible for never using an entity
/// handle again after delete_entity has been called on it. Using an
/// entity handle after deletion may return another entity's contents.
pub fn delete(&mut self, Entity(idx): Entity) {
assert!(self.active[idx]);
self.parent.remove(&idx);
self.reusable_idxs.push(idx);
self.active[idx] = false;
}
/// Return an iterator for the entities. The iterator will not be
/// invalidated if entities are added or removed during iteration. The
/// iterator also won't maintain a lock on the world singleton outside
/// calling next.
///
/// XXX: It is currently unspecified whether entities added during
/// iteration will show up in the iteration or not.
pub fn iter(&self) -> EntityIter {
EntityIter(0)
}
/// Return the optional parent entity of an entity.
pub fn parent(&self, Entity(idx): Entity) -> Option<Entity> {
self.parent.get(&idx).map(|&idx| Entity(idx))
}
/// Change the parent of a live entity
pub fn reparent(&mut self, Entity(idx): Entity, Entity(new_parent_idx): Entity) {
self.parent.insert(idx, new_parent_idx);
}
}
pub struct EntityIter(usize);
impl Iterator for EntityIter {
type Item = Entity;
fn next(&mut self) -> Option<Entity> {
world::with(|w| {
let &mut EntityIter(ref mut idx) = self;
loop {
if *idx >= w.ecs.active.len() { return None; }
let ret = Entity(*idx);
*idx += 1;
if!w.ecs.active[*idx - 1] { continue; }
return Some(ret);
}
})
}
}
////////////////////////////////////////////////////////////////////////
// The one big macro for defining the full set of available entity components
// in one place.
macro_rules! components {
{
// Declare the list of types which are included as components in the
// game's entity component system. Also declare the non-mutable and
// mutable accessor names for them. Example
//
// ```notrust
// [Mesh, meshes, meshes_mut],
// ```
$([$comp:ty, $access:ident, $access_mut:ident],)+
} => {
// The master container for all the components.
#[derive(RustcEncodable, RustcDecodable)]
pub struct Comps {
$($access: HashMap<usize, Option<$comp>>,)+
}
/// Container for all regular entity components.
impl Comps {
pub fn new() -> Comps {
Comps {
$($access: HashMap::new(),)+
}
}
/// Remove the given entity from all the contained components.
pub fn remove(&mut self, Entity(idx): Entity) {
$(self.$access.remove(&idx);)+
}
}
// Implement the Componet trait for the type, this provides an uniform
// syntax for adding component values to entities used by the entity
// factory.
$(
|
// XXX: Figure out how to move self into the closure to
// get rid of the.clone.
fn add_to(self, e: Entity) { world::with_mut(|w| w.$access_mut().insert(e, self.clone())) }
}
)+
// Implement the trait for accessing all the components that
// WorldState will implement
pub trait ComponentAccess<'a> {
$(
fn $access(&'a self) -> ComponentRef<'a, $comp>;
fn $access_mut(&'a mut self) -> ComponentRefMut<'a, $comp>;
)+
}
impl<'a> ComponentAccess<'a> for WorldState {
$(
fn $access(&'a self) -> ComponentRef<'a, $comp> {
ComponentRef::new(&self.ecs, &self.comps.$access)
}
fn $access_mut(&'a mut self) -> ComponentRefMut<'a, $comp> {
ComponentRefMut::new(&mut self.ecs, &mut self.comps.$access)
}
)+
}
}
}
pub trait Component {
/// Create an uniform syntax for attaching components to entities to allow
/// a fluent API for constructing prototypes.
fn add_to(self, e: Entity);
}
// Component loadout for the game.
components! {
[components::IsPrototype, prototypes, prototypes_mut],
[components::Desc, descs, descs_mut],
[components::MapMemory, map_memories, map_memories_mut],
[stats::Stats, stats, stats_mut],
[components::Spawn, spawns, spawns_mut],
[components::Health, healths, healths_mut],
[components::Brain, brains, brains_mut],
[components::Item, items, items_mut],
[components::StatsCache, stats_caches, stats_caches_mut],
[components::Colonist, colonists, colonists_mut],
}
|
impl Component for $comp {
|
random_line_split
|
ecs.rs
|
use std::collections::HashMap;
use std::iter;
use world;
use entity::{Entity};
use components;
use component_ref::{ComponentRef, ComponentRefMut};
use stats;
use world::{WorldState};
/// Entity component system.
#[derive(RustcDecodable, RustcEncodable)]
pub struct Ecs {
next_idx: usize,
reusable_idxs: Vec<usize>,
// Could use Bitv for active, but I can't bother to write the serializer...
active: Vec<bool>,
parent: HashMap<usize, usize>,
}
impl Ecs {
pub fn new() -> Ecs {
Ecs {
next_idx: 0,
reusable_idxs: vec![],
active: vec![],
parent: HashMap::new(),
}
}
pub fn new_entity(&mut self, parent: Option<Entity>) -> Entity {
// Get the entity idx, reuse old ones to keep the indexing compact.
let idx = match self.reusable_idxs.pop() {
None => {
let ret = self.next_idx;
self.next_idx += 1;
ret
}
Some(idx) => idx
};
if let Some(Entity(p_idx)) = parent {
assert!(self.active[p_idx]);
self.parent.insert(idx, p_idx);
}
if self.active.len() <= idx {
let padding = idx + 1 - self.active.len();
self.active.extend(iter::repeat(false).take(padding));
assert!(self.active.len() == idx + 1);
}
assert!(!self.active[idx]);
self.active[idx] = true;
Entity(idx)
}
/// Delete an entity from the entity component system.
///
/// XXX: The user is currently responsible for never using an entity
/// handle again after delete_entity has been called on it. Using an
/// entity handle after deletion may return another entity's contents.
pub fn delete(&mut self, Entity(idx): Entity) {
assert!(self.active[idx]);
self.parent.remove(&idx);
self.reusable_idxs.push(idx);
self.active[idx] = false;
}
/// Return an iterator for the entities. The iterator will not be
/// invalidated if entities are added or removed during iteration. The
/// iterator also won't maintain a lock on the world singleton outside
/// calling next.
///
/// XXX: It is currently unspecified whether entities added during
/// iteration will show up in the iteration or not.
pub fn iter(&self) -> EntityIter {
EntityIter(0)
}
/// Return the optional parent entity of an entity.
pub fn parent(&self, Entity(idx): Entity) -> Option<Entity> {
self.parent.get(&idx).map(|&idx| Entity(idx))
}
/// Change the parent of a live entity
pub fn reparent(&mut self, Entity(idx): Entity, Entity(new_parent_idx): Entity) {
self.parent.insert(idx, new_parent_idx);
}
}
pub struct EntityIter(usize);
impl Iterator for EntityIter {
type Item = Entity;
fn next(&mut self) -> Option<Entity> {
world::with(|w| {
let &mut EntityIter(ref mut idx) = self;
loop {
if *idx >= w.ecs.active.len()
|
let ret = Entity(*idx);
*idx += 1;
if!w.ecs.active[*idx - 1] { continue; }
return Some(ret);
}
})
}
}
////////////////////////////////////////////////////////////////////////
// The one big macro for defining the full set of available entity components
// in one place.
macro_rules! components {
{
// Declare the list of types which are included as components in the
// game's entity component system. Also declare the non-mutable and
// mutable accessor names for them. Example
//
// ```notrust
// [Mesh, meshes, meshes_mut],
// ```
$([$comp:ty, $access:ident, $access_mut:ident],)+
} => {
// The master container for all the components.
#[derive(RustcEncodable, RustcDecodable)]
pub struct Comps {
$($access: HashMap<usize, Option<$comp>>,)+
}
/// Container for all regular entity components.
impl Comps {
pub fn new() -> Comps {
Comps {
$($access: HashMap::new(),)+
}
}
/// Remove the given entity from all the contained components.
pub fn remove(&mut self, Entity(idx): Entity) {
$(self.$access.remove(&idx);)+
}
}
// Implement the Componet trait for the type, this provides an uniform
// syntax for adding component values to entities used by the entity
// factory.
$(
impl Component for $comp {
// XXX: Figure out how to move self into the closure to
// get rid of the.clone.
fn add_to(self, e: Entity) { world::with_mut(|w| w.$access_mut().insert(e, self.clone())) }
}
)+
// Implement the trait for accessing all the components that
// WorldState will implement
pub trait ComponentAccess<'a> {
$(
fn $access(&'a self) -> ComponentRef<'a, $comp>;
fn $access_mut(&'a mut self) -> ComponentRefMut<'a, $comp>;
)+
}
impl<'a> ComponentAccess<'a> for WorldState {
$(
fn $access(&'a self) -> ComponentRef<'a, $comp> {
ComponentRef::new(&self.ecs, &self.comps.$access)
}
fn $access_mut(&'a mut self) -> ComponentRefMut<'a, $comp> {
ComponentRefMut::new(&mut self.ecs, &mut self.comps.$access)
}
)+
}
}
}
pub trait Component {
/// Create an uniform syntax for attaching components to entities to allow
/// a fluent API for constructing prototypes.
fn add_to(self, e: Entity);
}
// Component loadout for the game.
components! {
[components::IsPrototype, prototypes, prototypes_mut],
[components::Desc, descs, descs_mut],
[components::MapMemory, map_memories, map_memories_mut],
[stats::Stats, stats, stats_mut],
[components::Spawn, spawns, spawns_mut],
[components::Health, healths, healths_mut],
[components::Brain, brains, brains_mut],
[components::Item, items, items_mut],
[components::StatsCache, stats_caches, stats_caches_mut],
[components::Colonist, colonists, colonists_mut],
}
|
{ return None; }
|
conditional_block
|
ecs.rs
|
use std::collections::HashMap;
use std::iter;
use world;
use entity::{Entity};
use components;
use component_ref::{ComponentRef, ComponentRefMut};
use stats;
use world::{WorldState};
/// Entity component system.
#[derive(RustcDecodable, RustcEncodable)]
pub struct Ecs {
next_idx: usize,
reusable_idxs: Vec<usize>,
// Could use Bitv for active, but I can't bother to write the serializer...
active: Vec<bool>,
parent: HashMap<usize, usize>,
}
impl Ecs {
pub fn new() -> Ecs {
Ecs {
next_idx: 0,
reusable_idxs: vec![],
active: vec![],
parent: HashMap::new(),
}
}
pub fn new_entity(&mut self, parent: Option<Entity>) -> Entity {
// Get the entity idx, reuse old ones to keep the indexing compact.
let idx = match self.reusable_idxs.pop() {
None => {
let ret = self.next_idx;
self.next_idx += 1;
ret
}
Some(idx) => idx
};
if let Some(Entity(p_idx)) = parent {
assert!(self.active[p_idx]);
self.parent.insert(idx, p_idx);
}
if self.active.len() <= idx {
let padding = idx + 1 - self.active.len();
self.active.extend(iter::repeat(false).take(padding));
assert!(self.active.len() == idx + 1);
}
assert!(!self.active[idx]);
self.active[idx] = true;
Entity(idx)
}
/// Delete an entity from the entity component system.
///
/// XXX: The user is currently responsible for never using an entity
/// handle again after delete_entity has been called on it. Using an
/// entity handle after deletion may return another entity's contents.
pub fn delete(&mut self, Entity(idx): Entity) {
assert!(self.active[idx]);
self.parent.remove(&idx);
self.reusable_idxs.push(idx);
self.active[idx] = false;
}
/// Return an iterator for the entities. The iterator will not be
/// invalidated if entities are added or removed during iteration. The
/// iterator also won't maintain a lock on the world singleton outside
/// calling next.
///
/// XXX: It is currently unspecified whether entities added during
/// iteration will show up in the iteration or not.
pub fn iter(&self) -> EntityIter {
EntityIter(0)
}
/// Return the optional parent entity of an entity.
pub fn parent(&self, Entity(idx): Entity) -> Option<Entity> {
self.parent.get(&idx).map(|&idx| Entity(idx))
}
/// Change the parent of a live entity
pub fn reparent(&mut self, Entity(idx): Entity, Entity(new_parent_idx): Entity) {
self.parent.insert(idx, new_parent_idx);
}
}
pub struct EntityIter(usize);
impl Iterator for EntityIter {
type Item = Entity;
fn
|
(&mut self) -> Option<Entity> {
world::with(|w| {
let &mut EntityIter(ref mut idx) = self;
loop {
if *idx >= w.ecs.active.len() { return None; }
let ret = Entity(*idx);
*idx += 1;
if!w.ecs.active[*idx - 1] { continue; }
return Some(ret);
}
})
}
}
////////////////////////////////////////////////////////////////////////
// The one big macro for defining the full set of available entity components
// in one place.
macro_rules! components {
{
// Declare the list of types which are included as components in the
// game's entity component system. Also declare the non-mutable and
// mutable accessor names for them. Example
//
// ```notrust
// [Mesh, meshes, meshes_mut],
// ```
$([$comp:ty, $access:ident, $access_mut:ident],)+
} => {
// The master container for all the components.
#[derive(RustcEncodable, RustcDecodable)]
pub struct Comps {
$($access: HashMap<usize, Option<$comp>>,)+
}
/// Container for all regular entity components.
impl Comps {
pub fn new() -> Comps {
Comps {
$($access: HashMap::new(),)+
}
}
/// Remove the given entity from all the contained components.
pub fn remove(&mut self, Entity(idx): Entity) {
$(self.$access.remove(&idx);)+
}
}
// Implement the Componet trait for the type, this provides an uniform
// syntax for adding component values to entities used by the entity
// factory.
$(
impl Component for $comp {
// XXX: Figure out how to move self into the closure to
// get rid of the.clone.
fn add_to(self, e: Entity) { world::with_mut(|w| w.$access_mut().insert(e, self.clone())) }
}
)+
// Implement the trait for accessing all the components that
// WorldState will implement
pub trait ComponentAccess<'a> {
$(
fn $access(&'a self) -> ComponentRef<'a, $comp>;
fn $access_mut(&'a mut self) -> ComponentRefMut<'a, $comp>;
)+
}
impl<'a> ComponentAccess<'a> for WorldState {
$(
fn $access(&'a self) -> ComponentRef<'a, $comp> {
ComponentRef::new(&self.ecs, &self.comps.$access)
}
fn $access_mut(&'a mut self) -> ComponentRefMut<'a, $comp> {
ComponentRefMut::new(&mut self.ecs, &mut self.comps.$access)
}
)+
}
}
}
pub trait Component {
/// Create an uniform syntax for attaching components to entities to allow
/// a fluent API for constructing prototypes.
fn add_to(self, e: Entity);
}
// Component loadout for the game.
components! {
[components::IsPrototype, prototypes, prototypes_mut],
[components::Desc, descs, descs_mut],
[components::MapMemory, map_memories, map_memories_mut],
[stats::Stats, stats, stats_mut],
[components::Spawn, spawns, spawns_mut],
[components::Health, healths, healths_mut],
[components::Brain, brains, brains_mut],
[components::Item, items, items_mut],
[components::StatsCache, stats_caches, stats_caches_mut],
[components::Colonist, colonists, colonists_mut],
}
|
next
|
identifier_name
|
ecs.rs
|
use std::collections::HashMap;
use std::iter;
use world;
use entity::{Entity};
use components;
use component_ref::{ComponentRef, ComponentRefMut};
use stats;
use world::{WorldState};
/// Entity component system.
#[derive(RustcDecodable, RustcEncodable)]
pub struct Ecs {
next_idx: usize,
reusable_idxs: Vec<usize>,
// Could use Bitv for active, but I can't bother to write the serializer...
active: Vec<bool>,
parent: HashMap<usize, usize>,
}
impl Ecs {
pub fn new() -> Ecs {
Ecs {
next_idx: 0,
reusable_idxs: vec![],
active: vec![],
parent: HashMap::new(),
}
}
pub fn new_entity(&mut self, parent: Option<Entity>) -> Entity {
// Get the entity idx, reuse old ones to keep the indexing compact.
let idx = match self.reusable_idxs.pop() {
None => {
let ret = self.next_idx;
self.next_idx += 1;
ret
}
Some(idx) => idx
};
if let Some(Entity(p_idx)) = parent {
assert!(self.active[p_idx]);
self.parent.insert(idx, p_idx);
}
if self.active.len() <= idx {
let padding = idx + 1 - self.active.len();
self.active.extend(iter::repeat(false).take(padding));
assert!(self.active.len() == idx + 1);
}
assert!(!self.active[idx]);
self.active[idx] = true;
Entity(idx)
}
/// Delete an entity from the entity component system.
///
/// XXX: The user is currently responsible for never using an entity
/// handle again after delete_entity has been called on it. Using an
/// entity handle after deletion may return another entity's contents.
pub fn delete(&mut self, Entity(idx): Entity) {
assert!(self.active[idx]);
self.parent.remove(&idx);
self.reusable_idxs.push(idx);
self.active[idx] = false;
}
/// Return an iterator for the entities. The iterator will not be
/// invalidated if entities are added or removed during iteration. The
/// iterator also won't maintain a lock on the world singleton outside
/// calling next.
///
/// XXX: It is currently unspecified whether entities added during
/// iteration will show up in the iteration or not.
pub fn iter(&self) -> EntityIter {
EntityIter(0)
}
/// Return the optional parent entity of an entity.
pub fn parent(&self, Entity(idx): Entity) -> Option<Entity> {
self.parent.get(&idx).map(|&idx| Entity(idx))
}
/// Change the parent of a live entity
pub fn reparent(&mut self, Entity(idx): Entity, Entity(new_parent_idx): Entity)
|
}
pub struct EntityIter(usize);
impl Iterator for EntityIter {
type Item = Entity;
fn next(&mut self) -> Option<Entity> {
world::with(|w| {
let &mut EntityIter(ref mut idx) = self;
loop {
if *idx >= w.ecs.active.len() { return None; }
let ret = Entity(*idx);
*idx += 1;
if!w.ecs.active[*idx - 1] { continue; }
return Some(ret);
}
})
}
}
////////////////////////////////////////////////////////////////////////
// The one big macro for defining the full set of available entity components
// in one place.
macro_rules! components {
{
// Declare the list of types which are included as components in the
// game's entity component system. Also declare the non-mutable and
// mutable accessor names for them. Example
//
// ```notrust
// [Mesh, meshes, meshes_mut],
// ```
$([$comp:ty, $access:ident, $access_mut:ident],)+
} => {
// The master container for all the components.
#[derive(RustcEncodable, RustcDecodable)]
pub struct Comps {
$($access: HashMap<usize, Option<$comp>>,)+
}
/// Container for all regular entity components.
impl Comps {
pub fn new() -> Comps {
Comps {
$($access: HashMap::new(),)+
}
}
/// Remove the given entity from all the contained components.
pub fn remove(&mut self, Entity(idx): Entity) {
$(self.$access.remove(&idx);)+
}
}
// Implement the Componet trait for the type, this provides an uniform
// syntax for adding component values to entities used by the entity
// factory.
$(
impl Component for $comp {
// XXX: Figure out how to move self into the closure to
// get rid of the.clone.
fn add_to(self, e: Entity) { world::with_mut(|w| w.$access_mut().insert(e, self.clone())) }
}
)+
// Implement the trait for accessing all the components that
// WorldState will implement
pub trait ComponentAccess<'a> {
$(
fn $access(&'a self) -> ComponentRef<'a, $comp>;
fn $access_mut(&'a mut self) -> ComponentRefMut<'a, $comp>;
)+
}
impl<'a> ComponentAccess<'a> for WorldState {
$(
fn $access(&'a self) -> ComponentRef<'a, $comp> {
ComponentRef::new(&self.ecs, &self.comps.$access)
}
fn $access_mut(&'a mut self) -> ComponentRefMut<'a, $comp> {
ComponentRefMut::new(&mut self.ecs, &mut self.comps.$access)
}
)+
}
}
}
pub trait Component {
/// Create an uniform syntax for attaching components to entities to allow
/// a fluent API for constructing prototypes.
fn add_to(self, e: Entity);
}
// Component loadout for the game.
components! {
[components::IsPrototype, prototypes, prototypes_mut],
[components::Desc, descs, descs_mut],
[components::MapMemory, map_memories, map_memories_mut],
[stats::Stats, stats, stats_mut],
[components::Spawn, spawns, spawns_mut],
[components::Health, healths, healths_mut],
[components::Brain, brains, brains_mut],
[components::Item, items, items_mut],
[components::StatsCache, stats_caches, stats_caches_mut],
[components::Colonist, colonists, colonists_mut],
}
|
{
self.parent.insert(idx, new_parent_idx);
}
|
identifier_body
|
message_log.rs
|
use std::{env, process::exit, time::Duration};
use assign::assign;
use ruma::{
api::client::{filter::FilterDefinition, sync::sync_events},
events::{
room::message::{MessageType, RoomMessageEventContent, TextMessageEventContent},
AnySyncMessageLikeEvent, AnySyncRoomEvent, SyncMessageLikeEvent,
},
presence::PresenceState,
};
use tokio_stream::StreamExt as _;
type HttpClient = ruma::client::http_client::HyperNativeTls;
async fn log_messages(
homeserver_url: String,
username: &str,
password: &str,
) -> anyhow::Result<()>
|
while let Some(res) = sync_stream.try_next().await? {
// Only look at rooms the user hasn't left yet
for (room_id, room) in res.rooms.join {
for event in room.timeline.events.into_iter().flat_map(|r| r.deserialize()) {
// Filter out the text messages
if let AnySyncRoomEvent::MessageLike(AnySyncMessageLikeEvent::RoomMessage(
SyncMessageLikeEvent {
content:
RoomMessageEventContent {
msgtype:
MessageType::Text(TextMessageEventContent {
body: msg_body,..
}),
..
},
sender,
..
},
)) = event
{
println!("{:?} in {:?}: {}", sender, room_id, msg_body);
}
}
}
}
Ok(())
}
#[tokio::main(flavor = "current_thread")]
async fn main() -> anyhow::Result<()> {
let (homeserver_url, username, password) =
match (env::args().nth(1), env::args().nth(2), env::args().nth(3)) {
(Some(a), Some(b), Some(c)) => (a, b, c),
_ => {
eprintln!(
"Usage: {} <homeserver_url> <username> <password>",
env::args().next().unwrap()
);
exit(1)
}
};
log_messages(homeserver_url, &username, &password).await
}
|
{
let client =
ruma::Client::builder().homeserver_url(homeserver_url).build::<HttpClient>().await?;
client.log_in(username, password, None, None).await?;
let filter = FilterDefinition::ignore_all().into();
let initial_sync_response = client
.send_request(assign!(sync_events::v3::Request::new(), {
filter: Some(&filter),
}))
.await?;
let mut sync_stream = Box::pin(client.sync(
None,
initial_sync_response.next_batch,
&PresenceState::Online,
Some(Duration::from_secs(30)),
));
|
identifier_body
|
message_log.rs
|
use std::{env, process::exit, time::Duration};
use assign::assign;
use ruma::{
api::client::{filter::FilterDefinition, sync::sync_events},
events::{
room::message::{MessageType, RoomMessageEventContent, TextMessageEventContent},
AnySyncMessageLikeEvent, AnySyncRoomEvent, SyncMessageLikeEvent,
},
presence::PresenceState,
};
use tokio_stream::StreamExt as _;
type HttpClient = ruma::client::http_client::HyperNativeTls;
async fn log_messages(
homeserver_url: String,
username: &str,
password: &str,
) -> anyhow::Result<()> {
let client =
ruma::Client::builder().homeserver_url(homeserver_url).build::<HttpClient>().await?;
client.log_in(username, password, None, None).await?;
let filter = FilterDefinition::ignore_all().into();
let initial_sync_response = client
.send_request(assign!(sync_events::v3::Request::new(), {
filter: Some(&filter),
}))
|
&PresenceState::Online,
Some(Duration::from_secs(30)),
));
while let Some(res) = sync_stream.try_next().await? {
// Only look at rooms the user hasn't left yet
for (room_id, room) in res.rooms.join {
for event in room.timeline.events.into_iter().flat_map(|r| r.deserialize()) {
// Filter out the text messages
if let AnySyncRoomEvent::MessageLike(AnySyncMessageLikeEvent::RoomMessage(
SyncMessageLikeEvent {
content:
RoomMessageEventContent {
msgtype:
MessageType::Text(TextMessageEventContent {
body: msg_body,..
}),
..
},
sender,
..
},
)) = event
{
println!("{:?} in {:?}: {}", sender, room_id, msg_body);
}
}
}
}
Ok(())
}
#[tokio::main(flavor = "current_thread")]
async fn main() -> anyhow::Result<()> {
let (homeserver_url, username, password) =
match (env::args().nth(1), env::args().nth(2), env::args().nth(3)) {
(Some(a), Some(b), Some(c)) => (a, b, c),
_ => {
eprintln!(
"Usage: {} <homeserver_url> <username> <password>",
env::args().next().unwrap()
);
exit(1)
}
};
log_messages(homeserver_url, &username, &password).await
}
|
.await?;
let mut sync_stream = Box::pin(client.sync(
None,
initial_sync_response.next_batch,
|
random_line_split
|
message_log.rs
|
use std::{env, process::exit, time::Duration};
use assign::assign;
use ruma::{
api::client::{filter::FilterDefinition, sync::sync_events},
events::{
room::message::{MessageType, RoomMessageEventContent, TextMessageEventContent},
AnySyncMessageLikeEvent, AnySyncRoomEvent, SyncMessageLikeEvent,
},
presence::PresenceState,
};
use tokio_stream::StreamExt as _;
type HttpClient = ruma::client::http_client::HyperNativeTls;
async fn
|
(
homeserver_url: String,
username: &str,
password: &str,
) -> anyhow::Result<()> {
let client =
ruma::Client::builder().homeserver_url(homeserver_url).build::<HttpClient>().await?;
client.log_in(username, password, None, None).await?;
let filter = FilterDefinition::ignore_all().into();
let initial_sync_response = client
.send_request(assign!(sync_events::v3::Request::new(), {
filter: Some(&filter),
}))
.await?;
let mut sync_stream = Box::pin(client.sync(
None,
initial_sync_response.next_batch,
&PresenceState::Online,
Some(Duration::from_secs(30)),
));
while let Some(res) = sync_stream.try_next().await? {
// Only look at rooms the user hasn't left yet
for (room_id, room) in res.rooms.join {
for event in room.timeline.events.into_iter().flat_map(|r| r.deserialize()) {
// Filter out the text messages
if let AnySyncRoomEvent::MessageLike(AnySyncMessageLikeEvent::RoomMessage(
SyncMessageLikeEvent {
content:
RoomMessageEventContent {
msgtype:
MessageType::Text(TextMessageEventContent {
body: msg_body,..
}),
..
},
sender,
..
},
)) = event
{
println!("{:?} in {:?}: {}", sender, room_id, msg_body);
}
}
}
}
Ok(())
}
#[tokio::main(flavor = "current_thread")]
async fn main() -> anyhow::Result<()> {
let (homeserver_url, username, password) =
match (env::args().nth(1), env::args().nth(2), env::args().nth(3)) {
(Some(a), Some(b), Some(c)) => (a, b, c),
_ => {
eprintln!(
"Usage: {} <homeserver_url> <username> <password>",
env::args().next().unwrap()
);
exit(1)
}
};
log_messages(homeserver_url, &username, &password).await
}
|
log_messages
|
identifier_name
|
patcher.rs
|
use std::io::prelude::*;
use std::fs;
use std::cell::RefCell;
use std::fs::File;
use std::collections::HashMap;
use rustc_serialize::json;
use rustc_serialize::hex::ToHex;
use parser;
use parser::usbr;
use parser::{Request, Source};
#[derive(RustcDecodable)]
struct PatchMetadata {
p_type: String,
vendor_id: u16,
product_id: u16,
request: u8,
requesttype: u8,
patch_id: u32, // id of this patch
min_matches: u16, // min number of matches before patch_id is considered a match
}
#[derive(RustcDecodable)]
struct Patch {
meta: PatchMetadata,
data: String, // hex-encoded
}
pub struct Patcher {
patches: Vec<Patch>,
counts: RefCell<HashMap<u32, u16>>,
}
impl PatchMetadata {
fn matches_descriptor(&self, header: &usbr::ControlPacketHeader) -> bool {
self.request == header.request && self.requesttype == header.requesttype
}
}
impl Patcher {
pub fn new(dir_path: &str) -> Patcher {
let mut patcher = Patcher { patches: vec![], counts: RefCell::new(HashMap::new()) };
for entry in fs::read_dir(dir_path).unwrap() {
let mut file = match File::open(&entry.unwrap().path()) {
Ok(file) => file,
Err(e) => panic!("[E000-Patcher] Could not open file {}", e),
};
// read file
let mut json_line = String::new();
file.read_to_string(&mut json_line).unwrap();
// decode file
let patch: Patch = json::decode(&json_line).unwrap();
// Insert count entry
if!patcher.counts.borrow().contains_key(&patch.meta.patch_id) {
patcher.counts.borrow_mut().insert(patch.meta.patch_id, patch.meta.min_matches);
}
// Insert patch into our list
patcher.patches.push(patch);
}
patcher
}
fn check_control_packet(&self, req: &Request) -> bool {
let h_ptr = req.type_header.as_ptr() as *const usbr::ControlPacketHeader;
let h: &usbr::ControlPacketHeader = unsafe { &*h_ptr };
for patch in &self.patches {
if patch.meta.p_type!= "control" {
continue;
}
if patch.meta.matches_descriptor(h) {
// The metadata matches, time to compare the data
let data_hex: String = req.data[..].to_hex();
if data_hex.len() >= patch.data.len() && data_hex.contains(&patch.data) {
let mut count: u16 = *self.counts.borrow().get(&patch.meta.patch_id).unwrap();
count -= 1;
if count == 0 {
error!("[E001-Patcher] matched at least {} signatures",
patch.meta.min_matches);
return false;
}
self.counts.borrow_mut().insert(patch.meta.patch_id, count);
}
}
}
true
}
fn check_bulk_packet(&self, req: &Request) -> bool
|
}
self.counts.borrow_mut().insert(patch.meta.patch_id, count);
}
}
true
}
fn check_connect(&self, req: &Request) -> bool {
let h_ptr = req.type_header.as_ptr() as *const usbr::ConnectHeader;
let h: &usbr::ConnectHeader = unsafe { &*h_ptr };
for patch in &self.patches {
if patch.meta.p_type!= "connect" {
continue;
}
if patch.meta.vendor_id == h.vendor_id && patch.meta.product_id == h.product_id {
error!("[E003-Patcher] malicious device found {:x}:{:x}",
h.vendor_id,
h.product_id);
return false;
}
}
true
}
}
impl parser::HasHandlers for Patcher {
fn handle_control_packet(&self, _: Source, req: Request) -> (u8, Vec<Request>) {
if self.check_control_packet(&req) { (0, vec![req]) } else { (1, vec![req]) }
}
fn handle_bulk_packet(&self, _: Source, req: Request) -> (u8, Vec<Request>) {
if self.check_bulk_packet(&req) { (0, vec![req]) } else { (1, vec![req]) }
}
fn handle_connect(&self, _: Source, req: Request) -> (u8, Vec<Request>) {
if self.check_connect(&req) { (0, vec![req]) } else { (1, vec![req]) }
}
// TODO: Implement below
//
// fn handle_int_packet(&self, source: Source, req: Request) -> (u8, Vec<Request>) {
// (0, vec![req])
// }
//
// fn handle_iso_packet(&self, source: Source, req: Request) -> (u8, Vec<Request>) {
// (0, vec![req])
// }
//
// fn handle_buffered_bulk_packet(&self, source: Source, req: Request) -> (u8, Vec<Request>) {
// (0, vec![req])
// }
//
}
|
{
for patch in &self.patches {
if patch.meta.p_type != "bulk" {
continue;
}
// The metadata matches, time to compare the data
let data_hex: String = req.data[..].to_hex();
if data_hex.len() >= patch.data.len() && data_hex.contains(&patch.data) {
let mut count: u16 = *self.counts.borrow().get(&patch.meta.patch_id).unwrap();
count -= 1;
if count == 0 {
error!("[E002-Patcher] matched at least {} signatures",
patch.meta.min_matches);
return false;
|
identifier_body
|
patcher.rs
|
use std::io::prelude::*;
use std::fs;
use std::cell::RefCell;
use std::fs::File;
use std::collections::HashMap;
use rustc_serialize::json;
use rustc_serialize::hex::ToHex;
use parser;
use parser::usbr;
use parser::{Request, Source};
#[derive(RustcDecodable)]
struct PatchMetadata {
p_type: String,
vendor_id: u16,
product_id: u16,
request: u8,
requesttype: u8,
patch_id: u32, // id of this patch
min_matches: u16, // min number of matches before patch_id is considered a match
}
#[derive(RustcDecodable)]
struct Patch {
meta: PatchMetadata,
data: String, // hex-encoded
}
pub struct Patcher {
patches: Vec<Patch>,
counts: RefCell<HashMap<u32, u16>>,
}
impl PatchMetadata {
fn matches_descriptor(&self, header: &usbr::ControlPacketHeader) -> bool {
self.request == header.request && self.requesttype == header.requesttype
}
}
impl Patcher {
pub fn new(dir_path: &str) -> Patcher {
let mut patcher = Patcher { patches: vec![], counts: RefCell::new(HashMap::new()) };
for entry in fs::read_dir(dir_path).unwrap() {
let mut file = match File::open(&entry.unwrap().path()) {
Ok(file) => file,
Err(e) => panic!("[E000-Patcher] Could not open file {}", e),
};
// read file
let mut json_line = String::new();
file.read_to_string(&mut json_line).unwrap();
// decode file
let patch: Patch = json::decode(&json_line).unwrap();
// Insert count entry
if!patcher.counts.borrow().contains_key(&patch.meta.patch_id) {
patcher.counts.borrow_mut().insert(patch.meta.patch_id, patch.meta.min_matches);
}
// Insert patch into our list
patcher.patches.push(patch);
}
patcher
}
fn check_control_packet(&self, req: &Request) -> bool {
let h_ptr = req.type_header.as_ptr() as *const usbr::ControlPacketHeader;
let h: &usbr::ControlPacketHeader = unsafe { &*h_ptr };
for patch in &self.patches {
if patch.meta.p_type!= "control" {
continue;
}
if patch.meta.matches_descriptor(h) {
// The metadata matches, time to compare the data
let data_hex: String = req.data[..].to_hex();
if data_hex.len() >= patch.data.len() && data_hex.contains(&patch.data) {
let mut count: u16 = *self.counts.borrow().get(&patch.meta.patch_id).unwrap();
count -= 1;
if count == 0 {
error!("[E001-Patcher] matched at least {} signatures",
patch.meta.min_matches);
return false;
}
self.counts.borrow_mut().insert(patch.meta.patch_id, count);
}
}
}
true
}
fn check_bulk_packet(&self, req: &Request) -> bool {
for patch in &self.patches {
if patch.meta.p_type!= "bulk"
|
// The metadata matches, time to compare the data
let data_hex: String = req.data[..].to_hex();
if data_hex.len() >= patch.data.len() && data_hex.contains(&patch.data) {
let mut count: u16 = *self.counts.borrow().get(&patch.meta.patch_id).unwrap();
count -= 1;
if count == 0 {
error!("[E002-Patcher] matched at least {} signatures",
patch.meta.min_matches);
return false;
}
self.counts.borrow_mut().insert(patch.meta.patch_id, count);
}
}
true
}
fn check_connect(&self, req: &Request) -> bool {
let h_ptr = req.type_header.as_ptr() as *const usbr::ConnectHeader;
let h: &usbr::ConnectHeader = unsafe { &*h_ptr };
for patch in &self.patches {
if patch.meta.p_type!= "connect" {
continue;
}
if patch.meta.vendor_id == h.vendor_id && patch.meta.product_id == h.product_id {
error!("[E003-Patcher] malicious device found {:x}:{:x}",
h.vendor_id,
h.product_id);
return false;
}
}
true
}
}
impl parser::HasHandlers for Patcher {
fn handle_control_packet(&self, _: Source, req: Request) -> (u8, Vec<Request>) {
if self.check_control_packet(&req) { (0, vec![req]) } else { (1, vec![req]) }
}
fn handle_bulk_packet(&self, _: Source, req: Request) -> (u8, Vec<Request>) {
if self.check_bulk_packet(&req) { (0, vec![req]) } else { (1, vec![req]) }
}
fn handle_connect(&self, _: Source, req: Request) -> (u8, Vec<Request>) {
if self.check_connect(&req) { (0, vec![req]) } else { (1, vec![req]) }
}
// TODO: Implement below
//
// fn handle_int_packet(&self, source: Source, req: Request) -> (u8, Vec<Request>) {
// (0, vec![req])
// }
//
// fn handle_iso_packet(&self, source: Source, req: Request) -> (u8, Vec<Request>) {
// (0, vec![req])
// }
//
// fn handle_buffered_bulk_packet(&self, source: Source, req: Request) -> (u8, Vec<Request>) {
// (0, vec![req])
// }
//
}
|
{
continue;
}
|
conditional_block
|
patcher.rs
|
use std::io::prelude::*;
use std::fs;
use std::cell::RefCell;
use std::fs::File;
use std::collections::HashMap;
use rustc_serialize::json;
use rustc_serialize::hex::ToHex;
use parser;
use parser::usbr;
use parser::{Request, Source};
#[derive(RustcDecodable)]
struct PatchMetadata {
p_type: String,
vendor_id: u16,
product_id: u16,
request: u8,
requesttype: u8,
patch_id: u32, // id of this patch
min_matches: u16, // min number of matches before patch_id is considered a match
}
#[derive(RustcDecodable)]
struct Patch {
meta: PatchMetadata,
data: String, // hex-encoded
}
pub struct Patcher {
patches: Vec<Patch>,
counts: RefCell<HashMap<u32, u16>>,
}
impl PatchMetadata {
fn matches_descriptor(&self, header: &usbr::ControlPacketHeader) -> bool {
self.request == header.request && self.requesttype == header.requesttype
}
}
impl Patcher {
pub fn new(dir_path: &str) -> Patcher {
let mut patcher = Patcher { patches: vec![], counts: RefCell::new(HashMap::new()) };
for entry in fs::read_dir(dir_path).unwrap() {
let mut file = match File::open(&entry.unwrap().path()) {
Ok(file) => file,
Err(e) => panic!("[E000-Patcher] Could not open file {}", e),
};
// read file
let mut json_line = String::new();
file.read_to_string(&mut json_line).unwrap();
// decode file
let patch: Patch = json::decode(&json_line).unwrap();
// Insert count entry
if!patcher.counts.borrow().contains_key(&patch.meta.patch_id) {
patcher.counts.borrow_mut().insert(patch.meta.patch_id, patch.meta.min_matches);
}
// Insert patch into our list
patcher.patches.push(patch);
}
patcher
}
fn check_control_packet(&self, req: &Request) -> bool {
let h_ptr = req.type_header.as_ptr() as *const usbr::ControlPacketHeader;
let h: &usbr::ControlPacketHeader = unsafe { &*h_ptr };
for patch in &self.patches {
if patch.meta.p_type!= "control" {
continue;
}
if patch.meta.matches_descriptor(h) {
// The metadata matches, time to compare the data
let data_hex: String = req.data[..].to_hex();
if data_hex.len() >= patch.data.len() && data_hex.contains(&patch.data) {
let mut count: u16 = *self.counts.borrow().get(&patch.meta.patch_id).unwrap();
count -= 1;
if count == 0 {
error!("[E001-Patcher] matched at least {} signatures",
patch.meta.min_matches);
return false;
}
self.counts.borrow_mut().insert(patch.meta.patch_id, count);
}
}
}
true
}
fn check_bulk_packet(&self, req: &Request) -> bool {
for patch in &self.patches {
if patch.meta.p_type!= "bulk" {
continue;
}
// The metadata matches, time to compare the data
let data_hex: String = req.data[..].to_hex();
if data_hex.len() >= patch.data.len() && data_hex.contains(&patch.data) {
let mut count: u16 = *self.counts.borrow().get(&patch.meta.patch_id).unwrap();
count -= 1;
if count == 0 {
error!("[E002-Patcher] matched at least {} signatures",
patch.meta.min_matches);
return false;
}
self.counts.borrow_mut().insert(patch.meta.patch_id, count);
}
}
true
}
fn check_connect(&self, req: &Request) -> bool {
let h_ptr = req.type_header.as_ptr() as *const usbr::ConnectHeader;
let h: &usbr::ConnectHeader = unsafe { &*h_ptr };
for patch in &self.patches {
if patch.meta.p_type!= "connect" {
continue;
}
if patch.meta.vendor_id == h.vendor_id && patch.meta.product_id == h.product_id {
error!("[E003-Patcher] malicious device found {:x}:{:x}",
h.vendor_id,
h.product_id);
return false;
}
}
true
}
}
impl parser::HasHandlers for Patcher {
fn handle_control_packet(&self, _: Source, req: Request) -> (u8, Vec<Request>) {
if self.check_control_packet(&req) { (0, vec![req]) } else { (1, vec![req]) }
}
|
fn handle_connect(&self, _: Source, req: Request) -> (u8, Vec<Request>) {
if self.check_connect(&req) { (0, vec![req]) } else { (1, vec![req]) }
}
// TODO: Implement below
//
// fn handle_int_packet(&self, source: Source, req: Request) -> (u8, Vec<Request>) {
// (0, vec![req])
// }
//
// fn handle_iso_packet(&self, source: Source, req: Request) -> (u8, Vec<Request>) {
// (0, vec![req])
// }
//
// fn handle_buffered_bulk_packet(&self, source: Source, req: Request) -> (u8, Vec<Request>) {
// (0, vec![req])
// }
//
}
|
fn handle_bulk_packet(&self, _: Source, req: Request) -> (u8, Vec<Request>) {
if self.check_bulk_packet(&req) { (0, vec![req]) } else { (1, vec![req]) }
}
|
random_line_split
|
patcher.rs
|
use std::io::prelude::*;
use std::fs;
use std::cell::RefCell;
use std::fs::File;
use std::collections::HashMap;
use rustc_serialize::json;
use rustc_serialize::hex::ToHex;
use parser;
use parser::usbr;
use parser::{Request, Source};
#[derive(RustcDecodable)]
struct PatchMetadata {
p_type: String,
vendor_id: u16,
product_id: u16,
request: u8,
requesttype: u8,
patch_id: u32, // id of this patch
min_matches: u16, // min number of matches before patch_id is considered a match
}
#[derive(RustcDecodable)]
struct Patch {
meta: PatchMetadata,
data: String, // hex-encoded
}
pub struct Patcher {
patches: Vec<Patch>,
counts: RefCell<HashMap<u32, u16>>,
}
impl PatchMetadata {
fn matches_descriptor(&self, header: &usbr::ControlPacketHeader) -> bool {
self.request == header.request && self.requesttype == header.requesttype
}
}
impl Patcher {
pub fn new(dir_path: &str) -> Patcher {
let mut patcher = Patcher { patches: vec![], counts: RefCell::new(HashMap::new()) };
for entry in fs::read_dir(dir_path).unwrap() {
let mut file = match File::open(&entry.unwrap().path()) {
Ok(file) => file,
Err(e) => panic!("[E000-Patcher] Could not open file {}", e),
};
// read file
let mut json_line = String::new();
file.read_to_string(&mut json_line).unwrap();
// decode file
let patch: Patch = json::decode(&json_line).unwrap();
// Insert count entry
if!patcher.counts.borrow().contains_key(&patch.meta.patch_id) {
patcher.counts.borrow_mut().insert(patch.meta.patch_id, patch.meta.min_matches);
}
// Insert patch into our list
patcher.patches.push(patch);
}
patcher
}
fn check_control_packet(&self, req: &Request) -> bool {
let h_ptr = req.type_header.as_ptr() as *const usbr::ControlPacketHeader;
let h: &usbr::ControlPacketHeader = unsafe { &*h_ptr };
for patch in &self.patches {
if patch.meta.p_type!= "control" {
continue;
}
if patch.meta.matches_descriptor(h) {
// The metadata matches, time to compare the data
let data_hex: String = req.data[..].to_hex();
if data_hex.len() >= patch.data.len() && data_hex.contains(&patch.data) {
let mut count: u16 = *self.counts.borrow().get(&patch.meta.patch_id).unwrap();
count -= 1;
if count == 0 {
error!("[E001-Patcher] matched at least {} signatures",
patch.meta.min_matches);
return false;
}
self.counts.borrow_mut().insert(patch.meta.patch_id, count);
}
}
}
true
}
fn
|
(&self, req: &Request) -> bool {
for patch in &self.patches {
if patch.meta.p_type!= "bulk" {
continue;
}
// The metadata matches, time to compare the data
let data_hex: String = req.data[..].to_hex();
if data_hex.len() >= patch.data.len() && data_hex.contains(&patch.data) {
let mut count: u16 = *self.counts.borrow().get(&patch.meta.patch_id).unwrap();
count -= 1;
if count == 0 {
error!("[E002-Patcher] matched at least {} signatures",
patch.meta.min_matches);
return false;
}
self.counts.borrow_mut().insert(patch.meta.patch_id, count);
}
}
true
}
fn check_connect(&self, req: &Request) -> bool {
let h_ptr = req.type_header.as_ptr() as *const usbr::ConnectHeader;
let h: &usbr::ConnectHeader = unsafe { &*h_ptr };
for patch in &self.patches {
if patch.meta.p_type!= "connect" {
continue;
}
if patch.meta.vendor_id == h.vendor_id && patch.meta.product_id == h.product_id {
error!("[E003-Patcher] malicious device found {:x}:{:x}",
h.vendor_id,
h.product_id);
return false;
}
}
true
}
}
impl parser::HasHandlers for Patcher {
fn handle_control_packet(&self, _: Source, req: Request) -> (u8, Vec<Request>) {
if self.check_control_packet(&req) { (0, vec![req]) } else { (1, vec![req]) }
}
fn handle_bulk_packet(&self, _: Source, req: Request) -> (u8, Vec<Request>) {
if self.check_bulk_packet(&req) { (0, vec![req]) } else { (1, vec![req]) }
}
fn handle_connect(&self, _: Source, req: Request) -> (u8, Vec<Request>) {
if self.check_connect(&req) { (0, vec![req]) } else { (1, vec![req]) }
}
// TODO: Implement below
//
// fn handle_int_packet(&self, source: Source, req: Request) -> (u8, Vec<Request>) {
// (0, vec![req])
// }
//
// fn handle_iso_packet(&self, source: Source, req: Request) -> (u8, Vec<Request>) {
// (0, vec![req])
// }
//
// fn handle_buffered_bulk_packet(&self, source: Source, req: Request) -> (u8, Vec<Request>) {
// (0, vec![req])
// }
//
}
|
check_bulk_packet
|
identifier_name
|
log.rs
|
//! Logger implementation
use std::fs::File;
use std::io::Write;
use std::fs::OpenOptions;
use std::path::PathBuf;
use std::collections::HashMap;
use std::sync::{Mutex, MutexGuard};
use std::borrow::Borrow;
use std;
/// Logger category. Logger can be configured to save
/// messages of each category to a separate file.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum LoggerCategory {
Status,
Error,
DebugGeneral,
DebugMoveFiles,
DebugTemplateInstantiation,
DebugInheritance,
DebugParserSkips,
DebugParser,
DebugFfiSkips,
DebugSignals,
DebugAllocationPlace,
DebugRustSkips,
DebugQtDoc,
DebugQtDocDeclarations,
DebugQtHeaderNames,
}
pub use self::LoggerCategory::*;
/// Specifies where the logging messages should be sent.
#[derive(Debug)]
pub struct LoggerSettings {
/// Write messages to specified file path. If `None`,
/// logging to file is disabled.
pub file_path: Option<PathBuf>,
/// Write messages to stderr.
pub write_to_stderr: bool,
}
impl Default for LoggerSettings {
fn default() -> LoggerSettings {
LoggerSettings {
file_path: None,
write_to_stderr: true,
}
}
}
impl LoggerSettings {
/// Returns false if messages are ignored. This function
/// can be used to skip expensive construction of messages.
fn is_on(&self) -> bool {
self.write_to_stderr || self.file_path.is_some()
}
}
/// Logger object. One logger manages messages of all categories.
/// It's possible to use multiple loggers independently.
/// Use `default_logger()` to get global `Logger` instance.
/// Note that the instance is mutex-guarded.
#[derive(Default)]
pub struct Logger {
default_settings: LoggerSettings,
category_settings: HashMap<LoggerCategory, LoggerSettings>,
files: HashMap<LoggerCategory, File>,
}
impl Logger {
/// Creates a new logger.
pub fn new() -> Logger {
Logger::default()
}
/// Set settings for all categories that don't have specific category settings.
pub fn set_default_settings(&mut self, value: LoggerSettings) {
self.default_settings = value;
self.files.clear();
}
/// Set settings for `category`.
pub fn set_category_settings(&mut self, category: LoggerCategory, value: LoggerSettings) {
self.category_settings.insert(category, value);
self.files.remove(&category);
}
/// Set all specific category settings. Old category settings are removed.
pub fn set_all_category_settings(&mut self, value: HashMap<LoggerCategory, LoggerSettings>)
|
/// Returns false if messages of `category` are ignored. This function
/// can be used to skip expensive construction of messages.
pub fn is_on(&self, category: LoggerCategory) -> bool {
self.settings(category).is_on()
}
/// Lazy-log. If messages of `category` are not ignored, calls the passed closure
/// and uses its output value as a message in that category.
pub fn llog<T: Borrow<str>, F: FnOnce() -> T>(&mut self, category: LoggerCategory, f: F) {
let settings = if let Some(data) = self.category_settings.get(&category) {
data
} else {
&self.default_settings
};
if!settings.is_on() {
return;
}
let text = f();
if settings.write_to_stderr {
std::io::stderr()
.write(text.borrow().as_bytes())
.unwrap();
std::io::stderr().write(b"\n").unwrap();
}
if let Some(ref path) = settings.file_path {
if!self.files.contains_key(&category) {
let file =
OpenOptions::new()
.write(true)
.create(true)
.append(true)
.open(path)
.unwrap_or_else(|err| panic!("failed to open log file '{}': {}", path.display(), err));
self.files.insert(category, file);
}
let mut file = self.files.get_mut(&category).unwrap();
file.write(text.borrow().as_bytes()).unwrap();
file.write(b"\n").unwrap();
}
}
/// Log a message `text` to `category`.
pub fn log<T: Borrow<str>>(&mut self, category: LoggerCategory, text: T) {
self.llog(category, move || text);
}
/// Returns settings for `category`.
fn settings(&self, category: LoggerCategory) -> &LoggerSettings {
if let Some(data) = self.category_settings.get(&category) {
data
} else {
&self.default_settings
}
}
}
lazy_static! {
pub static ref DEFAULT_LOGGER: Mutex<Logger> = Mutex::new(Logger::new());
}
/// Returns global instance of `Logger`.
pub fn default_logger() -> MutexGuard<'static, Logger> {
DEFAULT_LOGGER.lock().unwrap()
}
/// Convenience method to log status messages to the default logger.
pub fn status<T: Borrow<str>>(text: T) {
default_logger().log(LoggerCategory::Status, text);
}
/// Convenience method to log error messages to the default logger.
pub fn error<T: Borrow<str>>(text: T) {
default_logger().log(LoggerCategory::Error, text);
}
/// Convenience method to log messages to the default logger and specified `category`.
pub fn log<T: Borrow<str>>(category: LoggerCategory, text: T) {
default_logger().log(category, text);
}
/// Convenience method to lazy-log messages to the default logger and specified `category`.
/// If messages of `category` are not ignored, calls the passed closure
/// and uses its output value as a message in that category.
pub fn llog<T: Borrow<str>, F: FnOnce() -> T>(category: LoggerCategory, f: F) {
default_logger().llog(category, f);
}
/// Convenience method to check if `category` is enabled in the default logger.
pub fn is_on(category: LoggerCategory) -> bool {
default_logger().is_on(category)
}
|
{
self.category_settings = value;
self.files.clear();
}
|
identifier_body
|
log.rs
|
//! Logger implementation
use std::fs::File;
use std::io::Write;
use std::fs::OpenOptions;
use std::path::PathBuf;
use std::collections::HashMap;
use std::sync::{Mutex, MutexGuard};
use std::borrow::Borrow;
use std;
/// Logger category. Logger can be configured to save
/// messages of each category to a separate file.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum LoggerCategory {
Status,
Error,
DebugGeneral,
DebugMoveFiles,
DebugTemplateInstantiation,
DebugInheritance,
DebugParserSkips,
DebugParser,
DebugFfiSkips,
DebugSignals,
DebugAllocationPlace,
DebugRustSkips,
DebugQtDoc,
DebugQtDocDeclarations,
DebugQtHeaderNames,
}
pub use self::LoggerCategory::*;
/// Specifies where the logging messages should be sent.
#[derive(Debug)]
pub struct LoggerSettings {
/// Write messages to specified file path. If `None`,
/// logging to file is disabled.
pub file_path: Option<PathBuf>,
/// Write messages to stderr.
pub write_to_stderr: bool,
}
impl Default for LoggerSettings {
fn default() -> LoggerSettings {
LoggerSettings {
file_path: None,
write_to_stderr: true,
}
}
}
impl LoggerSettings {
/// Returns false if messages are ignored. This function
/// can be used to skip expensive construction of messages.
fn is_on(&self) -> bool {
self.write_to_stderr || self.file_path.is_some()
}
}
/// Logger object. One logger manages messages of all categories.
/// It's possible to use multiple loggers independently.
/// Use `default_logger()` to get global `Logger` instance.
/// Note that the instance is mutex-guarded.
#[derive(Default)]
pub struct Logger {
default_settings: LoggerSettings,
category_settings: HashMap<LoggerCategory, LoggerSettings>,
files: HashMap<LoggerCategory, File>,
}
impl Logger {
/// Creates a new logger.
pub fn new() -> Logger {
Logger::default()
}
/// Set settings for all categories that don't have specific category settings.
pub fn set_default_settings(&mut self, value: LoggerSettings) {
self.default_settings = value;
self.files.clear();
}
/// Set settings for `category`.
pub fn set_category_settings(&mut self, category: LoggerCategory, value: LoggerSettings) {
self.category_settings.insert(category, value);
self.files.remove(&category);
}
/// Set all specific category settings. Old category settings are removed.
pub fn set_all_category_settings(&mut self, value: HashMap<LoggerCategory, LoggerSettings>) {
self.category_settings = value;
self.files.clear();
}
/// Returns false if messages of `category` are ignored. This function
/// can be used to skip expensive construction of messages.
pub fn is_on(&self, category: LoggerCategory) -> bool {
self.settings(category).is_on()
}
/// Lazy-log. If messages of `category` are not ignored, calls the passed closure
/// and uses its output value as a message in that category.
pub fn llog<T: Borrow<str>, F: FnOnce() -> T>(&mut self, category: LoggerCategory, f: F) {
let settings = if let Some(data) = self.category_settings.get(&category) {
data
} else {
&self.default_settings
};
if!settings.is_on() {
return;
}
let text = f();
if settings.write_to_stderr {
std::io::stderr()
.write(text.borrow().as_bytes())
.unwrap();
std::io::stderr().write(b"\n").unwrap();
}
if let Some(ref path) = settings.file_path
|
}
/// Log a message `text` to `category`.
pub fn log<T: Borrow<str>>(&mut self, category: LoggerCategory, text: T) {
self.llog(category, move || text);
}
/// Returns settings for `category`.
fn settings(&self, category: LoggerCategory) -> &LoggerSettings {
if let Some(data) = self.category_settings.get(&category) {
data
} else {
&self.default_settings
}
}
}
lazy_static! {
pub static ref DEFAULT_LOGGER: Mutex<Logger> = Mutex::new(Logger::new());
}
/// Returns global instance of `Logger`.
pub fn default_logger() -> MutexGuard<'static, Logger> {
DEFAULT_LOGGER.lock().unwrap()
}
/// Convenience method to log status messages to the default logger.
pub fn status<T: Borrow<str>>(text: T) {
default_logger().log(LoggerCategory::Status, text);
}
/// Convenience method to log error messages to the default logger.
pub fn error<T: Borrow<str>>(text: T) {
default_logger().log(LoggerCategory::Error, text);
}
/// Convenience method to log messages to the default logger and specified `category`.
pub fn log<T: Borrow<str>>(category: LoggerCategory, text: T) {
default_logger().log(category, text);
}
/// Convenience method to lazy-log messages to the default logger and specified `category`.
/// If messages of `category` are not ignored, calls the passed closure
/// and uses its output value as a message in that category.
pub fn llog<T: Borrow<str>, F: FnOnce() -> T>(category: LoggerCategory, f: F) {
default_logger().llog(category, f);
}
/// Convenience method to check if `category` is enabled in the default logger.
pub fn is_on(category: LoggerCategory) -> bool {
default_logger().is_on(category)
}
|
{
if !self.files.contains_key(&category) {
let file =
OpenOptions::new()
.write(true)
.create(true)
.append(true)
.open(path)
.unwrap_or_else(|err| panic!("failed to open log file '{}': {}", path.display(), err));
self.files.insert(category, file);
}
let mut file = self.files.get_mut(&category).unwrap();
file.write(text.borrow().as_bytes()).unwrap();
file.write(b"\n").unwrap();
}
|
conditional_block
|
log.rs
|
//! Logger implementation
use std::fs::File;
use std::io::Write;
use std::fs::OpenOptions;
use std::path::PathBuf;
use std::collections::HashMap;
use std::sync::{Mutex, MutexGuard};
use std::borrow::Borrow;
use std;
/// Logger category. Logger can be configured to save
/// messages of each category to a separate file.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum LoggerCategory {
Status,
Error,
DebugGeneral,
DebugMoveFiles,
DebugTemplateInstantiation,
DebugInheritance,
DebugParserSkips,
DebugParser,
DebugFfiSkips,
DebugSignals,
DebugAllocationPlace,
DebugRustSkips,
DebugQtDoc,
DebugQtDocDeclarations,
DebugQtHeaderNames,
}
pub use self::LoggerCategory::*;
/// Specifies where the logging messages should be sent.
#[derive(Debug)]
pub struct LoggerSettings {
/// Write messages to specified file path. If `None`,
/// logging to file is disabled.
pub file_path: Option<PathBuf>,
/// Write messages to stderr.
pub write_to_stderr: bool,
}
impl Default for LoggerSettings {
fn default() -> LoggerSettings {
LoggerSettings {
file_path: None,
write_to_stderr: true,
}
}
}
impl LoggerSettings {
/// Returns false if messages are ignored. This function
/// can be used to skip expensive construction of messages.
fn is_on(&self) -> bool {
self.write_to_stderr || self.file_path.is_some()
}
}
/// Logger object. One logger manages messages of all categories.
/// It's possible to use multiple loggers independently.
/// Use `default_logger()` to get global `Logger` instance.
/// Note that the instance is mutex-guarded.
#[derive(Default)]
pub struct Logger {
default_settings: LoggerSettings,
category_settings: HashMap<LoggerCategory, LoggerSettings>,
files: HashMap<LoggerCategory, File>,
}
impl Logger {
/// Creates a new logger.
pub fn new() -> Logger {
Logger::default()
}
/// Set settings for all categories that don't have specific category settings.
pub fn set_default_settings(&mut self, value: LoggerSettings) {
self.default_settings = value;
self.files.clear();
}
/// Set settings for `category`.
pub fn set_category_settings(&mut self, category: LoggerCategory, value: LoggerSettings) {
self.category_settings.insert(category, value);
self.files.remove(&category);
}
/// Set all specific category settings. Old category settings are removed.
pub fn set_all_category_settings(&mut self, value: HashMap<LoggerCategory, LoggerSettings>) {
self.category_settings = value;
self.files.clear();
}
/// Returns false if messages of `category` are ignored. This function
/// can be used to skip expensive construction of messages.
pub fn is_on(&self, category: LoggerCategory) -> bool {
self.settings(category).is_on()
}
/// Lazy-log. If messages of `category` are not ignored, calls the passed closure
/// and uses its output value as a message in that category.
pub fn llog<T: Borrow<str>, F: FnOnce() -> T>(&mut self, category: LoggerCategory, f: F) {
let settings = if let Some(data) = self.category_settings.get(&category) {
data
} else {
&self.default_settings
};
if!settings.is_on() {
return;
}
let text = f();
if settings.write_to_stderr {
std::io::stderr()
.write(text.borrow().as_bytes())
.unwrap();
std::io::stderr().write(b"\n").unwrap();
}
if let Some(ref path) = settings.file_path {
if!self.files.contains_key(&category) {
let file =
OpenOptions::new()
.write(true)
.create(true)
.append(true)
.open(path)
.unwrap_or_else(|err| panic!("failed to open log file '{}': {}", path.display(), err));
self.files.insert(category, file);
}
let mut file = self.files.get_mut(&category).unwrap();
file.write(text.borrow().as_bytes()).unwrap();
file.write(b"\n").unwrap();
}
}
/// Log a message `text` to `category`.
pub fn log<T: Borrow<str>>(&mut self, category: LoggerCategory, text: T) {
self.llog(category, move || text);
}
/// Returns settings for `category`.
fn settings(&self, category: LoggerCategory) -> &LoggerSettings {
if let Some(data) = self.category_settings.get(&category) {
data
} else {
&self.default_settings
}
}
}
lazy_static! {
pub static ref DEFAULT_LOGGER: Mutex<Logger> = Mutex::new(Logger::new());
}
/// Returns global instance of `Logger`.
pub fn default_logger() -> MutexGuard<'static, Logger> {
DEFAULT_LOGGER.lock().unwrap()
}
/// Convenience method to log status messages to the default logger.
pub fn status<T: Borrow<str>>(text: T) {
default_logger().log(LoggerCategory::Status, text);
}
/// Convenience method to log error messages to the default logger.
pub fn error<T: Borrow<str>>(text: T) {
default_logger().log(LoggerCategory::Error, text);
}
/// Convenience method to log messages to the default logger and specified `category`.
pub fn log<T: Borrow<str>>(category: LoggerCategory, text: T) {
default_logger().log(category, text);
}
/// Convenience method to lazy-log messages to the default logger and specified `category`.
/// If messages of `category` are not ignored, calls the passed closure
/// and uses its output value as a message in that category.
|
default_logger().llog(category, f);
}
/// Convenience method to check if `category` is enabled in the default logger.
pub fn is_on(category: LoggerCategory) -> bool {
default_logger().is_on(category)
}
|
pub fn llog<T: Borrow<str>, F: FnOnce() -> T>(category: LoggerCategory, f: F) {
|
random_line_split
|
log.rs
|
//! Logger implementation
use std::fs::File;
use std::io::Write;
use std::fs::OpenOptions;
use std::path::PathBuf;
use std::collections::HashMap;
use std::sync::{Mutex, MutexGuard};
use std::borrow::Borrow;
use std;
/// Logger category. Logger can be configured to save
/// messages of each category to a separate file.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum LoggerCategory {
Status,
Error,
DebugGeneral,
DebugMoveFiles,
DebugTemplateInstantiation,
DebugInheritance,
DebugParserSkips,
DebugParser,
DebugFfiSkips,
DebugSignals,
DebugAllocationPlace,
DebugRustSkips,
DebugQtDoc,
DebugQtDocDeclarations,
DebugQtHeaderNames,
}
pub use self::LoggerCategory::*;
/// Specifies where the logging messages should be sent.
#[derive(Debug)]
pub struct LoggerSettings {
/// Write messages to specified file path. If `None`,
/// logging to file is disabled.
pub file_path: Option<PathBuf>,
/// Write messages to stderr.
pub write_to_stderr: bool,
}
impl Default for LoggerSettings {
fn default() -> LoggerSettings {
LoggerSettings {
file_path: None,
write_to_stderr: true,
}
}
}
impl LoggerSettings {
/// Returns false if messages are ignored. This function
/// can be used to skip expensive construction of messages.
fn is_on(&self) -> bool {
self.write_to_stderr || self.file_path.is_some()
}
}
/// Logger object. One logger manages messages of all categories.
/// It's possible to use multiple loggers independently.
/// Use `default_logger()` to get global `Logger` instance.
/// Note that the instance is mutex-guarded.
#[derive(Default)]
pub struct Logger {
default_settings: LoggerSettings,
category_settings: HashMap<LoggerCategory, LoggerSettings>,
files: HashMap<LoggerCategory, File>,
}
impl Logger {
/// Creates a new logger.
pub fn new() -> Logger {
Logger::default()
}
/// Set settings for all categories that don't have specific category settings.
pub fn set_default_settings(&mut self, value: LoggerSettings) {
self.default_settings = value;
self.files.clear();
}
/// Set settings for `category`.
pub fn set_category_settings(&mut self, category: LoggerCategory, value: LoggerSettings) {
self.category_settings.insert(category, value);
self.files.remove(&category);
}
/// Set all specific category settings. Old category settings are removed.
pub fn set_all_category_settings(&mut self, value: HashMap<LoggerCategory, LoggerSettings>) {
self.category_settings = value;
self.files.clear();
}
/// Returns false if messages of `category` are ignored. This function
/// can be used to skip expensive construction of messages.
pub fn is_on(&self, category: LoggerCategory) -> bool {
self.settings(category).is_on()
}
/// Lazy-log. If messages of `category` are not ignored, calls the passed closure
/// and uses its output value as a message in that category.
pub fn llog<T: Borrow<str>, F: FnOnce() -> T>(&mut self, category: LoggerCategory, f: F) {
let settings = if let Some(data) = self.category_settings.get(&category) {
data
} else {
&self.default_settings
};
if!settings.is_on() {
return;
}
let text = f();
if settings.write_to_stderr {
std::io::stderr()
.write(text.borrow().as_bytes())
.unwrap();
std::io::stderr().write(b"\n").unwrap();
}
if let Some(ref path) = settings.file_path {
if!self.files.contains_key(&category) {
let file =
OpenOptions::new()
.write(true)
.create(true)
.append(true)
.open(path)
.unwrap_or_else(|err| panic!("failed to open log file '{}': {}", path.display(), err));
self.files.insert(category, file);
}
let mut file = self.files.get_mut(&category).unwrap();
file.write(text.borrow().as_bytes()).unwrap();
file.write(b"\n").unwrap();
}
}
/// Log a message `text` to `category`.
pub fn log<T: Borrow<str>>(&mut self, category: LoggerCategory, text: T) {
self.llog(category, move || text);
}
/// Returns settings for `category`.
fn settings(&self, category: LoggerCategory) -> &LoggerSettings {
if let Some(data) = self.category_settings.get(&category) {
data
} else {
&self.default_settings
}
}
}
lazy_static! {
pub static ref DEFAULT_LOGGER: Mutex<Logger> = Mutex::new(Logger::new());
}
/// Returns global instance of `Logger`.
pub fn default_logger() -> MutexGuard<'static, Logger> {
DEFAULT_LOGGER.lock().unwrap()
}
/// Convenience method to log status messages to the default logger.
pub fn status<T: Borrow<str>>(text: T) {
default_logger().log(LoggerCategory::Status, text);
}
/// Convenience method to log error messages to the default logger.
pub fn error<T: Borrow<str>>(text: T) {
default_logger().log(LoggerCategory::Error, text);
}
/// Convenience method to log messages to the default logger and specified `category`.
pub fn
|
<T: Borrow<str>>(category: LoggerCategory, text: T) {
default_logger().log(category, text);
}
/// Convenience method to lazy-log messages to the default logger and specified `category`.
/// If messages of `category` are not ignored, calls the passed closure
/// and uses its output value as a message in that category.
pub fn llog<T: Borrow<str>, F: FnOnce() -> T>(category: LoggerCategory, f: F) {
default_logger().llog(category, f);
}
/// Convenience method to check if `category` is enabled in the default logger.
pub fn is_on(category: LoggerCategory) -> bool {
default_logger().is_on(category)
}
|
log
|
identifier_name
|
htmldatalistelement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HTMLDataListElementBinding;
use dom::bindings::codegen::Bindings::HTMLDataListElementBinding::HTMLDataListElementMethods;
use dom::bindings::inheritance::Castable;
use dom::bindings::root::DomRoot;
use dom::document::Document;
use dom::element::Element;
use dom::htmlcollection::{CollectionFilter, HTMLCollection};
use dom::htmlelement::HTMLElement;
use dom::htmloptionelement::HTMLOptionElement;
use dom::node::{Node, window_from_node};
use dom_struct::dom_struct;
use html5ever::{LocalName, Prefix};
#[dom_struct]
pub struct HTMLDataListElement {
htmlelement: HTMLElement,
}
impl HTMLDataListElement {
fn new_inherited(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
) -> HTMLDataListElement {
HTMLDataListElement {
htmlelement: HTMLElement::new_inherited(local_name, prefix, document),
}
}
#[allow(unrooted_must_root)]
pub fn new(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
) -> DomRoot<HTMLDataListElement>
|
}
impl HTMLDataListElementMethods for HTMLDataListElement {
// https://html.spec.whatwg.org/multipage/#dom-datalist-options
fn Options(&self) -> DomRoot<HTMLCollection> {
#[derive(JSTraceable, MallocSizeOf)]
struct HTMLDataListOptionsFilter;
impl CollectionFilter for HTMLDataListOptionsFilter {
fn filter(&self, elem: &Element, _root: &Node) -> bool {
elem.is::<HTMLOptionElement>()
}
}
let filter = Box::new(HTMLDataListOptionsFilter);
let window = window_from_node(self);
HTMLCollection::create(&window, self.upcast(), filter)
}
}
|
{
Node::reflect_node(
Box::new(HTMLDataListElement::new_inherited(
local_name, prefix, document,
)),
document,
HTMLDataListElementBinding::Wrap,
)
}
|
identifier_body
|
htmldatalistelement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
|
use dom::bindings::codegen::Bindings::HTMLDataListElementBinding::HTMLDataListElementMethods;
use dom::bindings::inheritance::Castable;
use dom::bindings::root::DomRoot;
use dom::document::Document;
use dom::element::Element;
use dom::htmlcollection::{CollectionFilter, HTMLCollection};
use dom::htmlelement::HTMLElement;
use dom::htmloptionelement::HTMLOptionElement;
use dom::node::{Node, window_from_node};
use dom_struct::dom_struct;
use html5ever::{LocalName, Prefix};
#[dom_struct]
pub struct HTMLDataListElement {
htmlelement: HTMLElement,
}
impl HTMLDataListElement {
fn new_inherited(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
) -> HTMLDataListElement {
HTMLDataListElement {
htmlelement: HTMLElement::new_inherited(local_name, prefix, document),
}
}
#[allow(unrooted_must_root)]
pub fn new(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
) -> DomRoot<HTMLDataListElement> {
Node::reflect_node(
Box::new(HTMLDataListElement::new_inherited(
local_name, prefix, document,
)),
document,
HTMLDataListElementBinding::Wrap,
)
}
}
impl HTMLDataListElementMethods for HTMLDataListElement {
// https://html.spec.whatwg.org/multipage/#dom-datalist-options
fn Options(&self) -> DomRoot<HTMLCollection> {
#[derive(JSTraceable, MallocSizeOf)]
struct HTMLDataListOptionsFilter;
impl CollectionFilter for HTMLDataListOptionsFilter {
fn filter(&self, elem: &Element, _root: &Node) -> bool {
elem.is::<HTMLOptionElement>()
}
}
let filter = Box::new(HTMLDataListOptionsFilter);
let window = window_from_node(self);
HTMLCollection::create(&window, self.upcast(), filter)
}
}
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HTMLDataListElementBinding;
|
random_line_split
|
htmldatalistelement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HTMLDataListElementBinding;
use dom::bindings::codegen::Bindings::HTMLDataListElementBinding::HTMLDataListElementMethods;
use dom::bindings::inheritance::Castable;
use dom::bindings::root::DomRoot;
use dom::document::Document;
use dom::element::Element;
use dom::htmlcollection::{CollectionFilter, HTMLCollection};
use dom::htmlelement::HTMLElement;
use dom::htmloptionelement::HTMLOptionElement;
use dom::node::{Node, window_from_node};
use dom_struct::dom_struct;
use html5ever::{LocalName, Prefix};
#[dom_struct]
pub struct HTMLDataListElement {
htmlelement: HTMLElement,
}
impl HTMLDataListElement {
fn new_inherited(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
) -> HTMLDataListElement {
HTMLDataListElement {
htmlelement: HTMLElement::new_inherited(local_name, prefix, document),
}
}
#[allow(unrooted_must_root)]
pub fn new(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
) -> DomRoot<HTMLDataListElement> {
Node::reflect_node(
Box::new(HTMLDataListElement::new_inherited(
local_name, prefix, document,
)),
document,
HTMLDataListElementBinding::Wrap,
)
}
}
impl HTMLDataListElementMethods for HTMLDataListElement {
// https://html.spec.whatwg.org/multipage/#dom-datalist-options
fn Options(&self) -> DomRoot<HTMLCollection> {
#[derive(JSTraceable, MallocSizeOf)]
struct HTMLDataListOptionsFilter;
impl CollectionFilter for HTMLDataListOptionsFilter {
fn
|
(&self, elem: &Element, _root: &Node) -> bool {
elem.is::<HTMLOptionElement>()
}
}
let filter = Box::new(HTMLDataListOptionsFilter);
let window = window_from_node(self);
HTMLCollection::create(&window, self.upcast(), filter)
}
}
|
filter
|
identifier_name
|
hr.rs
|
use regex::Regex;
use parser::Block;
use parser::Block::Hr;
pub fn parse_hr(lines: &[&str]) -> Option<(Block, usize)> {
let HORIZONTAL_RULE = Regex::new(r"^(===+)$|^(---+)$").unwrap();
if HORIZONTAL_RULE.is_match(lines[0]){
return Some((Hr, 1));
}
return None;
}
#[cfg(test)]
mod test {
use super::parse_hr;
use parser::Block::Hr;
#[test]
fn finds_hr() {
assert_eq!(parse_hr(&vec!["-------"]).unwrap(), (Hr, 1));
assert_eq!(parse_hr(&vec!["---"]).unwrap(), (Hr, 1));
assert_eq!(parse_hr(&vec!["----------------------------"]).unwrap(), (Hr, 1));
assert_eq!(parse_hr(&vec!["-------", "abc"]).unwrap(), (Hr, 1));
assert_eq!(parse_hr(&vec!["======="]).unwrap(), (Hr, 1));
assert_eq!(parse_hr(&vec!["==="]).unwrap(), (Hr, 1));
assert_eq!(parse_hr(&vec!["============================"]).unwrap(), (Hr, 1));
assert_eq!(parse_hr(&vec!["=======", "abc"]).unwrap(), (Hr, 1));
}
#[test]
fn no_false_positives()
|
}
|
{
assert_eq!(parse_hr(&vec!["a-------"]), None);
assert_eq!(parse_hr(&vec!["--- a"]), None);
assert_eq!(parse_hr(&vec!["--a-"]), None);
assert_eq!(parse_hr(&vec!["-------====--------------"]), None);
assert_eq!(parse_hr(&vec!["a======"]), None);
assert_eq!(parse_hr(&vec!["=== a"]), None);
assert_eq!(parse_hr(&vec!["==a="]), None);
assert_eq!(parse_hr(&vec!["=======---================="]), None);
}
|
identifier_body
|
hr.rs
|
use regex::Regex;
use parser::Block;
use parser::Block::Hr;
pub fn parse_hr(lines: &[&str]) -> Option<(Block, usize)> {
let HORIZONTAL_RULE = Regex::new(r"^(===+)$|^(---+)$").unwrap();
|
return Some((Hr, 1));
}
return None;
}
#[cfg(test)]
mod test {
use super::parse_hr;
use parser::Block::Hr;
#[test]
fn finds_hr() {
assert_eq!(parse_hr(&vec!["-------"]).unwrap(), (Hr, 1));
assert_eq!(parse_hr(&vec!["---"]).unwrap(), (Hr, 1));
assert_eq!(parse_hr(&vec!["----------------------------"]).unwrap(), (Hr, 1));
assert_eq!(parse_hr(&vec!["-------", "abc"]).unwrap(), (Hr, 1));
assert_eq!(parse_hr(&vec!["======="]).unwrap(), (Hr, 1));
assert_eq!(parse_hr(&vec!["==="]).unwrap(), (Hr, 1));
assert_eq!(parse_hr(&vec!["============================"]).unwrap(), (Hr, 1));
assert_eq!(parse_hr(&vec!["=======", "abc"]).unwrap(), (Hr, 1));
}
#[test]
fn no_false_positives() {
assert_eq!(parse_hr(&vec!["a-------"]), None);
assert_eq!(parse_hr(&vec!["--- a"]), None);
assert_eq!(parse_hr(&vec!["--a-"]), None);
assert_eq!(parse_hr(&vec!["-------====--------------"]), None);
assert_eq!(parse_hr(&vec!["a======"]), None);
assert_eq!(parse_hr(&vec!["=== a"]), None);
assert_eq!(parse_hr(&vec!["==a="]), None);
assert_eq!(parse_hr(&vec!["=======---================="]), None);
}
}
|
if HORIZONTAL_RULE.is_match(lines[0]){
|
random_line_split
|
hr.rs
|
use regex::Regex;
use parser::Block;
use parser::Block::Hr;
pub fn parse_hr(lines: &[&str]) -> Option<(Block, usize)> {
let HORIZONTAL_RULE = Regex::new(r"^(===+)$|^(---+)$").unwrap();
if HORIZONTAL_RULE.is_match(lines[0]){
return Some((Hr, 1));
}
return None;
}
#[cfg(test)]
mod test {
use super::parse_hr;
use parser::Block::Hr;
#[test]
fn finds_hr() {
assert_eq!(parse_hr(&vec!["-------"]).unwrap(), (Hr, 1));
assert_eq!(parse_hr(&vec!["---"]).unwrap(), (Hr, 1));
assert_eq!(parse_hr(&vec!["----------------------------"]).unwrap(), (Hr, 1));
assert_eq!(parse_hr(&vec!["-------", "abc"]).unwrap(), (Hr, 1));
assert_eq!(parse_hr(&vec!["======="]).unwrap(), (Hr, 1));
assert_eq!(parse_hr(&vec!["==="]).unwrap(), (Hr, 1));
assert_eq!(parse_hr(&vec!["============================"]).unwrap(), (Hr, 1));
assert_eq!(parse_hr(&vec!["=======", "abc"]).unwrap(), (Hr, 1));
}
#[test]
fn
|
() {
assert_eq!(parse_hr(&vec!["a-------"]), None);
assert_eq!(parse_hr(&vec!["--- a"]), None);
assert_eq!(parse_hr(&vec!["--a-"]), None);
assert_eq!(parse_hr(&vec!["-------====--------------"]), None);
assert_eq!(parse_hr(&vec!["a======"]), None);
assert_eq!(parse_hr(&vec!["=== a"]), None);
assert_eq!(parse_hr(&vec!["==a="]), None);
assert_eq!(parse_hr(&vec!["=======---================="]), None);
}
}
|
no_false_positives
|
identifier_name
|
hr.rs
|
use regex::Regex;
use parser::Block;
use parser::Block::Hr;
pub fn parse_hr(lines: &[&str]) -> Option<(Block, usize)> {
let HORIZONTAL_RULE = Regex::new(r"^(===+)$|^(---+)$").unwrap();
if HORIZONTAL_RULE.is_match(lines[0])
|
return None;
}
#[cfg(test)]
mod test {
use super::parse_hr;
use parser::Block::Hr;
#[test]
fn finds_hr() {
assert_eq!(parse_hr(&vec!["-------"]).unwrap(), (Hr, 1));
assert_eq!(parse_hr(&vec!["---"]).unwrap(), (Hr, 1));
assert_eq!(parse_hr(&vec!["----------------------------"]).unwrap(), (Hr, 1));
assert_eq!(parse_hr(&vec!["-------", "abc"]).unwrap(), (Hr, 1));
assert_eq!(parse_hr(&vec!["======="]).unwrap(), (Hr, 1));
assert_eq!(parse_hr(&vec!["==="]).unwrap(), (Hr, 1));
assert_eq!(parse_hr(&vec!["============================"]).unwrap(), (Hr, 1));
assert_eq!(parse_hr(&vec!["=======", "abc"]).unwrap(), (Hr, 1));
}
#[test]
fn no_false_positives() {
assert_eq!(parse_hr(&vec!["a-------"]), None);
assert_eq!(parse_hr(&vec!["--- a"]), None);
assert_eq!(parse_hr(&vec!["--a-"]), None);
assert_eq!(parse_hr(&vec!["-------====--------------"]), None);
assert_eq!(parse_hr(&vec!["a======"]), None);
assert_eq!(parse_hr(&vec!["=== a"]), None);
assert_eq!(parse_hr(&vec!["==a="]), None);
assert_eq!(parse_hr(&vec!["=======---================="]), None);
}
}
|
{
return Some((Hr, 1));
}
|
conditional_block
|
mod.rs
|
use core::creature::Creature;
use core::renderer::{Renderable, RGB};
use core::time::Time;
use core::world::dungeon::map::{self, Pos, Tile};
#[derive(Clone)]
pub enum Money {
Copper,
Silver,
Electrum,
Gold,
Quartz,
Platinum,
Mithril,
Scale,
Onyx,
Tourmaline,
Emerald,
Ruby,
Sapphire,
Topaz,
Diamond,
}
pub fn money_value(money: &Money) -> f32 {
match money {
Money::Copper => 0.01,
Money::Silver => 0.1,
Money::Electrum => 0.5,
Money::Gold | Money::Quartz => 1.0,
Money::Platinum => 2.0,
Money::Mithril | Money::Scale => 5.0,
Money::Onyx | Money::Tourmaline => 7.0,
Money::Emerald | Money::Ruby | Money::Sapphire | Money::Topaz => 10.0,
Money::Diamond => 100.0
}
}
#[derive(Clone)]
pub enum ItemProperty {
// Money is an interesting property because it should go right to a player's wallet
Money(Money)
}
///
/// Item struct
///
#[derive(Clone)]
pub struct Item {
name: &'static str,
glyph: char,
// Items can potentially be in something's inventory
pub pos: Pos,
fg: RGB,
bg: RGB,
// Items could have quantity like stacks of arrows, portions of food, liters of water etc
pub quantity: isize,
// Item property
pub property: ItemProperty
}
impl Item {
///
/// Return a new `Item`
///
#[inline]
pub fn new(name: &'static str, glyph: char, pos: Pos, fg: RGB, bg: RGB, quantity: isize, property: ItemProperty) -> Self
|
}
///
/// Implement the `Renderable` trait for `Item`, mostly just getters and setters
///
impl Renderable for Item {
#[inline]
fn get_bg(&self) -> RGB {
self.bg
}
#[inline]
fn get_fg(&self) -> RGB {
self.fg
}
#[inline]
fn get_glyph(&self) -> char {
self.glyph
}
#[inline]
fn get_id(&self) -> &'static str {
self.name.clone()
}
#[inline]
fn set_bg(&mut self, bg: RGB) {
self.bg = bg;
}
#[inline]
fn set_fg(&mut self, fg: RGB) {
self.fg = fg;
}
#[inline]
fn set_glyph(&mut self, glyph: char) {
self.glyph = glyph
}
#[inline]
fn set_id(&mut self, name: &'static str) {
self.name = name;
}
}
impl Time for Item {
fn take_turn(&mut self, _map: &map::Grid<Tile>, _player: &Creature) {
}
}
|
{
Item {
name,
glyph,
pos,
fg,
bg,
quantity,
property
}
}
|
identifier_body
|
mod.rs
|
use core::creature::Creature;
use core::renderer::{Renderable, RGB};
use core::time::Time;
use core::world::dungeon::map::{self, Pos, Tile};
#[derive(Clone)]
pub enum Money {
Copper,
Silver,
Electrum,
Gold,
Quartz,
Platinum,
Mithril,
Scale,
Onyx,
Tourmaline,
Emerald,
Ruby,
Sapphire,
Topaz,
Diamond,
}
pub fn money_value(money: &Money) -> f32 {
match money {
Money::Copper => 0.01,
Money::Silver => 0.1,
Money::Electrum => 0.5,
Money::Gold | Money::Quartz => 1.0,
Money::Platinum => 2.0,
Money::Mithril | Money::Scale => 5.0,
Money::Onyx | Money::Tourmaline => 7.0,
Money::Emerald | Money::Ruby | Money::Sapphire | Money::Topaz => 10.0,
Money::Diamond => 100.0
}
}
#[derive(Clone)]
pub enum ItemProperty {
// Money is an interesting property because it should go right to a player's wallet
Money(Money)
}
///
/// Item struct
///
#[derive(Clone)]
pub struct Item {
name: &'static str,
glyph: char,
// Items can potentially be in something's inventory
pub pos: Pos,
fg: RGB,
bg: RGB,
// Items could have quantity like stacks of arrows, portions of food, liters of water etc
pub quantity: isize,
// Item property
pub property: ItemProperty
}
impl Item {
///
/// Return a new `Item`
///
#[inline]
pub fn new(name: &'static str, glyph: char, pos: Pos, fg: RGB, bg: RGB, quantity: isize, property: ItemProperty) -> Self {
Item {
name,
glyph,
pos,
fg,
bg,
quantity,
property
}
}
}
///
/// Implement the `Renderable` trait for `Item`, mostly just getters and setters
///
impl Renderable for Item {
#[inline]
fn get_bg(&self) -> RGB {
self.bg
}
#[inline]
fn get_fg(&self) -> RGB {
self.fg
}
#[inline]
fn get_glyph(&self) -> char {
self.glyph
}
#[inline]
fn get_id(&self) -> &'static str {
self.name.clone()
}
#[inline]
fn set_bg(&mut self, bg: RGB) {
self.bg = bg;
}
#[inline]
fn set_fg(&mut self, fg: RGB) {
self.fg = fg;
}
|
self.glyph = glyph
}
#[inline]
fn set_id(&mut self, name: &'static str) {
self.name = name;
}
}
impl Time for Item {
fn take_turn(&mut self, _map: &map::Grid<Tile>, _player: &Creature) {
}
}
|
#[inline]
fn set_glyph(&mut self, glyph: char) {
|
random_line_split
|
mod.rs
|
use core::creature::Creature;
use core::renderer::{Renderable, RGB};
use core::time::Time;
use core::world::dungeon::map::{self, Pos, Tile};
#[derive(Clone)]
pub enum
|
{
Copper,
Silver,
Electrum,
Gold,
Quartz,
Platinum,
Mithril,
Scale,
Onyx,
Tourmaline,
Emerald,
Ruby,
Sapphire,
Topaz,
Diamond,
}
pub fn money_value(money: &Money) -> f32 {
match money {
Money::Copper => 0.01,
Money::Silver => 0.1,
Money::Electrum => 0.5,
Money::Gold | Money::Quartz => 1.0,
Money::Platinum => 2.0,
Money::Mithril | Money::Scale => 5.0,
Money::Onyx | Money::Tourmaline => 7.0,
Money::Emerald | Money::Ruby | Money::Sapphire | Money::Topaz => 10.0,
Money::Diamond => 100.0
}
}
#[derive(Clone)]
pub enum ItemProperty {
// Money is an interesting property because it should go right to a player's wallet
Money(Money)
}
///
/// Item struct
///
#[derive(Clone)]
pub struct Item {
name: &'static str,
glyph: char,
// Items can potentially be in something's inventory
pub pos: Pos,
fg: RGB,
bg: RGB,
// Items could have quantity like stacks of arrows, portions of food, liters of water etc
pub quantity: isize,
// Item property
pub property: ItemProperty
}
impl Item {
///
/// Return a new `Item`
///
#[inline]
pub fn new(name: &'static str, glyph: char, pos: Pos, fg: RGB, bg: RGB, quantity: isize, property: ItemProperty) -> Self {
Item {
name,
glyph,
pos,
fg,
bg,
quantity,
property
}
}
}
///
/// Implement the `Renderable` trait for `Item`, mostly just getters and setters
///
impl Renderable for Item {
#[inline]
fn get_bg(&self) -> RGB {
self.bg
}
#[inline]
fn get_fg(&self) -> RGB {
self.fg
}
#[inline]
fn get_glyph(&self) -> char {
self.glyph
}
#[inline]
fn get_id(&self) -> &'static str {
self.name.clone()
}
#[inline]
fn set_bg(&mut self, bg: RGB) {
self.bg = bg;
}
#[inline]
fn set_fg(&mut self, fg: RGB) {
self.fg = fg;
}
#[inline]
fn set_glyph(&mut self, glyph: char) {
self.glyph = glyph
}
#[inline]
fn set_id(&mut self, name: &'static str) {
self.name = name;
}
}
impl Time for Item {
fn take_turn(&mut self, _map: &map::Grid<Tile>, _player: &Creature) {
}
}
|
Money
|
identifier_name
|
propagate-approximated-shorter-to-static-no-bound.rs
|
// Test a case where we are trying to prove `'x: 'y` and are forced to
// approximate the shorter end-point (`'y`) to with `'static`. This is
// because `'y` is higher-ranked but we know of no relations to other
// regions. Note that `'static` shows up in the stderr output as `'0`.
// compile-flags:-Zborrowck=mir -Zverbose
#![feature(rustc_attrs)]
use std::cell::Cell;
// Callee knows that:
//
// 'x: 'a
//
// so the only way we can ensure that `'x: 'y` is to show that
// `'a:'static`.
|
where
F: for<'x, 'y> FnMut(
&Cell<&'a &'x u32>, // shows that 'x: 'a
&Cell<&'x u32>,
&Cell<&'y u32>,
),
{
}
fn demand_y<'x, 'y>(_cell_x: &Cell<&'x u32>, _cell_y: &Cell<&'y u32>, _y: &'y u32) {}
#[rustc_regions]
fn supply<'a, 'b>(cell_a: Cell<&'a u32>, cell_b: Cell<&'b u32>) {
establish_relationships(&cell_a, &cell_b, |_outlives, x, y| {
//~^ ERROR borrowed data escapes outside of function
// Only works if 'x: 'y:
demand_y(x, y, x.get())
});
}
fn main() {}
|
fn establish_relationships<'a, 'b, F>(_cell_a: &Cell<&'a u32>, _cell_b: &Cell<&'b u32>, _closure: F)
|
random_line_split
|
propagate-approximated-shorter-to-static-no-bound.rs
|
// Test a case where we are trying to prove `'x: 'y` and are forced to
// approximate the shorter end-point (`'y`) to with `'static`. This is
// because `'y` is higher-ranked but we know of no relations to other
// regions. Note that `'static` shows up in the stderr output as `'0`.
// compile-flags:-Zborrowck=mir -Zverbose
#![feature(rustc_attrs)]
use std::cell::Cell;
// Callee knows that:
//
// 'x: 'a
//
// so the only way we can ensure that `'x: 'y` is to show that
// `'a:'static`.
fn establish_relationships<'a, 'b, F>(_cell_a: &Cell<&'a u32>, _cell_b: &Cell<&'b u32>, _closure: F)
where
F: for<'x, 'y> FnMut(
&Cell<&'a &'x u32>, // shows that 'x: 'a
&Cell<&'x u32>,
&Cell<&'y u32>,
),
{
}
fn demand_y<'x, 'y>(_cell_x: &Cell<&'x u32>, _cell_y: &Cell<&'y u32>, _y: &'y u32) {}
#[rustc_regions]
fn
|
<'a, 'b>(cell_a: Cell<&'a u32>, cell_b: Cell<&'b u32>) {
establish_relationships(&cell_a, &cell_b, |_outlives, x, y| {
//~^ ERROR borrowed data escapes outside of function
// Only works if 'x: 'y:
demand_y(x, y, x.get())
});
}
fn main() {}
|
supply
|
identifier_name
|
propagate-approximated-shorter-to-static-no-bound.rs
|
// Test a case where we are trying to prove `'x: 'y` and are forced to
// approximate the shorter end-point (`'y`) to with `'static`. This is
// because `'y` is higher-ranked but we know of no relations to other
// regions. Note that `'static` shows up in the stderr output as `'0`.
// compile-flags:-Zborrowck=mir -Zverbose
#![feature(rustc_attrs)]
use std::cell::Cell;
// Callee knows that:
//
// 'x: 'a
//
// so the only way we can ensure that `'x: 'y` is to show that
// `'a:'static`.
fn establish_relationships<'a, 'b, F>(_cell_a: &Cell<&'a u32>, _cell_b: &Cell<&'b u32>, _closure: F)
where
F: for<'x, 'y> FnMut(
&Cell<&'a &'x u32>, // shows that 'x: 'a
&Cell<&'x u32>,
&Cell<&'y u32>,
),
{
}
fn demand_y<'x, 'y>(_cell_x: &Cell<&'x u32>, _cell_y: &Cell<&'y u32>, _y: &'y u32)
|
#[rustc_regions]
fn supply<'a, 'b>(cell_a: Cell<&'a u32>, cell_b: Cell<&'b u32>) {
establish_relationships(&cell_a, &cell_b, |_outlives, x, y| {
//~^ ERROR borrowed data escapes outside of function
// Only works if 'x: 'y:
demand_y(x, y, x.get())
});
}
fn main() {}
|
{}
|
identifier_body
|
unique-send-2.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(unknown_features)]
#![feature(box_syntax)]
use std::sync::mpsc::{channel, Sender};
use std::thread::Thread;
fn child(tx: &Sender<Box<uint>>, i: uint) {
tx.send(box i).unwrap();
}
pub fn main()
|
{
let (tx, rx) = channel();
let n = 100u;
let mut expected = 0u;
let _t = range(0u, n).map(|i| {
expected += i;
let tx = tx.clone();
Thread::scoped(move|| {
child(&tx, i)
})
}).collect::<Vec<_>>();
let mut actual = 0u;
for _ in range(0u, n) {
let j = rx.recv().unwrap();
actual += *j;
}
assert_eq!(expected, actual);
}
|
identifier_body
|
|
unique-send-2.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(unknown_features)]
#![feature(box_syntax)]
use std::sync::mpsc::{channel, Sender};
use std::thread::Thread;
fn
|
(tx: &Sender<Box<uint>>, i: uint) {
tx.send(box i).unwrap();
}
pub fn main() {
let (tx, rx) = channel();
let n = 100u;
let mut expected = 0u;
let _t = range(0u, n).map(|i| {
expected += i;
let tx = tx.clone();
Thread::scoped(move|| {
child(&tx, i)
})
}).collect::<Vec<_>>();
let mut actual = 0u;
for _ in range(0u, n) {
let j = rx.recv().unwrap();
actual += *j;
}
assert_eq!(expected, actual);
}
|
child
|
identifier_name
|
unique-send-2.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
|
use std::thread::Thread;
fn child(tx: &Sender<Box<uint>>, i: uint) {
tx.send(box i).unwrap();
}
pub fn main() {
let (tx, rx) = channel();
let n = 100u;
let mut expected = 0u;
let _t = range(0u, n).map(|i| {
expected += i;
let tx = tx.clone();
Thread::scoped(move|| {
child(&tx, i)
})
}).collect::<Vec<_>>();
let mut actual = 0u;
for _ in range(0u, n) {
let j = rx.recv().unwrap();
actual += *j;
}
assert_eq!(expected, actual);
}
|
#![allow(unknown_features)]
#![feature(box_syntax)]
use std::sync::mpsc::{channel, Sender};
|
random_line_split
|
chordangle.rs
|
/*
Copyright 2015 Google Inc. All rights reserved.
Copyright 2017 Jihyun Yu. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
use std;
use std::f64::consts::PI;
use crate::consts::*;
use crate::s1::angle::*;
use float_extras::f64::nextafter;
/// ChordAngle represents the angle subtended by a chord (i.e., the straight
/// line segment connecting two points on the sphere). Its representation
/// makes it very efficient for computing and comparing distances, but unlike
/// Angle it is only capable of representing angles between 0 and π radians.
/// Generally, ChordAngle should only be used in loops where many angles need
/// to be calculated and compared. Otherwise it is simpler to use Angle.
///
/// ChordAngle loses some accuracy as the angle approaches π radians.
/// Specifically, the representation of (π - x) radians has an error of about
/// (1e-15 / x), with a maximum error of about 2e-8 radians (about 13cm on the
/// Earth's surface). For comparison, for angles up to π/2 radians (10000km)
/// the worst-case representation error is about 2e-16 radians (1 nanonmeter),
/// which is about the same as Angle.
///
/// ChordAngles are represented by the squared chord length, which can
/// range from 0 to 4. Positive infinity represents an infinite squared length.
#[derive(Clone, Copy, PartialEq, PartialOrd, Debug, Default)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct ChordAngle(pub f64);
/// NEGATIVE represents a chord angle smaller than the zero angle.
/// The only valid operations on a NegativeChordAngle are comparisons and
/// Angle conversions.
pub const NEGATIVE: ChordAngle = ChordAngle(-1f64);
/// RIGHT represents a chord angle of 90 degrees (a "right angle").
pub const RIGHT: ChordAngle = ChordAngle(2f64);
/// STRAIGHT represents a chord angle of 180 degrees (a "straight angle").
/// This is the maximum finite chord angle.
pub const STRAIGHT: ChordAngle = ChordAngle(4f64);
// MAXLENGTH2 is the square of the maximum length allowed in a ChordAngle.
pub const MAXLENGTH2: f64 = 4.0;
impl<'a> From<&'a Angle> for ChordAngle {
/// returns a ChordAngle from the given Angle.
fn from(a: &'a Angle) -> Self {
if a.rad() < 0. {
NEGATIVE
} else if a.is_infinite() {
ChordAngle::inf()
} else {
let l = 2. * (0.5 * a.rad().min(PI)).sin();
ChordAngle(l * l)
}
}
}
impl From<Angle> for ChordAngle {
/// returns a ChordAngle from the given Angle.
fn from(a: Angle) -> Self {
ChordAngle::from(&a)
}
}
impl<'a> From<&'a Deg> for ChordAngle {
fn from(a: &'a Deg) -> Self {
Angle::from(a).into()
}
}
impl From<Deg> for ChordAngle {
fn from(a: Deg) -> Self {
Angle::from(&a).into()
}
}
impl<'a> From<&'a ChordAngle> for Angle {
/// converts this ChordAngle to an Angle.
fn from(ca: &'a ChordAngle) -> Self {
if ca.0 < 0. {
Rad(-1.).into()
} else if ca.is_infinite() {
Angle::inf()
} else {
Rad(2. * (0.5 * ca.0.sqrt()).asin()).into()
}
}
}
impl From<ChordAngle> for Angle {
/// converts this ChordAngle to an Angle.
fn from(ca: ChordAngle) -> Self {
Angle::from(&ca)
}
}
impl<'a, 'b> std::ops::Add<&'a ChordAngle> for &'b ChordAngle {
type Output = ChordAngle;
/// add adds the other ChordAngle to this one and returns the resulting value.
/// This method assumes the ChordAngles are not special.
fn add(self, other: &'a ChordAngle) -> Self::Output {
// Note that this method (and Sub) is much more efficient than converting
// the ChordAngle to an Angle and adding those and converting back. It
// requires only one square root plus a few additions and multiplications.
if other.0 == 0.0 {
// Optimization for the common case where b is an error tolerance
// parameter that happens to be set to zero.
*self
} else if self.0 + other.0 >= 4. {
// Clamp the angle sum to at most 180 degrees.
STRAIGHT
} else {
// Let a and b be the (non-squared) chord lengths, and let c = a+b.
// Let A, B, and C be the corresponding half-angles (a = 2*sin(A), etc).
// Then the formula below can be derived from c = 2 * sin(A+B) and the
// relationships sin(A+B) = sin(A)*cos(B) + sin(B)*cos(A)
// cos(X) = sqrt(1 - sin^2(X))
let x = self.0 * (1. - 0.25 * other.0);
let y = other.0 * (1. - 0.25 * self.0);
ChordAngle(4f64.min(x + y + 2f64 * (x * y).sqrt()))
}
}
}
impl std::ops::Add<ChordAngle> for ChordAngle {
type Output = ChordAngle;
fn add(self, other: ChordAngle) -> Self::Output {
&self + &other
}
}
impl std::ops::Sub<ChordAngle> for ChordAngle {
type Output = ChordAngle;
/// sub subtracts the other ChordAngle from this one and returns the resulting
/// value. This method assumes the ChordAngles are not special.
fn sub(self, other: ChordAngle) -> Self::Output {
if other.0 == 0.0 {
self
} else if self.0 <= other.0 {
ChordAngle(0f64)
} else {
let x = self.0 * (1. - 0.25 * other.0);
let y = other.0 * (1. - 0.25 * self.0);
ChordAngle(0f64.max(x + y - 2. * (x * y).sqrt()))
}
}
}
impl ChordAngle {
/// inf returns a chord angle larger than any finite chord angle.
/// The only valid operations on an InfChordAngle are comparisons and Angle conversions.
pub fn inf() -> Self {
ChordAngle(std::f64::INFINITY)
}
/// is_infinite reports whether this ChordAngle is infinite.
pub fn is_infinite(&self) -> bool {
self.0.is_infinite()
}
/// from_squared_length returns a ChordAngle from the squared chord length.
/// Note that the argument is automatically clamped to a maximum of 4.0 to
/// handle possible roundoff errors. The argument must be non-negative.
pub fn from_squared_length(length2: f64) -> Self {
if length2 > 4. {
STRAIGHT
} else {
ChordAngle(length2)
}
}
/// expanded returns a new ChordAngle that has been adjusted by the given error
/// bound (which can be positive or negative). Error should be the value
/// returned by either MaxPointError or MaxAngleError. For example:
/// let a = ChordAngle::from_points(x, y)
/// let a1 = a.expanded(a.max_point_error())
pub fn expanded(&self, e: f64) -> Self {
// If the angle is special, don't change it. Otherwise clamp it to the valid range.
if self.is_special() {
*self
} else {
ChordAngle(0f64.max(4f64.min(self.0 + e)))
}
}
/// is_special reports whether this ChordAngle is one of the special cases.
pub fn is_special(&self) -> bool {
self.0 < 0. || self.0.is_infinite()
}
/// is_valid reports whether this ChordAngle is valid or not.
pub fn is_valid(&self) -> bool {
self.0 >= 0. && self.0 <= 4. || self.is_special()
}
pub fn max(self, other: Self) -> Self {
if self.0 < other.0 {
return other;
} else {
return self;
}
}
/// max_point_error returns the maximum error size for a ChordAngle constructed
/// from 2 Points x and y, assuming that x and y are normalized to within the
/// bounds guaranteed by s2.Point.Normalize. The error is defined with respect to
/// the true distance after the points are projected to lie exactly on the sphere.
pub fn max_point_error(&self) -> f64 {
// There is a relative error of (2.5*DBL_EPSILON) when computing the squared
// distance, plus an absolute error of (16 * DBL_EPSILON**2) because the
// lengths of the input points may differ from 1 by up to (2*DBL_EPSILON) each.
2.5 * DBL_EPSILON * self.0 + 16. * DBL_EPSILON * DBL_EPSILON
}
/// max_angle_error returns the maximum error for a ChordAngle constructed
/// as an Angle distance.
pub fn max_angle_error(&self) -> f64 {
DBL_EPSILON * self.0
}
/// sin returns the sine of this chord angle. This method is more efficient
/// than converting to Angle and performing the computation.
pub fn sin(&self) -> f64 {
self.sin2().sqrt()
}
/// sin2 returns the square of the sine of this chord angle.
/// It is more efficient than Sin.
pub fn sin2(&self) -> f64 {
// Let a be the (non-squared) chord length, and let A be the corresponding
// half-angle (a = 2*sin(A)). The formula below can be derived from:
// sin(2*A) = 2 * sin(A) * cos(A)
// cos^2(A) = 1 - sin^2(A)
// This is much faster than converting to an angle and computing its sine.
self.0 * (1. - 0.25 * self.0)
}
/// cos returns the cosine of this chord angle. This method is more efficient
/// than converting to Angle and performing the computation.
pub fn cos(&self) -> f64 {
// cos(2*A) = cos^2(A) - sin^2(A) = 1 - 2*sin^2(A)
1.0 - 0.5 * self.0
}
/// tan returns the tangent of this chord angle.
pub fn tan(&self) -> f64 {
self.sin() / self.cos()
}
pub fn successor(&self) -> Self {
if self.0 >= MAXLENGTH2 {
return ChordAngle::inf();
} else if self.0 < 0. {
return ChordAngle(0.);
} else {
return ChordAngle(nextafter(self.0, 10.));
}
}
}
#[cfg(test)]
mod tests {
use super::*;
fn test_chordangle_basics_case(ca1: ChordAngle, ca2: ChordAngle, less_than: bool, equal: bool) {
|
#[test]
fn test_chordangle_basics() {
let zero = ChordAngle::default();
test_chordangle_basics_case(NEGATIVE, NEGATIVE, false, true);
test_chordangle_basics_case(NEGATIVE, zero, true, false);
test_chordangle_basics_case(NEGATIVE, STRAIGHT, true, false);
test_chordangle_basics_case(NEGATIVE, ChordAngle::inf(), true, false);
test_chordangle_basics_case(zero, zero, false, true);
test_chordangle_basics_case(zero, STRAIGHT, true, false);
test_chordangle_basics_case(zero, ChordAngle::inf(), true, false);
test_chordangle_basics_case(STRAIGHT, STRAIGHT, false, true);
test_chordangle_basics_case(STRAIGHT, ChordAngle::inf(), true, false);
test_chordangle_basics_case(ChordAngle::inf(), ChordAngle::inf(), false, true);
test_chordangle_basics_case(ChordAngle::inf(), ChordAngle::inf(), false, true);
}
fn test_chordangle_is_functions_case(
ca: ChordAngle,
is_neg: bool,
is_zero: bool,
is_inf: bool,
is_special: bool,
) {
assert_eq!(is_neg, ca.0 < 0.);
assert_eq!(is_zero, ca.0 == 0.);
assert_eq!(is_inf, ca.is_infinite());
assert_eq!(is_special, ca.is_special());
}
#[test]
fn test_chordangle_is_functions() {
let zero: ChordAngle = Default::default();
test_chordangle_is_functions_case(zero, false, true, false, false);
test_chordangle_is_functions_case(NEGATIVE, true, false, false, true);
test_chordangle_is_functions_case(zero, false, true, false, false);
test_chordangle_is_functions_case(STRAIGHT, false, false, false, false);
test_chordangle_is_functions_case(ChordAngle::inf(), false, false, true, true);
}
#[test]
fn test_chordangle_from_angle() {
let angles = vec![
Angle::from(Rad(0.)),
Angle::from(Rad(1.)),
Angle::from(Rad(-1.)),
Angle::from(Rad(PI)),
];
for angle in angles.into_iter() {
let ca = ChordAngle::from(angle);
let got = Angle::from(ca);
assert_eq!(got, angle);
}
assert_eq!(STRAIGHT, ChordAngle::from(Angle::from(Rad(PI))));
assert_eq!(Angle::inf(), Angle::from(ChordAngle::from(Angle::inf())));
}
fn chordangle_eq(a: ChordAngle, b: ChordAngle) {
assert_f64_eq!(a.0, b.0);
}
#[test]
fn test_chordangle_arithmetic() {
let zero = ChordAngle::default();
let deg_30 = ChordAngle::from(Deg(30.));
let deg_60 = ChordAngle::from(Deg(60.));
let deg_90 = ChordAngle::from(Deg(90.));
let deg_120 = ChordAngle::from(Deg(120.));
let deg_180 = STRAIGHT;
chordangle_eq(zero + zero, zero);
chordangle_eq(deg_60 + zero, deg_60);
chordangle_eq(zero + deg_60, deg_60);
chordangle_eq(deg_30 + deg_60, deg_90);
chordangle_eq(deg_60 + deg_30, deg_90);
chordangle_eq(deg_180 + zero, deg_180);
chordangle_eq(deg_60 + deg_30, deg_90);
chordangle_eq(deg_90 + deg_90, deg_180);
chordangle_eq(deg_120 + deg_90, deg_180);
chordangle_eq(deg_120 + deg_120, deg_180);
chordangle_eq(deg_30 + deg_180, deg_180);
chordangle_eq(deg_180 + deg_180, deg_180);
chordangle_eq(zero - zero, zero);
chordangle_eq(deg_60 - deg_60, zero);
chordangle_eq(deg_180 - deg_180, zero);
chordangle_eq(zero - deg_60, zero);
chordangle_eq(deg_30 - deg_90, zero);
chordangle_eq(deg_90 - deg_30, deg_60);
chordangle_eq(deg_90 - deg_60, deg_30);
chordangle_eq(deg_180 - zero, deg_180);
}
#[test]
fn test_chordangle_trigonometry() {
let iters = 40usize;
for i in 0..(iters + 1) {
let radians = PI * (i as f64) / (iters as f64);
let chordangle = ChordAngle::from(Angle::from(Rad(radians)));
assert_f64_eq!(radians.sin(), chordangle.sin());
assert_f64_eq!(radians.cos(), chordangle.cos());
// Since tan(x) is unbounded near pi/4, we map the result back to an
// angle before comparing. The assertion is that the result is equal to
// the tangent of a nearby angle.
assert_f64_eq!(radians.tan().atan(), chordangle.tan().atan());
}
let angle_90 = ChordAngle::from_squared_length(2.);
let angle_180 = ChordAngle::from_squared_length(4.);
assert_f64_eq!(1., angle_90.sin());
assert_f64_eq!(0., angle_90.cos());
assert!(angle_90.tan().is_infinite());
assert_f64_eq!(0., angle_180.sin());
assert_f64_eq!(-1., angle_180.cos());
assert_f64_eq!(0., angle_180.tan());
}
#[test]
fn test_chordangle_expanded() {
let zero = ChordAngle::default();
assert_eq!(NEGATIVE.expanded(5.), NEGATIVE.expanded(5.));
assert_eq!(ChordAngle::inf().expanded(-5.), ChordAngle::inf());
assert_eq!(zero.expanded(-5.), zero);
assert_eq!(
ChordAngle::from_squared_length(1.25).expanded(0.25),
ChordAngle::from_squared_length(1.5)
);
assert_eq!(
ChordAngle::from_squared_length(0.75).expanded(0.25),
ChordAngle::from_squared_length(1.)
);
}
}
|
assert_eq!(less_than, ca1 < ca2);
assert_eq!(equal, ca1 == ca2);
}
|
identifier_body
|
chordangle.rs
|
/*
Copyright 2015 Google Inc. All rights reserved.
Copyright 2017 Jihyun Yu. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
use std;
use std::f64::consts::PI;
use crate::consts::*;
use crate::s1::angle::*;
use float_extras::f64::nextafter;
/// ChordAngle represents the angle subtended by a chord (i.e., the straight
/// line segment connecting two points on the sphere). Its representation
/// makes it very efficient for computing and comparing distances, but unlike
/// Angle it is only capable of representing angles between 0 and π radians.
/// Generally, ChordAngle should only be used in loops where many angles need
/// to be calculated and compared. Otherwise it is simpler to use Angle.
///
/// ChordAngle loses some accuracy as the angle approaches π radians.
/// Specifically, the representation of (π - x) radians has an error of about
/// (1e-15 / x), with a maximum error of about 2e-8 radians (about 13cm on the
/// Earth's surface). For comparison, for angles up to π/2 radians (10000km)
/// the worst-case representation error is about 2e-16 radians (1 nanonmeter),
/// which is about the same as Angle.
///
/// ChordAngles are represented by the squared chord length, which can
/// range from 0 to 4. Positive infinity represents an infinite squared length.
#[derive(Clone, Copy, PartialEq, PartialOrd, Debug, Default)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct ChordAngle(pub f64);
/// NEGATIVE represents a chord angle smaller than the zero angle.
/// The only valid operations on a NegativeChordAngle are comparisons and
/// Angle conversions.
pub const NEGATIVE: ChordAngle = ChordAngle(-1f64);
/// RIGHT represents a chord angle of 90 degrees (a "right angle").
pub const RIGHT: ChordAngle = ChordAngle(2f64);
/// STRAIGHT represents a chord angle of 180 degrees (a "straight angle").
/// This is the maximum finite chord angle.
pub const STRAIGHT: ChordAngle = ChordAngle(4f64);
// MAXLENGTH2 is the square of the maximum length allowed in a ChordAngle.
pub const MAXLENGTH2: f64 = 4.0;
impl<'a> From<&'a Angle> for ChordAngle {
/// returns a ChordAngle from the given Angle.
fn from(a: &'a Angle) -> Self {
if a.rad() < 0. {
NEGATIVE
} else if a.is_infinite() {
ChordAngle::inf()
} else {
let l = 2. * (0.5 * a.rad().min(PI)).sin();
ChordAngle(l * l)
}
}
}
impl From<Angle> for ChordAngle {
/// returns a ChordAngle from the given Angle.
fn from(a: Angle) -> Self {
ChordAngle::from(&a)
}
}
impl<'a> From<&'a Deg> for ChordAngle {
fn from(a: &'a Deg) -> Self {
Angle::from(a).into()
}
}
impl From<Deg> for ChordAngle {
fn from
|
Deg) -> Self {
Angle::from(&a).into()
}
}
impl<'a> From<&'a ChordAngle> for Angle {
/// converts this ChordAngle to an Angle.
fn from(ca: &'a ChordAngle) -> Self {
if ca.0 < 0. {
Rad(-1.).into()
} else if ca.is_infinite() {
Angle::inf()
} else {
Rad(2. * (0.5 * ca.0.sqrt()).asin()).into()
}
}
}
impl From<ChordAngle> for Angle {
/// converts this ChordAngle to an Angle.
fn from(ca: ChordAngle) -> Self {
Angle::from(&ca)
}
}
impl<'a, 'b> std::ops::Add<&'a ChordAngle> for &'b ChordAngle {
type Output = ChordAngle;
/// add adds the other ChordAngle to this one and returns the resulting value.
/// This method assumes the ChordAngles are not special.
fn add(self, other: &'a ChordAngle) -> Self::Output {
// Note that this method (and Sub) is much more efficient than converting
// the ChordAngle to an Angle and adding those and converting back. It
// requires only one square root plus a few additions and multiplications.
if other.0 == 0.0 {
// Optimization for the common case where b is an error tolerance
// parameter that happens to be set to zero.
*self
} else if self.0 + other.0 >= 4. {
// Clamp the angle sum to at most 180 degrees.
STRAIGHT
} else {
// Let a and b be the (non-squared) chord lengths, and let c = a+b.
// Let A, B, and C be the corresponding half-angles (a = 2*sin(A), etc).
// Then the formula below can be derived from c = 2 * sin(A+B) and the
// relationships sin(A+B) = sin(A)*cos(B) + sin(B)*cos(A)
// cos(X) = sqrt(1 - sin^2(X))
let x = self.0 * (1. - 0.25 * other.0);
let y = other.0 * (1. - 0.25 * self.0);
ChordAngle(4f64.min(x + y + 2f64 * (x * y).sqrt()))
}
}
}
impl std::ops::Add<ChordAngle> for ChordAngle {
type Output = ChordAngle;
fn add(self, other: ChordAngle) -> Self::Output {
&self + &other
}
}
impl std::ops::Sub<ChordAngle> for ChordAngle {
type Output = ChordAngle;
/// sub subtracts the other ChordAngle from this one and returns the resulting
/// value. This method assumes the ChordAngles are not special.
fn sub(self, other: ChordAngle) -> Self::Output {
if other.0 == 0.0 {
self
} else if self.0 <= other.0 {
ChordAngle(0f64)
} else {
let x = self.0 * (1. - 0.25 * other.0);
let y = other.0 * (1. - 0.25 * self.0);
ChordAngle(0f64.max(x + y - 2. * (x * y).sqrt()))
}
}
}
impl ChordAngle {
/// inf returns a chord angle larger than any finite chord angle.
/// The only valid operations on an InfChordAngle are comparisons and Angle conversions.
pub fn inf() -> Self {
ChordAngle(std::f64::INFINITY)
}
/// is_infinite reports whether this ChordAngle is infinite.
pub fn is_infinite(&self) -> bool {
self.0.is_infinite()
}
/// from_squared_length returns a ChordAngle from the squared chord length.
/// Note that the argument is automatically clamped to a maximum of 4.0 to
/// handle possible roundoff errors. The argument must be non-negative.
pub fn from_squared_length(length2: f64) -> Self {
if length2 > 4. {
STRAIGHT
} else {
ChordAngle(length2)
}
}
/// expanded returns a new ChordAngle that has been adjusted by the given error
/// bound (which can be positive or negative). Error should be the value
/// returned by either MaxPointError or MaxAngleError. For example:
/// let a = ChordAngle::from_points(x, y)
/// let a1 = a.expanded(a.max_point_error())
pub fn expanded(&self, e: f64) -> Self {
// If the angle is special, don't change it. Otherwise clamp it to the valid range.
if self.is_special() {
*self
} else {
ChordAngle(0f64.max(4f64.min(self.0 + e)))
}
}
/// is_special reports whether this ChordAngle is one of the special cases.
pub fn is_special(&self) -> bool {
self.0 < 0. || self.0.is_infinite()
}
/// is_valid reports whether this ChordAngle is valid or not.
pub fn is_valid(&self) -> bool {
self.0 >= 0. && self.0 <= 4. || self.is_special()
}
pub fn max(self, other: Self) -> Self {
if self.0 < other.0 {
return other;
} else {
return self;
}
}
/// max_point_error returns the maximum error size for a ChordAngle constructed
/// from 2 Points x and y, assuming that x and y are normalized to within the
/// bounds guaranteed by s2.Point.Normalize. The error is defined with respect to
/// the true distance after the points are projected to lie exactly on the sphere.
pub fn max_point_error(&self) -> f64 {
// There is a relative error of (2.5*DBL_EPSILON) when computing the squared
// distance, plus an absolute error of (16 * DBL_EPSILON**2) because the
// lengths of the input points may differ from 1 by up to (2*DBL_EPSILON) each.
2.5 * DBL_EPSILON * self.0 + 16. * DBL_EPSILON * DBL_EPSILON
}
/// max_angle_error returns the maximum error for a ChordAngle constructed
/// as an Angle distance.
pub fn max_angle_error(&self) -> f64 {
DBL_EPSILON * self.0
}
/// sin returns the sine of this chord angle. This method is more efficient
/// than converting to Angle and performing the computation.
pub fn sin(&self) -> f64 {
self.sin2().sqrt()
}
/// sin2 returns the square of the sine of this chord angle.
/// It is more efficient than Sin.
pub fn sin2(&self) -> f64 {
// Let a be the (non-squared) chord length, and let A be the corresponding
// half-angle (a = 2*sin(A)). The formula below can be derived from:
// sin(2*A) = 2 * sin(A) * cos(A)
// cos^2(A) = 1 - sin^2(A)
// This is much faster than converting to an angle and computing its sine.
self.0 * (1. - 0.25 * self.0)
}
/// cos returns the cosine of this chord angle. This method is more efficient
/// than converting to Angle and performing the computation.
pub fn cos(&self) -> f64 {
// cos(2*A) = cos^2(A) - sin^2(A) = 1 - 2*sin^2(A)
1.0 - 0.5 * self.0
}
/// tan returns the tangent of this chord angle.
pub fn tan(&self) -> f64 {
self.sin() / self.cos()
}
pub fn successor(&self) -> Self {
if self.0 >= MAXLENGTH2 {
return ChordAngle::inf();
} else if self.0 < 0. {
return ChordAngle(0.);
} else {
return ChordAngle(nextafter(self.0, 10.));
}
}
}
#[cfg(test)]
mod tests {
use super::*;
fn test_chordangle_basics_case(ca1: ChordAngle, ca2: ChordAngle, less_than: bool, equal: bool) {
assert_eq!(less_than, ca1 < ca2);
assert_eq!(equal, ca1 == ca2);
}
#[test]
fn test_chordangle_basics() {
let zero = ChordAngle::default();
test_chordangle_basics_case(NEGATIVE, NEGATIVE, false, true);
test_chordangle_basics_case(NEGATIVE, zero, true, false);
test_chordangle_basics_case(NEGATIVE, STRAIGHT, true, false);
test_chordangle_basics_case(NEGATIVE, ChordAngle::inf(), true, false);
test_chordangle_basics_case(zero, zero, false, true);
test_chordangle_basics_case(zero, STRAIGHT, true, false);
test_chordangle_basics_case(zero, ChordAngle::inf(), true, false);
test_chordangle_basics_case(STRAIGHT, STRAIGHT, false, true);
test_chordangle_basics_case(STRAIGHT, ChordAngle::inf(), true, false);
test_chordangle_basics_case(ChordAngle::inf(), ChordAngle::inf(), false, true);
test_chordangle_basics_case(ChordAngle::inf(), ChordAngle::inf(), false, true);
}
fn test_chordangle_is_functions_case(
ca: ChordAngle,
is_neg: bool,
is_zero: bool,
is_inf: bool,
is_special: bool,
) {
assert_eq!(is_neg, ca.0 < 0.);
assert_eq!(is_zero, ca.0 == 0.);
assert_eq!(is_inf, ca.is_infinite());
assert_eq!(is_special, ca.is_special());
}
#[test]
fn test_chordangle_is_functions() {
let zero: ChordAngle = Default::default();
test_chordangle_is_functions_case(zero, false, true, false, false);
test_chordangle_is_functions_case(NEGATIVE, true, false, false, true);
test_chordangle_is_functions_case(zero, false, true, false, false);
test_chordangle_is_functions_case(STRAIGHT, false, false, false, false);
test_chordangle_is_functions_case(ChordAngle::inf(), false, false, true, true);
}
#[test]
fn test_chordangle_from_angle() {
let angles = vec![
Angle::from(Rad(0.)),
Angle::from(Rad(1.)),
Angle::from(Rad(-1.)),
Angle::from(Rad(PI)),
];
for angle in angles.into_iter() {
let ca = ChordAngle::from(angle);
let got = Angle::from(ca);
assert_eq!(got, angle);
}
assert_eq!(STRAIGHT, ChordAngle::from(Angle::from(Rad(PI))));
assert_eq!(Angle::inf(), Angle::from(ChordAngle::from(Angle::inf())));
}
fn chordangle_eq(a: ChordAngle, b: ChordAngle) {
assert_f64_eq!(a.0, b.0);
}
#[test]
fn test_chordangle_arithmetic() {
let zero = ChordAngle::default();
let deg_30 = ChordAngle::from(Deg(30.));
let deg_60 = ChordAngle::from(Deg(60.));
let deg_90 = ChordAngle::from(Deg(90.));
let deg_120 = ChordAngle::from(Deg(120.));
let deg_180 = STRAIGHT;
chordangle_eq(zero + zero, zero);
chordangle_eq(deg_60 + zero, deg_60);
chordangle_eq(zero + deg_60, deg_60);
chordangle_eq(deg_30 + deg_60, deg_90);
chordangle_eq(deg_60 + deg_30, deg_90);
chordangle_eq(deg_180 + zero, deg_180);
chordangle_eq(deg_60 + deg_30, deg_90);
chordangle_eq(deg_90 + deg_90, deg_180);
chordangle_eq(deg_120 + deg_90, deg_180);
chordangle_eq(deg_120 + deg_120, deg_180);
chordangle_eq(deg_30 + deg_180, deg_180);
chordangle_eq(deg_180 + deg_180, deg_180);
chordangle_eq(zero - zero, zero);
chordangle_eq(deg_60 - deg_60, zero);
chordangle_eq(deg_180 - deg_180, zero);
chordangle_eq(zero - deg_60, zero);
chordangle_eq(deg_30 - deg_90, zero);
chordangle_eq(deg_90 - deg_30, deg_60);
chordangle_eq(deg_90 - deg_60, deg_30);
chordangle_eq(deg_180 - zero, deg_180);
}
#[test]
fn test_chordangle_trigonometry() {
let iters = 40usize;
for i in 0..(iters + 1) {
let radians = PI * (i as f64) / (iters as f64);
let chordangle = ChordAngle::from(Angle::from(Rad(radians)));
assert_f64_eq!(radians.sin(), chordangle.sin());
assert_f64_eq!(radians.cos(), chordangle.cos());
// Since tan(x) is unbounded near pi/4, we map the result back to an
// angle before comparing. The assertion is that the result is equal to
// the tangent of a nearby angle.
assert_f64_eq!(radians.tan().atan(), chordangle.tan().atan());
}
let angle_90 = ChordAngle::from_squared_length(2.);
let angle_180 = ChordAngle::from_squared_length(4.);
assert_f64_eq!(1., angle_90.sin());
assert_f64_eq!(0., angle_90.cos());
assert!(angle_90.tan().is_infinite());
assert_f64_eq!(0., angle_180.sin());
assert_f64_eq!(-1., angle_180.cos());
assert_f64_eq!(0., angle_180.tan());
}
#[test]
fn test_chordangle_expanded() {
let zero = ChordAngle::default();
assert_eq!(NEGATIVE.expanded(5.), NEGATIVE.expanded(5.));
assert_eq!(ChordAngle::inf().expanded(-5.), ChordAngle::inf());
assert_eq!(zero.expanded(-5.), zero);
assert_eq!(
ChordAngle::from_squared_length(1.25).expanded(0.25),
ChordAngle::from_squared_length(1.5)
);
assert_eq!(
ChordAngle::from_squared_length(0.75).expanded(0.25),
ChordAngle::from_squared_length(1.)
);
}
}
|
(a:
|
identifier_name
|
chordangle.rs
|
/*
Copyright 2015 Google Inc. All rights reserved.
Copyright 2017 Jihyun Yu. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
use std;
use std::f64::consts::PI;
use crate::consts::*;
use crate::s1::angle::*;
use float_extras::f64::nextafter;
/// ChordAngle represents the angle subtended by a chord (i.e., the straight
/// line segment connecting two points on the sphere). Its representation
/// makes it very efficient for computing and comparing distances, but unlike
/// Angle it is only capable of representing angles between 0 and π radians.
/// Generally, ChordAngle should only be used in loops where many angles need
/// to be calculated and compared. Otherwise it is simpler to use Angle.
///
/// ChordAngle loses some accuracy as the angle approaches π radians.
/// Specifically, the representation of (π - x) radians has an error of about
/// (1e-15 / x), with a maximum error of about 2e-8 radians (about 13cm on the
/// Earth's surface). For comparison, for angles up to π/2 radians (10000km)
/// the worst-case representation error is about 2e-16 radians (1 nanonmeter),
/// which is about the same as Angle.
///
/// ChordAngles are represented by the squared chord length, which can
/// range from 0 to 4. Positive infinity represents an infinite squared length.
#[derive(Clone, Copy, PartialEq, PartialOrd, Debug, Default)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct ChordAngle(pub f64);
/// NEGATIVE represents a chord angle smaller than the zero angle.
/// The only valid operations on a NegativeChordAngle are comparisons and
/// Angle conversions.
pub const NEGATIVE: ChordAngle = ChordAngle(-1f64);
/// RIGHT represents a chord angle of 90 degrees (a "right angle").
pub const RIGHT: ChordAngle = ChordAngle(2f64);
/// STRAIGHT represents a chord angle of 180 degrees (a "straight angle").
/// This is the maximum finite chord angle.
pub const STRAIGHT: ChordAngle = ChordAngle(4f64);
// MAXLENGTH2 is the square of the maximum length allowed in a ChordAngle.
pub const MAXLENGTH2: f64 = 4.0;
impl<'a> From<&'a Angle> for ChordAngle {
/// returns a ChordAngle from the given Angle.
fn from(a: &'a Angle) -> Self {
if a.rad() < 0. {
NEGATIVE
} else if a.is_infinite() {
ChordAngle::inf()
} else {
let l = 2. * (0.5 * a.rad().min(PI)).sin();
ChordAngle(l * l)
}
}
}
impl From<Angle> for ChordAngle {
/// returns a ChordAngle from the given Angle.
fn from(a: Angle) -> Self {
ChordAngle::from(&a)
}
}
impl<'a> From<&'a Deg> for ChordAngle {
fn from(a: &'a Deg) -> Self {
Angle::from(a).into()
}
}
impl From<Deg> for ChordAngle {
|
Angle::from(&a).into()
}
}
impl<'a> From<&'a ChordAngle> for Angle {
/// converts this ChordAngle to an Angle.
fn from(ca: &'a ChordAngle) -> Self {
if ca.0 < 0. {
Rad(-1.).into()
} else if ca.is_infinite() {
Angle::inf()
} else {
Rad(2. * (0.5 * ca.0.sqrt()).asin()).into()
}
}
}
impl From<ChordAngle> for Angle {
/// converts this ChordAngle to an Angle.
fn from(ca: ChordAngle) -> Self {
Angle::from(&ca)
}
}
impl<'a, 'b> std::ops::Add<&'a ChordAngle> for &'b ChordAngle {
type Output = ChordAngle;
/// add adds the other ChordAngle to this one and returns the resulting value.
/// This method assumes the ChordAngles are not special.
fn add(self, other: &'a ChordAngle) -> Self::Output {
// Note that this method (and Sub) is much more efficient than converting
// the ChordAngle to an Angle and adding those and converting back. It
// requires only one square root plus a few additions and multiplications.
if other.0 == 0.0 {
// Optimization for the common case where b is an error tolerance
// parameter that happens to be set to zero.
*self
} else if self.0 + other.0 >= 4. {
// Clamp the angle sum to at most 180 degrees.
STRAIGHT
} else {
// Let a and b be the (non-squared) chord lengths, and let c = a+b.
// Let A, B, and C be the corresponding half-angles (a = 2*sin(A), etc).
// Then the formula below can be derived from c = 2 * sin(A+B) and the
// relationships sin(A+B) = sin(A)*cos(B) + sin(B)*cos(A)
// cos(X) = sqrt(1 - sin^2(X))
let x = self.0 * (1. - 0.25 * other.0);
let y = other.0 * (1. - 0.25 * self.0);
ChordAngle(4f64.min(x + y + 2f64 * (x * y).sqrt()))
}
}
}
impl std::ops::Add<ChordAngle> for ChordAngle {
type Output = ChordAngle;
fn add(self, other: ChordAngle) -> Self::Output {
&self + &other
}
}
impl std::ops::Sub<ChordAngle> for ChordAngle {
type Output = ChordAngle;
/// sub subtracts the other ChordAngle from this one and returns the resulting
/// value. This method assumes the ChordAngles are not special.
fn sub(self, other: ChordAngle) -> Self::Output {
if other.0 == 0.0 {
self
} else if self.0 <= other.0 {
ChordAngle(0f64)
} else {
let x = self.0 * (1. - 0.25 * other.0);
let y = other.0 * (1. - 0.25 * self.0);
ChordAngle(0f64.max(x + y - 2. * (x * y).sqrt()))
}
}
}
impl ChordAngle {
/// inf returns a chord angle larger than any finite chord angle.
/// The only valid operations on an InfChordAngle are comparisons and Angle conversions.
pub fn inf() -> Self {
ChordAngle(std::f64::INFINITY)
}
/// is_infinite reports whether this ChordAngle is infinite.
pub fn is_infinite(&self) -> bool {
self.0.is_infinite()
}
/// from_squared_length returns a ChordAngle from the squared chord length.
/// Note that the argument is automatically clamped to a maximum of 4.0 to
/// handle possible roundoff errors. The argument must be non-negative.
pub fn from_squared_length(length2: f64) -> Self {
if length2 > 4. {
STRAIGHT
} else {
ChordAngle(length2)
}
}
/// expanded returns a new ChordAngle that has been adjusted by the given error
/// bound (which can be positive or negative). Error should be the value
/// returned by either MaxPointError or MaxAngleError. For example:
/// let a = ChordAngle::from_points(x, y)
/// let a1 = a.expanded(a.max_point_error())
pub fn expanded(&self, e: f64) -> Self {
// If the angle is special, don't change it. Otherwise clamp it to the valid range.
if self.is_special() {
*self
} else {
ChordAngle(0f64.max(4f64.min(self.0 + e)))
}
}
/// is_special reports whether this ChordAngle is one of the special cases.
pub fn is_special(&self) -> bool {
self.0 < 0. || self.0.is_infinite()
}
/// is_valid reports whether this ChordAngle is valid or not.
pub fn is_valid(&self) -> bool {
self.0 >= 0. && self.0 <= 4. || self.is_special()
}
pub fn max(self, other: Self) -> Self {
if self.0 < other.0 {
return other;
} else {
return self;
}
}
/// max_point_error returns the maximum error size for a ChordAngle constructed
/// from 2 Points x and y, assuming that x and y are normalized to within the
/// bounds guaranteed by s2.Point.Normalize. The error is defined with respect to
/// the true distance after the points are projected to lie exactly on the sphere.
pub fn max_point_error(&self) -> f64 {
// There is a relative error of (2.5*DBL_EPSILON) when computing the squared
// distance, plus an absolute error of (16 * DBL_EPSILON**2) because the
// lengths of the input points may differ from 1 by up to (2*DBL_EPSILON) each.
2.5 * DBL_EPSILON * self.0 + 16. * DBL_EPSILON * DBL_EPSILON
}
/// max_angle_error returns the maximum error for a ChordAngle constructed
/// as an Angle distance.
pub fn max_angle_error(&self) -> f64 {
DBL_EPSILON * self.0
}
/// sin returns the sine of this chord angle. This method is more efficient
/// than converting to Angle and performing the computation.
pub fn sin(&self) -> f64 {
self.sin2().sqrt()
}
/// sin2 returns the square of the sine of this chord angle.
/// It is more efficient than Sin.
pub fn sin2(&self) -> f64 {
// Let a be the (non-squared) chord length, and let A be the corresponding
// half-angle (a = 2*sin(A)). The formula below can be derived from:
// sin(2*A) = 2 * sin(A) * cos(A)
// cos^2(A) = 1 - sin^2(A)
// This is much faster than converting to an angle and computing its sine.
self.0 * (1. - 0.25 * self.0)
}
/// cos returns the cosine of this chord angle. This method is more efficient
/// than converting to Angle and performing the computation.
pub fn cos(&self) -> f64 {
// cos(2*A) = cos^2(A) - sin^2(A) = 1 - 2*sin^2(A)
1.0 - 0.5 * self.0
}
/// tan returns the tangent of this chord angle.
pub fn tan(&self) -> f64 {
self.sin() / self.cos()
}
pub fn successor(&self) -> Self {
if self.0 >= MAXLENGTH2 {
return ChordAngle::inf();
} else if self.0 < 0. {
return ChordAngle(0.);
} else {
return ChordAngle(nextafter(self.0, 10.));
}
}
}
#[cfg(test)]
mod tests {
use super::*;
fn test_chordangle_basics_case(ca1: ChordAngle, ca2: ChordAngle, less_than: bool, equal: bool) {
assert_eq!(less_than, ca1 < ca2);
assert_eq!(equal, ca1 == ca2);
}
#[test]
fn test_chordangle_basics() {
let zero = ChordAngle::default();
test_chordangle_basics_case(NEGATIVE, NEGATIVE, false, true);
test_chordangle_basics_case(NEGATIVE, zero, true, false);
test_chordangle_basics_case(NEGATIVE, STRAIGHT, true, false);
test_chordangle_basics_case(NEGATIVE, ChordAngle::inf(), true, false);
test_chordangle_basics_case(zero, zero, false, true);
test_chordangle_basics_case(zero, STRAIGHT, true, false);
test_chordangle_basics_case(zero, ChordAngle::inf(), true, false);
test_chordangle_basics_case(STRAIGHT, STRAIGHT, false, true);
test_chordangle_basics_case(STRAIGHT, ChordAngle::inf(), true, false);
test_chordangle_basics_case(ChordAngle::inf(), ChordAngle::inf(), false, true);
test_chordangle_basics_case(ChordAngle::inf(), ChordAngle::inf(), false, true);
}
fn test_chordangle_is_functions_case(
ca: ChordAngle,
is_neg: bool,
is_zero: bool,
is_inf: bool,
is_special: bool,
) {
assert_eq!(is_neg, ca.0 < 0.);
assert_eq!(is_zero, ca.0 == 0.);
assert_eq!(is_inf, ca.is_infinite());
assert_eq!(is_special, ca.is_special());
}
#[test]
fn test_chordangle_is_functions() {
let zero: ChordAngle = Default::default();
test_chordangle_is_functions_case(zero, false, true, false, false);
test_chordangle_is_functions_case(NEGATIVE, true, false, false, true);
test_chordangle_is_functions_case(zero, false, true, false, false);
test_chordangle_is_functions_case(STRAIGHT, false, false, false, false);
test_chordangle_is_functions_case(ChordAngle::inf(), false, false, true, true);
}
#[test]
fn test_chordangle_from_angle() {
let angles = vec![
Angle::from(Rad(0.)),
Angle::from(Rad(1.)),
Angle::from(Rad(-1.)),
Angle::from(Rad(PI)),
];
for angle in angles.into_iter() {
let ca = ChordAngle::from(angle);
let got = Angle::from(ca);
assert_eq!(got, angle);
}
assert_eq!(STRAIGHT, ChordAngle::from(Angle::from(Rad(PI))));
assert_eq!(Angle::inf(), Angle::from(ChordAngle::from(Angle::inf())));
}
fn chordangle_eq(a: ChordAngle, b: ChordAngle) {
assert_f64_eq!(a.0, b.0);
}
#[test]
fn test_chordangle_arithmetic() {
let zero = ChordAngle::default();
let deg_30 = ChordAngle::from(Deg(30.));
let deg_60 = ChordAngle::from(Deg(60.));
let deg_90 = ChordAngle::from(Deg(90.));
let deg_120 = ChordAngle::from(Deg(120.));
let deg_180 = STRAIGHT;
chordangle_eq(zero + zero, zero);
chordangle_eq(deg_60 + zero, deg_60);
chordangle_eq(zero + deg_60, deg_60);
chordangle_eq(deg_30 + deg_60, deg_90);
chordangle_eq(deg_60 + deg_30, deg_90);
chordangle_eq(deg_180 + zero, deg_180);
chordangle_eq(deg_60 + deg_30, deg_90);
chordangle_eq(deg_90 + deg_90, deg_180);
chordangle_eq(deg_120 + deg_90, deg_180);
chordangle_eq(deg_120 + deg_120, deg_180);
chordangle_eq(deg_30 + deg_180, deg_180);
chordangle_eq(deg_180 + deg_180, deg_180);
chordangle_eq(zero - zero, zero);
chordangle_eq(deg_60 - deg_60, zero);
chordangle_eq(deg_180 - deg_180, zero);
chordangle_eq(zero - deg_60, zero);
chordangle_eq(deg_30 - deg_90, zero);
chordangle_eq(deg_90 - deg_30, deg_60);
chordangle_eq(deg_90 - deg_60, deg_30);
chordangle_eq(deg_180 - zero, deg_180);
}
#[test]
fn test_chordangle_trigonometry() {
let iters = 40usize;
for i in 0..(iters + 1) {
let radians = PI * (i as f64) / (iters as f64);
let chordangle = ChordAngle::from(Angle::from(Rad(radians)));
assert_f64_eq!(radians.sin(), chordangle.sin());
assert_f64_eq!(radians.cos(), chordangle.cos());
// Since tan(x) is unbounded near pi/4, we map the result back to an
// angle before comparing. The assertion is that the result is equal to
// the tangent of a nearby angle.
assert_f64_eq!(radians.tan().atan(), chordangle.tan().atan());
}
let angle_90 = ChordAngle::from_squared_length(2.);
let angle_180 = ChordAngle::from_squared_length(4.);
assert_f64_eq!(1., angle_90.sin());
assert_f64_eq!(0., angle_90.cos());
assert!(angle_90.tan().is_infinite());
assert_f64_eq!(0., angle_180.sin());
assert_f64_eq!(-1., angle_180.cos());
assert_f64_eq!(0., angle_180.tan());
}
#[test]
fn test_chordangle_expanded() {
let zero = ChordAngle::default();
assert_eq!(NEGATIVE.expanded(5.), NEGATIVE.expanded(5.));
assert_eq!(ChordAngle::inf().expanded(-5.), ChordAngle::inf());
assert_eq!(zero.expanded(-5.), zero);
assert_eq!(
ChordAngle::from_squared_length(1.25).expanded(0.25),
ChordAngle::from_squared_length(1.5)
);
assert_eq!(
ChordAngle::from_squared_length(0.75).expanded(0.25),
ChordAngle::from_squared_length(1.)
);
}
}
|
fn from(a: Deg) -> Self {
|
random_line_split
|
chordangle.rs
|
/*
Copyright 2015 Google Inc. All rights reserved.
Copyright 2017 Jihyun Yu. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
use std;
use std::f64::consts::PI;
use crate::consts::*;
use crate::s1::angle::*;
use float_extras::f64::nextafter;
/// ChordAngle represents the angle subtended by a chord (i.e., the straight
/// line segment connecting two points on the sphere). Its representation
/// makes it very efficient for computing and comparing distances, but unlike
/// Angle it is only capable of representing angles between 0 and π radians.
/// Generally, ChordAngle should only be used in loops where many angles need
/// to be calculated and compared. Otherwise it is simpler to use Angle.
///
/// ChordAngle loses some accuracy as the angle approaches π radians.
/// Specifically, the representation of (π - x) radians has an error of about
/// (1e-15 / x), with a maximum error of about 2e-8 radians (about 13cm on the
/// Earth's surface). For comparison, for angles up to π/2 radians (10000km)
/// the worst-case representation error is about 2e-16 radians (1 nanonmeter),
/// which is about the same as Angle.
///
/// ChordAngles are represented by the squared chord length, which can
/// range from 0 to 4. Positive infinity represents an infinite squared length.
#[derive(Clone, Copy, PartialEq, PartialOrd, Debug, Default)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct ChordAngle(pub f64);
/// NEGATIVE represents a chord angle smaller than the zero angle.
/// The only valid operations on a NegativeChordAngle are comparisons and
/// Angle conversions.
pub const NEGATIVE: ChordAngle = ChordAngle(-1f64);
/// RIGHT represents a chord angle of 90 degrees (a "right angle").
pub const RIGHT: ChordAngle = ChordAngle(2f64);
/// STRAIGHT represents a chord angle of 180 degrees (a "straight angle").
/// This is the maximum finite chord angle.
pub const STRAIGHT: ChordAngle = ChordAngle(4f64);
// MAXLENGTH2 is the square of the maximum length allowed in a ChordAngle.
pub const MAXLENGTH2: f64 = 4.0;
impl<'a> From<&'a Angle> for ChordAngle {
/// returns a ChordAngle from the given Angle.
fn from(a: &'a Angle) -> Self {
if a.rad() < 0. {
|
e if a.is_infinite() {
ChordAngle::inf()
} else {
let l = 2. * (0.5 * a.rad().min(PI)).sin();
ChordAngle(l * l)
}
}
}
impl From<Angle> for ChordAngle {
/// returns a ChordAngle from the given Angle.
fn from(a: Angle) -> Self {
ChordAngle::from(&a)
}
}
impl<'a> From<&'a Deg> for ChordAngle {
fn from(a: &'a Deg) -> Self {
Angle::from(a).into()
}
}
impl From<Deg> for ChordAngle {
fn from(a: Deg) -> Self {
Angle::from(&a).into()
}
}
impl<'a> From<&'a ChordAngle> for Angle {
/// converts this ChordAngle to an Angle.
fn from(ca: &'a ChordAngle) -> Self {
if ca.0 < 0. {
Rad(-1.).into()
} else if ca.is_infinite() {
Angle::inf()
} else {
Rad(2. * (0.5 * ca.0.sqrt()).asin()).into()
}
}
}
impl From<ChordAngle> for Angle {
/// converts this ChordAngle to an Angle.
fn from(ca: ChordAngle) -> Self {
Angle::from(&ca)
}
}
impl<'a, 'b> std::ops::Add<&'a ChordAngle> for &'b ChordAngle {
type Output = ChordAngle;
/// add adds the other ChordAngle to this one and returns the resulting value.
/// This method assumes the ChordAngles are not special.
fn add(self, other: &'a ChordAngle) -> Self::Output {
// Note that this method (and Sub) is much more efficient than converting
// the ChordAngle to an Angle and adding those and converting back. It
// requires only one square root plus a few additions and multiplications.
if other.0 == 0.0 {
// Optimization for the common case where b is an error tolerance
// parameter that happens to be set to zero.
*self
} else if self.0 + other.0 >= 4. {
// Clamp the angle sum to at most 180 degrees.
STRAIGHT
} else {
// Let a and b be the (non-squared) chord lengths, and let c = a+b.
// Let A, B, and C be the corresponding half-angles (a = 2*sin(A), etc).
// Then the formula below can be derived from c = 2 * sin(A+B) and the
// relationships sin(A+B) = sin(A)*cos(B) + sin(B)*cos(A)
// cos(X) = sqrt(1 - sin^2(X))
let x = self.0 * (1. - 0.25 * other.0);
let y = other.0 * (1. - 0.25 * self.0);
ChordAngle(4f64.min(x + y + 2f64 * (x * y).sqrt()))
}
}
}
impl std::ops::Add<ChordAngle> for ChordAngle {
type Output = ChordAngle;
fn add(self, other: ChordAngle) -> Self::Output {
&self + &other
}
}
impl std::ops::Sub<ChordAngle> for ChordAngle {
type Output = ChordAngle;
/// sub subtracts the other ChordAngle from this one and returns the resulting
/// value. This method assumes the ChordAngles are not special.
fn sub(self, other: ChordAngle) -> Self::Output {
if other.0 == 0.0 {
self
} else if self.0 <= other.0 {
ChordAngle(0f64)
} else {
let x = self.0 * (1. - 0.25 * other.0);
let y = other.0 * (1. - 0.25 * self.0);
ChordAngle(0f64.max(x + y - 2. * (x * y).sqrt()))
}
}
}
impl ChordAngle {
/// inf returns a chord angle larger than any finite chord angle.
/// The only valid operations on an InfChordAngle are comparisons and Angle conversions.
pub fn inf() -> Self {
ChordAngle(std::f64::INFINITY)
}
/// is_infinite reports whether this ChordAngle is infinite.
pub fn is_infinite(&self) -> bool {
self.0.is_infinite()
}
/// from_squared_length returns a ChordAngle from the squared chord length.
/// Note that the argument is automatically clamped to a maximum of 4.0 to
/// handle possible roundoff errors. The argument must be non-negative.
pub fn from_squared_length(length2: f64) -> Self {
if length2 > 4. {
STRAIGHT
} else {
ChordAngle(length2)
}
}
/// expanded returns a new ChordAngle that has been adjusted by the given error
/// bound (which can be positive or negative). Error should be the value
/// returned by either MaxPointError or MaxAngleError. For example:
/// let a = ChordAngle::from_points(x, y)
/// let a1 = a.expanded(a.max_point_error())
pub fn expanded(&self, e: f64) -> Self {
// If the angle is special, don't change it. Otherwise clamp it to the valid range.
if self.is_special() {
*self
} else {
ChordAngle(0f64.max(4f64.min(self.0 + e)))
}
}
/// is_special reports whether this ChordAngle is one of the special cases.
pub fn is_special(&self) -> bool {
self.0 < 0. || self.0.is_infinite()
}
/// is_valid reports whether this ChordAngle is valid or not.
pub fn is_valid(&self) -> bool {
self.0 >= 0. && self.0 <= 4. || self.is_special()
}
pub fn max(self, other: Self) -> Self {
if self.0 < other.0 {
return other;
} else {
return self;
}
}
/// max_point_error returns the maximum error size for a ChordAngle constructed
/// from 2 Points x and y, assuming that x and y are normalized to within the
/// bounds guaranteed by s2.Point.Normalize. The error is defined with respect to
/// the true distance after the points are projected to lie exactly on the sphere.
pub fn max_point_error(&self) -> f64 {
// There is a relative error of (2.5*DBL_EPSILON) when computing the squared
// distance, plus an absolute error of (16 * DBL_EPSILON**2) because the
// lengths of the input points may differ from 1 by up to (2*DBL_EPSILON) each.
2.5 * DBL_EPSILON * self.0 + 16. * DBL_EPSILON * DBL_EPSILON
}
/// max_angle_error returns the maximum error for a ChordAngle constructed
/// as an Angle distance.
pub fn max_angle_error(&self) -> f64 {
DBL_EPSILON * self.0
}
/// sin returns the sine of this chord angle. This method is more efficient
/// than converting to Angle and performing the computation.
pub fn sin(&self) -> f64 {
self.sin2().sqrt()
}
/// sin2 returns the square of the sine of this chord angle.
/// It is more efficient than Sin.
pub fn sin2(&self) -> f64 {
// Let a be the (non-squared) chord length, and let A be the corresponding
// half-angle (a = 2*sin(A)). The formula below can be derived from:
// sin(2*A) = 2 * sin(A) * cos(A)
// cos^2(A) = 1 - sin^2(A)
// This is much faster than converting to an angle and computing its sine.
self.0 * (1. - 0.25 * self.0)
}
/// cos returns the cosine of this chord angle. This method is more efficient
/// than converting to Angle and performing the computation.
pub fn cos(&self) -> f64 {
// cos(2*A) = cos^2(A) - sin^2(A) = 1 - 2*sin^2(A)
1.0 - 0.5 * self.0
}
/// tan returns the tangent of this chord angle.
pub fn tan(&self) -> f64 {
self.sin() / self.cos()
}
pub fn successor(&self) -> Self {
if self.0 >= MAXLENGTH2 {
return ChordAngle::inf();
} else if self.0 < 0. {
return ChordAngle(0.);
} else {
return ChordAngle(nextafter(self.0, 10.));
}
}
}
#[cfg(test)]
mod tests {
use super::*;
fn test_chordangle_basics_case(ca1: ChordAngle, ca2: ChordAngle, less_than: bool, equal: bool) {
assert_eq!(less_than, ca1 < ca2);
assert_eq!(equal, ca1 == ca2);
}
#[test]
fn test_chordangle_basics() {
let zero = ChordAngle::default();
test_chordangle_basics_case(NEGATIVE, NEGATIVE, false, true);
test_chordangle_basics_case(NEGATIVE, zero, true, false);
test_chordangle_basics_case(NEGATIVE, STRAIGHT, true, false);
test_chordangle_basics_case(NEGATIVE, ChordAngle::inf(), true, false);
test_chordangle_basics_case(zero, zero, false, true);
test_chordangle_basics_case(zero, STRAIGHT, true, false);
test_chordangle_basics_case(zero, ChordAngle::inf(), true, false);
test_chordangle_basics_case(STRAIGHT, STRAIGHT, false, true);
test_chordangle_basics_case(STRAIGHT, ChordAngle::inf(), true, false);
test_chordangle_basics_case(ChordAngle::inf(), ChordAngle::inf(), false, true);
test_chordangle_basics_case(ChordAngle::inf(), ChordAngle::inf(), false, true);
}
fn test_chordangle_is_functions_case(
ca: ChordAngle,
is_neg: bool,
is_zero: bool,
is_inf: bool,
is_special: bool,
) {
assert_eq!(is_neg, ca.0 < 0.);
assert_eq!(is_zero, ca.0 == 0.);
assert_eq!(is_inf, ca.is_infinite());
assert_eq!(is_special, ca.is_special());
}
#[test]
fn test_chordangle_is_functions() {
let zero: ChordAngle = Default::default();
test_chordangle_is_functions_case(zero, false, true, false, false);
test_chordangle_is_functions_case(NEGATIVE, true, false, false, true);
test_chordangle_is_functions_case(zero, false, true, false, false);
test_chordangle_is_functions_case(STRAIGHT, false, false, false, false);
test_chordangle_is_functions_case(ChordAngle::inf(), false, false, true, true);
}
#[test]
fn test_chordangle_from_angle() {
let angles = vec![
Angle::from(Rad(0.)),
Angle::from(Rad(1.)),
Angle::from(Rad(-1.)),
Angle::from(Rad(PI)),
];
for angle in angles.into_iter() {
let ca = ChordAngle::from(angle);
let got = Angle::from(ca);
assert_eq!(got, angle);
}
assert_eq!(STRAIGHT, ChordAngle::from(Angle::from(Rad(PI))));
assert_eq!(Angle::inf(), Angle::from(ChordAngle::from(Angle::inf())));
}
fn chordangle_eq(a: ChordAngle, b: ChordAngle) {
assert_f64_eq!(a.0, b.0);
}
#[test]
fn test_chordangle_arithmetic() {
let zero = ChordAngle::default();
let deg_30 = ChordAngle::from(Deg(30.));
let deg_60 = ChordAngle::from(Deg(60.));
let deg_90 = ChordAngle::from(Deg(90.));
let deg_120 = ChordAngle::from(Deg(120.));
let deg_180 = STRAIGHT;
chordangle_eq(zero + zero, zero);
chordangle_eq(deg_60 + zero, deg_60);
chordangle_eq(zero + deg_60, deg_60);
chordangle_eq(deg_30 + deg_60, deg_90);
chordangle_eq(deg_60 + deg_30, deg_90);
chordangle_eq(deg_180 + zero, deg_180);
chordangle_eq(deg_60 + deg_30, deg_90);
chordangle_eq(deg_90 + deg_90, deg_180);
chordangle_eq(deg_120 + deg_90, deg_180);
chordangle_eq(deg_120 + deg_120, deg_180);
chordangle_eq(deg_30 + deg_180, deg_180);
chordangle_eq(deg_180 + deg_180, deg_180);
chordangle_eq(zero - zero, zero);
chordangle_eq(deg_60 - deg_60, zero);
chordangle_eq(deg_180 - deg_180, zero);
chordangle_eq(zero - deg_60, zero);
chordangle_eq(deg_30 - deg_90, zero);
chordangle_eq(deg_90 - deg_30, deg_60);
chordangle_eq(deg_90 - deg_60, deg_30);
chordangle_eq(deg_180 - zero, deg_180);
}
#[test]
fn test_chordangle_trigonometry() {
let iters = 40usize;
for i in 0..(iters + 1) {
let radians = PI * (i as f64) / (iters as f64);
let chordangle = ChordAngle::from(Angle::from(Rad(radians)));
assert_f64_eq!(radians.sin(), chordangle.sin());
assert_f64_eq!(radians.cos(), chordangle.cos());
// Since tan(x) is unbounded near pi/4, we map the result back to an
// angle before comparing. The assertion is that the result is equal to
// the tangent of a nearby angle.
assert_f64_eq!(radians.tan().atan(), chordangle.tan().atan());
}
let angle_90 = ChordAngle::from_squared_length(2.);
let angle_180 = ChordAngle::from_squared_length(4.);
assert_f64_eq!(1., angle_90.sin());
assert_f64_eq!(0., angle_90.cos());
assert!(angle_90.tan().is_infinite());
assert_f64_eq!(0., angle_180.sin());
assert_f64_eq!(-1., angle_180.cos());
assert_f64_eq!(0., angle_180.tan());
}
#[test]
fn test_chordangle_expanded() {
let zero = ChordAngle::default();
assert_eq!(NEGATIVE.expanded(5.), NEGATIVE.expanded(5.));
assert_eq!(ChordAngle::inf().expanded(-5.), ChordAngle::inf());
assert_eq!(zero.expanded(-5.), zero);
assert_eq!(
ChordAngle::from_squared_length(1.25).expanded(0.25),
ChordAngle::from_squared_length(1.5)
);
assert_eq!(
ChordAngle::from_squared_length(0.75).expanded(0.25),
ChordAngle::from_squared_length(1.)
);
}
}
|
NEGATIVE
} els
|
conditional_block
|
action_batcher.rs
|
// OpenAOE: An open source reimplementation of Age of Empires (1997)
// Copyright (c) 2016 Kevin Fuller
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
use action::Action;
use specs::Index;
use std::collections::HashMap;
use std::mem;
pub struct ActionBatcher {
actions: HashMap<Index, Vec<Action>>,
}
impl ActionBatcher {
pub fn new() -> ActionBatcher {
ActionBatcher { actions: HashMap::new() }
}
pub fn queue_for_entity(&mut self, entity_id: Index, action: Action) {
|
self.actions.insert(entity_id, Vec::new());
}
self.actions.get_mut(&entity_id).unwrap().push(action);
}
pub fn consume_actions(&mut self) -> HashMap<Index, Vec<Action>> {
let mut consumed = HashMap::new();
mem::swap(&mut consumed, &mut self.actions);
consumed
}
}
|
if !self.actions.contains_key(&entity_id) {
|
random_line_split
|
action_batcher.rs
|
// OpenAOE: An open source reimplementation of Age of Empires (1997)
// Copyright (c) 2016 Kevin Fuller
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
use action::Action;
use specs::Index;
use std::collections::HashMap;
use std::mem;
pub struct
|
{
actions: HashMap<Index, Vec<Action>>,
}
impl ActionBatcher {
pub fn new() -> ActionBatcher {
ActionBatcher { actions: HashMap::new() }
}
pub fn queue_for_entity(&mut self, entity_id: Index, action: Action) {
if!self.actions.contains_key(&entity_id) {
self.actions.insert(entity_id, Vec::new());
}
self.actions.get_mut(&entity_id).unwrap().push(action);
}
pub fn consume_actions(&mut self) -> HashMap<Index, Vec<Action>> {
let mut consumed = HashMap::new();
mem::swap(&mut consumed, &mut self.actions);
consumed
}
}
|
ActionBatcher
|
identifier_name
|
action_batcher.rs
|
// OpenAOE: An open source reimplementation of Age of Empires (1997)
// Copyright (c) 2016 Kevin Fuller
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
use action::Action;
use specs::Index;
use std::collections::HashMap;
use std::mem;
pub struct ActionBatcher {
actions: HashMap<Index, Vec<Action>>,
}
impl ActionBatcher {
pub fn new() -> ActionBatcher {
ActionBatcher { actions: HashMap::new() }
}
pub fn queue_for_entity(&mut self, entity_id: Index, action: Action) {
if!self.actions.contains_key(&entity_id)
|
self.actions.get_mut(&entity_id).unwrap().push(action);
}
pub fn consume_actions(&mut self) -> HashMap<Index, Vec<Action>> {
let mut consumed = HashMap::new();
mem::swap(&mut consumed, &mut self.actions);
consumed
}
}
|
{
self.actions.insert(entity_id, Vec::new());
}
|
conditional_block
|
lib.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! This module contains shared types and messages for use by devtools/script.
//! The traits are here instead of in script so that the devtools crate can be
//! modified independently of the rest of Servo.
#![crate_name = "devtools_traits"]
#![crate_type = "rlib"]
#![allow(non_snake_case)]
#![deny(unsafe_code)]
#![feature(custom_derive, plugin)]
#![plugin(serde_macros, plugins)]
#[macro_use]
extern crate bitflags;
extern crate hyper;
extern crate ipc_channel;
extern crate msg;
extern crate rustc_serialize;
extern crate serde;
extern crate time;
extern crate url;
extern crate util;
use hyper::header::Headers;
use hyper::http::RawStatus;
use hyper::method::Method;
use ipc_channel::ipc::IpcSender;
use msg::constellation_msg::PipelineId;
use rustc_serialize::{Decodable, Decoder};
use std::net::TcpStream;
use time::Duration;
use time::Tm;
use url::Url;
// Information would be attached to NewGlobal to be received and show in devtools.
// Extend these fields if we need more information.
#[derive(Deserialize, Serialize)]
pub struct DevtoolsPageInfo {
pub title: String,
pub url: Url
}
/// Messages to instruct the devtools server to update its known actors/state
/// according to changes in the browser.
pub enum DevtoolsControlMsg {
/// Messages from tasks in the chrome process (resource/constellation/devtools)
FromChrome(ChromeToDevtoolsControlMsg),
/// Messages from script tasks
FromScript(ScriptToDevtoolsControlMsg),
}
/// Events that the devtools server must act upon.
pub enum ChromeToDevtoolsControlMsg {
/// A new client has connected to the server.
AddClient(TcpStream),
/// The browser is shutting down.
ServerExitMsg,
/// A network event occurred (request, reply, etc.). The actor with the
/// provided name should be notified.
NetworkEvent(String, NetworkEvent),
}
#[derive(Deserialize, Serialize)]
/// Events that the devtools server must act upon.
pub enum ScriptToDevtoolsControlMsg {
/// A new global object was created, associated with a particular pipeline.
/// The means of communicating directly with it are provided.
NewGlobal((PipelineId, Option<WorkerId>),
IpcSender<DevtoolScriptControlMsg>,
DevtoolsPageInfo),
/// A particular page has invoked the console API.
ConsoleAPI(PipelineId, ConsoleMessage, Option<WorkerId>),
/// An animation frame with the given timestamp was processed in a script task.
/// The actor with the provided name should be notified.
FramerateTick(String, f64),
}
/// Serialized JS return values
/// TODO: generalize this beyond the EvaluateJS message?
#[derive(Deserialize, Serialize)]
pub enum EvaluateJSReply {
VoidValue,
NullValue,
BooleanValue(bool),
NumberValue(f64),
StringValue(String),
ActorValue { class: String, uuid: String },
}
#[derive(Deserialize, Serialize)]
pub struct AttrInfo {
pub namespace: String,
pub name: String,
pub value: String,
}
#[derive(Deserialize, Serialize)]
pub struct NodeInfo {
pub uniqueId: String,
pub baseURI: String,
pub parent: String,
pub nodeType: u16,
pub namespaceURI: String,
pub nodeName: String,
pub numChildren: usize,
pub name: String,
pub publicId: String,
pub systemId: String,
pub attrs: Vec<AttrInfo>,
pub isDocumentElement: bool,
pub shortValue: String,
pub incompleteValue: bool,
}
pub struct StartedTimelineMarker {
name: String,
start_time: PreciseTime,
start_stack: Option<Vec<()>>,
}
#[derive(Deserialize, Serialize)]
pub struct TimelineMarker {
pub name: String,
pub start_time: PreciseTime,
pub start_stack: Option<Vec<()>>,
pub end_time: PreciseTime,
pub end_stack: Option<Vec<()>>,
}
#[derive(PartialEq, Eq, Hash, Clone, Deserialize, Serialize)]
pub enum TimelineMarkerType {
Reflow,
DOMEvent,
}
/// The properties of a DOM node as computed by layout.
#[derive(Deserialize, Serialize)]
pub struct ComputedNodeLayout {
pub width: f32,
pub height: f32,
}
/// Messages to process in a particular script task, as instructed by a devtools client.
#[derive(Deserialize, Serialize)]
pub enum DevtoolScriptControlMsg {
/// Evaluate a JS snippet in the context of the global for the given pipeline.
EvaluateJS(PipelineId, String, IpcSender<EvaluateJSReply>),
/// Retrieve the details of the root node (ie. the document) for the given pipeline.
GetRootNode(PipelineId, IpcSender<NodeInfo>),
/// Retrieve the details of the document element for the given pipeline.
GetDocumentElement(PipelineId, IpcSender<NodeInfo>),
/// Retrieve the details of the child nodes of the given node in the given pipeline.
GetChildren(PipelineId, String, IpcSender<Vec<NodeInfo>>),
/// Retrieve the computed layout properties of the given node in the given pipeline.
GetLayout(PipelineId, String, IpcSender<ComputedNodeLayout>),
/// Retrieve all stored console messages for the given pipeline.
GetCachedMessages(PipelineId, CachedConsoleMessageTypes, IpcSender<Vec<CachedConsoleMessage>>),
/// Update a given node's attributes with a list of modifications.
ModifyAttribute(PipelineId, String, Vec<Modification>),
/// Request live console messages for a given pipeline (true if desired, false otherwise).
WantsLiveNotifications(PipelineId, bool),
/// Request live notifications for a given set of timeline events for a given pipeline.
SetTimelineMarkers(PipelineId, Vec<TimelineMarkerType>, IpcSender<TimelineMarker>),
/// Withdraw request for live timeline notifications for a given pipeline.
DropTimelineMarkers(PipelineId, Vec<TimelineMarkerType>),
/// Request a callback directed at the given actor name from the next animation frame
/// executed in the given pipeline.
RequestAnimationFrame(PipelineId, String),
}
#[derive(RustcEncodable, Deserialize, Serialize)]
pub struct Modification {
pub attributeName: String,
pub newValue: Option<String>,
}
impl Decodable for Modification {
fn decode<D: Decoder>(d: &mut D) -> Result<Modification, D::Error> {
d.read_struct("Modification", 2, |d|
Ok(Modification {
attributeName: try!(d.read_struct_field("attributeName", 0, Decodable::decode)),
newValue: match d.read_struct_field("newValue", 1, Decodable::decode) {
Ok(opt) => opt,
Err(_) => None
}
})
)
}
}
#[derive(Clone, Deserialize, Serialize)]
pub enum LogLevel {
Log,
Debug,
Info,
Warn,
Error,
}
#[derive(Clone, Deserialize, Serialize)]
pub struct ConsoleMessage {
pub message: String,
pub logLevel: LogLevel,
pub filename: String,
pub lineNumber: u32,
pub columnNumber: u32,
}
bitflags! {
#[derive(Deserialize, Serialize)]
flags CachedConsoleMessageTypes: u8 {
const PAGE_ERROR = 1 << 0,
const CONSOLE_API = 1 << 1,
}
}
#[derive(RustcEncodable, Deserialize, Serialize)]
pub struct PageError {
pub _type: String,
pub errorMessage: String,
pub sourceName: String,
pub lineText: String,
pub lineNumber: u32,
pub columnNumber: u32,
pub category: String,
pub timeStamp: u64,
pub error: bool,
pub warning: bool,
pub exception: bool,
pub strict: bool,
pub private: bool,
}
#[derive(RustcEncodable, Deserialize, Serialize)]
pub struct ConsoleAPI {
pub _type: String,
pub level: String,
pub filename: String,
pub lineNumber: u32,
pub functionName: String,
pub timeStamp: u64,
pub private: bool,
pub arguments: Vec<String>,
}
#[derive(Deserialize, Serialize)]
pub enum CachedConsoleMessage {
PageError(PageError),
ConsoleAPI(ConsoleAPI),
}
#[derive(Debug, PartialEq)]
pub struct HttpRequest {
pub url: Url,
pub method: Method,
pub headers: Headers,
pub body: Option<Vec<u8>>,
pub pipeline_id: PipelineId,
pub startedDateTime: Tm
}
#[derive(Debug, PartialEq)]
pub struct
|
{
pub headers: Option<Headers>,
pub status: Option<RawStatus>,
pub body: Option<Vec<u8>>,
pub pipeline_id: PipelineId,
}
pub enum NetworkEvent {
HttpRequest(HttpRequest),
HttpResponse(HttpResponse),
}
impl TimelineMarker {
pub fn start(name: String) -> StartedTimelineMarker {
StartedTimelineMarker {
name: name,
start_time: PreciseTime::now(),
start_stack: None,
}
}
}
impl StartedTimelineMarker {
pub fn end(self) -> TimelineMarker {
TimelineMarker {
name: self.name,
start_time: self.start_time,
start_stack: self.start_stack,
end_time: PreciseTime::now(),
end_stack: None,
}
}
}
/// A replacement for `time::PreciseTime` that isn't opaque, so we can serialize it.
///
/// The reason why this doesn't go upstream is that `time` is slated to be part of Rust's standard
/// library, which definitely can't have any dependencies on `serde`. But `serde` can't implement
/// `Deserialize` and `Serialize` itself, because `time::PreciseTime` is opaque! A Catch-22. So I'm
/// duplicating the definition here.
#[derive(Copy, Clone, Deserialize, Serialize)]
pub struct PreciseTime(u64);
impl PreciseTime {
pub fn now() -> PreciseTime {
PreciseTime(time::precise_time_ns())
}
pub fn to(&self, later: PreciseTime) -> Duration {
Duration::nanoseconds((later.0 - self.0) as i64)
}
}
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize, HeapSizeOf)]
pub struct WorkerId(pub u32);
|
HttpResponse
|
identifier_name
|
lib.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! This module contains shared types and messages for use by devtools/script.
//! The traits are here instead of in script so that the devtools crate can be
//! modified independently of the rest of Servo.
#![crate_name = "devtools_traits"]
#![crate_type = "rlib"]
#![allow(non_snake_case)]
#![deny(unsafe_code)]
#![feature(custom_derive, plugin)]
#![plugin(serde_macros, plugins)]
#[macro_use]
extern crate bitflags;
extern crate hyper;
extern crate ipc_channel;
extern crate msg;
extern crate rustc_serialize;
extern crate serde;
extern crate time;
extern crate url;
extern crate util;
use hyper::header::Headers;
use hyper::http::RawStatus;
use hyper::method::Method;
use ipc_channel::ipc::IpcSender;
use msg::constellation_msg::PipelineId;
use rustc_serialize::{Decodable, Decoder};
use std::net::TcpStream;
use time::Duration;
use time::Tm;
use url::Url;
// Information would be attached to NewGlobal to be received and show in devtools.
// Extend these fields if we need more information.
#[derive(Deserialize, Serialize)]
pub struct DevtoolsPageInfo {
pub title: String,
pub url: Url
}
/// Messages to instruct the devtools server to update its known actors/state
/// according to changes in the browser.
pub enum DevtoolsControlMsg {
/// Messages from tasks in the chrome process (resource/constellation/devtools)
FromChrome(ChromeToDevtoolsControlMsg),
/// Messages from script tasks
FromScript(ScriptToDevtoolsControlMsg),
}
/// Events that the devtools server must act upon.
pub enum ChromeToDevtoolsControlMsg {
/// A new client has connected to the server.
AddClient(TcpStream),
/// The browser is shutting down.
ServerExitMsg,
/// A network event occurred (request, reply, etc.). The actor with the
/// provided name should be notified.
NetworkEvent(String, NetworkEvent),
}
#[derive(Deserialize, Serialize)]
/// Events that the devtools server must act upon.
pub enum ScriptToDevtoolsControlMsg {
/// A new global object was created, associated with a particular pipeline.
/// The means of communicating directly with it are provided.
NewGlobal((PipelineId, Option<WorkerId>),
IpcSender<DevtoolScriptControlMsg>,
DevtoolsPageInfo),
/// A particular page has invoked the console API.
ConsoleAPI(PipelineId, ConsoleMessage, Option<WorkerId>),
/// An animation frame with the given timestamp was processed in a script task.
/// The actor with the provided name should be notified.
FramerateTick(String, f64),
}
/// Serialized JS return values
/// TODO: generalize this beyond the EvaluateJS message?
#[derive(Deserialize, Serialize)]
pub enum EvaluateJSReply {
VoidValue,
NullValue,
BooleanValue(bool),
NumberValue(f64),
StringValue(String),
ActorValue { class: String, uuid: String },
}
#[derive(Deserialize, Serialize)]
pub struct AttrInfo {
pub namespace: String,
pub name: String,
pub value: String,
}
#[derive(Deserialize, Serialize)]
pub struct NodeInfo {
pub uniqueId: String,
pub baseURI: String,
pub parent: String,
pub nodeType: u16,
pub namespaceURI: String,
pub nodeName: String,
pub numChildren: usize,
pub name: String,
pub publicId: String,
pub systemId: String,
pub attrs: Vec<AttrInfo>,
pub isDocumentElement: bool,
pub shortValue: String,
pub incompleteValue: bool,
}
pub struct StartedTimelineMarker {
name: String,
start_time: PreciseTime,
start_stack: Option<Vec<()>>,
}
#[derive(Deserialize, Serialize)]
pub struct TimelineMarker {
pub name: String,
pub start_time: PreciseTime,
pub start_stack: Option<Vec<()>>,
pub end_time: PreciseTime,
pub end_stack: Option<Vec<()>>,
}
#[derive(PartialEq, Eq, Hash, Clone, Deserialize, Serialize)]
pub enum TimelineMarkerType {
Reflow,
DOMEvent,
}
/// The properties of a DOM node as computed by layout.
#[derive(Deserialize, Serialize)]
pub struct ComputedNodeLayout {
pub width: f32,
pub height: f32,
}
/// Messages to process in a particular script task, as instructed by a devtools client.
#[derive(Deserialize, Serialize)]
pub enum DevtoolScriptControlMsg {
/// Evaluate a JS snippet in the context of the global for the given pipeline.
EvaluateJS(PipelineId, String, IpcSender<EvaluateJSReply>),
/// Retrieve the details of the root node (ie. the document) for the given pipeline.
GetRootNode(PipelineId, IpcSender<NodeInfo>),
/// Retrieve the details of the document element for the given pipeline.
|
GetDocumentElement(PipelineId, IpcSender<NodeInfo>),
/// Retrieve the details of the child nodes of the given node in the given pipeline.
GetChildren(PipelineId, String, IpcSender<Vec<NodeInfo>>),
/// Retrieve the computed layout properties of the given node in the given pipeline.
GetLayout(PipelineId, String, IpcSender<ComputedNodeLayout>),
/// Retrieve all stored console messages for the given pipeline.
GetCachedMessages(PipelineId, CachedConsoleMessageTypes, IpcSender<Vec<CachedConsoleMessage>>),
/// Update a given node's attributes with a list of modifications.
ModifyAttribute(PipelineId, String, Vec<Modification>),
/// Request live console messages for a given pipeline (true if desired, false otherwise).
WantsLiveNotifications(PipelineId, bool),
/// Request live notifications for a given set of timeline events for a given pipeline.
SetTimelineMarkers(PipelineId, Vec<TimelineMarkerType>, IpcSender<TimelineMarker>),
/// Withdraw request for live timeline notifications for a given pipeline.
DropTimelineMarkers(PipelineId, Vec<TimelineMarkerType>),
/// Request a callback directed at the given actor name from the next animation frame
/// executed in the given pipeline.
RequestAnimationFrame(PipelineId, String),
}
#[derive(RustcEncodable, Deserialize, Serialize)]
pub struct Modification {
pub attributeName: String,
pub newValue: Option<String>,
}
impl Decodable for Modification {
fn decode<D: Decoder>(d: &mut D) -> Result<Modification, D::Error> {
d.read_struct("Modification", 2, |d|
Ok(Modification {
attributeName: try!(d.read_struct_field("attributeName", 0, Decodable::decode)),
newValue: match d.read_struct_field("newValue", 1, Decodable::decode) {
Ok(opt) => opt,
Err(_) => None
}
})
)
}
}
#[derive(Clone, Deserialize, Serialize)]
pub enum LogLevel {
Log,
Debug,
Info,
Warn,
Error,
}
#[derive(Clone, Deserialize, Serialize)]
pub struct ConsoleMessage {
pub message: String,
pub logLevel: LogLevel,
pub filename: String,
pub lineNumber: u32,
pub columnNumber: u32,
}
bitflags! {
#[derive(Deserialize, Serialize)]
flags CachedConsoleMessageTypes: u8 {
const PAGE_ERROR = 1 << 0,
const CONSOLE_API = 1 << 1,
}
}
#[derive(RustcEncodable, Deserialize, Serialize)]
pub struct PageError {
pub _type: String,
pub errorMessage: String,
pub sourceName: String,
pub lineText: String,
pub lineNumber: u32,
pub columnNumber: u32,
pub category: String,
pub timeStamp: u64,
pub error: bool,
pub warning: bool,
pub exception: bool,
pub strict: bool,
pub private: bool,
}
#[derive(RustcEncodable, Deserialize, Serialize)]
pub struct ConsoleAPI {
pub _type: String,
pub level: String,
pub filename: String,
pub lineNumber: u32,
pub functionName: String,
pub timeStamp: u64,
pub private: bool,
pub arguments: Vec<String>,
}
#[derive(Deserialize, Serialize)]
pub enum CachedConsoleMessage {
PageError(PageError),
ConsoleAPI(ConsoleAPI),
}
#[derive(Debug, PartialEq)]
pub struct HttpRequest {
pub url: Url,
pub method: Method,
pub headers: Headers,
pub body: Option<Vec<u8>>,
pub pipeline_id: PipelineId,
pub startedDateTime: Tm
}
#[derive(Debug, PartialEq)]
pub struct HttpResponse {
pub headers: Option<Headers>,
pub status: Option<RawStatus>,
pub body: Option<Vec<u8>>,
pub pipeline_id: PipelineId,
}
pub enum NetworkEvent {
HttpRequest(HttpRequest),
HttpResponse(HttpResponse),
}
impl TimelineMarker {
pub fn start(name: String) -> StartedTimelineMarker {
StartedTimelineMarker {
name: name,
start_time: PreciseTime::now(),
start_stack: None,
}
}
}
impl StartedTimelineMarker {
pub fn end(self) -> TimelineMarker {
TimelineMarker {
name: self.name,
start_time: self.start_time,
start_stack: self.start_stack,
end_time: PreciseTime::now(),
end_stack: None,
}
}
}
/// A replacement for `time::PreciseTime` that isn't opaque, so we can serialize it.
///
/// The reason why this doesn't go upstream is that `time` is slated to be part of Rust's standard
/// library, which definitely can't have any dependencies on `serde`. But `serde` can't implement
/// `Deserialize` and `Serialize` itself, because `time::PreciseTime` is opaque! A Catch-22. So I'm
/// duplicating the definition here.
#[derive(Copy, Clone, Deserialize, Serialize)]
pub struct PreciseTime(u64);
impl PreciseTime {
pub fn now() -> PreciseTime {
PreciseTime(time::precise_time_ns())
}
pub fn to(&self, later: PreciseTime) -> Duration {
Duration::nanoseconds((later.0 - self.0) as i64)
}
}
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize, HeapSizeOf)]
pub struct WorkerId(pub u32);
|
random_line_split
|
|
mod.rs
|
use crate::debug_protocol::Expressions;
#[macro_use]
pub mod prelude {
use nom::{
character::complete::{digit1, space0},
error::ParseError,
sequence::delimited,
IResult,
};
pub type Span<'a> = &'a str;
pub fn ws<'a, F: 'a, O, E: ParseError<Span<'a>>>(parser: F) -> impl FnMut(Span<'a>) -> IResult<Span<'a>, O, E>
where
F: Fn(Span<'a>) -> IResult<Span<'a>, O, E>,
{
delimited(space0, parser, space0)
}
pub fn unsigned(input: Span) -> IResult<Span, u32> {
let (rest, s) = digit1(input)?;
Ok((rest, parse_int::parse::<u32>(s).unwrap()))
}
#[cfg(test)]
macro_rules! assert_matches(($e:expr, $p:pat) => { let e = $e; assert!(matches!(e, $p), "{:?}!~ {}", e, stringify!($p)) });
}
mod expression_format;
mod hit_condition;
mod preprocess;
mod qualified_ident;
pub use expression_format::{get_expression_format, FormatSpec};
pub use hit_condition::{parse_hit_condition, HitCondition};
pub use preprocess::{preprocess_python_expr, preprocess_simple_expr};
|
#[derive(Debug)]
pub enum PreparedExpression {
Native(String),
Simple(String),
Python(String),
}
// Parse expression type and preprocess it.
pub fn prepare(expression: &str, default_type: Expressions) -> PreparedExpression {
let (expr, ty) = get_expression_type(expression, default_type);
match ty {
Expressions::Native => PreparedExpression::Native(expr.to_owned()),
Expressions::Simple => PreparedExpression::Simple(preprocess_simple_expr(expr)),
Expressions::Python => PreparedExpression::Python(preprocess_python_expr(expr)),
}
}
// Same as prepare(), but also parses formatting options at the end of expression,
// for example, `value,x` to format value as hex or `ptr,[50]` to interpret `ptr` as an array of 50 elements.
pub fn prepare_with_format(
expression: &str,
default_type: Expressions,
) -> Result<(PreparedExpression, Option<FormatSpec>), String> {
let (expr, ty) = get_expression_type(expression, default_type);
let (expr, format) = get_expression_format(expr)?;
let pp_expr = match ty {
Expressions::Native => PreparedExpression::Native(expr.to_owned()),
Expressions::Simple => PreparedExpression::Simple(preprocess_simple_expr(expr)),
Expressions::Python => PreparedExpression::Python(preprocess_python_expr(expr)),
};
Ok((pp_expr, format))
}
fn get_expression_type<'a>(expr: &'a str, default_type: Expressions) -> (&'a str, Expressions) {
if expr.starts_with("/nat ") {
(&expr[5..], Expressions::Native)
} else if expr.starts_with("/py ") {
(&expr[4..], Expressions::Python)
} else if expr.starts_with("/se ") {
(&expr[4..], Expressions::Simple)
} else {
(expr, default_type)
}
}
|
random_line_split
|
|
mod.rs
|
use crate::debug_protocol::Expressions;
#[macro_use]
pub mod prelude {
use nom::{
character::complete::{digit1, space0},
error::ParseError,
sequence::delimited,
IResult,
};
pub type Span<'a> = &'a str;
pub fn ws<'a, F: 'a, O, E: ParseError<Span<'a>>>(parser: F) -> impl FnMut(Span<'a>) -> IResult<Span<'a>, O, E>
where
F: Fn(Span<'a>) -> IResult<Span<'a>, O, E>,
{
delimited(space0, parser, space0)
}
pub fn unsigned(input: Span) -> IResult<Span, u32> {
let (rest, s) = digit1(input)?;
Ok((rest, parse_int::parse::<u32>(s).unwrap()))
}
#[cfg(test)]
macro_rules! assert_matches(($e:expr, $p:pat) => { let e = $e; assert!(matches!(e, $p), "{:?}!~ {}", e, stringify!($p)) });
}
mod expression_format;
mod hit_condition;
mod preprocess;
mod qualified_ident;
pub use expression_format::{get_expression_format, FormatSpec};
pub use hit_condition::{parse_hit_condition, HitCondition};
pub use preprocess::{preprocess_python_expr, preprocess_simple_expr};
#[derive(Debug)]
pub enum PreparedExpression {
Native(String),
Simple(String),
Python(String),
}
// Parse expression type and preprocess it.
pub fn prepare(expression: &str, default_type: Expressions) -> PreparedExpression {
let (expr, ty) = get_expression_type(expression, default_type);
match ty {
Expressions::Native => PreparedExpression::Native(expr.to_owned()),
Expressions::Simple => PreparedExpression::Simple(preprocess_simple_expr(expr)),
Expressions::Python => PreparedExpression::Python(preprocess_python_expr(expr)),
}
}
// Same as prepare(), but also parses formatting options at the end of expression,
// for example, `value,x` to format value as hex or `ptr,[50]` to interpret `ptr` as an array of 50 elements.
pub fn prepare_with_format(
expression: &str,
default_type: Expressions,
) -> Result<(PreparedExpression, Option<FormatSpec>), String> {
let (expr, ty) = get_expression_type(expression, default_type);
let (expr, format) = get_expression_format(expr)?;
let pp_expr = match ty {
Expressions::Native => PreparedExpression::Native(expr.to_owned()),
Expressions::Simple => PreparedExpression::Simple(preprocess_simple_expr(expr)),
Expressions::Python => PreparedExpression::Python(preprocess_python_expr(expr)),
};
Ok((pp_expr, format))
}
fn
|
<'a>(expr: &'a str, default_type: Expressions) -> (&'a str, Expressions) {
if expr.starts_with("/nat ") {
(&expr[5..], Expressions::Native)
} else if expr.starts_with("/py ") {
(&expr[4..], Expressions::Python)
} else if expr.starts_with("/se ") {
(&expr[4..], Expressions::Simple)
} else {
(expr, default_type)
}
}
|
get_expression_type
|
identifier_name
|
file.rs
|
use std::fs::{File, create_dir_all};
use std::io::{Read, Write};
use std::path::{Path, PathBuf};
use walkdir::WalkDir;
use error::{Error, Result};
pub fn read_file<T: AsRef<Path>>(path: T) -> Result<Vec<u8>> {
let mut file = File::open(path)?;
let file_len = file.metadata()?.len();
let mut buf = Vec::with_capacity(file_len as usize + 1);
file.read_to_end(&mut buf)?;
Ok(buf)
}
pub fn read_all_files<T: AsRef<Path>>(root: T) -> Result<Vec<(PathBuf, Vec<u8>)>> {
let mut result = Vec::new();
for dir_entry in WalkDir::new(&root) {
let entry = dir_entry?;
|
result.push((path.to_owned(), file));
}
}
Ok(result)
}
pub fn write_file(path: &Path, content: &[u8]) -> Result<()> {
create_dir_all(path.to_owned().parent().ok_or(Error::DirNotFound)?)?;
let mut file = File::create(path)?;
file.write_all(content)?;
Ok(())
}
|
let path = entry.path();
if path.is_file() {
let file = read_file(path)?;
let path = path.strip_prefix(&root)?;
|
random_line_split
|
file.rs
|
use std::fs::{File, create_dir_all};
use std::io::{Read, Write};
use std::path::{Path, PathBuf};
use walkdir::WalkDir;
use error::{Error, Result};
pub fn
|
<T: AsRef<Path>>(path: T) -> Result<Vec<u8>> {
let mut file = File::open(path)?;
let file_len = file.metadata()?.len();
let mut buf = Vec::with_capacity(file_len as usize + 1);
file.read_to_end(&mut buf)?;
Ok(buf)
}
pub fn read_all_files<T: AsRef<Path>>(root: T) -> Result<Vec<(PathBuf, Vec<u8>)>> {
let mut result = Vec::new();
for dir_entry in WalkDir::new(&root) {
let entry = dir_entry?;
let path = entry.path();
if path.is_file() {
let file = read_file(path)?;
let path = path.strip_prefix(&root)?;
result.push((path.to_owned(), file));
}
}
Ok(result)
}
pub fn write_file(path: &Path, content: &[u8]) -> Result<()> {
create_dir_all(path.to_owned().parent().ok_or(Error::DirNotFound)?)?;
let mut file = File::create(path)?;
file.write_all(content)?;
Ok(())
}
|
read_file
|
identifier_name
|
file.rs
|
use std::fs::{File, create_dir_all};
use std::io::{Read, Write};
use std::path::{Path, PathBuf};
use walkdir::WalkDir;
use error::{Error, Result};
pub fn read_file<T: AsRef<Path>>(path: T) -> Result<Vec<u8>>
|
pub fn read_all_files<T: AsRef<Path>>(root: T) -> Result<Vec<(PathBuf, Vec<u8>)>> {
let mut result = Vec::new();
for dir_entry in WalkDir::new(&root) {
let entry = dir_entry?;
let path = entry.path();
if path.is_file() {
let file = read_file(path)?;
let path = path.strip_prefix(&root)?;
result.push((path.to_owned(), file));
}
}
Ok(result)
}
pub fn write_file(path: &Path, content: &[u8]) -> Result<()> {
create_dir_all(path.to_owned().parent().ok_or(Error::DirNotFound)?)?;
let mut file = File::create(path)?;
file.write_all(content)?;
Ok(())
}
|
{
let mut file = File::open(path)?;
let file_len = file.metadata()?.len();
let mut buf = Vec::with_capacity(file_len as usize + 1);
file.read_to_end(&mut buf)?;
Ok(buf)
}
|
identifier_body
|
file.rs
|
use std::fs::{File, create_dir_all};
use std::io::{Read, Write};
use std::path::{Path, PathBuf};
use walkdir::WalkDir;
use error::{Error, Result};
pub fn read_file<T: AsRef<Path>>(path: T) -> Result<Vec<u8>> {
let mut file = File::open(path)?;
let file_len = file.metadata()?.len();
let mut buf = Vec::with_capacity(file_len as usize + 1);
file.read_to_end(&mut buf)?;
Ok(buf)
}
pub fn read_all_files<T: AsRef<Path>>(root: T) -> Result<Vec<(PathBuf, Vec<u8>)>> {
let mut result = Vec::new();
for dir_entry in WalkDir::new(&root) {
let entry = dir_entry?;
let path = entry.path();
if path.is_file()
|
}
Ok(result)
}
pub fn write_file(path: &Path, content: &[u8]) -> Result<()> {
create_dir_all(path.to_owned().parent().ok_or(Error::DirNotFound)?)?;
let mut file = File::create(path)?;
file.write_all(content)?;
Ok(())
}
|
{
let file = read_file(path)?;
let path = path.strip_prefix(&root)?;
result.push((path.to_owned(), file));
}
|
conditional_block
|
gnss.rs
|
/* vim: set et ts=4 sw=4: */
/* gnss.rs
*
* Copyright (C) 2017 Pelagicore AB.
* Copyright (C) 2017 Zeeshan Ali.
* Copyright (C) 2020 Purism SPC.
*
* GPSShare is free software; you can redistribute it and/or modify it under
* the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*
* GPSShare is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License along
* with GPSShare; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*
* Author: Zeeshan Ali <[email protected]>
*/
use config::Config;
use gps::GPS;
use libudev;
use std::io::BufRead;
use std::io::BufReader;
use std::path::Path;
use std::rc::Rc;
use std::fs::File;
use std::fs;
use std::io;
|
impl GNSS {
pub fn new(config: Rc<Config>) -> io::Result<Self> {
match config.dev_path {
Some(ref path) => GNSS::new_for_path(path.as_path()),
None => GNSS::new_detect(),
}
}
fn new_for_path(path: &Path) -> io::Result<Self> {
let port = File::open(path.as_os_str())?;
Ok(GNSS {
reader: BufReader::new(port),
})
}
fn new_detect() -> io::Result<Self> {
println!("Attempting to autodetect GNSS device...");
let context = libudev::Context::new()?;
let mut enumerator = libudev::Enumerator::new(&context)?;
enumerator.match_subsystem("gnss")?;
let devices = enumerator.scan_devices()?;
for d in devices {
if let Some(p) = d.devnode().and_then(|devnode| devnode.to_str()) {
let path = Path::new(p);
match GNSS::new_for_path(&path) {
Ok(mut gps) => {
if gps.verify() {
println!("Detected {} as a GPS device", p);
return Ok(gps);
}
}
Err(e) => println!("Error openning {}: {}", p, e),
}
}
}
Err(io::Error::new(
io::ErrorKind::NotFound,
"Failed to autodetect GNSS device",
))
}
fn verify(&mut self) -> bool {
let mut buffer = String::new();
for _ in 1..3 {
if let Ok(_) = self.read_line(&mut buffer) {
if buffer.len() >= 15
&& buffer.chars().nth(0) == Some('$')
&& buffer.chars().nth(6) == Some(',')
{
return true;
}
buffer.clear();
} else {
println!("Failed to read from serial port");
}
}
false
}
}
impl GPS for GNSS {
fn read_line(&mut self, buffer: &mut String) -> io::Result<usize> {
self.reader.read_line(buffer)
}
}
|
pub struct GNSS {
reader: BufReader<fs::File>,
}
|
random_line_split
|
gnss.rs
|
/* vim: set et ts=4 sw=4: */
/* gnss.rs
*
* Copyright (C) 2017 Pelagicore AB.
* Copyright (C) 2017 Zeeshan Ali.
* Copyright (C) 2020 Purism SPC.
*
* GPSShare is free software; you can redistribute it and/or modify it under
* the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*
* GPSShare is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License along
* with GPSShare; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*
* Author: Zeeshan Ali <[email protected]>
*/
use config::Config;
use gps::GPS;
use libudev;
use std::io::BufRead;
use std::io::BufReader;
use std::path::Path;
use std::rc::Rc;
use std::fs::File;
use std::fs;
use std::io;
pub struct GNSS {
reader: BufReader<fs::File>,
}
impl GNSS {
pub fn new(config: Rc<Config>) -> io::Result<Self> {
match config.dev_path {
Some(ref path) => GNSS::new_for_path(path.as_path()),
None => GNSS::new_detect(),
}
}
fn new_for_path(path: &Path) -> io::Result<Self> {
let port = File::open(path.as_os_str())?;
Ok(GNSS {
reader: BufReader::new(port),
})
}
fn new_detect() -> io::Result<Self> {
println!("Attempting to autodetect GNSS device...");
let context = libudev::Context::new()?;
let mut enumerator = libudev::Enumerator::new(&context)?;
enumerator.match_subsystem("gnss")?;
let devices = enumerator.scan_devices()?;
for d in devices {
if let Some(p) = d.devnode().and_then(|devnode| devnode.to_str()) {
let path = Path::new(p);
match GNSS::new_for_path(&path) {
Ok(mut gps) => {
if gps.verify() {
println!("Detected {} as a GPS device", p);
return Ok(gps);
}
}
Err(e) => println!("Error openning {}: {}", p, e),
}
}
}
Err(io::Error::new(
io::ErrorKind::NotFound,
"Failed to autodetect GNSS device",
))
}
fn verify(&mut self) -> bool {
let mut buffer = String::new();
for _ in 1..3 {
if let Ok(_) = self.read_line(&mut buffer) {
if buffer.len() >= 15
&& buffer.chars().nth(0) == Some('$')
&& buffer.chars().nth(6) == Some(',')
{
return true;
}
buffer.clear();
} else {
println!("Failed to read from serial port");
}
}
false
}
}
impl GPS for GNSS {
fn read_line(&mut self, buffer: &mut String) -> io::Result<usize>
|
}
|
{
self.reader.read_line(buffer)
}
|
identifier_body
|
gnss.rs
|
/* vim: set et ts=4 sw=4: */
/* gnss.rs
*
* Copyright (C) 2017 Pelagicore AB.
* Copyright (C) 2017 Zeeshan Ali.
* Copyright (C) 2020 Purism SPC.
*
* GPSShare is free software; you can redistribute it and/or modify it under
* the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*
* GPSShare is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License along
* with GPSShare; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*
* Author: Zeeshan Ali <[email protected]>
*/
use config::Config;
use gps::GPS;
use libudev;
use std::io::BufRead;
use std::io::BufReader;
use std::path::Path;
use std::rc::Rc;
use std::fs::File;
use std::fs;
use std::io;
pub struct GNSS {
reader: BufReader<fs::File>,
}
impl GNSS {
pub fn new(config: Rc<Config>) -> io::Result<Self> {
match config.dev_path {
Some(ref path) => GNSS::new_for_path(path.as_path()),
None => GNSS::new_detect(),
}
}
fn new_for_path(path: &Path) -> io::Result<Self> {
let port = File::open(path.as_os_str())?;
Ok(GNSS {
reader: BufReader::new(port),
})
}
fn new_detect() -> io::Result<Self> {
println!("Attempting to autodetect GNSS device...");
let context = libudev::Context::new()?;
let mut enumerator = libudev::Enumerator::new(&context)?;
enumerator.match_subsystem("gnss")?;
let devices = enumerator.scan_devices()?;
for d in devices {
if let Some(p) = d.devnode().and_then(|devnode| devnode.to_str()) {
let path = Path::new(p);
match GNSS::new_for_path(&path) {
Ok(mut gps) => {
if gps.verify() {
println!("Detected {} as a GPS device", p);
return Ok(gps);
}
}
Err(e) => println!("Error openning {}: {}", p, e),
}
}
}
Err(io::Error::new(
io::ErrorKind::NotFound,
"Failed to autodetect GNSS device",
))
}
fn verify(&mut self) -> bool {
let mut buffer = String::new();
for _ in 1..3 {
if let Ok(_) = self.read_line(&mut buffer) {
if buffer.len() >= 15
&& buffer.chars().nth(0) == Some('$')
&& buffer.chars().nth(6) == Some(',')
{
return true;
}
buffer.clear();
} else
|
}
false
}
}
impl GPS for GNSS {
fn read_line(&mut self, buffer: &mut String) -> io::Result<usize> {
self.reader.read_line(buffer)
}
}
|
{
println!("Failed to read from serial port");
}
|
conditional_block
|
gnss.rs
|
/* vim: set et ts=4 sw=4: */
/* gnss.rs
*
* Copyright (C) 2017 Pelagicore AB.
* Copyright (C) 2017 Zeeshan Ali.
* Copyright (C) 2020 Purism SPC.
*
* GPSShare is free software; you can redistribute it and/or modify it under
* the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*
* GPSShare is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License along
* with GPSShare; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*
* Author: Zeeshan Ali <[email protected]>
*/
use config::Config;
use gps::GPS;
use libudev;
use std::io::BufRead;
use std::io::BufReader;
use std::path::Path;
use std::rc::Rc;
use std::fs::File;
use std::fs;
use std::io;
pub struct GNSS {
reader: BufReader<fs::File>,
}
impl GNSS {
pub fn new(config: Rc<Config>) -> io::Result<Self> {
match config.dev_path {
Some(ref path) => GNSS::new_for_path(path.as_path()),
None => GNSS::new_detect(),
}
}
fn
|
(path: &Path) -> io::Result<Self> {
let port = File::open(path.as_os_str())?;
Ok(GNSS {
reader: BufReader::new(port),
})
}
fn new_detect() -> io::Result<Self> {
println!("Attempting to autodetect GNSS device...");
let context = libudev::Context::new()?;
let mut enumerator = libudev::Enumerator::new(&context)?;
enumerator.match_subsystem("gnss")?;
let devices = enumerator.scan_devices()?;
for d in devices {
if let Some(p) = d.devnode().and_then(|devnode| devnode.to_str()) {
let path = Path::new(p);
match GNSS::new_for_path(&path) {
Ok(mut gps) => {
if gps.verify() {
println!("Detected {} as a GPS device", p);
return Ok(gps);
}
}
Err(e) => println!("Error openning {}: {}", p, e),
}
}
}
Err(io::Error::new(
io::ErrorKind::NotFound,
"Failed to autodetect GNSS device",
))
}
fn verify(&mut self) -> bool {
let mut buffer = String::new();
for _ in 1..3 {
if let Ok(_) = self.read_line(&mut buffer) {
if buffer.len() >= 15
&& buffer.chars().nth(0) == Some('$')
&& buffer.chars().nth(6) == Some(',')
{
return true;
}
buffer.clear();
} else {
println!("Failed to read from serial port");
}
}
false
}
}
impl GPS for GNSS {
fn read_line(&mut self, buffer: &mut String) -> io::Result<usize> {
self.reader.read_line(buffer)
}
}
|
new_for_path
|
identifier_name
|
sleeper_list.rs
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Maintains a shared list of sleeping schedulers. Schedulers
//! use this to wake each other up.
use container::Container;
use vec::OwnedVector;
use option::{Option, Some, None};
use cell::Cell;
use unstable::sync::{Exclusive, exclusive};
use rt::sched::SchedHandle;
use clone::Clone;
pub struct SleeperList {
priv stack: ~Exclusive<~[SchedHandle]>
}
impl SleeperList {
pub fn new() -> SleeperList {
SleeperList {
stack: ~exclusive(~[])
}
}
|
self.stack.with(|s| s.push(handle.take()));
}
}
pub fn pop(&mut self) -> Option<SchedHandle> {
unsafe {
do self.stack.with |s| {
if!s.is_empty() {
Some(s.pop())
} else {
None
}
}
}
}
}
impl Clone for SleeperList {
fn clone(&self) -> SleeperList {
SleeperList {
stack: self.stack.clone()
}
}
}
|
pub fn push(&mut self, handle: SchedHandle) {
let handle = Cell::new(handle);
unsafe {
|
random_line_split
|
sleeper_list.rs
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Maintains a shared list of sleeping schedulers. Schedulers
//! use this to wake each other up.
use container::Container;
use vec::OwnedVector;
use option::{Option, Some, None};
use cell::Cell;
use unstable::sync::{Exclusive, exclusive};
use rt::sched::SchedHandle;
use clone::Clone;
pub struct SleeperList {
priv stack: ~Exclusive<~[SchedHandle]>
}
impl SleeperList {
pub fn new() -> SleeperList {
SleeperList {
stack: ~exclusive(~[])
}
}
pub fn push(&mut self, handle: SchedHandle) {
let handle = Cell::new(handle);
unsafe {
self.stack.with(|s| s.push(handle.take()));
}
}
pub fn pop(&mut self) -> Option<SchedHandle> {
unsafe {
do self.stack.with |s| {
if!s.is_empty() {
Some(s.pop())
} else
|
}
}
}
}
impl Clone for SleeperList {
fn clone(&self) -> SleeperList {
SleeperList {
stack: self.stack.clone()
}
}
}
|
{
None
}
|
conditional_block
|
sleeper_list.rs
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Maintains a shared list of sleeping schedulers. Schedulers
//! use this to wake each other up.
use container::Container;
use vec::OwnedVector;
use option::{Option, Some, None};
use cell::Cell;
use unstable::sync::{Exclusive, exclusive};
use rt::sched::SchedHandle;
use clone::Clone;
pub struct SleeperList {
priv stack: ~Exclusive<~[SchedHandle]>
}
impl SleeperList {
pub fn new() -> SleeperList {
SleeperList {
stack: ~exclusive(~[])
}
}
pub fn push(&mut self, handle: SchedHandle) {
let handle = Cell::new(handle);
unsafe {
self.stack.with(|s| s.push(handle.take()));
}
}
pub fn pop(&mut self) -> Option<SchedHandle> {
unsafe {
do self.stack.with |s| {
if!s.is_empty() {
Some(s.pop())
} else {
None
}
}
}
}
}
impl Clone for SleeperList {
fn
|
(&self) -> SleeperList {
SleeperList {
stack: self.stack.clone()
}
}
}
|
clone
|
identifier_name
|
sleeper_list.rs
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Maintains a shared list of sleeping schedulers. Schedulers
//! use this to wake each other up.
use container::Container;
use vec::OwnedVector;
use option::{Option, Some, None};
use cell::Cell;
use unstable::sync::{Exclusive, exclusive};
use rt::sched::SchedHandle;
use clone::Clone;
pub struct SleeperList {
priv stack: ~Exclusive<~[SchedHandle]>
}
impl SleeperList {
pub fn new() -> SleeperList {
SleeperList {
stack: ~exclusive(~[])
}
}
pub fn push(&mut self, handle: SchedHandle)
|
pub fn pop(&mut self) -> Option<SchedHandle> {
unsafe {
do self.stack.with |s| {
if!s.is_empty() {
Some(s.pop())
} else {
None
}
}
}
}
}
impl Clone for SleeperList {
fn clone(&self) -> SleeperList {
SleeperList {
stack: self.stack.clone()
}
}
}
|
{
let handle = Cell::new(handle);
unsafe {
self.stack.with(|s| s.push(handle.take()));
}
}
|
identifier_body
|
render_thread.rs
|
use std::sync::mpsc::{Receiver, Sender};
use gfx;
use glutin;
use gfx_window_glutin;
use std::collections::HashMap;
use gfx::{Adapter, CommandQueue, Device, FrameSync,
Surface, Swapchain, SwapchainExt, WindowExt};
use gfx_device_gl;
use image;
use game::ContentId;
use content::load_content::{EContentType, EContentLoadRequst};
use graphics::box_renderer::BoxRenderData;
use graphics::sphere_renderer::{SphereRenderData, SphereRenderer};
use glutin::{VirtualKeyCode};
use frame_timer::FrameTimer;
use graphics::box_renderer::BoxRenderer;
use cgmath::{self};
pub type ColorFormat = gfx::format::Rgba8;
pub type DepthFormat = gfx::format::DepthStencil;
pub struct RenderPackage<'a> {
pub device: &'a mut gfx_device_gl::Device,
pub graphics_queue: &'a mut gfx::queue::GraphicsQueue<gfx_device_gl::Backend>,
pub frame_semaphore: &'a gfx::handle::Semaphore<gfx_device_gl::Resources>,
pub draw_semaphore: &'a gfx::handle::Semaphore<gfx_device_gl::Resources>,
pub frame_fence: &'a gfx::handle::Fence<gfx_device_gl::Resources>,
}
impl<'a> RenderPackage<'a> {
pub fn new(device: &'a mut gfx_device_gl::Device,
graphics_queue: &'a mut gfx::queue::GraphicsQueue<gfx_device_gl::Backend>,
frame_semaphore: &'a gfx::handle::Semaphore<gfx_device_gl::Resources>,
draw_semaphore: &'a gfx::handle::Semaphore<gfx_device_gl::Resources>,
frame_fence: &'a gfx::handle::Fence<gfx_device_gl::Resources>,
) -> RenderPackage<'a> {
RenderPackage {
device,
graphics_queue,
frame_semaphore,
draw_semaphore,
frame_fence
}
}
}
#[derive(Clone)]
pub struct RenderFrame {
pub frame_index: u64,
pub boxes: Option<Vec<BoxRenderData>>,
pub spheres: Option<Vec<SphereRenderData>>,
}
impl RenderFrame {
pub fn new(frame_index: u64, boxes: Option<Vec<BoxRenderData>>, spheres: Option<Vec<SphereRenderData>>) -> RenderFrame {
RenderFrame {
frame_index: frame_index,
boxes,
spheres
}
}
}
pub struct RenderThread {
from_game_thread: Receiver<RenderFrame>,
_to_content_manifest: Sender<EContentLoadRequst>,
_from_content_manifest: Receiver<EContentType>,
to_game_thread_with_input: Sender<VirtualKeyCode>,
_current_frame_index: u64,
textures: HashMap<ContentId,gfx::handle::ShaderResourceView<gfx_device_gl::Resources, [f32;4]>>,
pub use_matrix : [[f32;4];4],
}
impl RenderThread {
pub fn new(
from_game_thread: Receiver<RenderFrame>,
to_content_manifest: Sender<EContentLoadRequst>,
from_content_manifest: Receiver<EContentType>,
to_game_thread_with_input: Sender<VirtualKeyCode>,
) -> RenderThread {
let o = cgmath::ortho(-2000.0f32, 2000.0f32, -2000.0f32, 2000.0f32, 0.0, 10.0);
|
RenderThread {
_current_frame_index: 0,
from_game_thread: from_game_thread,
_to_content_manifest: to_content_manifest,
_from_content_manifest: from_content_manifest,
to_game_thread_with_input: to_game_thread_with_input,
textures: HashMap::new(),
use_matrix: [
[o.x[0], o.x[1], o.x[2], o.x[3]],
[o.y[0], o.y[1], o.y[2], o.y[3]],
[o.z[0], o.z[1], o.z[2], o.z[3]],
[o.w[0], o.w[1], o.w[2], o.w[3]]
]
}
}
pub fn load_texture<D, R>(factory: &mut D, path: &str) -> gfx::handle::ShaderResourceView<R, [f32; 4]> where D: gfx::Device<R>, R: gfx::Resources
{
let img = image::open(path).unwrap().to_rgba();
let (width, height) = img.dimensions();
let kind = gfx::texture::Kind::D2(width as u16, height as u16, gfx::texture::AaMode::Single);
let (_, view) = factory.create_texture_immutable_u8::<ColorFormat>(kind, &[&img]).unwrap();
view
}
pub fn query_content_manifest_for_sprite(&mut self, _content_id: ContentId) -> bool {
return false;
/*
if self.sprites.contains_key(&content_id) {
true
} else {
let _ = self.to_content_manifest.send(EContentLoadRequst::Image(
content_id,
));
let value = self.from_content_manifest.recv().unwrap();
match value {
EContentType::Image(id, dy_image) => {
/*
let image_dimensions = dy_image.to_rgba().dimensions();
let loaded_image = glium::texture::RawImage2d::from_raw_rgba_reversed(dy_image.to_rgba().into_raw(), image_dimensions);
let tex = Texture2d::new(&self.display, loaded_image).unwrap();
let spr = Sprite::new("Sprite".to_string(), tex, &self.display);
self.sprites.insert(id, spr);
*/
true
}
EContentType::NotLoaded => false,
}
}
*/
}
pub fn thread_loop(
from_game_thread: Receiver<RenderFrame>,
to_content_manifest: Sender<EContentLoadRequst>,
from_content_manifest: Receiver<EContentType>,
to_game_thread_with_input: Sender<VirtualKeyCode>,
) {
let mut rend =
RenderThread::new(from_game_thread, to_content_manifest, from_content_manifest, to_game_thread_with_input);
rend.render();
}
pub fn render(&mut self) {
let mut frame_timer = FrameTimer::new();
let mut events_loop = glutin::EventsLoop::new();
let builder = glutin::WindowBuilder::new()
.with_title("Square Toy".to_string())
.with_dimensions(800, 800);
let gl_builder = glutin::ContextBuilder::new().with_vsync(true);
let windows = glutin::GlWindow::new(builder, gl_builder, &events_loop).unwrap();
// let context = glutin::ContextBuilder::new().with_vsync(true);
//let test_window = ;
let (mut surface, adapters) = gfx_window_glutin::Window::new(windows).get_surface_and_adapters();
let gfx::Gpu{mut device, mut graphics_queues,..} =
adapters[0].open_with(|family, ty| {
((ty.supports_graphics() && surface.supports_queue(&family)) as u32, gfx::QueueType::Graphics)
});
let mut graphics_queue = graphics_queues.pop().expect("Unable to find a graphics queue");
let config = gfx::SwapchainConfig::new().with_color::<ColorFormat>();
let mut swap_chain = surface.build_swapchain(config, &graphics_queue);
let views : Vec<gfx::handle::RenderTargetView<gfx_device_gl::Resources, (gfx::format::R8_G8_B8_A8, gfx::format::Unorm)>> = swap_chain.create_color_views(&mut device);
let mut box_rend = BoxRenderer::new(&mut device, graphics_queue.create_graphics_pool(1));
let mut sphere_rend = SphereRenderer::new(&mut device, graphics_queue.create_graphics_pool(1));
let frame_semaphore = device.create_semaphore();
let draw_semaphore = device.create_semaphore();
let frame_fence = device.create_fence(false);
let mut running = true;
let mut frame;
let mut frame_data;
while running {
let mut render_package = RenderPackage::new(&mut device, &mut graphics_queue, &frame_semaphore, &draw_semaphore, &frame_fence);
frame_timer.frame_start();
//the first thing we do is grab the current frame
events_loop.poll_events(|event| {
if let glutin::Event::WindowEvent { event,.. } = event {
match event {
glutin::WindowEvent::Closed => running = false,
glutin::WindowEvent::KeyboardInput{device_id: _id_of_device, input: input_event} => {
if input_event.virtual_keycode.is_none() == true {
return;
}
if input_event.virtual_keycode.unwrap() == VirtualKeyCode::Escape {
running = false;
}
let _ = self.to_game_thread_with_input.send(input_event.virtual_keycode.unwrap());
},
glutin::WindowEvent::Resized(_width, _height) => {
// TODO
},
_ => (),
}
}
});
frame = swap_chain.acquire_frame(FrameSync::Semaphore(&frame_semaphore));
let frame_view = &views[frame.id()].clone();
frame_data = self.from_game_thread.try_recv();
let frame_data = match frame_data {
Ok(data) => Some(data),
Err(_) => {
None
}
};
if frame_data.is_some() {
let frame_data = frame_data.unwrap();
if frame_data.boxes.is_some() {
// let fake : Vec<BoxRenderData> = vec![BoxRenderData{pos: Vector2::new(0.0f32, 0.0f32), scale: Vector2::new(1.0f32, 1.0f32), z_rotation: 0.0f32, color: [1.0f32, 1.0f32, 1.0f32]}];
box_rend.render_boxes(&frame_data.boxes.unwrap(), &mut render_package, &frame_view, self);
}
if frame_data.spheres.is_some() {
sphere_rend.render_spheres(&frame_data.spheres.unwrap(), &mut render_package, &frame_view, self);
}
}
swap_chain.present(&mut render_package.graphics_queue, &[&draw_semaphore]);
render_package.device.wait_for_fences(&[&frame_fence], gfx::WaitFor::All, 1_000_000);
render_package.graphics_queue.cleanup();
frame_timer.frame_end();
}
}
}
/*
gfx_defines! {
vertex VertexColor {
pos: [f32;2] = "a_Pos",
color: [f32;3] = "a_Color",
}
pipeline pipe_color {
vbuf: gfx::VertexBuffer<VertexColor> = (),
out: gfx::RenderTarget<ColorFormat> = "Target0",
}
constant Transform {
transform: [[f32;4]; 4] = "u_Transform",
scale: [[f32;4]; 4] = "u_Scale",
rotation_z: [[f32;4]; 4] = "u_Rotation_z",
}
pipeline pipe_sin {
vbuf: gfx::VertexBuffer<VertexColor> = (),
transform: gfx::ConstantBuffer<Transform> = "Transform",
out: gfx::RenderTarget<ColorFormat> = "Target0",
}
}
*/
|
random_line_split
|
|
render_thread.rs
|
use std::sync::mpsc::{Receiver, Sender};
use gfx;
use glutin;
use gfx_window_glutin;
use std::collections::HashMap;
use gfx::{Adapter, CommandQueue, Device, FrameSync,
Surface, Swapchain, SwapchainExt, WindowExt};
use gfx_device_gl;
use image;
use game::ContentId;
use content::load_content::{EContentType, EContentLoadRequst};
use graphics::box_renderer::BoxRenderData;
use graphics::sphere_renderer::{SphereRenderData, SphereRenderer};
use glutin::{VirtualKeyCode};
use frame_timer::FrameTimer;
use graphics::box_renderer::BoxRenderer;
use cgmath::{self};
pub type ColorFormat = gfx::format::Rgba8;
pub type DepthFormat = gfx::format::DepthStencil;
pub struct RenderPackage<'a> {
pub device: &'a mut gfx_device_gl::Device,
pub graphics_queue: &'a mut gfx::queue::GraphicsQueue<gfx_device_gl::Backend>,
pub frame_semaphore: &'a gfx::handle::Semaphore<gfx_device_gl::Resources>,
pub draw_semaphore: &'a gfx::handle::Semaphore<gfx_device_gl::Resources>,
pub frame_fence: &'a gfx::handle::Fence<gfx_device_gl::Resources>,
}
impl<'a> RenderPackage<'a> {
pub fn new(device: &'a mut gfx_device_gl::Device,
graphics_queue: &'a mut gfx::queue::GraphicsQueue<gfx_device_gl::Backend>,
frame_semaphore: &'a gfx::handle::Semaphore<gfx_device_gl::Resources>,
draw_semaphore: &'a gfx::handle::Semaphore<gfx_device_gl::Resources>,
frame_fence: &'a gfx::handle::Fence<gfx_device_gl::Resources>,
) -> RenderPackage<'a> {
RenderPackage {
device,
graphics_queue,
frame_semaphore,
draw_semaphore,
frame_fence
}
}
}
#[derive(Clone)]
pub struct RenderFrame {
pub frame_index: u64,
pub boxes: Option<Vec<BoxRenderData>>,
pub spheres: Option<Vec<SphereRenderData>>,
}
impl RenderFrame {
pub fn new(frame_index: u64, boxes: Option<Vec<BoxRenderData>>, spheres: Option<Vec<SphereRenderData>>) -> RenderFrame {
RenderFrame {
frame_index: frame_index,
boxes,
spheres
}
}
}
pub struct RenderThread {
from_game_thread: Receiver<RenderFrame>,
_to_content_manifest: Sender<EContentLoadRequst>,
_from_content_manifest: Receiver<EContentType>,
to_game_thread_with_input: Sender<VirtualKeyCode>,
_current_frame_index: u64,
textures: HashMap<ContentId,gfx::handle::ShaderResourceView<gfx_device_gl::Resources, [f32;4]>>,
pub use_matrix : [[f32;4];4],
}
impl RenderThread {
pub fn new(
from_game_thread: Receiver<RenderFrame>,
to_content_manifest: Sender<EContentLoadRequst>,
from_content_manifest: Receiver<EContentType>,
to_game_thread_with_input: Sender<VirtualKeyCode>,
) -> RenderThread {
let o = cgmath::ortho(-2000.0f32, 2000.0f32, -2000.0f32, 2000.0f32, 0.0, 10.0);
RenderThread {
_current_frame_index: 0,
from_game_thread: from_game_thread,
_to_content_manifest: to_content_manifest,
_from_content_manifest: from_content_manifest,
to_game_thread_with_input: to_game_thread_with_input,
textures: HashMap::new(),
use_matrix: [
[o.x[0], o.x[1], o.x[2], o.x[3]],
[o.y[0], o.y[1], o.y[2], o.y[3]],
[o.z[0], o.z[1], o.z[2], o.z[3]],
[o.w[0], o.w[1], o.w[2], o.w[3]]
]
}
}
pub fn load_texture<D, R>(factory: &mut D, path: &str) -> gfx::handle::ShaderResourceView<R, [f32; 4]> where D: gfx::Device<R>, R: gfx::Resources
{
let img = image::open(path).unwrap().to_rgba();
let (width, height) = img.dimensions();
let kind = gfx::texture::Kind::D2(width as u16, height as u16, gfx::texture::AaMode::Single);
let (_, view) = factory.create_texture_immutable_u8::<ColorFormat>(kind, &[&img]).unwrap();
view
}
pub fn query_content_manifest_for_sprite(&mut self, _content_id: ContentId) -> bool {
return false;
/*
if self.sprites.contains_key(&content_id) {
true
} else {
let _ = self.to_content_manifest.send(EContentLoadRequst::Image(
content_id,
));
let value = self.from_content_manifest.recv().unwrap();
match value {
EContentType::Image(id, dy_image) => {
/*
let image_dimensions = dy_image.to_rgba().dimensions();
let loaded_image = glium::texture::RawImage2d::from_raw_rgba_reversed(dy_image.to_rgba().into_raw(), image_dimensions);
let tex = Texture2d::new(&self.display, loaded_image).unwrap();
let spr = Sprite::new("Sprite".to_string(), tex, &self.display);
self.sprites.insert(id, spr);
*/
true
}
EContentType::NotLoaded => false,
}
}
*/
}
pub fn thread_loop(
from_game_thread: Receiver<RenderFrame>,
to_content_manifest: Sender<EContentLoadRequst>,
from_content_manifest: Receiver<EContentType>,
to_game_thread_with_input: Sender<VirtualKeyCode>,
) {
let mut rend =
RenderThread::new(from_game_thread, to_content_manifest, from_content_manifest, to_game_thread_with_input);
rend.render();
}
pub fn render(&mut self) {
let mut frame_timer = FrameTimer::new();
let mut events_loop = glutin::EventsLoop::new();
let builder = glutin::WindowBuilder::new()
.with_title("Square Toy".to_string())
.with_dimensions(800, 800);
let gl_builder = glutin::ContextBuilder::new().with_vsync(true);
let windows = glutin::GlWindow::new(builder, gl_builder, &events_loop).unwrap();
// let context = glutin::ContextBuilder::new().with_vsync(true);
//let test_window = ;
let (mut surface, adapters) = gfx_window_glutin::Window::new(windows).get_surface_and_adapters();
let gfx::Gpu{mut device, mut graphics_queues,..} =
adapters[0].open_with(|family, ty| {
((ty.supports_graphics() && surface.supports_queue(&family)) as u32, gfx::QueueType::Graphics)
});
let mut graphics_queue = graphics_queues.pop().expect("Unable to find a graphics queue");
let config = gfx::SwapchainConfig::new().with_color::<ColorFormat>();
let mut swap_chain = surface.build_swapchain(config, &graphics_queue);
let views : Vec<gfx::handle::RenderTargetView<gfx_device_gl::Resources, (gfx::format::R8_G8_B8_A8, gfx::format::Unorm)>> = swap_chain.create_color_views(&mut device);
let mut box_rend = BoxRenderer::new(&mut device, graphics_queue.create_graphics_pool(1));
let mut sphere_rend = SphereRenderer::new(&mut device, graphics_queue.create_graphics_pool(1));
let frame_semaphore = device.create_semaphore();
let draw_semaphore = device.create_semaphore();
let frame_fence = device.create_fence(false);
let mut running = true;
let mut frame;
let mut frame_data;
while running {
let mut render_package = RenderPackage::new(&mut device, &mut graphics_queue, &frame_semaphore, &draw_semaphore, &frame_fence);
frame_timer.frame_start();
//the first thing we do is grab the current frame
events_loop.poll_events(|event| {
if let glutin::Event::WindowEvent { event,.. } = event {
match event {
glutin::WindowEvent::Closed => running = false,
glutin::WindowEvent::KeyboardInput{device_id: _id_of_device, input: input_event} => {
if input_event.virtual_keycode.is_none() == true {
return;
}
if input_event.virtual_keycode.unwrap() == VirtualKeyCode::Escape {
running = false;
}
let _ = self.to_game_thread_with_input.send(input_event.virtual_keycode.unwrap());
},
glutin::WindowEvent::Resized(_width, _height) =>
|
,
_ => (),
}
}
});
frame = swap_chain.acquire_frame(FrameSync::Semaphore(&frame_semaphore));
let frame_view = &views[frame.id()].clone();
frame_data = self.from_game_thread.try_recv();
let frame_data = match frame_data {
Ok(data) => Some(data),
Err(_) => {
None
}
};
if frame_data.is_some() {
let frame_data = frame_data.unwrap();
if frame_data.boxes.is_some() {
// let fake : Vec<BoxRenderData> = vec![BoxRenderData{pos: Vector2::new(0.0f32, 0.0f32), scale: Vector2::new(1.0f32, 1.0f32), z_rotation: 0.0f32, color: [1.0f32, 1.0f32, 1.0f32]}];
box_rend.render_boxes(&frame_data.boxes.unwrap(), &mut render_package, &frame_view, self);
}
if frame_data.spheres.is_some() {
sphere_rend.render_spheres(&frame_data.spheres.unwrap(), &mut render_package, &frame_view, self);
}
}
swap_chain.present(&mut render_package.graphics_queue, &[&draw_semaphore]);
render_package.device.wait_for_fences(&[&frame_fence], gfx::WaitFor::All, 1_000_000);
render_package.graphics_queue.cleanup();
frame_timer.frame_end();
}
}
}
/*
gfx_defines! {
vertex VertexColor {
pos: [f32;2] = "a_Pos",
color: [f32;3] = "a_Color",
}
pipeline pipe_color {
vbuf: gfx::VertexBuffer<VertexColor> = (),
out: gfx::RenderTarget<ColorFormat> = "Target0",
}
constant Transform {
transform: [[f32;4]; 4] = "u_Transform",
scale: [[f32;4]; 4] = "u_Scale",
rotation_z: [[f32;4]; 4] = "u_Rotation_z",
}
pipeline pipe_sin {
vbuf: gfx::VertexBuffer<VertexColor> = (),
transform: gfx::ConstantBuffer<Transform> = "Transform",
out: gfx::RenderTarget<ColorFormat> = "Target0",
}
}
*/
|
{
// TODO
}
|
conditional_block
|
render_thread.rs
|
use std::sync::mpsc::{Receiver, Sender};
use gfx;
use glutin;
use gfx_window_glutin;
use std::collections::HashMap;
use gfx::{Adapter, CommandQueue, Device, FrameSync,
Surface, Swapchain, SwapchainExt, WindowExt};
use gfx_device_gl;
use image;
use game::ContentId;
use content::load_content::{EContentType, EContentLoadRequst};
use graphics::box_renderer::BoxRenderData;
use graphics::sphere_renderer::{SphereRenderData, SphereRenderer};
use glutin::{VirtualKeyCode};
use frame_timer::FrameTimer;
use graphics::box_renderer::BoxRenderer;
use cgmath::{self};
pub type ColorFormat = gfx::format::Rgba8;
pub type DepthFormat = gfx::format::DepthStencil;
pub struct RenderPackage<'a> {
pub device: &'a mut gfx_device_gl::Device,
pub graphics_queue: &'a mut gfx::queue::GraphicsQueue<gfx_device_gl::Backend>,
pub frame_semaphore: &'a gfx::handle::Semaphore<gfx_device_gl::Resources>,
pub draw_semaphore: &'a gfx::handle::Semaphore<gfx_device_gl::Resources>,
pub frame_fence: &'a gfx::handle::Fence<gfx_device_gl::Resources>,
}
impl<'a> RenderPackage<'a> {
pub fn new(device: &'a mut gfx_device_gl::Device,
graphics_queue: &'a mut gfx::queue::GraphicsQueue<gfx_device_gl::Backend>,
frame_semaphore: &'a gfx::handle::Semaphore<gfx_device_gl::Resources>,
draw_semaphore: &'a gfx::handle::Semaphore<gfx_device_gl::Resources>,
frame_fence: &'a gfx::handle::Fence<gfx_device_gl::Resources>,
) -> RenderPackage<'a>
|
}
#[derive(Clone)]
pub struct RenderFrame {
pub frame_index: u64,
pub boxes: Option<Vec<BoxRenderData>>,
pub spheres: Option<Vec<SphereRenderData>>,
}
impl RenderFrame {
pub fn new(frame_index: u64, boxes: Option<Vec<BoxRenderData>>, spheres: Option<Vec<SphereRenderData>>) -> RenderFrame {
RenderFrame {
frame_index: frame_index,
boxes,
spheres
}
}
}
pub struct RenderThread {
from_game_thread: Receiver<RenderFrame>,
_to_content_manifest: Sender<EContentLoadRequst>,
_from_content_manifest: Receiver<EContentType>,
to_game_thread_with_input: Sender<VirtualKeyCode>,
_current_frame_index: u64,
textures: HashMap<ContentId,gfx::handle::ShaderResourceView<gfx_device_gl::Resources, [f32;4]>>,
pub use_matrix : [[f32;4];4],
}
impl RenderThread {
pub fn new(
from_game_thread: Receiver<RenderFrame>,
to_content_manifest: Sender<EContentLoadRequst>,
from_content_manifest: Receiver<EContentType>,
to_game_thread_with_input: Sender<VirtualKeyCode>,
) -> RenderThread {
let o = cgmath::ortho(-2000.0f32, 2000.0f32, -2000.0f32, 2000.0f32, 0.0, 10.0);
RenderThread {
_current_frame_index: 0,
from_game_thread: from_game_thread,
_to_content_manifest: to_content_manifest,
_from_content_manifest: from_content_manifest,
to_game_thread_with_input: to_game_thread_with_input,
textures: HashMap::new(),
use_matrix: [
[o.x[0], o.x[1], o.x[2], o.x[3]],
[o.y[0], o.y[1], o.y[2], o.y[3]],
[o.z[0], o.z[1], o.z[2], o.z[3]],
[o.w[0], o.w[1], o.w[2], o.w[3]]
]
}
}
pub fn load_texture<D, R>(factory: &mut D, path: &str) -> gfx::handle::ShaderResourceView<R, [f32; 4]> where D: gfx::Device<R>, R: gfx::Resources
{
let img = image::open(path).unwrap().to_rgba();
let (width, height) = img.dimensions();
let kind = gfx::texture::Kind::D2(width as u16, height as u16, gfx::texture::AaMode::Single);
let (_, view) = factory.create_texture_immutable_u8::<ColorFormat>(kind, &[&img]).unwrap();
view
}
pub fn query_content_manifest_for_sprite(&mut self, _content_id: ContentId) -> bool {
return false;
/*
if self.sprites.contains_key(&content_id) {
true
} else {
let _ = self.to_content_manifest.send(EContentLoadRequst::Image(
content_id,
));
let value = self.from_content_manifest.recv().unwrap();
match value {
EContentType::Image(id, dy_image) => {
/*
let image_dimensions = dy_image.to_rgba().dimensions();
let loaded_image = glium::texture::RawImage2d::from_raw_rgba_reversed(dy_image.to_rgba().into_raw(), image_dimensions);
let tex = Texture2d::new(&self.display, loaded_image).unwrap();
let spr = Sprite::new("Sprite".to_string(), tex, &self.display);
self.sprites.insert(id, spr);
*/
true
}
EContentType::NotLoaded => false,
}
}
*/
}
pub fn thread_loop(
from_game_thread: Receiver<RenderFrame>,
to_content_manifest: Sender<EContentLoadRequst>,
from_content_manifest: Receiver<EContentType>,
to_game_thread_with_input: Sender<VirtualKeyCode>,
) {
let mut rend =
RenderThread::new(from_game_thread, to_content_manifest, from_content_manifest, to_game_thread_with_input);
rend.render();
}
pub fn render(&mut self) {
let mut frame_timer = FrameTimer::new();
let mut events_loop = glutin::EventsLoop::new();
let builder = glutin::WindowBuilder::new()
.with_title("Square Toy".to_string())
.with_dimensions(800, 800);
let gl_builder = glutin::ContextBuilder::new().with_vsync(true);
let windows = glutin::GlWindow::new(builder, gl_builder, &events_loop).unwrap();
// let context = glutin::ContextBuilder::new().with_vsync(true);
//let test_window = ;
let (mut surface, adapters) = gfx_window_glutin::Window::new(windows).get_surface_and_adapters();
let gfx::Gpu{mut device, mut graphics_queues,..} =
adapters[0].open_with(|family, ty| {
((ty.supports_graphics() && surface.supports_queue(&family)) as u32, gfx::QueueType::Graphics)
});
let mut graphics_queue = graphics_queues.pop().expect("Unable to find a graphics queue");
let config = gfx::SwapchainConfig::new().with_color::<ColorFormat>();
let mut swap_chain = surface.build_swapchain(config, &graphics_queue);
let views : Vec<gfx::handle::RenderTargetView<gfx_device_gl::Resources, (gfx::format::R8_G8_B8_A8, gfx::format::Unorm)>> = swap_chain.create_color_views(&mut device);
let mut box_rend = BoxRenderer::new(&mut device, graphics_queue.create_graphics_pool(1));
let mut sphere_rend = SphereRenderer::new(&mut device, graphics_queue.create_graphics_pool(1));
let frame_semaphore = device.create_semaphore();
let draw_semaphore = device.create_semaphore();
let frame_fence = device.create_fence(false);
let mut running = true;
let mut frame;
let mut frame_data;
while running {
let mut render_package = RenderPackage::new(&mut device, &mut graphics_queue, &frame_semaphore, &draw_semaphore, &frame_fence);
frame_timer.frame_start();
//the first thing we do is grab the current frame
events_loop.poll_events(|event| {
if let glutin::Event::WindowEvent { event,.. } = event {
match event {
glutin::WindowEvent::Closed => running = false,
glutin::WindowEvent::KeyboardInput{device_id: _id_of_device, input: input_event} => {
if input_event.virtual_keycode.is_none() == true {
return;
}
if input_event.virtual_keycode.unwrap() == VirtualKeyCode::Escape {
running = false;
}
let _ = self.to_game_thread_with_input.send(input_event.virtual_keycode.unwrap());
},
glutin::WindowEvent::Resized(_width, _height) => {
// TODO
},
_ => (),
}
}
});
frame = swap_chain.acquire_frame(FrameSync::Semaphore(&frame_semaphore));
let frame_view = &views[frame.id()].clone();
frame_data = self.from_game_thread.try_recv();
let frame_data = match frame_data {
Ok(data) => Some(data),
Err(_) => {
None
}
};
if frame_data.is_some() {
let frame_data = frame_data.unwrap();
if frame_data.boxes.is_some() {
// let fake : Vec<BoxRenderData> = vec![BoxRenderData{pos: Vector2::new(0.0f32, 0.0f32), scale: Vector2::new(1.0f32, 1.0f32), z_rotation: 0.0f32, color: [1.0f32, 1.0f32, 1.0f32]}];
box_rend.render_boxes(&frame_data.boxes.unwrap(), &mut render_package, &frame_view, self);
}
if frame_data.spheres.is_some() {
sphere_rend.render_spheres(&frame_data.spheres.unwrap(), &mut render_package, &frame_view, self);
}
}
swap_chain.present(&mut render_package.graphics_queue, &[&draw_semaphore]);
render_package.device.wait_for_fences(&[&frame_fence], gfx::WaitFor::All, 1_000_000);
render_package.graphics_queue.cleanup();
frame_timer.frame_end();
}
}
}
/*
gfx_defines! {
vertex VertexColor {
pos: [f32;2] = "a_Pos",
color: [f32;3] = "a_Color",
}
pipeline pipe_color {
vbuf: gfx::VertexBuffer<VertexColor> = (),
out: gfx::RenderTarget<ColorFormat> = "Target0",
}
constant Transform {
transform: [[f32;4]; 4] = "u_Transform",
scale: [[f32;4]; 4] = "u_Scale",
rotation_z: [[f32;4]; 4] = "u_Rotation_z",
}
pipeline pipe_sin {
vbuf: gfx::VertexBuffer<VertexColor> = (),
transform: gfx::ConstantBuffer<Transform> = "Transform",
out: gfx::RenderTarget<ColorFormat> = "Target0",
}
}
*/
|
{
RenderPackage {
device,
graphics_queue,
frame_semaphore,
draw_semaphore,
frame_fence
}
}
|
identifier_body
|
render_thread.rs
|
use std::sync::mpsc::{Receiver, Sender};
use gfx;
use glutin;
use gfx_window_glutin;
use std::collections::HashMap;
use gfx::{Adapter, CommandQueue, Device, FrameSync,
Surface, Swapchain, SwapchainExt, WindowExt};
use gfx_device_gl;
use image;
use game::ContentId;
use content::load_content::{EContentType, EContentLoadRequst};
use graphics::box_renderer::BoxRenderData;
use graphics::sphere_renderer::{SphereRenderData, SphereRenderer};
use glutin::{VirtualKeyCode};
use frame_timer::FrameTimer;
use graphics::box_renderer::BoxRenderer;
use cgmath::{self};
pub type ColorFormat = gfx::format::Rgba8;
pub type DepthFormat = gfx::format::DepthStencil;
pub struct RenderPackage<'a> {
pub device: &'a mut gfx_device_gl::Device,
pub graphics_queue: &'a mut gfx::queue::GraphicsQueue<gfx_device_gl::Backend>,
pub frame_semaphore: &'a gfx::handle::Semaphore<gfx_device_gl::Resources>,
pub draw_semaphore: &'a gfx::handle::Semaphore<gfx_device_gl::Resources>,
pub frame_fence: &'a gfx::handle::Fence<gfx_device_gl::Resources>,
}
impl<'a> RenderPackage<'a> {
pub fn new(device: &'a mut gfx_device_gl::Device,
graphics_queue: &'a mut gfx::queue::GraphicsQueue<gfx_device_gl::Backend>,
frame_semaphore: &'a gfx::handle::Semaphore<gfx_device_gl::Resources>,
draw_semaphore: &'a gfx::handle::Semaphore<gfx_device_gl::Resources>,
frame_fence: &'a gfx::handle::Fence<gfx_device_gl::Resources>,
) -> RenderPackage<'a> {
RenderPackage {
device,
graphics_queue,
frame_semaphore,
draw_semaphore,
frame_fence
}
}
}
#[derive(Clone)]
pub struct
|
{
pub frame_index: u64,
pub boxes: Option<Vec<BoxRenderData>>,
pub spheres: Option<Vec<SphereRenderData>>,
}
impl RenderFrame {
pub fn new(frame_index: u64, boxes: Option<Vec<BoxRenderData>>, spheres: Option<Vec<SphereRenderData>>) -> RenderFrame {
RenderFrame {
frame_index: frame_index,
boxes,
spheres
}
}
}
pub struct RenderThread {
from_game_thread: Receiver<RenderFrame>,
_to_content_manifest: Sender<EContentLoadRequst>,
_from_content_manifest: Receiver<EContentType>,
to_game_thread_with_input: Sender<VirtualKeyCode>,
_current_frame_index: u64,
textures: HashMap<ContentId,gfx::handle::ShaderResourceView<gfx_device_gl::Resources, [f32;4]>>,
pub use_matrix : [[f32;4];4],
}
impl RenderThread {
pub fn new(
from_game_thread: Receiver<RenderFrame>,
to_content_manifest: Sender<EContentLoadRequst>,
from_content_manifest: Receiver<EContentType>,
to_game_thread_with_input: Sender<VirtualKeyCode>,
) -> RenderThread {
let o = cgmath::ortho(-2000.0f32, 2000.0f32, -2000.0f32, 2000.0f32, 0.0, 10.0);
RenderThread {
_current_frame_index: 0,
from_game_thread: from_game_thread,
_to_content_manifest: to_content_manifest,
_from_content_manifest: from_content_manifest,
to_game_thread_with_input: to_game_thread_with_input,
textures: HashMap::new(),
use_matrix: [
[o.x[0], o.x[1], o.x[2], o.x[3]],
[o.y[0], o.y[1], o.y[2], o.y[3]],
[o.z[0], o.z[1], o.z[2], o.z[3]],
[o.w[0], o.w[1], o.w[2], o.w[3]]
]
}
}
pub fn load_texture<D, R>(factory: &mut D, path: &str) -> gfx::handle::ShaderResourceView<R, [f32; 4]> where D: gfx::Device<R>, R: gfx::Resources
{
let img = image::open(path).unwrap().to_rgba();
let (width, height) = img.dimensions();
let kind = gfx::texture::Kind::D2(width as u16, height as u16, gfx::texture::AaMode::Single);
let (_, view) = factory.create_texture_immutable_u8::<ColorFormat>(kind, &[&img]).unwrap();
view
}
pub fn query_content_manifest_for_sprite(&mut self, _content_id: ContentId) -> bool {
return false;
/*
if self.sprites.contains_key(&content_id) {
true
} else {
let _ = self.to_content_manifest.send(EContentLoadRequst::Image(
content_id,
));
let value = self.from_content_manifest.recv().unwrap();
match value {
EContentType::Image(id, dy_image) => {
/*
let image_dimensions = dy_image.to_rgba().dimensions();
let loaded_image = glium::texture::RawImage2d::from_raw_rgba_reversed(dy_image.to_rgba().into_raw(), image_dimensions);
let tex = Texture2d::new(&self.display, loaded_image).unwrap();
let spr = Sprite::new("Sprite".to_string(), tex, &self.display);
self.sprites.insert(id, spr);
*/
true
}
EContentType::NotLoaded => false,
}
}
*/
}
pub fn thread_loop(
from_game_thread: Receiver<RenderFrame>,
to_content_manifest: Sender<EContentLoadRequst>,
from_content_manifest: Receiver<EContentType>,
to_game_thread_with_input: Sender<VirtualKeyCode>,
) {
let mut rend =
RenderThread::new(from_game_thread, to_content_manifest, from_content_manifest, to_game_thread_with_input);
rend.render();
}
pub fn render(&mut self) {
let mut frame_timer = FrameTimer::new();
let mut events_loop = glutin::EventsLoop::new();
let builder = glutin::WindowBuilder::new()
.with_title("Square Toy".to_string())
.with_dimensions(800, 800);
let gl_builder = glutin::ContextBuilder::new().with_vsync(true);
let windows = glutin::GlWindow::new(builder, gl_builder, &events_loop).unwrap();
// let context = glutin::ContextBuilder::new().with_vsync(true);
//let test_window = ;
let (mut surface, adapters) = gfx_window_glutin::Window::new(windows).get_surface_and_adapters();
let gfx::Gpu{mut device, mut graphics_queues,..} =
adapters[0].open_with(|family, ty| {
((ty.supports_graphics() && surface.supports_queue(&family)) as u32, gfx::QueueType::Graphics)
});
let mut graphics_queue = graphics_queues.pop().expect("Unable to find a graphics queue");
let config = gfx::SwapchainConfig::new().with_color::<ColorFormat>();
let mut swap_chain = surface.build_swapchain(config, &graphics_queue);
let views : Vec<gfx::handle::RenderTargetView<gfx_device_gl::Resources, (gfx::format::R8_G8_B8_A8, gfx::format::Unorm)>> = swap_chain.create_color_views(&mut device);
let mut box_rend = BoxRenderer::new(&mut device, graphics_queue.create_graphics_pool(1));
let mut sphere_rend = SphereRenderer::new(&mut device, graphics_queue.create_graphics_pool(1));
let frame_semaphore = device.create_semaphore();
let draw_semaphore = device.create_semaphore();
let frame_fence = device.create_fence(false);
let mut running = true;
let mut frame;
let mut frame_data;
while running {
let mut render_package = RenderPackage::new(&mut device, &mut graphics_queue, &frame_semaphore, &draw_semaphore, &frame_fence);
frame_timer.frame_start();
//the first thing we do is grab the current frame
events_loop.poll_events(|event| {
if let glutin::Event::WindowEvent { event,.. } = event {
match event {
glutin::WindowEvent::Closed => running = false,
glutin::WindowEvent::KeyboardInput{device_id: _id_of_device, input: input_event} => {
if input_event.virtual_keycode.is_none() == true {
return;
}
if input_event.virtual_keycode.unwrap() == VirtualKeyCode::Escape {
running = false;
}
let _ = self.to_game_thread_with_input.send(input_event.virtual_keycode.unwrap());
},
glutin::WindowEvent::Resized(_width, _height) => {
// TODO
},
_ => (),
}
}
});
frame = swap_chain.acquire_frame(FrameSync::Semaphore(&frame_semaphore));
let frame_view = &views[frame.id()].clone();
frame_data = self.from_game_thread.try_recv();
let frame_data = match frame_data {
Ok(data) => Some(data),
Err(_) => {
None
}
};
if frame_data.is_some() {
let frame_data = frame_data.unwrap();
if frame_data.boxes.is_some() {
// let fake : Vec<BoxRenderData> = vec![BoxRenderData{pos: Vector2::new(0.0f32, 0.0f32), scale: Vector2::new(1.0f32, 1.0f32), z_rotation: 0.0f32, color: [1.0f32, 1.0f32, 1.0f32]}];
box_rend.render_boxes(&frame_data.boxes.unwrap(), &mut render_package, &frame_view, self);
}
if frame_data.spheres.is_some() {
sphere_rend.render_spheres(&frame_data.spheres.unwrap(), &mut render_package, &frame_view, self);
}
}
swap_chain.present(&mut render_package.graphics_queue, &[&draw_semaphore]);
render_package.device.wait_for_fences(&[&frame_fence], gfx::WaitFor::All, 1_000_000);
render_package.graphics_queue.cleanup();
frame_timer.frame_end();
}
}
}
/*
gfx_defines! {
vertex VertexColor {
pos: [f32;2] = "a_Pos",
color: [f32;3] = "a_Color",
}
pipeline pipe_color {
vbuf: gfx::VertexBuffer<VertexColor> = (),
out: gfx::RenderTarget<ColorFormat> = "Target0",
}
constant Transform {
transform: [[f32;4]; 4] = "u_Transform",
scale: [[f32;4]; 4] = "u_Scale",
rotation_z: [[f32;4]; 4] = "u_Rotation_z",
}
pipeline pipe_sin {
vbuf: gfx::VertexBuffer<VertexColor> = (),
transform: gfx::ConstantBuffer<Transform> = "Transform",
out: gfx::RenderTarget<ColorFormat> = "Target0",
}
}
*/
|
RenderFrame
|
identifier_name
|
pointing.mako.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
<%namespace name="helpers" file="/helpers.mako.rs" />
<% data.new_style_struct("Pointing", inherited=True, gecko_name="UserInterface") %>
<%helpers:longhand name="cursor" animatable="False">
pub use self::computed_value::T as SpecifiedValue;
use values::NoViewportPercentage;
use values::computed::ComputedValueAsSpecified;
impl ComputedValueAsSpecified for SpecifiedValue {}
impl NoViewportPercentage for SpecifiedValue {}
pub mod computed_value {
use cssparser::ToCss;
use std::fmt;
use style_traits::cursor::Cursor;
#[derive(Clone, PartialEq, Eq, Copy, Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub enum
|
{
AutoCursor,
SpecifiedCursor(Cursor),
}
impl ToCss for T {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
match *self {
T::AutoCursor => dest.write_str("auto"),
T::SpecifiedCursor(c) => c.to_css(dest),
}
}
}
}
#[inline]
pub fn get_initial_value() -> computed_value::T {
computed_value::T::AutoCursor
}
pub fn parse(_context: &ParserContext, input: &mut Parser) -> Result<SpecifiedValue, ()> {
use std::ascii::AsciiExt;
use style_traits::cursor::Cursor;
let ident = try!(input.expect_ident());
if ident.eq_ignore_ascii_case("auto") {
Ok(SpecifiedValue::AutoCursor)
} else {
Cursor::from_css_keyword(&ident)
.map(SpecifiedValue::SpecifiedCursor)
}
}
</%helpers:longhand>
// NB: `pointer-events: auto` (and use of `pointer-events` in anything that isn't SVG, in fact)
// is nonstandard, slated for CSS4-UI.
// TODO(pcwalton): SVG-only values.
${helpers.single_keyword("pointer-events", "auto none", animatable=False)}
${helpers.single_keyword("-moz-user-input", "none enabled disabled",
products="gecko", gecko_ffi_name="mUserInput",
gecko_constant_prefix="NS_STYLE_USER_INPUT",
animatable=False)}
${helpers.single_keyword("-moz-user-modify", "read-only read-write write-only",
products="gecko", gecko_ffi_name="mUserModify",
gecko_constant_prefix="NS_STYLE_USER_MODIFY",
animatable=False)}
${helpers.single_keyword("-moz-user-focus",
"ignore normal select-after select-before select-menu select-same select-all none",
products="gecko", gecko_ffi_name="mUserFocus",
gecko_constant_prefix="NS_STYLE_USER_FOCUS",
animatable=False)}
|
T
|
identifier_name
|
pointing.mako.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
<%namespace name="helpers" file="/helpers.mako.rs" />
<% data.new_style_struct("Pointing", inherited=True, gecko_name="UserInterface") %>
<%helpers:longhand name="cursor" animatable="False">
pub use self::computed_value::T as SpecifiedValue;
use values::NoViewportPercentage;
use values::computed::ComputedValueAsSpecified;
impl ComputedValueAsSpecified for SpecifiedValue {}
impl NoViewportPercentage for SpecifiedValue {}
pub mod computed_value {
use cssparser::ToCss;
use std::fmt;
use style_traits::cursor::Cursor;
#[derive(Clone, PartialEq, Eq, Copy, Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub enum T {
AutoCursor,
SpecifiedCursor(Cursor),
}
impl ToCss for T {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
match *self {
T::AutoCursor => dest.write_str("auto"),
T::SpecifiedCursor(c) => c.to_css(dest),
}
}
}
}
|
computed_value::T::AutoCursor
}
pub fn parse(_context: &ParserContext, input: &mut Parser) -> Result<SpecifiedValue, ()> {
use std::ascii::AsciiExt;
use style_traits::cursor::Cursor;
let ident = try!(input.expect_ident());
if ident.eq_ignore_ascii_case("auto") {
Ok(SpecifiedValue::AutoCursor)
} else {
Cursor::from_css_keyword(&ident)
.map(SpecifiedValue::SpecifiedCursor)
}
}
</%helpers:longhand>
// NB: `pointer-events: auto` (and use of `pointer-events` in anything that isn't SVG, in fact)
// is nonstandard, slated for CSS4-UI.
// TODO(pcwalton): SVG-only values.
${helpers.single_keyword("pointer-events", "auto none", animatable=False)}
${helpers.single_keyword("-moz-user-input", "none enabled disabled",
products="gecko", gecko_ffi_name="mUserInput",
gecko_constant_prefix="NS_STYLE_USER_INPUT",
animatable=False)}
${helpers.single_keyword("-moz-user-modify", "read-only read-write write-only",
products="gecko", gecko_ffi_name="mUserModify",
gecko_constant_prefix="NS_STYLE_USER_MODIFY",
animatable=False)}
${helpers.single_keyword("-moz-user-focus",
"ignore normal select-after select-before select-menu select-same select-all none",
products="gecko", gecko_ffi_name="mUserFocus",
gecko_constant_prefix="NS_STYLE_USER_FOCUS",
animatable=False)}
|
#[inline]
pub fn get_initial_value() -> computed_value::T {
|
random_line_split
|
pointing.mako.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
<%namespace name="helpers" file="/helpers.mako.rs" />
<% data.new_style_struct("Pointing", inherited=True, gecko_name="UserInterface") %>
<%helpers:longhand name="cursor" animatable="False">
pub use self::computed_value::T as SpecifiedValue;
use values::NoViewportPercentage;
use values::computed::ComputedValueAsSpecified;
impl ComputedValueAsSpecified for SpecifiedValue {}
impl NoViewportPercentage for SpecifiedValue {}
pub mod computed_value {
use cssparser::ToCss;
use std::fmt;
use style_traits::cursor::Cursor;
#[derive(Clone, PartialEq, Eq, Copy, Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub enum T {
AutoCursor,
SpecifiedCursor(Cursor),
}
impl ToCss for T {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
match *self {
T::AutoCursor => dest.write_str("auto"),
T::SpecifiedCursor(c) => c.to_css(dest),
}
}
}
}
#[inline]
pub fn get_initial_value() -> computed_value::T {
computed_value::T::AutoCursor
}
pub fn parse(_context: &ParserContext, input: &mut Parser) -> Result<SpecifiedValue, ()> {
use std::ascii::AsciiExt;
use style_traits::cursor::Cursor;
let ident = try!(input.expect_ident());
if ident.eq_ignore_ascii_case("auto") {
Ok(SpecifiedValue::AutoCursor)
} else
|
}
</%helpers:longhand>
// NB: `pointer-events: auto` (and use of `pointer-events` in anything that isn't SVG, in fact)
// is nonstandard, slated for CSS4-UI.
// TODO(pcwalton): SVG-only values.
${helpers.single_keyword("pointer-events", "auto none", animatable=False)}
${helpers.single_keyword("-moz-user-input", "none enabled disabled",
products="gecko", gecko_ffi_name="mUserInput",
gecko_constant_prefix="NS_STYLE_USER_INPUT",
animatable=False)}
${helpers.single_keyword("-moz-user-modify", "read-only read-write write-only",
products="gecko", gecko_ffi_name="mUserModify",
gecko_constant_prefix="NS_STYLE_USER_MODIFY",
animatable=False)}
${helpers.single_keyword("-moz-user-focus",
"ignore normal select-after select-before select-menu select-same select-all none",
products="gecko", gecko_ffi_name="mUserFocus",
gecko_constant_prefix="NS_STYLE_USER_FOCUS",
animatable=False)}
|
{
Cursor::from_css_keyword(&ident)
.map(SpecifiedValue::SpecifiedCursor)
}
|
conditional_block
|
diagnostic.rs
|
call to `.bug`
/// or `.span_bug` rather than a failed assertion, etc.
#[derive(Copy)]
pub struct ExplicitBug;
/// A span-handler is like a handler but also
/// accepts span information for source-location
/// reporting.
pub struct SpanHandler {
pub handler: Handler,
pub cm: codemap::CodeMap,
}
impl SpanHandler {
pub fn span_fatal(&self, sp: Span, msg: &str) ->! {
self.handler.emit(Some((&self.cm, sp)), msg, Fatal);
panic!(FatalError);
}
pub fn span_fatal_with_code(&self, sp: Span, msg: &str, code: &str) ->! {
self.handler.emit_with_code(Some((&self.cm, sp)), msg, code, Fatal);
panic!(FatalError);
}
pub fn span_err(&self, sp: Span, msg: &str) {
self.handler.emit(Some((&self.cm, sp)), msg, Error);
self.handler.bump_err_count();
}
pub fn span_err_with_code(&self, sp: Span, msg: &str, code: &str) {
self.handler.emit_with_code(Some((&self.cm, sp)), msg, code, Error);
self.handler.bump_err_count();
}
pub fn span_warn(&self, sp: Span, msg: &str) {
self.handler.emit(Some((&self.cm, sp)), msg, Warning);
}
pub fn span_warn_with_code(&self, sp: Span, msg: &str, code: &str) {
self.handler.emit_with_code(Some((&self.cm, sp)), msg, code, Warning);
}
pub fn span_note(&self, sp: Span, msg: &str) {
self.handler.emit(Some((&self.cm, sp)), msg, Note);
}
pub fn span_end_note(&self, sp: Span, msg: &str) {
self.handler.custom_emit(&self.cm, FullSpan(sp), msg, Note);
}
pub fn span_help(&self, sp: Span, msg: &str) {
self.handler.emit(Some((&self.cm, sp)), msg, Help);
}
pub fn fileline_note(&self, sp: Span, msg: &str) {
self.handler.custom_emit(&self.cm, FileLine(sp), msg, Note);
}
pub fn fileline_help(&self, sp: Span, msg: &str) {
self.handler.custom_emit(&self.cm, FileLine(sp), msg, Help);
}
pub fn span_bug(&self, sp: Span, msg: &str) ->! {
self.handler.emit(Some((&self.cm, sp)), msg, Bug);
panic!(ExplicitBug);
}
pub fn span_unimpl(&self, sp: Span, msg: &str) ->! {
self.span_bug(sp, &format!("unimplemented {}", msg)[]);
}
pub fn handler<'a>(&'a self) -> &'a Handler {
&self.handler
}
}
/// A handler deals with errors; certain errors
/// (fatal, bug, unimpl) may cause immediate exit,
/// others log errors for later reporting.
pub struct Handler {
err_count: Cell<usize>,
emit: RefCell<Box<Emitter + Send>>,
pub can_emit_warnings: bool
}
impl Handler {
pub fn fatal(&self, msg: &str) ->! {
self.emit.borrow_mut().emit(None, msg, None, Fatal);
panic!(FatalError);
}
pub fn err(&self, msg: &str) {
self.emit.borrow_mut().emit(None, msg, None, Error);
self.bump_err_count();
}
pub fn bump_err_count(&self) {
self.err_count.set(self.err_count.get() + 1);
}
pub fn err_count(&self) -> usize {
self.err_count.get()
}
pub fn has_errors(&self) -> bool {
self.err_count.get() > 0
}
pub fn abort_if_errors(&self) {
let s;
match self.err_count.get() {
0 => return,
1 => s = "aborting due to previous error".to_string(),
_ => {
s = format!("aborting due to {} previous errors",
self.err_count.get());
}
}
self.fatal(&s[]);
}
pub fn warn(&self, msg: &str) {
self.emit.borrow_mut().emit(None, msg, None, Warning);
}
pub fn note(&self, msg: &str) {
self.emit.borrow_mut().emit(None, msg, None, Note);
}
pub fn help(&self, msg: &str) {
self.emit.borrow_mut().emit(None, msg, None, Help);
}
pub fn bug(&self, msg: &str) ->! {
self.emit.borrow_mut().emit(None, msg, None, Bug);
panic!(ExplicitBug);
}
pub fn unimpl(&self, msg: &str) ->! {
self.bug(&format!("unimplemented {}", msg)[]);
}
pub fn emit(&self,
cmsp: Option<(&codemap::CodeMap, Span)>,
msg: &str,
lvl: Level) {
if lvl == Warning &&!self.can_emit_warnings { return }
self.emit.borrow_mut().emit(cmsp, msg, None, lvl);
}
pub fn emit_with_code(&self,
cmsp: Option<(&codemap::CodeMap, Span)>,
msg: &str,
code: &str,
lvl: Level) {
if lvl == Warning &&!self.can_emit_warnings { return }
self.emit.borrow_mut().emit(cmsp, msg, Some(code), lvl);
}
pub fn custom_emit(&self, cm: &codemap::CodeMap,
sp: RenderSpan, msg: &str, lvl: Level) {
if lvl == Warning &&!self.can_emit_warnings { return }
self.emit.borrow_mut().custom_emit(cm, sp, msg, lvl);
}
}
pub fn mk_span_handler(handler: Handler, cm: codemap::CodeMap) -> SpanHandler {
SpanHandler {
handler: handler,
cm: cm,
}
}
pub fn default_handler(color_config: ColorConfig,
registry: Option<diagnostics::registry::Registry>,
can_emit_warnings: bool) -> Handler {
mk_handler(can_emit_warnings, box EmitterWriter::stderr(color_config, registry))
}
pub fn mk_handler(can_emit_warnings: bool, e: Box<Emitter + Send>) -> Handler {
Handler {
err_count: Cell::new(0),
emit: RefCell::new(e),
can_emit_warnings: can_emit_warnings
}
}
#[derive(Copy, PartialEq, Clone, Debug)]
pub enum Level {
Bug,
Fatal,
Error,
Warning,
Note,
Help,
}
impl fmt::Display for Level {
fn
|
(&self, f: &mut fmt::Formatter) -> fmt::Result {
use std::fmt::Display;
match *self {
Bug => "error: internal compiler error".fmt(f),
Fatal | Error => "error".fmt(f),
Warning => "warning".fmt(f),
Note => "note".fmt(f),
Help => "help".fmt(f),
}
}
}
impl Level {
fn color(self) -> term::color::Color {
match self {
Bug | Fatal | Error => term::color::BRIGHT_RED,
Warning => term::color::BRIGHT_YELLOW,
Note => term::color::BRIGHT_GREEN,
Help => term::color::BRIGHT_CYAN,
}
}
}
fn print_maybe_styled(w: &mut EmitterWriter,
msg: &str,
color: term::attr::Attr) -> old_io::IoResult<()> {
match w.dst {
Terminal(ref mut t) => {
try!(t.attr(color));
// If `msg` ends in a newline, we need to reset the color before
// the newline. We're making the assumption that we end up writing
// to a `LineBufferedWriter`, which means that emitting the reset
// after the newline ends up buffering the reset until we print
// another line or exit. Buffering the reset is a problem if we're
// sharing the terminal with any other programs (e.g. other rustc
// instances via `make -jN`).
//
// Note that if `msg` contains any internal newlines, this will
// result in the `LineBufferedWriter` flushing twice instead of
// once, which still leaves the opportunity for interleaved output
// to be miscolored. We assume this is rare enough that we don't
// have to worry about it.
if msg.ends_with("\n") {
try!(t.write_str(&msg[..msg.len()-1]));
try!(t.reset());
try!(t.write_str("\n"));
} else {
try!(t.write_str(msg));
try!(t.reset());
}
Ok(())
}
Raw(ref mut w) => {
w.write_str(msg)
}
}
}
fn print_diagnostic(dst: &mut EmitterWriter, topic: &str, lvl: Level,
msg: &str, code: Option<&str>) -> old_io::IoResult<()> {
if!topic.is_empty() {
try!(write!(&mut dst.dst, "{} ", topic));
}
try!(print_maybe_styled(dst,
&format!("{}: ", lvl.to_string())[],
term::attr::ForegroundColor(lvl.color())));
try!(print_maybe_styled(dst,
&format!("{}", msg)[],
term::attr::Bold));
match code {
Some(code) => {
let style = term::attr::ForegroundColor(term::color::BRIGHT_MAGENTA);
try!(print_maybe_styled(dst, &format!(" [{}]", code.clone())[], style));
}
None => ()
}
try!(dst.dst.write_char('\n'));
Ok(())
}
pub struct EmitterWriter {
dst: Destination,
registry: Option<diagnostics::registry::Registry>
}
enum Destination {
Terminal(Box<term::Terminal<WriterWrapper> + Send>),
Raw(Box<Writer + Send>),
}
impl EmitterWriter {
pub fn stderr(color_config: ColorConfig,
registry: Option<diagnostics::registry::Registry>) -> EmitterWriter {
let stderr = old_io::stderr();
let use_color = match color_config {
Always => true,
Never => false,
Auto => stderr.get_ref().isatty()
};
if use_color {
let dst = match term::stderr() {
Some(t) => Terminal(t),
None => Raw(box stderr),
};
EmitterWriter { dst: dst, registry: registry }
} else {
EmitterWriter { dst: Raw(box stderr), registry: registry }
}
}
pub fn new(dst: Box<Writer + Send>,
registry: Option<diagnostics::registry::Registry>) -> EmitterWriter {
EmitterWriter { dst: Raw(dst), registry: registry }
}
}
impl Writer for Destination {
fn write_all(&mut self, bytes: &[u8]) -> old_io::IoResult<()> {
match *self {
Terminal(ref mut t) => t.write_all(bytes),
Raw(ref mut w) => w.write_all(bytes),
}
}
}
impl Emitter for EmitterWriter {
fn emit(&mut self,
cmsp: Option<(&codemap::CodeMap, Span)>,
msg: &str, code: Option<&str>, lvl: Level) {
let error = match cmsp {
Some((cm, COMMAND_LINE_SP)) => emit(self, cm,
FileLine(COMMAND_LINE_SP),
msg, code, lvl, false),
Some((cm, sp)) => emit(self, cm, FullSpan(sp), msg, code, lvl, false),
None => print_diagnostic(self, "", lvl, msg, code),
};
match error {
Ok(()) => {}
Err(e) => panic!("failed to print diagnostics: {:?}", e),
}
}
fn custom_emit(&mut self, cm: &codemap::CodeMap,
sp: RenderSpan, msg: &str, lvl: Level) {
match emit(self, cm, sp, msg, None, lvl, true) {
Ok(()) => {}
Err(e) => panic!("failed to print diagnostics: {:?}", e),
}
}
}
fn emit(dst: &mut EmitterWriter, cm: &codemap::CodeMap, rsp: RenderSpan,
msg: &str, code: Option<&str>, lvl: Level, custom: bool) -> old_io::IoResult<()> {
let sp = rsp.span();
// We cannot check equality directly with COMMAND_LINE_SP
// since PartialEq is manually implemented to ignore the ExpnId
let ss = if sp.expn_id == COMMAND_LINE_EXPN {
"<command line option>".to_string()
} else {
cm.span_to_string(sp)
};
if custom {
// we want to tell compiletest/runtest to look at the last line of the
// span (since `custom_highlight_lines` displays an arrow to the end of
// the span)
let span_end = Span { lo: sp.hi, hi: sp.hi, expn_id: sp.expn_id};
let ses = cm.span_to_string(span_end);
try!(print_diagnostic(dst, &ses[], lvl, msg, code));
if rsp.is_full_span() {
try!(custom_highlight_lines(dst, cm, sp, lvl, cm.span_to_lines(sp)));
}
} else {
try!(print_diagnostic(dst, &ss[], lvl, msg, code));
if rsp.is_full_span() {
try!(highlight_lines(dst, cm, sp, lvl, cm.span_to_lines(sp)));
}
}
if sp!= COMMAND_LINE_SP {
try!(print_macro_backtrace(dst, cm, sp));
}
match code {
Some(code) =>
match dst.registry.as_ref().and_then(|registry| registry.find_description(code)) {
Some(_) => {
try!(print_diagnostic(dst, &ss[], Help,
&format!("pass `--explain {}` to see a detailed \
explanation", code)[], None));
}
None => ()
},
None => (),
}
Ok(())
}
fn highlight_lines(err: &mut EmitterWriter,
cm: &codemap::CodeMap,
sp: Span,
lvl: Level,
lines: codemap::FileLines) -> old_io::IoResult<()> {
let fm = &*lines.file;
let mut elided = false;
let mut display_lines = &lines.lines[];
if display_lines.len() > MAX_LINES {
display_lines = &display_lines[0..MAX_LINES];
elided = true;
}
// Print the offending lines
for &line_number in display_lines {
if let Some(line) = fm.get_line(line_number) {
try!(write!(&mut err.dst, "{}:{} {}\n", fm.name,
line_number + 1, line));
}
}
if elided {
let last_line = display_lines[display_lines.len() - 1];
let s = format!("{}:{} ", fm.name, last_line + 1);
try!(write!(&mut err.dst, "{0:1$}...\n", "", s.len()));
}
// FIXME (#3260)
// If there's one line at fault we can easily point to the problem
if lines.lines.len() == 1 {
let lo = cm.lookup_char_pos(sp.lo);
let mut digits = 0;
let mut num = (lines.lines[0] + 1) / 10;
// how many digits must be indent past?
while num > 0 { num /= 10; digits += 1; }
let mut s = String::new();
// Skip is the number of characters we need to skip because they are
// part of the 'filename:line'part of the previous line.
let skip = fm.name.width(false) + digits + 3;
for _ in 0..skip {
s.push(' ');
}
if let Some(orig) = fm.get_line(lines.lines[0]) {
let mut col = skip;
let mut lastc ='';
let mut iter = orig.chars().enumerate();
for (pos, ch) in iter.by_ref() {
lastc = ch;
if pos >= lo.col.to_usize() { break; }
// Whenever a tab occurs on the previous line, we insert one on
// the error-point-squiggly-line as well (instead of a space).
// That way the squiggly line will usually appear in the correct
// position.
match ch {
'\t' => {
col += 8 - col%8;
s.push('\t');
},
c => for _ in 0..c.width(false).unwrap_or(0) {
col += 1;
s.push(' ');
},
}
}
try!(write!(&mut err.dst, "{}", s));
let mut s = String::from_str("^");
let count = match lastc {
// Most terminals have a tab stop every eight columns by default
'\t' => 8 - col%8,
_ => lastc.width(false).unwrap_or(0),
};
col += count;
s.extend(::std::iter::repeat('~').take(count));
let hi = cm.lookup_char_pos(sp.hi);
if hi.col!= lo.col {
for (pos, ch) in iter {
if pos >= hi.col.to_usize() { break; }
let count = match ch {
'\t' => 8 - col%8,
_ => ch.width(false).unwrap_or(0),
};
col += count;
s.extend(::std::iter::repeat('~').take(count));
}
}
if s.len() > 1 {
// One extra squiggly is replaced by a "^"
s.pop();
}
try!(print_maybe_styled(err,
&format!("{}\n", s)[],
term::attr::ForegroundColor(lvl.color())));
}
}
Ok(())
}
/// Here are the differences between this and the normal `highlight_lines`:
/// `custom_highlight_lines` will always put arrow on the last byte of the
/// span (instead of the first byte). Also, when the span is too long (more
/// than 6 lines), `custom_highlight_lines` will print the first line, then
/// dot dot dot, then last line, whereas `highlight_lines` prints the first
/// six lines.
fn custom_highlight_lines(w: &mut EmitterWriter,
cm: &codemap::CodeMap,
sp: Span,
lvl: Level,
lines: codemap::FileLines)
|
fmt
|
identifier_name
|
diagnostic.rs
|
call to `.bug`
/// or `.span_bug` rather than a failed assertion, etc.
#[derive(Copy)]
pub struct ExplicitBug;
/// A span-handler is like a handler but also
/// accepts span information for source-location
/// reporting.
pub struct SpanHandler {
pub handler: Handler,
pub cm: codemap::CodeMap,
}
impl SpanHandler {
pub fn span_fatal(&self, sp: Span, msg: &str) ->! {
self.handler.emit(Some((&self.cm, sp)), msg, Fatal);
panic!(FatalError);
}
pub fn span_fatal_with_code(&self, sp: Span, msg: &str, code: &str) ->! {
self.handler.emit_with_code(Some((&self.cm, sp)), msg, code, Fatal);
panic!(FatalError);
}
pub fn span_err(&self, sp: Span, msg: &str) {
self.handler.emit(Some((&self.cm, sp)), msg, Error);
self.handler.bump_err_count();
}
pub fn span_err_with_code(&self, sp: Span, msg: &str, code: &str) {
self.handler.emit_with_code(Some((&self.cm, sp)), msg, code, Error);
self.handler.bump_err_count();
}
pub fn span_warn(&self, sp: Span, msg: &str) {
self.handler.emit(Some((&self.cm, sp)), msg, Warning);
}
pub fn span_warn_with_code(&self, sp: Span, msg: &str, code: &str) {
self.handler.emit_with_code(Some((&self.cm, sp)), msg, code, Warning);
}
pub fn span_note(&self, sp: Span, msg: &str) {
self.handler.emit(Some((&self.cm, sp)), msg, Note);
}
pub fn span_end_note(&self, sp: Span, msg: &str) {
self.handler.custom_emit(&self.cm, FullSpan(sp), msg, Note);
}
pub fn span_help(&self, sp: Span, msg: &str) {
self.handler.emit(Some((&self.cm, sp)), msg, Help);
}
pub fn fileline_note(&self, sp: Span, msg: &str) {
self.handler.custom_emit(&self.cm, FileLine(sp), msg, Note);
}
pub fn fileline_help(&self, sp: Span, msg: &str) {
self.handler.custom_emit(&self.cm, FileLine(sp), msg, Help);
}
pub fn span_bug(&self, sp: Span, msg: &str) ->!
|
pub fn span_unimpl(&self, sp: Span, msg: &str) ->! {
self.span_bug(sp, &format!("unimplemented {}", msg)[]);
}
pub fn handler<'a>(&'a self) -> &'a Handler {
&self.handler
}
}
/// A handler deals with errors; certain errors
/// (fatal, bug, unimpl) may cause immediate exit,
/// others log errors for later reporting.
pub struct Handler {
err_count: Cell<usize>,
emit: RefCell<Box<Emitter + Send>>,
pub can_emit_warnings: bool
}
impl Handler {
pub fn fatal(&self, msg: &str) ->! {
self.emit.borrow_mut().emit(None, msg, None, Fatal);
panic!(FatalError);
}
pub fn err(&self, msg: &str) {
self.emit.borrow_mut().emit(None, msg, None, Error);
self.bump_err_count();
}
pub fn bump_err_count(&self) {
self.err_count.set(self.err_count.get() + 1);
}
pub fn err_count(&self) -> usize {
self.err_count.get()
}
pub fn has_errors(&self) -> bool {
self.err_count.get() > 0
}
pub fn abort_if_errors(&self) {
let s;
match self.err_count.get() {
0 => return,
1 => s = "aborting due to previous error".to_string(),
_ => {
s = format!("aborting due to {} previous errors",
self.err_count.get());
}
}
self.fatal(&s[]);
}
pub fn warn(&self, msg: &str) {
self.emit.borrow_mut().emit(None, msg, None, Warning);
}
pub fn note(&self, msg: &str) {
self.emit.borrow_mut().emit(None, msg, None, Note);
}
pub fn help(&self, msg: &str) {
self.emit.borrow_mut().emit(None, msg, None, Help);
}
pub fn bug(&self, msg: &str) ->! {
self.emit.borrow_mut().emit(None, msg, None, Bug);
panic!(ExplicitBug);
}
pub fn unimpl(&self, msg: &str) ->! {
self.bug(&format!("unimplemented {}", msg)[]);
}
pub fn emit(&self,
cmsp: Option<(&codemap::CodeMap, Span)>,
msg: &str,
lvl: Level) {
if lvl == Warning &&!self.can_emit_warnings { return }
self.emit.borrow_mut().emit(cmsp, msg, None, lvl);
}
pub fn emit_with_code(&self,
cmsp: Option<(&codemap::CodeMap, Span)>,
msg: &str,
code: &str,
lvl: Level) {
if lvl == Warning &&!self.can_emit_warnings { return }
self.emit.borrow_mut().emit(cmsp, msg, Some(code), lvl);
}
pub fn custom_emit(&self, cm: &codemap::CodeMap,
sp: RenderSpan, msg: &str, lvl: Level) {
if lvl == Warning &&!self.can_emit_warnings { return }
self.emit.borrow_mut().custom_emit(cm, sp, msg, lvl);
}
}
pub fn mk_span_handler(handler: Handler, cm: codemap::CodeMap) -> SpanHandler {
SpanHandler {
handler: handler,
cm: cm,
}
}
pub fn default_handler(color_config: ColorConfig,
registry: Option<diagnostics::registry::Registry>,
can_emit_warnings: bool) -> Handler {
mk_handler(can_emit_warnings, box EmitterWriter::stderr(color_config, registry))
}
pub fn mk_handler(can_emit_warnings: bool, e: Box<Emitter + Send>) -> Handler {
Handler {
err_count: Cell::new(0),
emit: RefCell::new(e),
can_emit_warnings: can_emit_warnings
}
}
#[derive(Copy, PartialEq, Clone, Debug)]
pub enum Level {
Bug,
Fatal,
Error,
Warning,
Note,
Help,
}
impl fmt::Display for Level {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use std::fmt::Display;
match *self {
Bug => "error: internal compiler error".fmt(f),
Fatal | Error => "error".fmt(f),
Warning => "warning".fmt(f),
Note => "note".fmt(f),
Help => "help".fmt(f),
}
}
}
impl Level {
fn color(self) -> term::color::Color {
match self {
Bug | Fatal | Error => term::color::BRIGHT_RED,
Warning => term::color::BRIGHT_YELLOW,
Note => term::color::BRIGHT_GREEN,
Help => term::color::BRIGHT_CYAN,
}
}
}
fn print_maybe_styled(w: &mut EmitterWriter,
msg: &str,
color: term::attr::Attr) -> old_io::IoResult<()> {
match w.dst {
Terminal(ref mut t) => {
try!(t.attr(color));
// If `msg` ends in a newline, we need to reset the color before
// the newline. We're making the assumption that we end up writing
// to a `LineBufferedWriter`, which means that emitting the reset
// after the newline ends up buffering the reset until we print
// another line or exit. Buffering the reset is a problem if we're
// sharing the terminal with any other programs (e.g. other rustc
// instances via `make -jN`).
//
// Note that if `msg` contains any internal newlines, this will
// result in the `LineBufferedWriter` flushing twice instead of
// once, which still leaves the opportunity for interleaved output
// to be miscolored. We assume this is rare enough that we don't
// have to worry about it.
if msg.ends_with("\n") {
try!(t.write_str(&msg[..msg.len()-1]));
try!(t.reset());
try!(t.write_str("\n"));
} else {
try!(t.write_str(msg));
try!(t.reset());
}
Ok(())
}
Raw(ref mut w) => {
w.write_str(msg)
}
}
}
fn print_diagnostic(dst: &mut EmitterWriter, topic: &str, lvl: Level,
msg: &str, code: Option<&str>) -> old_io::IoResult<()> {
if!topic.is_empty() {
try!(write!(&mut dst.dst, "{} ", topic));
}
try!(print_maybe_styled(dst,
&format!("{}: ", lvl.to_string())[],
term::attr::ForegroundColor(lvl.color())));
try!(print_maybe_styled(dst,
&format!("{}", msg)[],
term::attr::Bold));
match code {
Some(code) => {
let style = term::attr::ForegroundColor(term::color::BRIGHT_MAGENTA);
try!(print_maybe_styled(dst, &format!(" [{}]", code.clone())[], style));
}
None => ()
}
try!(dst.dst.write_char('\n'));
Ok(())
}
pub struct EmitterWriter {
dst: Destination,
registry: Option<diagnostics::registry::Registry>
}
enum Destination {
Terminal(Box<term::Terminal<WriterWrapper> + Send>),
Raw(Box<Writer + Send>),
}
impl EmitterWriter {
pub fn stderr(color_config: ColorConfig,
registry: Option<diagnostics::registry::Registry>) -> EmitterWriter {
let stderr = old_io::stderr();
let use_color = match color_config {
Always => true,
Never => false,
Auto => stderr.get_ref().isatty()
};
if use_color {
let dst = match term::stderr() {
Some(t) => Terminal(t),
None => Raw(box stderr),
};
EmitterWriter { dst: dst, registry: registry }
} else {
EmitterWriter { dst: Raw(box stderr), registry: registry }
}
}
pub fn new(dst: Box<Writer + Send>,
registry: Option<diagnostics::registry::Registry>) -> EmitterWriter {
EmitterWriter { dst: Raw(dst), registry: registry }
}
}
impl Writer for Destination {
fn write_all(&mut self, bytes: &[u8]) -> old_io::IoResult<()> {
match *self {
Terminal(ref mut t) => t.write_all(bytes),
Raw(ref mut w) => w.write_all(bytes),
}
}
}
impl Emitter for EmitterWriter {
fn emit(&mut self,
cmsp: Option<(&codemap::CodeMap, Span)>,
msg: &str, code: Option<&str>, lvl: Level) {
let error = match cmsp {
Some((cm, COMMAND_LINE_SP)) => emit(self, cm,
FileLine(COMMAND_LINE_SP),
msg, code, lvl, false),
Some((cm, sp)) => emit(self, cm, FullSpan(sp), msg, code, lvl, false),
None => print_diagnostic(self, "", lvl, msg, code),
};
match error {
Ok(()) => {}
Err(e) => panic!("failed to print diagnostics: {:?}", e),
}
}
fn custom_emit(&mut self, cm: &codemap::CodeMap,
sp: RenderSpan, msg: &str, lvl: Level) {
match emit(self, cm, sp, msg, None, lvl, true) {
Ok(()) => {}
Err(e) => panic!("failed to print diagnostics: {:?}", e),
}
}
}
fn emit(dst: &mut EmitterWriter, cm: &codemap::CodeMap, rsp: RenderSpan,
msg: &str, code: Option<&str>, lvl: Level, custom: bool) -> old_io::IoResult<()> {
let sp = rsp.span();
// We cannot check equality directly with COMMAND_LINE_SP
// since PartialEq is manually implemented to ignore the ExpnId
let ss = if sp.expn_id == COMMAND_LINE_EXPN {
"<command line option>".to_string()
} else {
cm.span_to_string(sp)
};
if custom {
// we want to tell compiletest/runtest to look at the last line of the
// span (since `custom_highlight_lines` displays an arrow to the end of
// the span)
let span_end = Span { lo: sp.hi, hi: sp.hi, expn_id: sp.expn_id};
let ses = cm.span_to_string(span_end);
try!(print_diagnostic(dst, &ses[], lvl, msg, code));
if rsp.is_full_span() {
try!(custom_highlight_lines(dst, cm, sp, lvl, cm.span_to_lines(sp)));
}
} else {
try!(print_diagnostic(dst, &ss[], lvl, msg, code));
if rsp.is_full_span() {
try!(highlight_lines(dst, cm, sp, lvl, cm.span_to_lines(sp)));
}
}
if sp!= COMMAND_LINE_SP {
try!(print_macro_backtrace(dst, cm, sp));
}
match code {
Some(code) =>
match dst.registry.as_ref().and_then(|registry| registry.find_description(code)) {
Some(_) => {
try!(print_diagnostic(dst, &ss[], Help,
&format!("pass `--explain {}` to see a detailed \
explanation", code)[], None));
}
None => ()
},
None => (),
}
Ok(())
}
fn highlight_lines(err: &mut EmitterWriter,
cm: &codemap::CodeMap,
sp: Span,
lvl: Level,
lines: codemap::FileLines) -> old_io::IoResult<()> {
let fm = &*lines.file;
let mut elided = false;
let mut display_lines = &lines.lines[];
if display_lines.len() > MAX_LINES {
display_lines = &display_lines[0..MAX_LINES];
elided = true;
}
// Print the offending lines
for &line_number in display_lines {
if let Some(line) = fm.get_line(line_number) {
try!(write!(&mut err.dst, "{}:{} {}\n", fm.name,
line_number + 1, line));
}
}
if elided {
let last_line = display_lines[display_lines.len() - 1];
let s = format!("{}:{} ", fm.name, last_line + 1);
try!(write!(&mut err.dst, "{0:1$}...\n", "", s.len()));
}
// FIXME (#3260)
// If there's one line at fault we can easily point to the problem
if lines.lines.len() == 1 {
let lo = cm.lookup_char_pos(sp.lo);
let mut digits = 0;
let mut num = (lines.lines[0] + 1) / 10;
// how many digits must be indent past?
while num > 0 { num /= 10; digits += 1; }
let mut s = String::new();
// Skip is the number of characters we need to skip because they are
// part of the 'filename:line'part of the previous line.
let skip = fm.name.width(false) + digits + 3;
for _ in 0..skip {
s.push(' ');
}
if let Some(orig) = fm.get_line(lines.lines[0]) {
let mut col = skip;
let mut lastc ='';
let mut iter = orig.chars().enumerate();
for (pos, ch) in iter.by_ref() {
lastc = ch;
if pos >= lo.col.to_usize() { break; }
// Whenever a tab occurs on the previous line, we insert one on
// the error-point-squiggly-line as well (instead of a space).
// That way the squiggly line will usually appear in the correct
// position.
match ch {
'\t' => {
col += 8 - col%8;
s.push('\t');
},
c => for _ in 0..c.width(false).unwrap_or(0) {
col += 1;
s.push(' ');
},
}
}
try!(write!(&mut err.dst, "{}", s));
let mut s = String::from_str("^");
let count = match lastc {
// Most terminals have a tab stop every eight columns by default
'\t' => 8 - col%8,
_ => lastc.width(false).unwrap_or(0),
};
col += count;
s.extend(::std::iter::repeat('~').take(count));
let hi = cm.lookup_char_pos(sp.hi);
if hi.col!= lo.col {
for (pos, ch) in iter {
if pos >= hi.col.to_usize() { break; }
let count = match ch {
'\t' => 8 - col%8,
_ => ch.width(false).unwrap_or(0),
};
col += count;
s.extend(::std::iter::repeat('~').take(count));
}
}
if s.len() > 1 {
// One extra squiggly is replaced by a "^"
s.pop();
}
try!(print_maybe_styled(err,
&format!("{}\n", s)[],
term::attr::ForegroundColor(lvl.color())));
}
}
Ok(())
}
/// Here are the differences between this and the normal `highlight_lines`:
/// `custom_highlight_lines` will always put arrow on the last byte of the
/// span (instead of the first byte). Also, when the span is too long (more
/// than 6 lines), `custom_highlight_lines` will print the first line, then
/// dot dot dot, then last line, whereas `highlight_lines` prints the first
/// six lines.
fn custom_highlight_lines(w: &mut EmitterWriter,
cm: &codemap::CodeMap,
sp: Span,
lvl: Level,
lines: codemap::FileLines)
|
{
self.handler.emit(Some((&self.cm, sp)), msg, Bug);
panic!(ExplicitBug);
}
|
identifier_body
|
diagnostic.rs
|
explicit call to `.bug`
/// or `.span_bug` rather than a failed assertion, etc.
#[derive(Copy)]
pub struct ExplicitBug;
/// A span-handler is like a handler but also
/// accepts span information for source-location
/// reporting.
pub struct SpanHandler {
pub handler: Handler,
pub cm: codemap::CodeMap,
}
impl SpanHandler {
pub fn span_fatal(&self, sp: Span, msg: &str) ->! {
self.handler.emit(Some((&self.cm, sp)), msg, Fatal);
panic!(FatalError);
}
pub fn span_fatal_with_code(&self, sp: Span, msg: &str, code: &str) ->! {
self.handler.emit_with_code(Some((&self.cm, sp)), msg, code, Fatal);
panic!(FatalError);
}
pub fn span_err(&self, sp: Span, msg: &str) {
self.handler.emit(Some((&self.cm, sp)), msg, Error);
self.handler.bump_err_count();
}
pub fn span_err_with_code(&self, sp: Span, msg: &str, code: &str) {
self.handler.emit_with_code(Some((&self.cm, sp)), msg, code, Error);
self.handler.bump_err_count();
}
pub fn span_warn(&self, sp: Span, msg: &str) {
self.handler.emit(Some((&self.cm, sp)), msg, Warning);
}
pub fn span_warn_with_code(&self, sp: Span, msg: &str, code: &str) {
self.handler.emit_with_code(Some((&self.cm, sp)), msg, code, Warning);
}
pub fn span_note(&self, sp: Span, msg: &str) {
self.handler.emit(Some((&self.cm, sp)), msg, Note);
}
pub fn span_end_note(&self, sp: Span, msg: &str) {
self.handler.custom_emit(&self.cm, FullSpan(sp), msg, Note);
}
pub fn span_help(&self, sp: Span, msg: &str) {
self.handler.emit(Some((&self.cm, sp)), msg, Help);
}
pub fn fileline_note(&self, sp: Span, msg: &str) {
self.handler.custom_emit(&self.cm, FileLine(sp), msg, Note);
}
pub fn fileline_help(&self, sp: Span, msg: &str) {
self.handler.custom_emit(&self.cm, FileLine(sp), msg, Help);
}
pub fn span_bug(&self, sp: Span, msg: &str) ->! {
self.handler.emit(Some((&self.cm, sp)), msg, Bug);
panic!(ExplicitBug);
}
pub fn span_unimpl(&self, sp: Span, msg: &str) ->! {
self.span_bug(sp, &format!("unimplemented {}", msg)[]);
}
pub fn handler<'a>(&'a self) -> &'a Handler {
&self.handler
}
}
/// A handler deals with errors; certain errors
/// (fatal, bug, unimpl) may cause immediate exit,
/// others log errors for later reporting.
pub struct Handler {
err_count: Cell<usize>,
emit: RefCell<Box<Emitter + Send>>,
pub can_emit_warnings: bool
}
impl Handler {
pub fn fatal(&self, msg: &str) ->! {
self.emit.borrow_mut().emit(None, msg, None, Fatal);
panic!(FatalError);
}
pub fn err(&self, msg: &str) {
self.emit.borrow_mut().emit(None, msg, None, Error);
self.bump_err_count();
}
pub fn bump_err_count(&self) {
self.err_count.set(self.err_count.get() + 1);
}
pub fn err_count(&self) -> usize {
self.err_count.get()
}
pub fn has_errors(&self) -> bool {
self.err_count.get() > 0
}
pub fn abort_if_errors(&self) {
let s;
match self.err_count.get() {
0 => return,
1 => s = "aborting due to previous error".to_string(),
_ => {
s = format!("aborting due to {} previous errors",
self.err_count.get());
}
}
self.fatal(&s[]);
}
pub fn warn(&self, msg: &str) {
self.emit.borrow_mut().emit(None, msg, None, Warning);
}
pub fn note(&self, msg: &str) {
self.emit.borrow_mut().emit(None, msg, None, Note);
}
pub fn help(&self, msg: &str) {
self.emit.borrow_mut().emit(None, msg, None, Help);
}
pub fn bug(&self, msg: &str) ->! {
self.emit.borrow_mut().emit(None, msg, None, Bug);
panic!(ExplicitBug);
}
pub fn unimpl(&self, msg: &str) ->! {
self.bug(&format!("unimplemented {}", msg)[]);
}
pub fn emit(&self,
cmsp: Option<(&codemap::CodeMap, Span)>,
msg: &str,
lvl: Level) {
if lvl == Warning &&!self.can_emit_warnings { return }
self.emit.borrow_mut().emit(cmsp, msg, None, lvl);
}
pub fn emit_with_code(&self,
cmsp: Option<(&codemap::CodeMap, Span)>,
msg: &str,
code: &str,
lvl: Level) {
if lvl == Warning &&!self.can_emit_warnings { return }
self.emit.borrow_mut().emit(cmsp, msg, Some(code), lvl);
}
pub fn custom_emit(&self, cm: &codemap::CodeMap,
sp: RenderSpan, msg: &str, lvl: Level) {
if lvl == Warning &&!self.can_emit_warnings { return }
self.emit.borrow_mut().custom_emit(cm, sp, msg, lvl);
}
}
pub fn mk_span_handler(handler: Handler, cm: codemap::CodeMap) -> SpanHandler {
SpanHandler {
handler: handler,
cm: cm,
}
}
pub fn default_handler(color_config: ColorConfig,
registry: Option<diagnostics::registry::Registry>,
can_emit_warnings: bool) -> Handler {
mk_handler(can_emit_warnings, box EmitterWriter::stderr(color_config, registry))
}
pub fn mk_handler(can_emit_warnings: bool, e: Box<Emitter + Send>) -> Handler {
Handler {
err_count: Cell::new(0),
emit: RefCell::new(e),
can_emit_warnings: can_emit_warnings
}
}
#[derive(Copy, PartialEq, Clone, Debug)]
pub enum Level {
Bug,
Fatal,
Error,
Warning,
Note,
Help,
}
impl fmt::Display for Level {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use std::fmt::Display;
match *self {
Bug => "error: internal compiler error".fmt(f),
Fatal | Error => "error".fmt(f),
Warning => "warning".fmt(f),
Note => "note".fmt(f),
Help => "help".fmt(f),
}
}
}
impl Level {
fn color(self) -> term::color::Color {
match self {
Bug | Fatal | Error => term::color::BRIGHT_RED,
Warning => term::color::BRIGHT_YELLOW,
Note => term::color::BRIGHT_GREEN,
Help => term::color::BRIGHT_CYAN,
}
}
}
fn print_maybe_styled(w: &mut EmitterWriter,
msg: &str,
color: term::attr::Attr) -> old_io::IoResult<()> {
match w.dst {
Terminal(ref mut t) => {
|
// to a `LineBufferedWriter`, which means that emitting the reset
// after the newline ends up buffering the reset until we print
// another line or exit. Buffering the reset is a problem if we're
// sharing the terminal with any other programs (e.g. other rustc
// instances via `make -jN`).
//
// Note that if `msg` contains any internal newlines, this will
// result in the `LineBufferedWriter` flushing twice instead of
// once, which still leaves the opportunity for interleaved output
// to be miscolored. We assume this is rare enough that we don't
// have to worry about it.
if msg.ends_with("\n") {
try!(t.write_str(&msg[..msg.len()-1]));
try!(t.reset());
try!(t.write_str("\n"));
} else {
try!(t.write_str(msg));
try!(t.reset());
}
Ok(())
}
Raw(ref mut w) => {
w.write_str(msg)
}
}
}
fn print_diagnostic(dst: &mut EmitterWriter, topic: &str, lvl: Level,
msg: &str, code: Option<&str>) -> old_io::IoResult<()> {
if!topic.is_empty() {
try!(write!(&mut dst.dst, "{} ", topic));
}
try!(print_maybe_styled(dst,
&format!("{}: ", lvl.to_string())[],
term::attr::ForegroundColor(lvl.color())));
try!(print_maybe_styled(dst,
&format!("{}", msg)[],
term::attr::Bold));
match code {
Some(code) => {
let style = term::attr::ForegroundColor(term::color::BRIGHT_MAGENTA);
try!(print_maybe_styled(dst, &format!(" [{}]", code.clone())[], style));
}
None => ()
}
try!(dst.dst.write_char('\n'));
Ok(())
}
pub struct EmitterWriter {
dst: Destination,
registry: Option<diagnostics::registry::Registry>
}
enum Destination {
Terminal(Box<term::Terminal<WriterWrapper> + Send>),
Raw(Box<Writer + Send>),
}
impl EmitterWriter {
pub fn stderr(color_config: ColorConfig,
registry: Option<diagnostics::registry::Registry>) -> EmitterWriter {
let stderr = old_io::stderr();
let use_color = match color_config {
Always => true,
Never => false,
Auto => stderr.get_ref().isatty()
};
if use_color {
let dst = match term::stderr() {
Some(t) => Terminal(t),
None => Raw(box stderr),
};
EmitterWriter { dst: dst, registry: registry }
} else {
EmitterWriter { dst: Raw(box stderr), registry: registry }
}
}
pub fn new(dst: Box<Writer + Send>,
registry: Option<diagnostics::registry::Registry>) -> EmitterWriter {
EmitterWriter { dst: Raw(dst), registry: registry }
}
}
impl Writer for Destination {
fn write_all(&mut self, bytes: &[u8]) -> old_io::IoResult<()> {
match *self {
Terminal(ref mut t) => t.write_all(bytes),
Raw(ref mut w) => w.write_all(bytes),
}
}
}
impl Emitter for EmitterWriter {
fn emit(&mut self,
cmsp: Option<(&codemap::CodeMap, Span)>,
msg: &str, code: Option<&str>, lvl: Level) {
let error = match cmsp {
Some((cm, COMMAND_LINE_SP)) => emit(self, cm,
FileLine(COMMAND_LINE_SP),
msg, code, lvl, false),
Some((cm, sp)) => emit(self, cm, FullSpan(sp), msg, code, lvl, false),
None => print_diagnostic(self, "", lvl, msg, code),
};
match error {
Ok(()) => {}
Err(e) => panic!("failed to print diagnostics: {:?}", e),
}
}
fn custom_emit(&mut self, cm: &codemap::CodeMap,
sp: RenderSpan, msg: &str, lvl: Level) {
match emit(self, cm, sp, msg, None, lvl, true) {
Ok(()) => {}
Err(e) => panic!("failed to print diagnostics: {:?}", e),
}
}
}
fn emit(dst: &mut EmitterWriter, cm: &codemap::CodeMap, rsp: RenderSpan,
msg: &str, code: Option<&str>, lvl: Level, custom: bool) -> old_io::IoResult<()> {
let sp = rsp.span();
// We cannot check equality directly with COMMAND_LINE_SP
// since PartialEq is manually implemented to ignore the ExpnId
let ss = if sp.expn_id == COMMAND_LINE_EXPN {
"<command line option>".to_string()
} else {
cm.span_to_string(sp)
};
if custom {
// we want to tell compiletest/runtest to look at the last line of the
// span (since `custom_highlight_lines` displays an arrow to the end of
// the span)
let span_end = Span { lo: sp.hi, hi: sp.hi, expn_id: sp.expn_id};
let ses = cm.span_to_string(span_end);
try!(print_diagnostic(dst, &ses[], lvl, msg, code));
if rsp.is_full_span() {
try!(custom_highlight_lines(dst, cm, sp, lvl, cm.span_to_lines(sp)));
}
} else {
try!(print_diagnostic(dst, &ss[], lvl, msg, code));
if rsp.is_full_span() {
try!(highlight_lines(dst, cm, sp, lvl, cm.span_to_lines(sp)));
}
}
if sp!= COMMAND_LINE_SP {
try!(print_macro_backtrace(dst, cm, sp));
}
match code {
Some(code) =>
match dst.registry.as_ref().and_then(|registry| registry.find_description(code)) {
Some(_) => {
try!(print_diagnostic(dst, &ss[], Help,
&format!("pass `--explain {}` to see a detailed \
explanation", code)[], None));
}
None => ()
},
None => (),
}
Ok(())
}
fn highlight_lines(err: &mut EmitterWriter,
cm: &codemap::CodeMap,
sp: Span,
lvl: Level,
lines: codemap::FileLines) -> old_io::IoResult<()> {
let fm = &*lines.file;
let mut elided = false;
let mut display_lines = &lines.lines[];
if display_lines.len() > MAX_LINES {
display_lines = &display_lines[0..MAX_LINES];
elided = true;
}
// Print the offending lines
for &line_number in display_lines {
if let Some(line) = fm.get_line(line_number) {
try!(write!(&mut err.dst, "{}:{} {}\n", fm.name,
line_number + 1, line));
}
}
if elided {
let last_line = display_lines[display_lines.len() - 1];
let s = format!("{}:{} ", fm.name, last_line + 1);
try!(write!(&mut err.dst, "{0:1$}...\n", "", s.len()));
}
// FIXME (#3260)
// If there's one line at fault we can easily point to the problem
if lines.lines.len() == 1 {
let lo = cm.lookup_char_pos(sp.lo);
let mut digits = 0;
let mut num = (lines.lines[0] + 1) / 10;
// how many digits must be indent past?
while num > 0 { num /= 10; digits += 1; }
let mut s = String::new();
// Skip is the number of characters we need to skip because they are
// part of the 'filename:line'part of the previous line.
let skip = fm.name.width(false) + digits + 3;
for _ in 0..skip {
s.push(' ');
}
if let Some(orig) = fm.get_line(lines.lines[0]) {
let mut col = skip;
let mut lastc ='';
let mut iter = orig.chars().enumerate();
for (pos, ch) in iter.by_ref() {
lastc = ch;
if pos >= lo.col.to_usize() { break; }
// Whenever a tab occurs on the previous line, we insert one on
// the error-point-squiggly-line as well (instead of a space).
// That way the squiggly line will usually appear in the correct
// position.
match ch {
'\t' => {
col += 8 - col%8;
s.push('\t');
},
c => for _ in 0..c.width(false).unwrap_or(0) {
col += 1;
s.push(' ');
},
}
}
try!(write!(&mut err.dst, "{}", s));
let mut s = String::from_str("^");
let count = match lastc {
// Most terminals have a tab stop every eight columns by default
'\t' => 8 - col%8,
_ => lastc.width(false).unwrap_or(0),
};
col += count;
s.extend(::std::iter::repeat('~').take(count));
let hi = cm.lookup_char_pos(sp.hi);
if hi.col!= lo.col {
for (pos, ch) in iter {
if pos >= hi.col.to_usize() { break; }
let count = match ch {
'\t' => 8 - col%8,
_ => ch.width(false).unwrap_or(0),
};
col += count;
s.extend(::std::iter::repeat('~').take(count));
}
}
if s.len() > 1 {
// One extra squiggly is replaced by a "^"
s.pop();
}
try!(print_maybe_styled(err,
&format!("{}\n", s)[],
term::attr::ForegroundColor(lvl.color())));
}
}
Ok(())
}
/// Here are the differences between this and the normal `highlight_lines`:
/// `custom_highlight_lines` will always put arrow on the last byte of the
/// span (instead of the first byte). Also, when the span is too long (more
/// than 6 lines), `custom_highlight_lines` will print the first line, then
/// dot dot dot, then last line, whereas `highlight_lines` prints the first
/// six lines.
fn custom_highlight_lines(w: &mut EmitterWriter,
cm: &codemap::CodeMap,
sp: Span,
lvl: Level,
lines: codemap::FileLines)
|
try!(t.attr(color));
// If `msg` ends in a newline, we need to reset the color before
// the newline. We're making the assumption that we end up writing
|
random_line_split
|
any.rs
|
use AsLua;
use AsMutLua;
use Push;
use PushGuard;
use LuaRead;
/// Represents any value that can be stored by Lua
#[derive(Clone, Debug, PartialEq)]
pub enum AnyLuaValue {
LuaString(String),
LuaNumber(f64),
LuaBoolean(bool),
LuaArray(Vec<(AnyLuaValue, AnyLuaValue)>),
/// The "Other" element is (hopefully) temporary and will be replaced by "Function" and "Userdata".
/// A panic! will trigger if you try to push a Other.
LuaOther
}
impl<L> Push<L> for AnyLuaValue where L: AsMutLua {
fn push_to_lua(self, lua: L) -> PushGuard<L> {
match self {
AnyLuaValue::LuaString(val) => val.push_to_lua(lua),
AnyLuaValue::LuaNumber(val) => val.push_to_lua(lua),
AnyLuaValue::LuaBoolean(val) => val.push_to_lua(lua),
AnyLuaValue::LuaArray(_val) => unimplemented!(),//val.push_to_lua(lua), // FIXME: reached recursion limit during monomorphization
AnyLuaValue::LuaOther => panic!("can't push a AnyLuaValue of type Other")
}
}
}
impl<L> LuaRead<L> for AnyLuaValue where L: AsLua {
fn lua_read_at_position(lua: L, index: i32) -> Result<AnyLuaValue, L> {
let lua = match LuaRead::lua_read_at_position(&lua, index) {
Ok(v) => return Ok(AnyLuaValue::LuaNumber(v)),
Err(lua) => lua
};
let lua = match LuaRead::lua_read_at_position(&lua, index) {
Ok(v) => return Ok(AnyLuaValue::LuaBoolean(v)),
Err(lua) => lua
};
|
let _lua = match LuaRead::lua_read_at_position(&lua, index) {
Ok(v) => return Ok(AnyLuaValue::LuaString(v)),
Err(lua) => lua
};
/*let _lua = match LuaRead::lua_read_at_position(&lua, index) {
Ok(v) => return Ok(AnyLuaValue::LuaArray(v)),
Err(lua) => lua
};*/
Ok(AnyLuaValue::LuaOther)
}
}
|
random_line_split
|
|
any.rs
|
use AsLua;
use AsMutLua;
use Push;
use PushGuard;
use LuaRead;
/// Represents any value that can be stored by Lua
#[derive(Clone, Debug, PartialEq)]
pub enum AnyLuaValue {
LuaString(String),
LuaNumber(f64),
LuaBoolean(bool),
LuaArray(Vec<(AnyLuaValue, AnyLuaValue)>),
/// The "Other" element is (hopefully) temporary and will be replaced by "Function" and "Userdata".
/// A panic! will trigger if you try to push a Other.
LuaOther
}
impl<L> Push<L> for AnyLuaValue where L: AsMutLua {
fn push_to_lua(self, lua: L) -> PushGuard<L> {
match self {
AnyLuaValue::LuaString(val) => val.push_to_lua(lua),
AnyLuaValue::LuaNumber(val) => val.push_to_lua(lua),
AnyLuaValue::LuaBoolean(val) => val.push_to_lua(lua),
AnyLuaValue::LuaArray(_val) => unimplemented!(),//val.push_to_lua(lua), // FIXME: reached recursion limit during monomorphization
AnyLuaValue::LuaOther => panic!("can't push a AnyLuaValue of type Other")
}
}
}
impl<L> LuaRead<L> for AnyLuaValue where L: AsLua {
fn
|
(lua: L, index: i32) -> Result<AnyLuaValue, L> {
let lua = match LuaRead::lua_read_at_position(&lua, index) {
Ok(v) => return Ok(AnyLuaValue::LuaNumber(v)),
Err(lua) => lua
};
let lua = match LuaRead::lua_read_at_position(&lua, index) {
Ok(v) => return Ok(AnyLuaValue::LuaBoolean(v)),
Err(lua) => lua
};
let _lua = match LuaRead::lua_read_at_position(&lua, index) {
Ok(v) => return Ok(AnyLuaValue::LuaString(v)),
Err(lua) => lua
};
/*let _lua = match LuaRead::lua_read_at_position(&lua, index) {
Ok(v) => return Ok(AnyLuaValue::LuaArray(v)),
Err(lua) => lua
};*/
Ok(AnyLuaValue::LuaOther)
}
}
|
lua_read_at_position
|
identifier_name
|
disk.rs
|
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::{fs, io};
use std::path::{PathBuf, Path};
use std::collections::HashMap;
use time;
use {json, SafeAccount, Error};
use json::Uuid;
use super::{KeyDirectory, VaultKeyDirectory, VaultKeyDirectoryProvider, VaultKey};
use super::vault::{VAULT_FILE_NAME, VaultDiskDirectory};
const IGNORED_FILES: &'static [&'static str] = &[
"thumbs.db",
"address_book.json",
"dapps_policy.json",
"dapps_accounts.json",
"dapps_history.json",
"vault.json",
];
#[cfg(not(windows))]
fn restrict_permissions_to_owner(file_path: &Path) -> Result<(), i32> {
use std::ffi;
use libc;
let cstr = ffi::CString::new(&*file_path.to_string_lossy())
.map_err(|_| -1)?;
match unsafe { libc::chmod(cstr.as_ptr(), libc::S_IWUSR | libc::S_IRUSR) } {
0 => Ok(()),
x => Err(x),
}
}
#[cfg(windows)]
fn restrict_permissions_to_owner(_file_path: &Path) -> Result<(), i32> {
Ok(())
}
/// Root keys directory implementation
pub type RootDiskDirectory = DiskDirectory<DiskKeyFileManager>;
/// Disk directory key file manager
pub trait KeyFileManager: Send + Sync {
/// Read `SafeAccount` from given key file stream
fn read<T>(&self, filename: Option<String>, reader: T) -> Result<SafeAccount, Error> where T: io::Read;
/// Write `SafeAccount` to given key file stream
fn write<T>(&self, account: SafeAccount, writer: &mut T) -> Result<(), Error> where T: io::Write;
}
/// Disk-based keys directory implementation
pub struct DiskDirectory<T> where T: KeyFileManager {
path: PathBuf,
key_manager: T,
}
/// Keys file manager for root keys directory
pub struct DiskKeyFileManager;
impl RootDiskDirectory {
pub fn create<P>(path: P) -> Result<Self, Error> where P: AsRef<Path> {
fs::create_dir_all(&path)?;
Ok(Self::at(path))
}
pub fn at<P>(path: P) -> Self where P: AsRef<Path> {
DiskDirectory::new(path, DiskKeyFileManager)
}
}
impl<T> DiskDirectory<T> where T: KeyFileManager {
/// Create new disk directory instance
pub fn new<P>(path: P, key_manager: T) -> Self where P: AsRef<Path> {
DiskDirectory {
path: path.as_ref().to_path_buf(),
key_manager: key_manager,
}
}
fn files(&self) -> Result<Vec<PathBuf>, Error> {
Ok(fs::read_dir(&self.path)?
.flat_map(Result::ok)
.filter(|entry| {
let metadata = entry.metadata().ok();
let file_name = entry.file_name();
let name = file_name.to_string_lossy();
// filter directories
metadata.map_or(false, |m|!m.is_dir()) &&
// hidden files
!name.starts_with(".") &&
// other ignored files
!IGNORED_FILES.contains(&&*name)
})
.map(|entry| entry.path())
.collect::<Vec<PathBuf>>()
)
}
pub fn files_hash(&self) -> Result<u64, Error> {
use std::collections::hash_map::DefaultHasher;
use std::hash::Hasher;
let mut hasher = DefaultHasher::new();
let files = self.files()?;
for file in files {
hasher.write(file.to_str().unwrap_or("").as_bytes())
}
Ok(hasher.finish())
}
/// all accounts found in keys directory
fn files_content(&self) -> Result<HashMap<PathBuf, SafeAccount>, Error> {
// it's not done using one iterator cause
// there is an issue with rustc and it takes tooo much time to compile
let paths = self.files()?;
Ok(paths
.into_iter()
.filter_map(|path| {
let filename = Some(path.file_name().and_then(|n| n.to_str()).expect("Keys have valid UTF8 names only.").to_owned());
fs::File::open(path.clone())
.map_err(Into::into)
.and_then(|file| self.key_manager.read(filename, file))
.map_err(|err| {
warn!("Invalid key file: {:?} ({})", path, err);
err
})
.map(|account| (path, account))
.ok()
})
.collect()
)
}
/// insert account with given file name
pub fn insert_with_filename(&self, account: SafeAccount, filename: String) -> Result<SafeAccount, Error> {
// update account filename
let original_account = account.clone();
let mut account = account;
account.filename = Some(filename.clone());
{
// Path to keyfile
let mut keyfile_path = self.path.clone();
keyfile_path.push(filename.as_str());
// save the file
let mut file = fs::File::create(&keyfile_path)?;
if let Err(err) = self.key_manager.write(original_account, &mut file).map_err(|e| Error::Custom(format!("{:?}", e))) {
drop(file);
fs::remove_file(keyfile_path).expect("Expected to remove recently created file");
return Err(err);
}
if let Err(_) = restrict_permissions_to_owner(keyfile_path.as_path()) {
drop(file);
fs::remove_file(keyfile_path).expect("Expected to remove recently created file");
return Err(Error::Io(io::Error::last_os_error()));
}
}
Ok(account)
}
/// Get key file manager referece
pub fn key_manager(&self) -> &T {
&self.key_manager
}
}
impl<T> KeyDirectory for DiskDirectory<T> where T: KeyFileManager {
fn load(&self) -> Result<Vec<SafeAccount>, Error> {
let accounts = self.files_content()?
.into_iter()
.map(|(_, account)| account)
.collect();
Ok(accounts)
}
fn update(&self, account: SafeAccount) -> Result<SafeAccount, Error> {
// Disk store handles updates correctly iff filename is the same
self.insert(account)
}
fn insert(&self, account: SafeAccount) -> Result<SafeAccount, Error> {
// build file path
let filename = account.filename.as_ref().cloned().unwrap_or_else(|| {
let timestamp = time::strftime("%Y-%m-%dT%H-%M-%S", &time::now_utc()).expect("Time-format string is valid.");
format!("UTC--{}Z--{}", timestamp, Uuid::from(account.id))
});
self.insert_with_filename(account, filename)
}
fn remove(&self, account: &SafeAccount) -> Result<(), Error> {
// enumerate all entries in keystore
// and find entry with given address
let to_remove = self.files_content()?
.into_iter()
.find(|&(_, ref acc)| acc.id == account.id && acc.address == account.address);
// remove it
match to_remove {
None => Err(Error::InvalidAccount),
Some((path, _)) => fs::remove_file(path).map_err(From::from)
}
}
fn path(&self) -> Option<&PathBuf> { Some(&self.path) }
fn as_vault_provider(&self) -> Option<&VaultKeyDirectoryProvider> {
Some(self)
}
fn unique_repr(&self) -> Result<u64, Error> {
self.files_hash()
}
}
impl<T> VaultKeyDirectoryProvider for DiskDirectory<T> where T: KeyFileManager {
|
fn create(&self, name: &str, key: VaultKey) -> Result<Box<VaultKeyDirectory>, Error> {
let vault_dir = VaultDiskDirectory::create(&self.path, name, key)?;
Ok(Box::new(vault_dir))
}
fn open(&self, name: &str, key: VaultKey) -> Result<Box<VaultKeyDirectory>, Error> {
let vault_dir = VaultDiskDirectory::at(&self.path, name, key)?;
Ok(Box::new(vault_dir))
}
fn list_vaults(&self) -> Result<Vec<String>, Error> {
Ok(fs::read_dir(&self.path)?
.filter_map(|e| e.ok().map(|e| e.path()))
.filter_map(|path| {
let mut vault_file_path = path.clone();
vault_file_path.push(VAULT_FILE_NAME);
if vault_file_path.is_file() {
path.file_name().and_then(|f| f.to_str()).map(|f| f.to_owned())
} else {
None
}
})
.collect())
}
fn vault_meta(&self, name: &str) -> Result<String, Error> {
VaultDiskDirectory::meta_at(&self.path, name)
}
}
impl KeyFileManager for DiskKeyFileManager {
fn read<T>(&self, filename: Option<String>, reader: T) -> Result<SafeAccount, Error> where T: io::Read {
let key_file = json::KeyFile::load(reader).map_err(|e| Error::Custom(format!("{:?}", e)))?;
Ok(SafeAccount::from_file(key_file, filename))
}
fn write<T>(&self, mut account: SafeAccount, writer: &mut T) -> Result<(), Error> where T: io::Write {
// when account is moved back to root directory from vault
// => remove vault field from meta
account.meta = json::remove_vault_name_from_json_meta(&account.meta)
.map_err(|err| Error::Custom(format!("{:?}", err)))?;
let key_file: json::KeyFile = account.into();
key_file.write(writer).map_err(|e| Error::Custom(format!("{:?}", e)))
}
}
#[cfg(test)]
mod test {
extern crate tempdir;
use std::{env, fs};
use super::RootDiskDirectory;
use dir::{KeyDirectory, VaultKey};
use account::SafeAccount;
use ethkey::{Random, Generator};
use self::tempdir::TempDir;
#[test]
fn should_create_new_account() {
// given
let mut dir = env::temp_dir();
dir.push("ethstore_should_create_new_account");
let keypair = Random.generate().unwrap();
let password = "hello world";
let directory = RootDiskDirectory::create(dir.clone()).unwrap();
// when
let account = SafeAccount::create(&keypair, [0u8; 16], password, 1024, "Test".to_owned(), "{}".to_owned());
let res = directory.insert(account);
// then
assert!(res.is_ok(), "Should save account succesfuly.");
assert!(res.unwrap().filename.is_some(), "Filename has been assigned.");
// cleanup
let _ = fs::remove_dir_all(dir);
}
#[test]
fn should_manage_vaults() {
// given
let mut dir = env::temp_dir();
dir.push("should_create_new_vault");
let directory = RootDiskDirectory::create(dir.clone()).unwrap();
let vault_name = "vault";
let password = "password";
// then
assert!(directory.as_vault_provider().is_some());
// and when
let before_root_items_count = fs::read_dir(&dir).unwrap().count();
let vault = directory.as_vault_provider().unwrap().create(vault_name, VaultKey::new(password, 1024));
// then
assert!(vault.is_ok());
let after_root_items_count = fs::read_dir(&dir).unwrap().count();
assert!(after_root_items_count > before_root_items_count);
// and when
let vault = directory.as_vault_provider().unwrap().open(vault_name, VaultKey::new(password, 1024));
// then
assert!(vault.is_ok());
let after_root_items_count2 = fs::read_dir(&dir).unwrap().count();
assert!(after_root_items_count == after_root_items_count2);
// cleanup
let _ = fs::remove_dir_all(dir);
}
#[test]
fn should_list_vaults() {
// given
let temp_path = TempDir::new("").unwrap();
let directory = RootDiskDirectory::create(&temp_path).unwrap();
let vault_provider = directory.as_vault_provider().unwrap();
vault_provider.create("vault1", VaultKey::new("password1", 1)).unwrap();
vault_provider.create("vault2", VaultKey::new("password2", 1)).unwrap();
// then
let vaults = vault_provider.list_vaults().unwrap();
assert_eq!(vaults.len(), 2);
assert!(vaults.iter().any(|v| &*v == "vault1"));
assert!(vaults.iter().any(|v| &*v == "vault2"));
}
#[test]
fn hash_of_files() {
let temp_path = TempDir::new("").unwrap();
let directory = RootDiskDirectory::create(&temp_path).unwrap();
let hash = directory.files_hash().expect("Files hash should be calculated ok");
assert_eq!(
hash,
15130871412783076140
);
let keypair = Random.generate().unwrap();
let password = "test pass";
let account = SafeAccount::create(&keypair, [0u8; 16], password, 1024, "Test".to_owned(), "{}".to_owned());
directory.insert(account).expect("Account should be inserted ok");
let new_hash = directory.files_hash().expect("New files hash should be calculated ok");
assert!(new_hash!= hash, "hash of the file list should change once directory content changed");
}
}
|
random_line_split
|
|
disk.rs
|
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::{fs, io};
use std::path::{PathBuf, Path};
use std::collections::HashMap;
use time;
use {json, SafeAccount, Error};
use json::Uuid;
use super::{KeyDirectory, VaultKeyDirectory, VaultKeyDirectoryProvider, VaultKey};
use super::vault::{VAULT_FILE_NAME, VaultDiskDirectory};
const IGNORED_FILES: &'static [&'static str] = &[
"thumbs.db",
"address_book.json",
"dapps_policy.json",
"dapps_accounts.json",
"dapps_history.json",
"vault.json",
];
#[cfg(not(windows))]
fn restrict_permissions_to_owner(file_path: &Path) -> Result<(), i32> {
use std::ffi;
use libc;
let cstr = ffi::CString::new(&*file_path.to_string_lossy())
.map_err(|_| -1)?;
match unsafe { libc::chmod(cstr.as_ptr(), libc::S_IWUSR | libc::S_IRUSR) } {
0 => Ok(()),
x => Err(x),
}
}
#[cfg(windows)]
fn restrict_permissions_to_owner(_file_path: &Path) -> Result<(), i32> {
Ok(())
}
/// Root keys directory implementation
pub type RootDiskDirectory = DiskDirectory<DiskKeyFileManager>;
/// Disk directory key file manager
pub trait KeyFileManager: Send + Sync {
/// Read `SafeAccount` from given key file stream
fn read<T>(&self, filename: Option<String>, reader: T) -> Result<SafeAccount, Error> where T: io::Read;
/// Write `SafeAccount` to given key file stream
fn write<T>(&self, account: SafeAccount, writer: &mut T) -> Result<(), Error> where T: io::Write;
}
/// Disk-based keys directory implementation
pub struct DiskDirectory<T> where T: KeyFileManager {
path: PathBuf,
key_manager: T,
}
/// Keys file manager for root keys directory
pub struct DiskKeyFileManager;
impl RootDiskDirectory {
pub fn create<P>(path: P) -> Result<Self, Error> where P: AsRef<Path> {
fs::create_dir_all(&path)?;
Ok(Self::at(path))
}
pub fn at<P>(path: P) -> Self where P: AsRef<Path> {
DiskDirectory::new(path, DiskKeyFileManager)
}
}
impl<T> DiskDirectory<T> where T: KeyFileManager {
/// Create new disk directory instance
pub fn new<P>(path: P, key_manager: T) -> Self where P: AsRef<Path> {
DiskDirectory {
path: path.as_ref().to_path_buf(),
key_manager: key_manager,
}
}
fn files(&self) -> Result<Vec<PathBuf>, Error> {
Ok(fs::read_dir(&self.path)?
.flat_map(Result::ok)
.filter(|entry| {
let metadata = entry.metadata().ok();
let file_name = entry.file_name();
let name = file_name.to_string_lossy();
// filter directories
metadata.map_or(false, |m|!m.is_dir()) &&
// hidden files
!name.starts_with(".") &&
// other ignored files
!IGNORED_FILES.contains(&&*name)
})
.map(|entry| entry.path())
.collect::<Vec<PathBuf>>()
)
}
pub fn files_hash(&self) -> Result<u64, Error> {
use std::collections::hash_map::DefaultHasher;
use std::hash::Hasher;
let mut hasher = DefaultHasher::new();
let files = self.files()?;
for file in files {
hasher.write(file.to_str().unwrap_or("").as_bytes())
}
Ok(hasher.finish())
}
/// all accounts found in keys directory
fn files_content(&self) -> Result<HashMap<PathBuf, SafeAccount>, Error> {
// it's not done using one iterator cause
// there is an issue with rustc and it takes tooo much time to compile
let paths = self.files()?;
Ok(paths
.into_iter()
.filter_map(|path| {
let filename = Some(path.file_name().and_then(|n| n.to_str()).expect("Keys have valid UTF8 names only.").to_owned());
fs::File::open(path.clone())
.map_err(Into::into)
.and_then(|file| self.key_manager.read(filename, file))
.map_err(|err| {
warn!("Invalid key file: {:?} ({})", path, err);
err
})
.map(|account| (path, account))
.ok()
})
.collect()
)
}
/// insert account with given file name
pub fn insert_with_filename(&self, account: SafeAccount, filename: String) -> Result<SafeAccount, Error> {
// update account filename
let original_account = account.clone();
let mut account = account;
account.filename = Some(filename.clone());
{
// Path to keyfile
let mut keyfile_path = self.path.clone();
keyfile_path.push(filename.as_str());
// save the file
let mut file = fs::File::create(&keyfile_path)?;
if let Err(err) = self.key_manager.write(original_account, &mut file).map_err(|e| Error::Custom(format!("{:?}", e))) {
drop(file);
fs::remove_file(keyfile_path).expect("Expected to remove recently created file");
return Err(err);
}
if let Err(_) = restrict_permissions_to_owner(keyfile_path.as_path()) {
drop(file);
fs::remove_file(keyfile_path).expect("Expected to remove recently created file");
return Err(Error::Io(io::Error::last_os_error()));
}
}
Ok(account)
}
/// Get key file manager referece
pub fn key_manager(&self) -> &T {
&self.key_manager
}
}
impl<T> KeyDirectory for DiskDirectory<T> where T: KeyFileManager {
fn load(&self) -> Result<Vec<SafeAccount>, Error> {
let accounts = self.files_content()?
.into_iter()
.map(|(_, account)| account)
.collect();
Ok(accounts)
}
fn update(&self, account: SafeAccount) -> Result<SafeAccount, Error> {
// Disk store handles updates correctly iff filename is the same
self.insert(account)
}
fn insert(&self, account: SafeAccount) -> Result<SafeAccount, Error> {
// build file path
let filename = account.filename.as_ref().cloned().unwrap_or_else(|| {
let timestamp = time::strftime("%Y-%m-%dT%H-%M-%S", &time::now_utc()).expect("Time-format string is valid.");
format!("UTC--{}Z--{}", timestamp, Uuid::from(account.id))
});
self.insert_with_filename(account, filename)
}
fn remove(&self, account: &SafeAccount) -> Result<(), Error> {
// enumerate all entries in keystore
// and find entry with given address
let to_remove = self.files_content()?
.into_iter()
.find(|&(_, ref acc)| acc.id == account.id && acc.address == account.address);
// remove it
match to_remove {
None => Err(Error::InvalidAccount),
Some((path, _)) => fs::remove_file(path).map_err(From::from)
}
}
fn path(&self) -> Option<&PathBuf> { Some(&self.path) }
fn as_vault_provider(&self) -> Option<&VaultKeyDirectoryProvider> {
Some(self)
}
fn unique_repr(&self) -> Result<u64, Error> {
self.files_hash()
}
}
impl<T> VaultKeyDirectoryProvider for DiskDirectory<T> where T: KeyFileManager {
fn create(&self, name: &str, key: VaultKey) -> Result<Box<VaultKeyDirectory>, Error> {
let vault_dir = VaultDiskDirectory::create(&self.path, name, key)?;
Ok(Box::new(vault_dir))
}
fn open(&self, name: &str, key: VaultKey) -> Result<Box<VaultKeyDirectory>, Error> {
let vault_dir = VaultDiskDirectory::at(&self.path, name, key)?;
Ok(Box::new(vault_dir))
}
fn list_vaults(&self) -> Result<Vec<String>, Error> {
Ok(fs::read_dir(&self.path)?
.filter_map(|e| e.ok().map(|e| e.path()))
.filter_map(|path| {
let mut vault_file_path = path.clone();
vault_file_path.push(VAULT_FILE_NAME);
if vault_file_path.is_file() {
path.file_name().and_then(|f| f.to_str()).map(|f| f.to_owned())
} else
|
})
.collect())
}
fn vault_meta(&self, name: &str) -> Result<String, Error> {
VaultDiskDirectory::meta_at(&self.path, name)
}
}
impl KeyFileManager for DiskKeyFileManager {
fn read<T>(&self, filename: Option<String>, reader: T) -> Result<SafeAccount, Error> where T: io::Read {
let key_file = json::KeyFile::load(reader).map_err(|e| Error::Custom(format!("{:?}", e)))?;
Ok(SafeAccount::from_file(key_file, filename))
}
fn write<T>(&self, mut account: SafeAccount, writer: &mut T) -> Result<(), Error> where T: io::Write {
// when account is moved back to root directory from vault
// => remove vault field from meta
account.meta = json::remove_vault_name_from_json_meta(&account.meta)
.map_err(|err| Error::Custom(format!("{:?}", err)))?;
let key_file: json::KeyFile = account.into();
key_file.write(writer).map_err(|e| Error::Custom(format!("{:?}", e)))
}
}
#[cfg(test)]
mod test {
extern crate tempdir;
use std::{env, fs};
use super::RootDiskDirectory;
use dir::{KeyDirectory, VaultKey};
use account::SafeAccount;
use ethkey::{Random, Generator};
use self::tempdir::TempDir;
#[test]
fn should_create_new_account() {
// given
let mut dir = env::temp_dir();
dir.push("ethstore_should_create_new_account");
let keypair = Random.generate().unwrap();
let password = "hello world";
let directory = RootDiskDirectory::create(dir.clone()).unwrap();
// when
let account = SafeAccount::create(&keypair, [0u8; 16], password, 1024, "Test".to_owned(), "{}".to_owned());
let res = directory.insert(account);
// then
assert!(res.is_ok(), "Should save account succesfuly.");
assert!(res.unwrap().filename.is_some(), "Filename has been assigned.");
// cleanup
let _ = fs::remove_dir_all(dir);
}
#[test]
fn should_manage_vaults() {
// given
let mut dir = env::temp_dir();
dir.push("should_create_new_vault");
let directory = RootDiskDirectory::create(dir.clone()).unwrap();
let vault_name = "vault";
let password = "password";
// then
assert!(directory.as_vault_provider().is_some());
// and when
let before_root_items_count = fs::read_dir(&dir).unwrap().count();
let vault = directory.as_vault_provider().unwrap().create(vault_name, VaultKey::new(password, 1024));
// then
assert!(vault.is_ok());
let after_root_items_count = fs::read_dir(&dir).unwrap().count();
assert!(after_root_items_count > before_root_items_count);
// and when
let vault = directory.as_vault_provider().unwrap().open(vault_name, VaultKey::new(password, 1024));
// then
assert!(vault.is_ok());
let after_root_items_count2 = fs::read_dir(&dir).unwrap().count();
assert!(after_root_items_count == after_root_items_count2);
// cleanup
let _ = fs::remove_dir_all(dir);
}
#[test]
fn should_list_vaults() {
// given
let temp_path = TempDir::new("").unwrap();
let directory = RootDiskDirectory::create(&temp_path).unwrap();
let vault_provider = directory.as_vault_provider().unwrap();
vault_provider.create("vault1", VaultKey::new("password1", 1)).unwrap();
vault_provider.create("vault2", VaultKey::new("password2", 1)).unwrap();
// then
let vaults = vault_provider.list_vaults().unwrap();
assert_eq!(vaults.len(), 2);
assert!(vaults.iter().any(|v| &*v == "vault1"));
assert!(vaults.iter().any(|v| &*v == "vault2"));
}
#[test]
fn hash_of_files() {
let temp_path = TempDir::new("").unwrap();
let directory = RootDiskDirectory::create(&temp_path).unwrap();
let hash = directory.files_hash().expect("Files hash should be calculated ok");
assert_eq!(
hash,
15130871412783076140
);
let keypair = Random.generate().unwrap();
let password = "test pass";
let account = SafeAccount::create(&keypair, [0u8; 16], password, 1024, "Test".to_owned(), "{}".to_owned());
directory.insert(account).expect("Account should be inserted ok");
let new_hash = directory.files_hash().expect("New files hash should be calculated ok");
assert!(new_hash!= hash, "hash of the file list should change once directory content changed");
}
}
|
{
None
}
|
conditional_block
|
disk.rs
|
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::{fs, io};
use std::path::{PathBuf, Path};
use std::collections::HashMap;
use time;
use {json, SafeAccount, Error};
use json::Uuid;
use super::{KeyDirectory, VaultKeyDirectory, VaultKeyDirectoryProvider, VaultKey};
use super::vault::{VAULT_FILE_NAME, VaultDiskDirectory};
const IGNORED_FILES: &'static [&'static str] = &[
"thumbs.db",
"address_book.json",
"dapps_policy.json",
"dapps_accounts.json",
"dapps_history.json",
"vault.json",
];
#[cfg(not(windows))]
fn restrict_permissions_to_owner(file_path: &Path) -> Result<(), i32> {
use std::ffi;
use libc;
let cstr = ffi::CString::new(&*file_path.to_string_lossy())
.map_err(|_| -1)?;
match unsafe { libc::chmod(cstr.as_ptr(), libc::S_IWUSR | libc::S_IRUSR) } {
0 => Ok(()),
x => Err(x),
}
}
#[cfg(windows)]
fn restrict_permissions_to_owner(_file_path: &Path) -> Result<(), i32> {
Ok(())
}
/// Root keys directory implementation
pub type RootDiskDirectory = DiskDirectory<DiskKeyFileManager>;
/// Disk directory key file manager
pub trait KeyFileManager: Send + Sync {
/// Read `SafeAccount` from given key file stream
fn read<T>(&self, filename: Option<String>, reader: T) -> Result<SafeAccount, Error> where T: io::Read;
/// Write `SafeAccount` to given key file stream
fn write<T>(&self, account: SafeAccount, writer: &mut T) -> Result<(), Error> where T: io::Write;
}
/// Disk-based keys directory implementation
pub struct DiskDirectory<T> where T: KeyFileManager {
path: PathBuf,
key_manager: T,
}
/// Keys file manager for root keys directory
pub struct DiskKeyFileManager;
impl RootDiskDirectory {
pub fn create<P>(path: P) -> Result<Self, Error> where P: AsRef<Path> {
fs::create_dir_all(&path)?;
Ok(Self::at(path))
}
pub fn at<P>(path: P) -> Self where P: AsRef<Path> {
DiskDirectory::new(path, DiskKeyFileManager)
}
}
impl<T> DiskDirectory<T> where T: KeyFileManager {
/// Create new disk directory instance
pub fn new<P>(path: P, key_manager: T) -> Self where P: AsRef<Path> {
DiskDirectory {
path: path.as_ref().to_path_buf(),
key_manager: key_manager,
}
}
fn files(&self) -> Result<Vec<PathBuf>, Error> {
Ok(fs::read_dir(&self.path)?
.flat_map(Result::ok)
.filter(|entry| {
let metadata = entry.metadata().ok();
let file_name = entry.file_name();
let name = file_name.to_string_lossy();
// filter directories
metadata.map_or(false, |m|!m.is_dir()) &&
// hidden files
!name.starts_with(".") &&
// other ignored files
!IGNORED_FILES.contains(&&*name)
})
.map(|entry| entry.path())
.collect::<Vec<PathBuf>>()
)
}
pub fn files_hash(&self) -> Result<u64, Error> {
use std::collections::hash_map::DefaultHasher;
use std::hash::Hasher;
let mut hasher = DefaultHasher::new();
let files = self.files()?;
for file in files {
hasher.write(file.to_str().unwrap_or("").as_bytes())
}
Ok(hasher.finish())
}
/// all accounts found in keys directory
fn files_content(&self) -> Result<HashMap<PathBuf, SafeAccount>, Error> {
// it's not done using one iterator cause
// there is an issue with rustc and it takes tooo much time to compile
let paths = self.files()?;
Ok(paths
.into_iter()
.filter_map(|path| {
let filename = Some(path.file_name().and_then(|n| n.to_str()).expect("Keys have valid UTF8 names only.").to_owned());
fs::File::open(path.clone())
.map_err(Into::into)
.and_then(|file| self.key_manager.read(filename, file))
.map_err(|err| {
warn!("Invalid key file: {:?} ({})", path, err);
err
})
.map(|account| (path, account))
.ok()
})
.collect()
)
}
/// insert account with given file name
pub fn insert_with_filename(&self, account: SafeAccount, filename: String) -> Result<SafeAccount, Error> {
// update account filename
let original_account = account.clone();
let mut account = account;
account.filename = Some(filename.clone());
{
// Path to keyfile
let mut keyfile_path = self.path.clone();
keyfile_path.push(filename.as_str());
// save the file
let mut file = fs::File::create(&keyfile_path)?;
if let Err(err) = self.key_manager.write(original_account, &mut file).map_err(|e| Error::Custom(format!("{:?}", e))) {
drop(file);
fs::remove_file(keyfile_path).expect("Expected to remove recently created file");
return Err(err);
}
if let Err(_) = restrict_permissions_to_owner(keyfile_path.as_path()) {
drop(file);
fs::remove_file(keyfile_path).expect("Expected to remove recently created file");
return Err(Error::Io(io::Error::last_os_error()));
}
}
Ok(account)
}
/// Get key file manager referece
pub fn key_manager(&self) -> &T {
&self.key_manager
}
}
impl<T> KeyDirectory for DiskDirectory<T> where T: KeyFileManager {
fn load(&self) -> Result<Vec<SafeAccount>, Error> {
let accounts = self.files_content()?
.into_iter()
.map(|(_, account)| account)
.collect();
Ok(accounts)
}
fn update(&self, account: SafeAccount) -> Result<SafeAccount, Error>
|
fn insert(&self, account: SafeAccount) -> Result<SafeAccount, Error> {
// build file path
let filename = account.filename.as_ref().cloned().unwrap_or_else(|| {
let timestamp = time::strftime("%Y-%m-%dT%H-%M-%S", &time::now_utc()).expect("Time-format string is valid.");
format!("UTC--{}Z--{}", timestamp, Uuid::from(account.id))
});
self.insert_with_filename(account, filename)
}
fn remove(&self, account: &SafeAccount) -> Result<(), Error> {
// enumerate all entries in keystore
// and find entry with given address
let to_remove = self.files_content()?
.into_iter()
.find(|&(_, ref acc)| acc.id == account.id && acc.address == account.address);
// remove it
match to_remove {
None => Err(Error::InvalidAccount),
Some((path, _)) => fs::remove_file(path).map_err(From::from)
}
}
fn path(&self) -> Option<&PathBuf> { Some(&self.path) }
fn as_vault_provider(&self) -> Option<&VaultKeyDirectoryProvider> {
Some(self)
}
fn unique_repr(&self) -> Result<u64, Error> {
self.files_hash()
}
}
impl<T> VaultKeyDirectoryProvider for DiskDirectory<T> where T: KeyFileManager {
fn create(&self, name: &str, key: VaultKey) -> Result<Box<VaultKeyDirectory>, Error> {
let vault_dir = VaultDiskDirectory::create(&self.path, name, key)?;
Ok(Box::new(vault_dir))
}
fn open(&self, name: &str, key: VaultKey) -> Result<Box<VaultKeyDirectory>, Error> {
let vault_dir = VaultDiskDirectory::at(&self.path, name, key)?;
Ok(Box::new(vault_dir))
}
fn list_vaults(&self) -> Result<Vec<String>, Error> {
Ok(fs::read_dir(&self.path)?
.filter_map(|e| e.ok().map(|e| e.path()))
.filter_map(|path| {
let mut vault_file_path = path.clone();
vault_file_path.push(VAULT_FILE_NAME);
if vault_file_path.is_file() {
path.file_name().and_then(|f| f.to_str()).map(|f| f.to_owned())
} else {
None
}
})
.collect())
}
fn vault_meta(&self, name: &str) -> Result<String, Error> {
VaultDiskDirectory::meta_at(&self.path, name)
}
}
impl KeyFileManager for DiskKeyFileManager {
fn read<T>(&self, filename: Option<String>, reader: T) -> Result<SafeAccount, Error> where T: io::Read {
let key_file = json::KeyFile::load(reader).map_err(|e| Error::Custom(format!("{:?}", e)))?;
Ok(SafeAccount::from_file(key_file, filename))
}
fn write<T>(&self, mut account: SafeAccount, writer: &mut T) -> Result<(), Error> where T: io::Write {
// when account is moved back to root directory from vault
// => remove vault field from meta
account.meta = json::remove_vault_name_from_json_meta(&account.meta)
.map_err(|err| Error::Custom(format!("{:?}", err)))?;
let key_file: json::KeyFile = account.into();
key_file.write(writer).map_err(|e| Error::Custom(format!("{:?}", e)))
}
}
#[cfg(test)]
mod test {
extern crate tempdir;
use std::{env, fs};
use super::RootDiskDirectory;
use dir::{KeyDirectory, VaultKey};
use account::SafeAccount;
use ethkey::{Random, Generator};
use self::tempdir::TempDir;
#[test]
fn should_create_new_account() {
// given
let mut dir = env::temp_dir();
dir.push("ethstore_should_create_new_account");
let keypair = Random.generate().unwrap();
let password = "hello world";
let directory = RootDiskDirectory::create(dir.clone()).unwrap();
// when
let account = SafeAccount::create(&keypair, [0u8; 16], password, 1024, "Test".to_owned(), "{}".to_owned());
let res = directory.insert(account);
// then
assert!(res.is_ok(), "Should save account succesfuly.");
assert!(res.unwrap().filename.is_some(), "Filename has been assigned.");
// cleanup
let _ = fs::remove_dir_all(dir);
}
#[test]
fn should_manage_vaults() {
// given
let mut dir = env::temp_dir();
dir.push("should_create_new_vault");
let directory = RootDiskDirectory::create(dir.clone()).unwrap();
let vault_name = "vault";
let password = "password";
// then
assert!(directory.as_vault_provider().is_some());
// and when
let before_root_items_count = fs::read_dir(&dir).unwrap().count();
let vault = directory.as_vault_provider().unwrap().create(vault_name, VaultKey::new(password, 1024));
// then
assert!(vault.is_ok());
let after_root_items_count = fs::read_dir(&dir).unwrap().count();
assert!(after_root_items_count > before_root_items_count);
// and when
let vault = directory.as_vault_provider().unwrap().open(vault_name, VaultKey::new(password, 1024));
// then
assert!(vault.is_ok());
let after_root_items_count2 = fs::read_dir(&dir).unwrap().count();
assert!(after_root_items_count == after_root_items_count2);
// cleanup
let _ = fs::remove_dir_all(dir);
}
#[test]
fn should_list_vaults() {
// given
let temp_path = TempDir::new("").unwrap();
let directory = RootDiskDirectory::create(&temp_path).unwrap();
let vault_provider = directory.as_vault_provider().unwrap();
vault_provider.create("vault1", VaultKey::new("password1", 1)).unwrap();
vault_provider.create("vault2", VaultKey::new("password2", 1)).unwrap();
// then
let vaults = vault_provider.list_vaults().unwrap();
assert_eq!(vaults.len(), 2);
assert!(vaults.iter().any(|v| &*v == "vault1"));
assert!(vaults.iter().any(|v| &*v == "vault2"));
}
#[test]
fn hash_of_files() {
let temp_path = TempDir::new("").unwrap();
let directory = RootDiskDirectory::create(&temp_path).unwrap();
let hash = directory.files_hash().expect("Files hash should be calculated ok");
assert_eq!(
hash,
15130871412783076140
);
let keypair = Random.generate().unwrap();
let password = "test pass";
let account = SafeAccount::create(&keypair, [0u8; 16], password, 1024, "Test".to_owned(), "{}".to_owned());
directory.insert(account).expect("Account should be inserted ok");
let new_hash = directory.files_hash().expect("New files hash should be calculated ok");
assert!(new_hash!= hash, "hash of the file list should change once directory content changed");
}
}
|
{
// Disk store handles updates correctly iff filename is the same
self.insert(account)
}
|
identifier_body
|
disk.rs
|
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::{fs, io};
use std::path::{PathBuf, Path};
use std::collections::HashMap;
use time;
use {json, SafeAccount, Error};
use json::Uuid;
use super::{KeyDirectory, VaultKeyDirectory, VaultKeyDirectoryProvider, VaultKey};
use super::vault::{VAULT_FILE_NAME, VaultDiskDirectory};
const IGNORED_FILES: &'static [&'static str] = &[
"thumbs.db",
"address_book.json",
"dapps_policy.json",
"dapps_accounts.json",
"dapps_history.json",
"vault.json",
];
#[cfg(not(windows))]
fn restrict_permissions_to_owner(file_path: &Path) -> Result<(), i32> {
use std::ffi;
use libc;
let cstr = ffi::CString::new(&*file_path.to_string_lossy())
.map_err(|_| -1)?;
match unsafe { libc::chmod(cstr.as_ptr(), libc::S_IWUSR | libc::S_IRUSR) } {
0 => Ok(()),
x => Err(x),
}
}
#[cfg(windows)]
fn restrict_permissions_to_owner(_file_path: &Path) -> Result<(), i32> {
Ok(())
}
/// Root keys directory implementation
pub type RootDiskDirectory = DiskDirectory<DiskKeyFileManager>;
/// Disk directory key file manager
pub trait KeyFileManager: Send + Sync {
/// Read `SafeAccount` from given key file stream
fn read<T>(&self, filename: Option<String>, reader: T) -> Result<SafeAccount, Error> where T: io::Read;
/// Write `SafeAccount` to given key file stream
fn write<T>(&self, account: SafeAccount, writer: &mut T) -> Result<(), Error> where T: io::Write;
}
/// Disk-based keys directory implementation
pub struct DiskDirectory<T> where T: KeyFileManager {
path: PathBuf,
key_manager: T,
}
/// Keys file manager for root keys directory
pub struct DiskKeyFileManager;
impl RootDiskDirectory {
pub fn create<P>(path: P) -> Result<Self, Error> where P: AsRef<Path> {
fs::create_dir_all(&path)?;
Ok(Self::at(path))
}
pub fn at<P>(path: P) -> Self where P: AsRef<Path> {
DiskDirectory::new(path, DiskKeyFileManager)
}
}
impl<T> DiskDirectory<T> where T: KeyFileManager {
/// Create new disk directory instance
pub fn new<P>(path: P, key_manager: T) -> Self where P: AsRef<Path> {
DiskDirectory {
path: path.as_ref().to_path_buf(),
key_manager: key_manager,
}
}
fn files(&self) -> Result<Vec<PathBuf>, Error> {
Ok(fs::read_dir(&self.path)?
.flat_map(Result::ok)
.filter(|entry| {
let metadata = entry.metadata().ok();
let file_name = entry.file_name();
let name = file_name.to_string_lossy();
// filter directories
metadata.map_or(false, |m|!m.is_dir()) &&
// hidden files
!name.starts_with(".") &&
// other ignored files
!IGNORED_FILES.contains(&&*name)
})
.map(|entry| entry.path())
.collect::<Vec<PathBuf>>()
)
}
pub fn files_hash(&self) -> Result<u64, Error> {
use std::collections::hash_map::DefaultHasher;
use std::hash::Hasher;
let mut hasher = DefaultHasher::new();
let files = self.files()?;
for file in files {
hasher.write(file.to_str().unwrap_or("").as_bytes())
}
Ok(hasher.finish())
}
/// all accounts found in keys directory
fn files_content(&self) -> Result<HashMap<PathBuf, SafeAccount>, Error> {
// it's not done using one iterator cause
// there is an issue with rustc and it takes tooo much time to compile
let paths = self.files()?;
Ok(paths
.into_iter()
.filter_map(|path| {
let filename = Some(path.file_name().and_then(|n| n.to_str()).expect("Keys have valid UTF8 names only.").to_owned());
fs::File::open(path.clone())
.map_err(Into::into)
.and_then(|file| self.key_manager.read(filename, file))
.map_err(|err| {
warn!("Invalid key file: {:?} ({})", path, err);
err
})
.map(|account| (path, account))
.ok()
})
.collect()
)
}
/// insert account with given file name
pub fn insert_with_filename(&self, account: SafeAccount, filename: String) -> Result<SafeAccount, Error> {
// update account filename
let original_account = account.clone();
let mut account = account;
account.filename = Some(filename.clone());
{
// Path to keyfile
let mut keyfile_path = self.path.clone();
keyfile_path.push(filename.as_str());
// save the file
let mut file = fs::File::create(&keyfile_path)?;
if let Err(err) = self.key_manager.write(original_account, &mut file).map_err(|e| Error::Custom(format!("{:?}", e))) {
drop(file);
fs::remove_file(keyfile_path).expect("Expected to remove recently created file");
return Err(err);
}
if let Err(_) = restrict_permissions_to_owner(keyfile_path.as_path()) {
drop(file);
fs::remove_file(keyfile_path).expect("Expected to remove recently created file");
return Err(Error::Io(io::Error::last_os_error()));
}
}
Ok(account)
}
/// Get key file manager referece
pub fn key_manager(&self) -> &T {
&self.key_manager
}
}
impl<T> KeyDirectory for DiskDirectory<T> where T: KeyFileManager {
fn load(&self) -> Result<Vec<SafeAccount>, Error> {
let accounts = self.files_content()?
.into_iter()
.map(|(_, account)| account)
.collect();
Ok(accounts)
}
fn update(&self, account: SafeAccount) -> Result<SafeAccount, Error> {
// Disk store handles updates correctly iff filename is the same
self.insert(account)
}
fn insert(&self, account: SafeAccount) -> Result<SafeAccount, Error> {
// build file path
let filename = account.filename.as_ref().cloned().unwrap_or_else(|| {
let timestamp = time::strftime("%Y-%m-%dT%H-%M-%S", &time::now_utc()).expect("Time-format string is valid.");
format!("UTC--{}Z--{}", timestamp, Uuid::from(account.id))
});
self.insert_with_filename(account, filename)
}
fn remove(&self, account: &SafeAccount) -> Result<(), Error> {
// enumerate all entries in keystore
// and find entry with given address
let to_remove = self.files_content()?
.into_iter()
.find(|&(_, ref acc)| acc.id == account.id && acc.address == account.address);
// remove it
match to_remove {
None => Err(Error::InvalidAccount),
Some((path, _)) => fs::remove_file(path).map_err(From::from)
}
}
fn path(&self) -> Option<&PathBuf> { Some(&self.path) }
fn as_vault_provider(&self) -> Option<&VaultKeyDirectoryProvider> {
Some(self)
}
fn unique_repr(&self) -> Result<u64, Error> {
self.files_hash()
}
}
impl<T> VaultKeyDirectoryProvider for DiskDirectory<T> where T: KeyFileManager {
fn create(&self, name: &str, key: VaultKey) -> Result<Box<VaultKeyDirectory>, Error> {
let vault_dir = VaultDiskDirectory::create(&self.path, name, key)?;
Ok(Box::new(vault_dir))
}
fn open(&self, name: &str, key: VaultKey) -> Result<Box<VaultKeyDirectory>, Error> {
let vault_dir = VaultDiskDirectory::at(&self.path, name, key)?;
Ok(Box::new(vault_dir))
}
fn list_vaults(&self) -> Result<Vec<String>, Error> {
Ok(fs::read_dir(&self.path)?
.filter_map(|e| e.ok().map(|e| e.path()))
.filter_map(|path| {
let mut vault_file_path = path.clone();
vault_file_path.push(VAULT_FILE_NAME);
if vault_file_path.is_file() {
path.file_name().and_then(|f| f.to_str()).map(|f| f.to_owned())
} else {
None
}
})
.collect())
}
fn vault_meta(&self, name: &str) -> Result<String, Error> {
VaultDiskDirectory::meta_at(&self.path, name)
}
}
impl KeyFileManager for DiskKeyFileManager {
fn read<T>(&self, filename: Option<String>, reader: T) -> Result<SafeAccount, Error> where T: io::Read {
let key_file = json::KeyFile::load(reader).map_err(|e| Error::Custom(format!("{:?}", e)))?;
Ok(SafeAccount::from_file(key_file, filename))
}
fn write<T>(&self, mut account: SafeAccount, writer: &mut T) -> Result<(), Error> where T: io::Write {
// when account is moved back to root directory from vault
// => remove vault field from meta
account.meta = json::remove_vault_name_from_json_meta(&account.meta)
.map_err(|err| Error::Custom(format!("{:?}", err)))?;
let key_file: json::KeyFile = account.into();
key_file.write(writer).map_err(|e| Error::Custom(format!("{:?}", e)))
}
}
#[cfg(test)]
mod test {
extern crate tempdir;
use std::{env, fs};
use super::RootDiskDirectory;
use dir::{KeyDirectory, VaultKey};
use account::SafeAccount;
use ethkey::{Random, Generator};
use self::tempdir::TempDir;
#[test]
fn
|
() {
// given
let mut dir = env::temp_dir();
dir.push("ethstore_should_create_new_account");
let keypair = Random.generate().unwrap();
let password = "hello world";
let directory = RootDiskDirectory::create(dir.clone()).unwrap();
// when
let account = SafeAccount::create(&keypair, [0u8; 16], password, 1024, "Test".to_owned(), "{}".to_owned());
let res = directory.insert(account);
// then
assert!(res.is_ok(), "Should save account succesfuly.");
assert!(res.unwrap().filename.is_some(), "Filename has been assigned.");
// cleanup
let _ = fs::remove_dir_all(dir);
}
#[test]
fn should_manage_vaults() {
// given
let mut dir = env::temp_dir();
dir.push("should_create_new_vault");
let directory = RootDiskDirectory::create(dir.clone()).unwrap();
let vault_name = "vault";
let password = "password";
// then
assert!(directory.as_vault_provider().is_some());
// and when
let before_root_items_count = fs::read_dir(&dir).unwrap().count();
let vault = directory.as_vault_provider().unwrap().create(vault_name, VaultKey::new(password, 1024));
// then
assert!(vault.is_ok());
let after_root_items_count = fs::read_dir(&dir).unwrap().count();
assert!(after_root_items_count > before_root_items_count);
// and when
let vault = directory.as_vault_provider().unwrap().open(vault_name, VaultKey::new(password, 1024));
// then
assert!(vault.is_ok());
let after_root_items_count2 = fs::read_dir(&dir).unwrap().count();
assert!(after_root_items_count == after_root_items_count2);
// cleanup
let _ = fs::remove_dir_all(dir);
}
#[test]
fn should_list_vaults() {
// given
let temp_path = TempDir::new("").unwrap();
let directory = RootDiskDirectory::create(&temp_path).unwrap();
let vault_provider = directory.as_vault_provider().unwrap();
vault_provider.create("vault1", VaultKey::new("password1", 1)).unwrap();
vault_provider.create("vault2", VaultKey::new("password2", 1)).unwrap();
// then
let vaults = vault_provider.list_vaults().unwrap();
assert_eq!(vaults.len(), 2);
assert!(vaults.iter().any(|v| &*v == "vault1"));
assert!(vaults.iter().any(|v| &*v == "vault2"));
}
#[test]
fn hash_of_files() {
let temp_path = TempDir::new("").unwrap();
let directory = RootDiskDirectory::create(&temp_path).unwrap();
let hash = directory.files_hash().expect("Files hash should be calculated ok");
assert_eq!(
hash,
15130871412783076140
);
let keypair = Random.generate().unwrap();
let password = "test pass";
let account = SafeAccount::create(&keypair, [0u8; 16], password, 1024, "Test".to_owned(), "{}".to_owned());
directory.insert(account).expect("Account should be inserted ok");
let new_hash = directory.files_hash().expect("New files hash should be calculated ok");
assert!(new_hash!= hash, "hash of the file list should change once directory content changed");
}
}
|
should_create_new_account
|
identifier_name
|
main.rs
|
extern crate clap;
extern crate colored;
extern crate glob;
extern crate libc;
extern crate regex;
//#[macro_use]
extern crate itertools;
mod replacer;
mod operation_mode;
mod interactor;
mod arguments;
mod fs_walker;
use std::io::{BufReader, BufWriter};
use std::io::prelude::*;
use std::fs::{remove_file, rename, File};
use libc::getpid;
use colored::*;
use interactor::{ask_user, InteractionResult};
pub struct TemporaryPrepFile {
pub writer: BufWriter<File>,
filename: String,
}
impl TemporaryPrepFile {
fn new() -> TemporaryPrepFile {
let filename = Self::generate_filename();
let wf = File::create(&filename).expect("Could not create temporary file");
TemporaryPrepFile {
writer: BufWriter::new(wf),
filename: filename,
}
}
fn filename(&self) -> &str {
return &self.filename;
}
fn generate_filename() -> String
|
}
pub fn main() {
let args = arguments::parse();
let mode = if args.regex_enabled {
operation_mode::OperationMode::new_regex(&args.search_pattern).expect("Invalid regex")
} else {
operation_mode::OperationMode::new_raw(&args.search_pattern)
};
colored::control::set_override(!args.colorless);
// return;
let replacer = replacer::Replacer::new(mode.clone(), &args.replace_pattern);
if!args.ignore_stdin {
let stdin = ::std::io::stdin();
let stdin = stdin.lock();
for l in stdin.lines() {
println!("{}", replacer.replace_all(l.as_ref().unwrap()));
}
}
let walker = fs_walker::FsWalker::new(args.file_patterns, args.files);
for file in walker.iter() {
let f = File::open(file.clone());
if let Ok(f) = f {
let f = BufReader::new(f);
let mut tmp = TemporaryPrepFile::new();
let mut line_iterator = f.lines();
let mut curr = line_iterator.next();
let mut next = line_iterator.next();
let mut did_at_least_one_replacement = false;
while curr.is_some() {
let line_end = if next.is_some() { "\n" } else { "" };
let mut line = curr.unwrap().expect("Failed to read out a line?").clone();
let mut pos = 0usize;
loop {
line = if let Some(result) = replacer.replace(&line, pos) {
pos = result.position_of_replacement;
let mut should_do_replacement = args.accept_everything;
if!should_do_replacement {
match ask_user(&format!(
"Should replace:\n{}{}{}\nWith:\n{}{}{}",
result.before,
result.old.green(),
result.after,
result.before,
result.new.red(),
result.after
)) {
InteractionResult::Accept => {
should_do_replacement = true;
}
_ => {}
}
}
if should_do_replacement {
did_at_least_one_replacement = true;
format!("{}{}{}", result.before, result.new, result.after)
} else {
pos = result.before.len() + result.old.len();
format!("{}{}{}", result.before, result.old, result.after)
}
} else {
break;
}
}
write!(tmp.writer, "{}{}", line, line_end)
.expect("Failed to write line to temp file.");
curr = next;
next = line_iterator.next();
}
if did_at_least_one_replacement {
let _ = tmp.writer.flush();
let _ = rename(tmp.filename(), file);
} else {
let _ = remove_file(tmp.filename());
}
}
}
}
|
{
format!("prep_tmp_file_{}", unsafe { getpid() })
}
|
identifier_body
|
main.rs
|
extern crate clap;
extern crate colored;
extern crate glob;
extern crate libc;
extern crate regex;
//#[macro_use]
extern crate itertools;
mod replacer;
mod operation_mode;
mod interactor;
mod arguments;
mod fs_walker;
use std::io::{BufReader, BufWriter};
use std::io::prelude::*;
use std::fs::{remove_file, rename, File};
use libc::getpid;
use colored::*;
use interactor::{ask_user, InteractionResult};
pub struct TemporaryPrepFile {
pub writer: BufWriter<File>,
filename: String,
}
impl TemporaryPrepFile {
fn
|
() -> TemporaryPrepFile {
let filename = Self::generate_filename();
let wf = File::create(&filename).expect("Could not create temporary file");
TemporaryPrepFile {
writer: BufWriter::new(wf),
filename: filename,
}
}
fn filename(&self) -> &str {
return &self.filename;
}
fn generate_filename() -> String {
format!("prep_tmp_file_{}", unsafe { getpid() })
}
}
pub fn main() {
let args = arguments::parse();
let mode = if args.regex_enabled {
operation_mode::OperationMode::new_regex(&args.search_pattern).expect("Invalid regex")
} else {
operation_mode::OperationMode::new_raw(&args.search_pattern)
};
colored::control::set_override(!args.colorless);
// return;
let replacer = replacer::Replacer::new(mode.clone(), &args.replace_pattern);
if!args.ignore_stdin {
let stdin = ::std::io::stdin();
let stdin = stdin.lock();
for l in stdin.lines() {
println!("{}", replacer.replace_all(l.as_ref().unwrap()));
}
}
let walker = fs_walker::FsWalker::new(args.file_patterns, args.files);
for file in walker.iter() {
let f = File::open(file.clone());
if let Ok(f) = f {
let f = BufReader::new(f);
let mut tmp = TemporaryPrepFile::new();
let mut line_iterator = f.lines();
let mut curr = line_iterator.next();
let mut next = line_iterator.next();
let mut did_at_least_one_replacement = false;
while curr.is_some() {
let line_end = if next.is_some() { "\n" } else { "" };
let mut line = curr.unwrap().expect("Failed to read out a line?").clone();
let mut pos = 0usize;
loop {
line = if let Some(result) = replacer.replace(&line, pos) {
pos = result.position_of_replacement;
let mut should_do_replacement = args.accept_everything;
if!should_do_replacement {
match ask_user(&format!(
"Should replace:\n{}{}{}\nWith:\n{}{}{}",
result.before,
result.old.green(),
result.after,
result.before,
result.new.red(),
result.after
)) {
InteractionResult::Accept => {
should_do_replacement = true;
}
_ => {}
}
}
if should_do_replacement {
did_at_least_one_replacement = true;
format!("{}{}{}", result.before, result.new, result.after)
} else {
pos = result.before.len() + result.old.len();
format!("{}{}{}", result.before, result.old, result.after)
}
} else {
break;
}
}
write!(tmp.writer, "{}{}", line, line_end)
.expect("Failed to write line to temp file.");
curr = next;
next = line_iterator.next();
}
if did_at_least_one_replacement {
let _ = tmp.writer.flush();
let _ = rename(tmp.filename(), file);
} else {
let _ = remove_file(tmp.filename());
}
}
}
}
|
new
|
identifier_name
|
main.rs
|
extern crate clap;
extern crate colored;
extern crate glob;
extern crate libc;
extern crate regex;
//#[macro_use]
extern crate itertools;
mod replacer;
mod operation_mode;
mod interactor;
mod arguments;
mod fs_walker;
use std::io::{BufReader, BufWriter};
use std::io::prelude::*;
use std::fs::{remove_file, rename, File};
use libc::getpid;
use colored::*;
use interactor::{ask_user, InteractionResult};
pub struct TemporaryPrepFile {
pub writer: BufWriter<File>,
filename: String,
}
impl TemporaryPrepFile {
fn new() -> TemporaryPrepFile {
let filename = Self::generate_filename();
let wf = File::create(&filename).expect("Could not create temporary file");
TemporaryPrepFile {
writer: BufWriter::new(wf),
filename: filename,
}
}
fn filename(&self) -> &str {
return &self.filename;
}
fn generate_filename() -> String {
format!("prep_tmp_file_{}", unsafe { getpid() })
}
}
pub fn main() {
let args = arguments::parse();
let mode = if args.regex_enabled {
operation_mode::OperationMode::new_regex(&args.search_pattern).expect("Invalid regex")
} else {
operation_mode::OperationMode::new_raw(&args.search_pattern)
};
colored::control::set_override(!args.colorless);
// return;
let replacer = replacer::Replacer::new(mode.clone(), &args.replace_pattern);
if!args.ignore_stdin {
let stdin = ::std::io::stdin();
let stdin = stdin.lock();
for l in stdin.lines() {
println!("{}", replacer.replace_all(l.as_ref().unwrap()));
}
}
let walker = fs_walker::FsWalker::new(args.file_patterns, args.files);
for file in walker.iter() {
let f = File::open(file.clone());
if let Ok(f) = f {
let f = BufReader::new(f);
let mut tmp = TemporaryPrepFile::new();
let mut line_iterator = f.lines();
let mut curr = line_iterator.next();
let mut next = line_iterator.next();
let mut did_at_least_one_replacement = false;
while curr.is_some() {
let line_end = if next.is_some() { "\n" } else { "" };
let mut line = curr.unwrap().expect("Failed to read out a line?").clone();
let mut pos = 0usize;
loop {
line = if let Some(result) = replacer.replace(&line, pos) {
pos = result.position_of_replacement;
let mut should_do_replacement = args.accept_everything;
if!should_do_replacement {
match ask_user(&format!(
"Should replace:\n{}{}{}\nWith:\n{}{}{}",
result.before,
result.old.green(),
result.after,
result.before,
result.new.red(),
result.after
)) {
InteractionResult::Accept => {
should_do_replacement = true;
}
_ => {}
}
}
if should_do_replacement {
did_at_least_one_replacement = true;
format!("{}{}{}", result.before, result.new, result.after)
} else {
pos = result.before.len() + result.old.len();
format!("{}{}{}", result.before, result.old, result.after)
}
} else {
break;
}
}
|
write!(tmp.writer, "{}{}", line, line_end)
.expect("Failed to write line to temp file.");
curr = next;
next = line_iterator.next();
}
if did_at_least_one_replacement {
let _ = tmp.writer.flush();
let _ = rename(tmp.filename(), file);
} else {
let _ = remove_file(tmp.filename());
}
}
}
}
|
random_line_split
|
|
render_all_data.rs
|
use std::collections::BTreeMap;
use time::*;
use error::Error;
use types::{
AllData, CCode, CEnumVariant, CTree, Command, KeyDefs, KmapPath, ModeName,
Modifier, Name, SeqType, ToC,
};
use output::{KmapBuilder, ModeBuilder};
use util::usize_to_u8;
impl AllData {
/// Generate and save the c code containing the keyboard firmware
/// configuration. `file_name_base` should have no extension. `.h` and
/// `.cpp` will be added to it as needed.
pub fn save_as(&self, file_name_base: &str) -> Result<(), Error>
|
/// Used for testing. The message contains a timestamp that would make the
/// same output files look different if they're only generated at different
/// times.
#[cfg(test)]
pub fn save_without_message_as(
&self,
file_name_base: &str,
) -> Result<(), Error> {
self.save_helper(file_name_base, false)
}
fn save_helper(
&self,
file_name_base: &str,
with_message: bool,
) -> Result<(), Error> {
let main_files =
self.render_main(with_message)?.format(file_name_base)?;
let early_name_base = format!("{}_early", file_name_base);
let early_files = self
.render_early_config(with_message)?
.format(&early_name_base)?;
let mut file_names =
main_files.save(&self.output_directory, file_name_base)?;
file_names.extend(
early_files.save(&self.output_directory, &early_name_base)?,
);
let file_name_list = file_names
.into_iter()
.map(|path| format!("{:?}", path))
.collect::<Vec<_>>()
.join(", ");
println!("Saved keyboard configuration to: {}", file_name_list);
Ok(())
}
/// Render c code defining any constants etc. that need to be included
/// before / separately from the main auto_config.h file.
fn render_early_config(&self, with_message: bool) -> Result<CTree, Error> {
let mut group = Vec::new();
if with_message {
group.push(CTree::LiteralH(autogen_message()));
}
group.push(CTree::LiteralH("#pragma once\n".to_c()));
let mut namespace = Vec::new();
namespace.extend(self.user_options.render_early());
namespace.push(KmapBuilder::render_limits());
namespace.push(self.huffman_table.render_early());
namespace.push(ModeName::render_c_enum(self.modes.keys()));
// Use the first mode enum variant as the default mode
// TODO what happens if there are no modes, so no variant with value 0,
// and this cast is invalid?
namespace.push(CTree::LiteralH(
"constexpr Mode defaultMode() { return static_cast<Mode>(0); }\n\n"
.to_c(),
));
let all_mods: Vec<_> = self
.modifier_names()
.into_iter()
.map(|name| Modifier::new(name))
.collect();
namespace.push(Modifier::render_c_enum(all_mods.iter()));
namespace.push(CTree::Define {
name: "NUM_MODIFIERS".to_c(),
value: all_mods.len().to_c(),
});
namespace.push(SeqType::render_c_enum(self.sequences.seq_types()));
group.push(CTree::Namespace {
name: "conf".to_c(),
contents: Box::new(CTree::Group(namespace)),
});
Ok(CTree::Group(group))
}
fn render_main(&self, with_message: bool) -> Result<CTree, Error> {
Ok(CTree::Group(vec![
intro(with_message)?,
CTree::Namespace {
name: "conf".to_c(),
contents: Box::new(CTree::Group(vec![
self.user_options.render(),
self.huffman_table.render()?,
self.render_modifiers()?,
Command::render_c_enum(self.commands.iter()),
self.render_modes()?,
])),
},
make_debug_macros(),
]))
}
fn render_modes(&self) -> Result<CTree, Error> {
let mut g = Vec::new();
g.push(CTree::ConstVar {
name: "MAX_KEYS_IN_SEQUENCE".to_c(),
value: usize_to_u8(self.sequences.max_seq_length())?.to_c(),
c_type: "uint8_t".to_c(),
is_extern: true,
});
let (tree, kmap_struct_names) = self.render_kmaps()?;
g.push(tree);
let mut mode_struct_names = Vec::new();
for (mode, info) in &self.modes {
let m = ModeBuilder {
mode_name: mode,
info,
kmap_struct_names: &kmap_struct_names,
mod_chords: self.modifier_chords(mode),
anagram_mask: self.get_anagram_mask(mode),
chord_spec: self.chord_spec.clone(),
};
let (tree, name) = m.render()?;
g.push(tree);
mode_struct_names.push(name);
}
g.push(CTree::StdArray {
name: "mode_structs".to_c(),
values: CCode::map_prepend("&", &mode_struct_names),
c_type: "const ModeStruct*".to_c(),
is_extern: true,
});
Ok(CTree::Group(g))
}
fn render_kmaps(
&self,
) -> Result<(CTree, BTreeMap<KmapPath, CCode>), Error> {
// Render all keymap structs as CTrees, and return their names
let mut kmap_struct_names = BTreeMap::new();
let mut g = Vec::new();
for (i, (kmap_name, chords)) in self.chords.iter().enumerate() {
let builder = KmapBuilder {
kmap_nickname: format!("kmap{}", i),
chord_map: chords,
seq_maps: &self.sequences,
huffman_table: &self.huffman_table,
chord_spec: self.chord_spec.clone(),
};
let (tree, kmap_struct_name) = builder.render()?;
g.push(tree);
kmap_struct_names.insert(kmap_name.to_owned(), kmap_struct_name);
}
Ok((CTree::Group(g), kmap_struct_names))
}
fn render_modifiers(&self) -> Result<CTree, Error> {
fn to_variants(mod_names: &[Name]) -> Vec<CCode> {
mod_names
.iter()
.map(|name| Modifier::new(name).qualified_enum_variant())
.collect()
}
let mut group = Vec::new();
group.push(CTree::ConstVar {
name: "MAX_ANAGRAM_NUM".to_c(),
value: self.chords.max_anagram_num().to_c(),
c_type: "uint8_t".to_c(),
is_extern: true,
});
group.push(CTree::StdArray {
name: "word_mods".to_c(),
values: to_variants(&self.word_mods),
c_type: Modifier::enum_type(),
is_extern: true,
});
group.push(CTree::StdArray {
name: "plain_mods".to_c(),
values: to_variants(&self.plain_mods),
c_type: Modifier::enum_type(),
is_extern: true,
});
group.push(CTree::StdArray {
name: "anagram_mods".to_c(),
values: to_variants(&self.anagram_mods),
c_type: Modifier::enum_type(),
is_extern: true,
});
group.push(CTree::StdArray {
name: "anagram_mod_numbers".to_c(),
values: self
.get_anagram_mod_numbers()?
.iter()
.map(|num| num.to_c())
.collect(),
c_type: "uint8_t".to_c(),
is_extern: true,
});
group.push(CTree::StdArray {
name: "plain_mod_keys".to_c(),
values: self.get_plain_mod_codes()?,
c_type: "uint8_t".to_c(),
is_extern: true,
});
Ok(CTree::Group(group))
}
fn get_plain_mod_codes(&self) -> Result<Vec<CCode>, Error> {
// TODO this should be easier...
self.plain_mods
.iter()
.map(|name| {
Ok(self
.sequences
.get_seq_of_any_type(name)?
.lone_keypress()?
.format_mods())
})
.collect()
}
}
/// Generate the top of the main auto_config files. If `with_message` is true,
/// add a commented message about when the file was autogenerated.
fn intro(with_message: bool) -> Result<CTree, Error> {
let mut group = Vec::new();
if with_message {
let msg = autogen_message();
group.push(CTree::LiteralC(msg.clone()));
group.push(CTree::LiteralH(msg));
}
group.push(CTree::LiteralH("#pragma once\n".to_c()));
group.push(CTree::IncludeSelf);
group.push(CTree::IncludeH {
path: "<Arduino.h>".to_c(),
});
group.push(CTree::IncludeH {
path: "<stdint.h>".to_c(),
});
group.push(CTree::IncludeH {
path: "\"config_types.h\"".to_c(),
});
group.push(CTree::LiteralH(
"typedef void (*voidFuncPtr)(void);\n".to_c(),
));
group.push(render_keycode_definitions());
Ok(CTree::Group(group))
}
fn autogen_message() -> CCode {
const AUTHOR: &str = "pipit-keyboard";
let mut s = format!(
"/**\n * Automatically generated by {} on: {}\n",
AUTHOR,
now().strftime("%c").unwrap()
);
s += " * Do not make changes here, they will be overwritten.\n */\n\n";
s.to_c()
}
fn make_debug_macros() -> CTree {
// TODO clean up debug macros
// TODO use Define variant instead
let mut s = String::new();
s += "\n#if DEBUG_MESSAGES == 0\n";
s += " #define DEBUG1(msg)\n";
s += " #define DEBUG1_LN(msg)\n";
s += " #define DEBUG2(msg)\n";
s += " #define DEBUG2_LN(msg)\n";
s += "#else\n";
s += " #define ENABLE_SERIAL_DEBUG\n";
s += " #include <Arduino.h>\n";
s += " #define DEBUG1(msg) Serial.print(msg)\n";
s += " #define DEBUG1_LN(msg) Serial.println(msg)\n";
s += " #if DEBUG_MESSAGES == 1\n";
s += " #define DEBUG2(msg)\n";
s += " #define DEBUG2_LN(msg)\n";
s += " #else\n";
s += " #define DEBUG2(msg) Serial.print(msg)\n";
s += " #define DEBUG2_LN(msg) Serial.println(msg)\n";
s += " #endif\n\n";
s += "#endif\n\n";
CTree::LiteralH(CCode(s))
}
fn render_keycode_definitions() -> CTree {
let keycodes = KeyDefs::scancode_table();
let example = keycodes
.keys()
.nth(0)
.expect("KeyDefs::scancode_table() is empty!")
.to_owned();
let keycode_definitions = CTree::Group(
keycodes
.iter()
.map(|(&name, &value)| CTree::Define {
name: name.to_owned(),
value: value.to_c(),
})
.collect(),
);
CTree::Ifndef {
conditional: example.to_owned(),
contents: Box::new(keycode_definitions),
}
}
|
{
self.save_helper(file_name_base, true)
}
|
identifier_body
|
render_all_data.rs
|
use std::collections::BTreeMap;
use time::*;
use error::Error;
use types::{
AllData, CCode, CEnumVariant, CTree, Command, KeyDefs, KmapPath, ModeName,
Modifier, Name, SeqType, ToC,
};
use output::{KmapBuilder, ModeBuilder};
use util::usize_to_u8;
impl AllData {
/// Generate and save the c code containing the keyboard firmware
/// configuration. `file_name_base` should have no extension. `.h` and
/// `.cpp` will be added to it as needed.
pub fn save_as(&self, file_name_base: &str) -> Result<(), Error> {
self.save_helper(file_name_base, true)
}
/// Used for testing. The message contains a timestamp that would make the
/// same output files look different if they're only generated at different
/// times.
#[cfg(test)]
pub fn save_without_message_as(
&self,
file_name_base: &str,
) -> Result<(), Error> {
self.save_helper(file_name_base, false)
}
fn save_helper(
&self,
file_name_base: &str,
with_message: bool,
) -> Result<(), Error> {
let main_files =
self.render_main(with_message)?.format(file_name_base)?;
let early_name_base = format!("{}_early", file_name_base);
let early_files = self
.render_early_config(with_message)?
.format(&early_name_base)?;
let mut file_names =
main_files.save(&self.output_directory, file_name_base)?;
file_names.extend(
early_files.save(&self.output_directory, &early_name_base)?,
);
let file_name_list = file_names
.into_iter()
.map(|path| format!("{:?}", path))
.collect::<Vec<_>>()
.join(", ");
println!("Saved keyboard configuration to: {}", file_name_list);
Ok(())
}
/// Render c code defining any constants etc. that need to be included
/// before / separately from the main auto_config.h file.
fn render_early_config(&self, with_message: bool) -> Result<CTree, Error> {
let mut group = Vec::new();
if with_message {
group.push(CTree::LiteralH(autogen_message()));
}
group.push(CTree::LiteralH("#pragma once\n".to_c()));
let mut namespace = Vec::new();
namespace.extend(self.user_options.render_early());
namespace.push(KmapBuilder::render_limits());
namespace.push(self.huffman_table.render_early());
namespace.push(ModeName::render_c_enum(self.modes.keys()));
// Use the first mode enum variant as the default mode
// TODO what happens if there are no modes, so no variant with value 0,
// and this cast is invalid?
namespace.push(CTree::LiteralH(
"constexpr Mode defaultMode() { return static_cast<Mode>(0); }\n\n"
.to_c(),
));
let all_mods: Vec<_> = self
.modifier_names()
.into_iter()
.map(|name| Modifier::new(name))
.collect();
namespace.push(Modifier::render_c_enum(all_mods.iter()));
namespace.push(CTree::Define {
name: "NUM_MODIFIERS".to_c(),
value: all_mods.len().to_c(),
});
namespace.push(SeqType::render_c_enum(self.sequences.seq_types()));
group.push(CTree::Namespace {
name: "conf".to_c(),
contents: Box::new(CTree::Group(namespace)),
});
Ok(CTree::Group(group))
}
fn render_main(&self, with_message: bool) -> Result<CTree, Error> {
Ok(CTree::Group(vec![
intro(with_message)?,
CTree::Namespace {
name: "conf".to_c(),
contents: Box::new(CTree::Group(vec![
self.user_options.render(),
self.huffman_table.render()?,
self.render_modifiers()?,
Command::render_c_enum(self.commands.iter()),
self.render_modes()?,
])),
},
make_debug_macros(),
]))
}
fn render_modes(&self) -> Result<CTree, Error> {
let mut g = Vec::new();
g.push(CTree::ConstVar {
name: "MAX_KEYS_IN_SEQUENCE".to_c(),
value: usize_to_u8(self.sequences.max_seq_length())?.to_c(),
c_type: "uint8_t".to_c(),
is_extern: true,
});
let (tree, kmap_struct_names) = self.render_kmaps()?;
g.push(tree);
let mut mode_struct_names = Vec::new();
for (mode, info) in &self.modes {
let m = ModeBuilder {
mode_name: mode,
info,
kmap_struct_names: &kmap_struct_names,
mod_chords: self.modifier_chords(mode),
anagram_mask: self.get_anagram_mask(mode),
chord_spec: self.chord_spec.clone(),
};
let (tree, name) = m.render()?;
g.push(tree);
mode_struct_names.push(name);
}
g.push(CTree::StdArray {
name: "mode_structs".to_c(),
values: CCode::map_prepend("&", &mode_struct_names),
c_type: "const ModeStruct*".to_c(),
is_extern: true,
});
Ok(CTree::Group(g))
}
fn render_kmaps(
&self,
) -> Result<(CTree, BTreeMap<KmapPath, CCode>), Error> {
// Render all keymap structs as CTrees, and return their names
let mut kmap_struct_names = BTreeMap::new();
let mut g = Vec::new();
for (i, (kmap_name, chords)) in self.chords.iter().enumerate() {
let builder = KmapBuilder {
kmap_nickname: format!("kmap{}", i),
chord_map: chords,
seq_maps: &self.sequences,
huffman_table: &self.huffman_table,
chord_spec: self.chord_spec.clone(),
};
let (tree, kmap_struct_name) = builder.render()?;
g.push(tree);
kmap_struct_names.insert(kmap_name.to_owned(), kmap_struct_name);
}
Ok((CTree::Group(g), kmap_struct_names))
}
fn render_modifiers(&self) -> Result<CTree, Error> {
fn to_variants(mod_names: &[Name]) -> Vec<CCode> {
mod_names
.iter()
.map(|name| Modifier::new(name).qualified_enum_variant())
.collect()
}
let mut group = Vec::new();
group.push(CTree::ConstVar {
name: "MAX_ANAGRAM_NUM".to_c(),
value: self.chords.max_anagram_num().to_c(),
c_type: "uint8_t".to_c(),
is_extern: true,
});
group.push(CTree::StdArray {
name: "word_mods".to_c(),
values: to_variants(&self.word_mods),
c_type: Modifier::enum_type(),
is_extern: true,
});
group.push(CTree::StdArray {
name: "plain_mods".to_c(),
values: to_variants(&self.plain_mods),
c_type: Modifier::enum_type(),
is_extern: true,
});
group.push(CTree::StdArray {
name: "anagram_mods".to_c(),
values: to_variants(&self.anagram_mods),
c_type: Modifier::enum_type(),
is_extern: true,
|
values: self
.get_anagram_mod_numbers()?
.iter()
.map(|num| num.to_c())
.collect(),
c_type: "uint8_t".to_c(),
is_extern: true,
});
group.push(CTree::StdArray {
name: "plain_mod_keys".to_c(),
values: self.get_plain_mod_codes()?,
c_type: "uint8_t".to_c(),
is_extern: true,
});
Ok(CTree::Group(group))
}
fn get_plain_mod_codes(&self) -> Result<Vec<CCode>, Error> {
// TODO this should be easier...
self.plain_mods
.iter()
.map(|name| {
Ok(self
.sequences
.get_seq_of_any_type(name)?
.lone_keypress()?
.format_mods())
})
.collect()
}
}
/// Generate the top of the main auto_config files. If `with_message` is true,
/// add a commented message about when the file was autogenerated.
fn intro(with_message: bool) -> Result<CTree, Error> {
let mut group = Vec::new();
if with_message {
let msg = autogen_message();
group.push(CTree::LiteralC(msg.clone()));
group.push(CTree::LiteralH(msg));
}
group.push(CTree::LiteralH("#pragma once\n".to_c()));
group.push(CTree::IncludeSelf);
group.push(CTree::IncludeH {
path: "<Arduino.h>".to_c(),
});
group.push(CTree::IncludeH {
path: "<stdint.h>".to_c(),
});
group.push(CTree::IncludeH {
path: "\"config_types.h\"".to_c(),
});
group.push(CTree::LiteralH(
"typedef void (*voidFuncPtr)(void);\n".to_c(),
));
group.push(render_keycode_definitions());
Ok(CTree::Group(group))
}
fn autogen_message() -> CCode {
const AUTHOR: &str = "pipit-keyboard";
let mut s = format!(
"/**\n * Automatically generated by {} on: {}\n",
AUTHOR,
now().strftime("%c").unwrap()
);
s += " * Do not make changes here, they will be overwritten.\n */\n\n";
s.to_c()
}
fn make_debug_macros() -> CTree {
// TODO clean up debug macros
// TODO use Define variant instead
let mut s = String::new();
s += "\n#if DEBUG_MESSAGES == 0\n";
s += " #define DEBUG1(msg)\n";
s += " #define DEBUG1_LN(msg)\n";
s += " #define DEBUG2(msg)\n";
s += " #define DEBUG2_LN(msg)\n";
s += "#else\n";
s += " #define ENABLE_SERIAL_DEBUG\n";
s += " #include <Arduino.h>\n";
s += " #define DEBUG1(msg) Serial.print(msg)\n";
s += " #define DEBUG1_LN(msg) Serial.println(msg)\n";
s += " #if DEBUG_MESSAGES == 1\n";
s += " #define DEBUG2(msg)\n";
s += " #define DEBUG2_LN(msg)\n";
s += " #else\n";
s += " #define DEBUG2(msg) Serial.print(msg)\n";
s += " #define DEBUG2_LN(msg) Serial.println(msg)\n";
s += " #endif\n\n";
s += "#endif\n\n";
CTree::LiteralH(CCode(s))
}
fn render_keycode_definitions() -> CTree {
let keycodes = KeyDefs::scancode_table();
let example = keycodes
.keys()
.nth(0)
.expect("KeyDefs::scancode_table() is empty!")
.to_owned();
let keycode_definitions = CTree::Group(
keycodes
.iter()
.map(|(&name, &value)| CTree::Define {
name: name.to_owned(),
value: value.to_c(),
})
.collect(),
);
CTree::Ifndef {
conditional: example.to_owned(),
contents: Box::new(keycode_definitions),
}
}
|
});
group.push(CTree::StdArray {
name: "anagram_mod_numbers".to_c(),
|
random_line_split
|
render_all_data.rs
|
use std::collections::BTreeMap;
use time::*;
use error::Error;
use types::{
AllData, CCode, CEnumVariant, CTree, Command, KeyDefs, KmapPath, ModeName,
Modifier, Name, SeqType, ToC,
};
use output::{KmapBuilder, ModeBuilder};
use util::usize_to_u8;
impl AllData {
/// Generate and save the c code containing the keyboard firmware
/// configuration. `file_name_base` should have no extension. `.h` and
/// `.cpp` will be added to it as needed.
pub fn
|
(&self, file_name_base: &str) -> Result<(), Error> {
self.save_helper(file_name_base, true)
}
/// Used for testing. The message contains a timestamp that would make the
/// same output files look different if they're only generated at different
/// times.
#[cfg(test)]
pub fn save_without_message_as(
&self,
file_name_base: &str,
) -> Result<(), Error> {
self.save_helper(file_name_base, false)
}
fn save_helper(
&self,
file_name_base: &str,
with_message: bool,
) -> Result<(), Error> {
let main_files =
self.render_main(with_message)?.format(file_name_base)?;
let early_name_base = format!("{}_early", file_name_base);
let early_files = self
.render_early_config(with_message)?
.format(&early_name_base)?;
let mut file_names =
main_files.save(&self.output_directory, file_name_base)?;
file_names.extend(
early_files.save(&self.output_directory, &early_name_base)?,
);
let file_name_list = file_names
.into_iter()
.map(|path| format!("{:?}", path))
.collect::<Vec<_>>()
.join(", ");
println!("Saved keyboard configuration to: {}", file_name_list);
Ok(())
}
/// Render c code defining any constants etc. that need to be included
/// before / separately from the main auto_config.h file.
fn render_early_config(&self, with_message: bool) -> Result<CTree, Error> {
let mut group = Vec::new();
if with_message {
group.push(CTree::LiteralH(autogen_message()));
}
group.push(CTree::LiteralH("#pragma once\n".to_c()));
let mut namespace = Vec::new();
namespace.extend(self.user_options.render_early());
namespace.push(KmapBuilder::render_limits());
namespace.push(self.huffman_table.render_early());
namespace.push(ModeName::render_c_enum(self.modes.keys()));
// Use the first mode enum variant as the default mode
// TODO what happens if there are no modes, so no variant with value 0,
// and this cast is invalid?
namespace.push(CTree::LiteralH(
"constexpr Mode defaultMode() { return static_cast<Mode>(0); }\n\n"
.to_c(),
));
let all_mods: Vec<_> = self
.modifier_names()
.into_iter()
.map(|name| Modifier::new(name))
.collect();
namespace.push(Modifier::render_c_enum(all_mods.iter()));
namespace.push(CTree::Define {
name: "NUM_MODIFIERS".to_c(),
value: all_mods.len().to_c(),
});
namespace.push(SeqType::render_c_enum(self.sequences.seq_types()));
group.push(CTree::Namespace {
name: "conf".to_c(),
contents: Box::new(CTree::Group(namespace)),
});
Ok(CTree::Group(group))
}
fn render_main(&self, with_message: bool) -> Result<CTree, Error> {
Ok(CTree::Group(vec![
intro(with_message)?,
CTree::Namespace {
name: "conf".to_c(),
contents: Box::new(CTree::Group(vec![
self.user_options.render(),
self.huffman_table.render()?,
self.render_modifiers()?,
Command::render_c_enum(self.commands.iter()),
self.render_modes()?,
])),
},
make_debug_macros(),
]))
}
fn render_modes(&self) -> Result<CTree, Error> {
let mut g = Vec::new();
g.push(CTree::ConstVar {
name: "MAX_KEYS_IN_SEQUENCE".to_c(),
value: usize_to_u8(self.sequences.max_seq_length())?.to_c(),
c_type: "uint8_t".to_c(),
is_extern: true,
});
let (tree, kmap_struct_names) = self.render_kmaps()?;
g.push(tree);
let mut mode_struct_names = Vec::new();
for (mode, info) in &self.modes {
let m = ModeBuilder {
mode_name: mode,
info,
kmap_struct_names: &kmap_struct_names,
mod_chords: self.modifier_chords(mode),
anagram_mask: self.get_anagram_mask(mode),
chord_spec: self.chord_spec.clone(),
};
let (tree, name) = m.render()?;
g.push(tree);
mode_struct_names.push(name);
}
g.push(CTree::StdArray {
name: "mode_structs".to_c(),
values: CCode::map_prepend("&", &mode_struct_names),
c_type: "const ModeStruct*".to_c(),
is_extern: true,
});
Ok(CTree::Group(g))
}
fn render_kmaps(
&self,
) -> Result<(CTree, BTreeMap<KmapPath, CCode>), Error> {
// Render all keymap structs as CTrees, and return their names
let mut kmap_struct_names = BTreeMap::new();
let mut g = Vec::new();
for (i, (kmap_name, chords)) in self.chords.iter().enumerate() {
let builder = KmapBuilder {
kmap_nickname: format!("kmap{}", i),
chord_map: chords,
seq_maps: &self.sequences,
huffman_table: &self.huffman_table,
chord_spec: self.chord_spec.clone(),
};
let (tree, kmap_struct_name) = builder.render()?;
g.push(tree);
kmap_struct_names.insert(kmap_name.to_owned(), kmap_struct_name);
}
Ok((CTree::Group(g), kmap_struct_names))
}
fn render_modifiers(&self) -> Result<CTree, Error> {
fn to_variants(mod_names: &[Name]) -> Vec<CCode> {
mod_names
.iter()
.map(|name| Modifier::new(name).qualified_enum_variant())
.collect()
}
let mut group = Vec::new();
group.push(CTree::ConstVar {
name: "MAX_ANAGRAM_NUM".to_c(),
value: self.chords.max_anagram_num().to_c(),
c_type: "uint8_t".to_c(),
is_extern: true,
});
group.push(CTree::StdArray {
name: "word_mods".to_c(),
values: to_variants(&self.word_mods),
c_type: Modifier::enum_type(),
is_extern: true,
});
group.push(CTree::StdArray {
name: "plain_mods".to_c(),
values: to_variants(&self.plain_mods),
c_type: Modifier::enum_type(),
is_extern: true,
});
group.push(CTree::StdArray {
name: "anagram_mods".to_c(),
values: to_variants(&self.anagram_mods),
c_type: Modifier::enum_type(),
is_extern: true,
});
group.push(CTree::StdArray {
name: "anagram_mod_numbers".to_c(),
values: self
.get_anagram_mod_numbers()?
.iter()
.map(|num| num.to_c())
.collect(),
c_type: "uint8_t".to_c(),
is_extern: true,
});
group.push(CTree::StdArray {
name: "plain_mod_keys".to_c(),
values: self.get_plain_mod_codes()?,
c_type: "uint8_t".to_c(),
is_extern: true,
});
Ok(CTree::Group(group))
}
fn get_plain_mod_codes(&self) -> Result<Vec<CCode>, Error> {
// TODO this should be easier...
self.plain_mods
.iter()
.map(|name| {
Ok(self
.sequences
.get_seq_of_any_type(name)?
.lone_keypress()?
.format_mods())
})
.collect()
}
}
/// Generate the top of the main auto_config files. If `with_message` is true,
/// add a commented message about when the file was autogenerated.
fn intro(with_message: bool) -> Result<CTree, Error> {
let mut group = Vec::new();
if with_message {
let msg = autogen_message();
group.push(CTree::LiteralC(msg.clone()));
group.push(CTree::LiteralH(msg));
}
group.push(CTree::LiteralH("#pragma once\n".to_c()));
group.push(CTree::IncludeSelf);
group.push(CTree::IncludeH {
path: "<Arduino.h>".to_c(),
});
group.push(CTree::IncludeH {
path: "<stdint.h>".to_c(),
});
group.push(CTree::IncludeH {
path: "\"config_types.h\"".to_c(),
});
group.push(CTree::LiteralH(
"typedef void (*voidFuncPtr)(void);\n".to_c(),
));
group.push(render_keycode_definitions());
Ok(CTree::Group(group))
}
fn autogen_message() -> CCode {
const AUTHOR: &str = "pipit-keyboard";
let mut s = format!(
"/**\n * Automatically generated by {} on: {}\n",
AUTHOR,
now().strftime("%c").unwrap()
);
s += " * Do not make changes here, they will be overwritten.\n */\n\n";
s.to_c()
}
fn make_debug_macros() -> CTree {
// TODO clean up debug macros
// TODO use Define variant instead
let mut s = String::new();
s += "\n#if DEBUG_MESSAGES == 0\n";
s += " #define DEBUG1(msg)\n";
s += " #define DEBUG1_LN(msg)\n";
s += " #define DEBUG2(msg)\n";
s += " #define DEBUG2_LN(msg)\n";
s += "#else\n";
s += " #define ENABLE_SERIAL_DEBUG\n";
s += " #include <Arduino.h>\n";
s += " #define DEBUG1(msg) Serial.print(msg)\n";
s += " #define DEBUG1_LN(msg) Serial.println(msg)\n";
s += " #if DEBUG_MESSAGES == 1\n";
s += " #define DEBUG2(msg)\n";
s += " #define DEBUG2_LN(msg)\n";
s += " #else\n";
s += " #define DEBUG2(msg) Serial.print(msg)\n";
s += " #define DEBUG2_LN(msg) Serial.println(msg)\n";
s += " #endif\n\n";
s += "#endif\n\n";
CTree::LiteralH(CCode(s))
}
fn render_keycode_definitions() -> CTree {
let keycodes = KeyDefs::scancode_table();
let example = keycodes
.keys()
.nth(0)
.expect("KeyDefs::scancode_table() is empty!")
.to_owned();
let keycode_definitions = CTree::Group(
keycodes
.iter()
.map(|(&name, &value)| CTree::Define {
name: name.to_owned(),
value: value.to_c(),
})
.collect(),
);
CTree::Ifndef {
conditional: example.to_owned(),
contents: Box::new(keycode_definitions),
}
}
|
save_as
|
identifier_name
|
render_all_data.rs
|
use std::collections::BTreeMap;
use time::*;
use error::Error;
use types::{
AllData, CCode, CEnumVariant, CTree, Command, KeyDefs, KmapPath, ModeName,
Modifier, Name, SeqType, ToC,
};
use output::{KmapBuilder, ModeBuilder};
use util::usize_to_u8;
impl AllData {
/// Generate and save the c code containing the keyboard firmware
/// configuration. `file_name_base` should have no extension. `.h` and
/// `.cpp` will be added to it as needed.
pub fn save_as(&self, file_name_base: &str) -> Result<(), Error> {
self.save_helper(file_name_base, true)
}
/// Used for testing. The message contains a timestamp that would make the
/// same output files look different if they're only generated at different
/// times.
#[cfg(test)]
pub fn save_without_message_as(
&self,
file_name_base: &str,
) -> Result<(), Error> {
self.save_helper(file_name_base, false)
}
fn save_helper(
&self,
file_name_base: &str,
with_message: bool,
) -> Result<(), Error> {
let main_files =
self.render_main(with_message)?.format(file_name_base)?;
let early_name_base = format!("{}_early", file_name_base);
let early_files = self
.render_early_config(with_message)?
.format(&early_name_base)?;
let mut file_names =
main_files.save(&self.output_directory, file_name_base)?;
file_names.extend(
early_files.save(&self.output_directory, &early_name_base)?,
);
let file_name_list = file_names
.into_iter()
.map(|path| format!("{:?}", path))
.collect::<Vec<_>>()
.join(", ");
println!("Saved keyboard configuration to: {}", file_name_list);
Ok(())
}
/// Render c code defining any constants etc. that need to be included
/// before / separately from the main auto_config.h file.
fn render_early_config(&self, with_message: bool) -> Result<CTree, Error> {
let mut group = Vec::new();
if with_message {
group.push(CTree::LiteralH(autogen_message()));
}
group.push(CTree::LiteralH("#pragma once\n".to_c()));
let mut namespace = Vec::new();
namespace.extend(self.user_options.render_early());
namespace.push(KmapBuilder::render_limits());
namespace.push(self.huffman_table.render_early());
namespace.push(ModeName::render_c_enum(self.modes.keys()));
// Use the first mode enum variant as the default mode
// TODO what happens if there are no modes, so no variant with value 0,
// and this cast is invalid?
namespace.push(CTree::LiteralH(
"constexpr Mode defaultMode() { return static_cast<Mode>(0); }\n\n"
.to_c(),
));
let all_mods: Vec<_> = self
.modifier_names()
.into_iter()
.map(|name| Modifier::new(name))
.collect();
namespace.push(Modifier::render_c_enum(all_mods.iter()));
namespace.push(CTree::Define {
name: "NUM_MODIFIERS".to_c(),
value: all_mods.len().to_c(),
});
namespace.push(SeqType::render_c_enum(self.sequences.seq_types()));
group.push(CTree::Namespace {
name: "conf".to_c(),
contents: Box::new(CTree::Group(namespace)),
});
Ok(CTree::Group(group))
}
fn render_main(&self, with_message: bool) -> Result<CTree, Error> {
Ok(CTree::Group(vec![
intro(with_message)?,
CTree::Namespace {
name: "conf".to_c(),
contents: Box::new(CTree::Group(vec![
self.user_options.render(),
self.huffman_table.render()?,
self.render_modifiers()?,
Command::render_c_enum(self.commands.iter()),
self.render_modes()?,
])),
},
make_debug_macros(),
]))
}
fn render_modes(&self) -> Result<CTree, Error> {
let mut g = Vec::new();
g.push(CTree::ConstVar {
name: "MAX_KEYS_IN_SEQUENCE".to_c(),
value: usize_to_u8(self.sequences.max_seq_length())?.to_c(),
c_type: "uint8_t".to_c(),
is_extern: true,
});
let (tree, kmap_struct_names) = self.render_kmaps()?;
g.push(tree);
let mut mode_struct_names = Vec::new();
for (mode, info) in &self.modes {
let m = ModeBuilder {
mode_name: mode,
info,
kmap_struct_names: &kmap_struct_names,
mod_chords: self.modifier_chords(mode),
anagram_mask: self.get_anagram_mask(mode),
chord_spec: self.chord_spec.clone(),
};
let (tree, name) = m.render()?;
g.push(tree);
mode_struct_names.push(name);
}
g.push(CTree::StdArray {
name: "mode_structs".to_c(),
values: CCode::map_prepend("&", &mode_struct_names),
c_type: "const ModeStruct*".to_c(),
is_extern: true,
});
Ok(CTree::Group(g))
}
fn render_kmaps(
&self,
) -> Result<(CTree, BTreeMap<KmapPath, CCode>), Error> {
// Render all keymap structs as CTrees, and return their names
let mut kmap_struct_names = BTreeMap::new();
let mut g = Vec::new();
for (i, (kmap_name, chords)) in self.chords.iter().enumerate() {
let builder = KmapBuilder {
kmap_nickname: format!("kmap{}", i),
chord_map: chords,
seq_maps: &self.sequences,
huffman_table: &self.huffman_table,
chord_spec: self.chord_spec.clone(),
};
let (tree, kmap_struct_name) = builder.render()?;
g.push(tree);
kmap_struct_names.insert(kmap_name.to_owned(), kmap_struct_name);
}
Ok((CTree::Group(g), kmap_struct_names))
}
fn render_modifiers(&self) -> Result<CTree, Error> {
fn to_variants(mod_names: &[Name]) -> Vec<CCode> {
mod_names
.iter()
.map(|name| Modifier::new(name).qualified_enum_variant())
.collect()
}
let mut group = Vec::new();
group.push(CTree::ConstVar {
name: "MAX_ANAGRAM_NUM".to_c(),
value: self.chords.max_anagram_num().to_c(),
c_type: "uint8_t".to_c(),
is_extern: true,
});
group.push(CTree::StdArray {
name: "word_mods".to_c(),
values: to_variants(&self.word_mods),
c_type: Modifier::enum_type(),
is_extern: true,
});
group.push(CTree::StdArray {
name: "plain_mods".to_c(),
values: to_variants(&self.plain_mods),
c_type: Modifier::enum_type(),
is_extern: true,
});
group.push(CTree::StdArray {
name: "anagram_mods".to_c(),
values: to_variants(&self.anagram_mods),
c_type: Modifier::enum_type(),
is_extern: true,
});
group.push(CTree::StdArray {
name: "anagram_mod_numbers".to_c(),
values: self
.get_anagram_mod_numbers()?
.iter()
.map(|num| num.to_c())
.collect(),
c_type: "uint8_t".to_c(),
is_extern: true,
});
group.push(CTree::StdArray {
name: "plain_mod_keys".to_c(),
values: self.get_plain_mod_codes()?,
c_type: "uint8_t".to_c(),
is_extern: true,
});
Ok(CTree::Group(group))
}
fn get_plain_mod_codes(&self) -> Result<Vec<CCode>, Error> {
// TODO this should be easier...
self.plain_mods
.iter()
.map(|name| {
Ok(self
.sequences
.get_seq_of_any_type(name)?
.lone_keypress()?
.format_mods())
})
.collect()
}
}
/// Generate the top of the main auto_config files. If `with_message` is true,
/// add a commented message about when the file was autogenerated.
fn intro(with_message: bool) -> Result<CTree, Error> {
let mut group = Vec::new();
if with_message
|
group.push(CTree::LiteralH("#pragma once\n".to_c()));
group.push(CTree::IncludeSelf);
group.push(CTree::IncludeH {
path: "<Arduino.h>".to_c(),
});
group.push(CTree::IncludeH {
path: "<stdint.h>".to_c(),
});
group.push(CTree::IncludeH {
path: "\"config_types.h\"".to_c(),
});
group.push(CTree::LiteralH(
"typedef void (*voidFuncPtr)(void);\n".to_c(),
));
group.push(render_keycode_definitions());
Ok(CTree::Group(group))
}
fn autogen_message() -> CCode {
const AUTHOR: &str = "pipit-keyboard";
let mut s = format!(
"/**\n * Automatically generated by {} on: {}\n",
AUTHOR,
now().strftime("%c").unwrap()
);
s += " * Do not make changes here, they will be overwritten.\n */\n\n";
s.to_c()
}
fn make_debug_macros() -> CTree {
// TODO clean up debug macros
// TODO use Define variant instead
let mut s = String::new();
s += "\n#if DEBUG_MESSAGES == 0\n";
s += " #define DEBUG1(msg)\n";
s += " #define DEBUG1_LN(msg)\n";
s += " #define DEBUG2(msg)\n";
s += " #define DEBUG2_LN(msg)\n";
s += "#else\n";
s += " #define ENABLE_SERIAL_DEBUG\n";
s += " #include <Arduino.h>\n";
s += " #define DEBUG1(msg) Serial.print(msg)\n";
s += " #define DEBUG1_LN(msg) Serial.println(msg)\n";
s += " #if DEBUG_MESSAGES == 1\n";
s += " #define DEBUG2(msg)\n";
s += " #define DEBUG2_LN(msg)\n";
s += " #else\n";
s += " #define DEBUG2(msg) Serial.print(msg)\n";
s += " #define DEBUG2_LN(msg) Serial.println(msg)\n";
s += " #endif\n\n";
s += "#endif\n\n";
CTree::LiteralH(CCode(s))
}
fn render_keycode_definitions() -> CTree {
let keycodes = KeyDefs::scancode_table();
let example = keycodes
.keys()
.nth(0)
.expect("KeyDefs::scancode_table() is empty!")
.to_owned();
let keycode_definitions = CTree::Group(
keycodes
.iter()
.map(|(&name, &value)| CTree::Define {
name: name.to_owned(),
value: value.to_c(),
})
.collect(),
);
CTree::Ifndef {
conditional: example.to_owned(),
contents: Box::new(keycode_definitions),
}
}
|
{
let msg = autogen_message();
group.push(CTree::LiteralC(msg.clone()));
group.push(CTree::LiteralH(msg));
}
|
conditional_block
|
issue-9396.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::comm;
use std::io::timer::Timer;
pub fn
|
() {
let (tx, rx) = channel();
spawn(proc (){
let mut timer = Timer::new().unwrap();
timer.sleep(10);
tx.send(());
});
loop {
match rx.try_recv() {
comm::Data(()) => break,
comm::Empty => {}
comm::Disconnected => unreachable!()
}
}
}
|
main
|
identifier_name
|
issue-9396.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
|
pub fn main() {
let (tx, rx) = channel();
spawn(proc (){
let mut timer = Timer::new().unwrap();
timer.sleep(10);
tx.send(());
});
loop {
match rx.try_recv() {
comm::Data(()) => break,
comm::Empty => {}
comm::Disconnected => unreachable!()
}
}
}
|
// except according to those terms.
use std::comm;
use std::io::timer::Timer;
|
random_line_split
|
issue-9396.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::comm;
use std::io::timer::Timer;
pub fn main()
|
{
let (tx, rx) = channel();
spawn(proc (){
let mut timer = Timer::new().unwrap();
timer.sleep(10);
tx.send(());
});
loop {
match rx.try_recv() {
comm::Data(()) => break,
comm::Empty => {}
comm::Disconnected => unreachable!()
}
}
}
|
identifier_body
|
|
metadata.rs
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use std::fs::Metadata;
#[cfg(any(target_os = "macos", target_os = "linux"))]
use nix::sys::stat::Mode;
#[cfg(target_os = "linux")]
use std::os::linux::fs::MetadataExt as MetadataLinuxExt;
#[cfg(target_os = "macos")]
use std::os::unix::fs::MetadataExt as MetadataMacosExt;
#[cfg(windows)]
use std::os::windows::fs::MetadataExt as MetadataWindowsExt;
/// Metadata helper methods that map equivalent methods for the
/// edenfs purposes
pub trait MetadataExt {
/// Returns the ID of the device containing the file
fn eden_dev(&self) -> u64;
/// Returns the file size
fn eden_file_size(&self) -> u64;
|
fn eden_dev(&self) -> u64 {
// Dummy value
0
}
fn eden_file_size(&self) -> u64 {
self.file_size()
}
fn is_setuid_set(&self) -> bool {
// This doesn't exist for windows
false
}
}
#[cfg(target_os = "linux")]
impl MetadataExt for Metadata {
fn eden_dev(&self) -> u64 {
self.st_dev()
}
fn eden_file_size(&self) -> u64 {
// Use st_blocks as this represents the actual amount of
// disk space allocated by the file, not its apparent
// size.
self.st_blocks() * 512
}
fn is_setuid_set(&self) -> bool {
let isuid = Mode::S_ISUID;
self.st_mode() & isuid.bits()!= 0
}
}
#[cfg(target_os = "macos")]
impl MetadataExt for Metadata {
fn eden_dev(&self) -> u64 {
self.dev()
}
fn eden_file_size(&self) -> u64 {
self.blocks() * 512
}
fn is_setuid_set(&self) -> bool {
let isuid = Mode::S_ISUID;
self.mode() & (isuid.bits() as u32)!= 0
}
}
|
fn is_setuid_set(&self) -> bool;
}
#[cfg(windows)]
impl MetadataExt for Metadata {
|
random_line_split
|
metadata.rs
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use std::fs::Metadata;
#[cfg(any(target_os = "macos", target_os = "linux"))]
use nix::sys::stat::Mode;
#[cfg(target_os = "linux")]
use std::os::linux::fs::MetadataExt as MetadataLinuxExt;
#[cfg(target_os = "macos")]
use std::os::unix::fs::MetadataExt as MetadataMacosExt;
#[cfg(windows)]
use std::os::windows::fs::MetadataExt as MetadataWindowsExt;
/// Metadata helper methods that map equivalent methods for the
/// edenfs purposes
pub trait MetadataExt {
/// Returns the ID of the device containing the file
fn eden_dev(&self) -> u64;
/// Returns the file size
fn eden_file_size(&self) -> u64;
fn is_setuid_set(&self) -> bool;
}
#[cfg(windows)]
impl MetadataExt for Metadata {
fn eden_dev(&self) -> u64 {
// Dummy value
0
}
fn eden_file_size(&self) -> u64
|
fn is_setuid_set(&self) -> bool {
// This doesn't exist for windows
false
}
}
#[cfg(target_os = "linux")]
impl MetadataExt for Metadata {
fn eden_dev(&self) -> u64 {
self.st_dev()
}
fn eden_file_size(&self) -> u64 {
// Use st_blocks as this represents the actual amount of
// disk space allocated by the file, not its apparent
// size.
self.st_blocks() * 512
}
fn is_setuid_set(&self) -> bool {
let isuid = Mode::S_ISUID;
self.st_mode() & isuid.bits()!= 0
}
}
#[cfg(target_os = "macos")]
impl MetadataExt for Metadata {
fn eden_dev(&self) -> u64 {
self.dev()
}
fn eden_file_size(&self) -> u64 {
self.blocks() * 512
}
fn is_setuid_set(&self) -> bool {
let isuid = Mode::S_ISUID;
self.mode() & (isuid.bits() as u32)!= 0
}
}
|
{
self.file_size()
}
|
identifier_body
|
metadata.rs
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use std::fs::Metadata;
#[cfg(any(target_os = "macos", target_os = "linux"))]
use nix::sys::stat::Mode;
#[cfg(target_os = "linux")]
use std::os::linux::fs::MetadataExt as MetadataLinuxExt;
#[cfg(target_os = "macos")]
use std::os::unix::fs::MetadataExt as MetadataMacosExt;
#[cfg(windows)]
use std::os::windows::fs::MetadataExt as MetadataWindowsExt;
/// Metadata helper methods that map equivalent methods for the
/// edenfs purposes
pub trait MetadataExt {
/// Returns the ID of the device containing the file
fn eden_dev(&self) -> u64;
/// Returns the file size
fn eden_file_size(&self) -> u64;
fn is_setuid_set(&self) -> bool;
}
#[cfg(windows)]
impl MetadataExt for Metadata {
fn eden_dev(&self) -> u64 {
// Dummy value
0
}
fn eden_file_size(&self) -> u64 {
self.file_size()
}
fn is_setuid_set(&self) -> bool {
// This doesn't exist for windows
false
}
}
#[cfg(target_os = "linux")]
impl MetadataExt for Metadata {
fn eden_dev(&self) -> u64 {
self.st_dev()
}
fn
|
(&self) -> u64 {
// Use st_blocks as this represents the actual amount of
// disk space allocated by the file, not its apparent
// size.
self.st_blocks() * 512
}
fn is_setuid_set(&self) -> bool {
let isuid = Mode::S_ISUID;
self.st_mode() & isuid.bits()!= 0
}
}
#[cfg(target_os = "macos")]
impl MetadataExt for Metadata {
fn eden_dev(&self) -> u64 {
self.dev()
}
fn eden_file_size(&self) -> u64 {
self.blocks() * 512
}
fn is_setuid_set(&self) -> bool {
let isuid = Mode::S_ISUID;
self.mode() & (isuid.bits() as u32)!= 0
}
}
|
eden_file_size
|
identifier_name
|
prelude.rs
|
// Copyright (C) 2017 Pietro Albini
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
//! Prelude for Fisher.
//!
//! This module re-exports useful things used by all the Fisher code, to be
//! easily included.
pub use super::errors::{Error, ErrorKind, Result, ResultExt};
pub use super::traits::*;
|
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
random_line_split
|
move-3.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[feature(managed_boxes)];
struct Triple { x: int, y: int, z: int }
fn test(x: bool, foo: @Triple) -> int {
let bar = foo;
let mut y: @Triple;
if x { y = bar; } else { y = @Triple{x: 4, y: 5, z: 6}; }
return y.y;
}
pub fn
|
() {
let x = @Triple{x: 1, y: 2, z: 3};
for _i in range(0u, 10000u) {
assert_eq!(test(true, x), 2);
}
assert_eq!(test(false, x), 5);
}
|
main
|
identifier_name
|
move-3.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[feature(managed_boxes)];
struct Triple { x: int, y: int, z: int }
|
return y.y;
}
pub fn main() {
let x = @Triple{x: 1, y: 2, z: 3};
for _i in range(0u, 10000u) {
assert_eq!(test(true, x), 2);
}
assert_eq!(test(false, x), 5);
}
|
fn test(x: bool, foo: @Triple) -> int {
let bar = foo;
let mut y: @Triple;
if x { y = bar; } else { y = @Triple{x: 4, y: 5, z: 6}; }
|
random_line_split
|
move-3.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[feature(managed_boxes)];
struct Triple { x: int, y: int, z: int }
fn test(x: bool, foo: @Triple) -> int {
let bar = foo;
let mut y: @Triple;
if x
|
else { y = @Triple{x: 4, y: 5, z: 6}; }
return y.y;
}
pub fn main() {
let x = @Triple{x: 1, y: 2, z: 3};
for _i in range(0u, 10000u) {
assert_eq!(test(true, x), 2);
}
assert_eq!(test(false, x), 5);
}
|
{ y = bar; }
|
conditional_block
|
lib.rs
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
#![deny(warnings)]
/// We have a few jobs that maintain some counters for each Mononoke repository. For example,
/// the latest blobimported revision, latest replayed pushrebase etc. Previously these counters were
/// stored in Manifold, but that's not convenient. They are harder to modify and harder to keep
/// track of. Storing all of them in the same table makes maintenance easier and safer,
/// for example, we can have conditional updates.
use anyhow::Error;
use context::{CoreContext, PerfCounterType};
use futures::future::{FutureExt, TryFutureExt};
use futures_ext::{BoxFuture, FutureExt as _};
use mononoke_types::RepositoryId;
use sql::{queries, Connection, Transaction as SqlTransaction};
use sql_construct::{SqlConstruct, SqlConstructFromMetadataDatabaseConfig};
use sql_ext::{SqlConnections, TransactionResult};
pub trait MutableCounters: Send + Sync +'static {
/// Get the current value of the counter
fn get_counter(
&self,
ctx: CoreContext,
repoid: RepositoryId,
name: &str,
) -> BoxFuture<Option<i64>, Error>;
fn get_maybe_stale_counter(
&self,
ctx: CoreContext,
repoid: RepositoryId,
name: &str,
) -> BoxFuture<Option<i64>, Error>;
/// Set the current value of the counter. if `prev_value` is not None, then it sets the value
/// conditionally.
fn set_counter(
&self,
ctx: CoreContext,
repoid: RepositoryId,
name: &str,
value: i64,
prev_value: Option<i64>,
) -> BoxFuture<bool, Error>;
/// Get the names and values of all the counters for a given repository
fn get_all_counters(
&self,
ctx: CoreContext,
repoid: RepositoryId,
) -> BoxFuture<Vec<(String, i64)>, Error>;
}
queries! {
write SetCounter(
repo_id: RepositoryId, name: &str, value: i64
) {
none,
mysql(
"REPLACE INTO mutable_counters (repo_id, name, value) VALUES ({repo_id}, {name}, {value})"
)
sqlite(
"REPLACE INTO mutable_counters (repo_id, name, value) VALUES ({repo_id}, CAST({name} AS TEXT), {value})"
)
}
write SetCounterConditionally(
repo_id: RepositoryId, name: &str, value: i64, prev_value: i64
) {
none,
mysql(
"UPDATE mutable_counters SET value = {value}
WHERE repo_id = {repo_id} AND name = {name} AND value = {prev_value}"
)
sqlite(
"UPDATE mutable_counters SET value = {value}
WHERE repo_id = {repo_id} AND name = CAST({name} AS TEXT) AND value = {prev_value}"
)
}
read GetCounter(repo_id: RepositoryId, name: &str) -> (i64) {
mysql(
"SELECT value FROM mutable_counters WHERE repo_id = {repo_id} and name = {name}"
)
sqlite(
"SELECT value FROM mutable_counters WHERE repo_id = {repo_id} and name = CAST({name} AS TEXT)"
)
}
read GetCountersForRepo(repo_id: RepositoryId) -> (String, i64) {
"SELECT name, value FROM mutable_counters WHERE repo_id = {repo_id} ORDER BY name"
}
}
#[derive(Clone)]
pub struct SqlMutableCounters {
write_connection: Connection,
read_connection: Connection,
read_master_connection: Connection,
}
impl SqlConstruct for SqlMutableCounters {
const LABEL: &'static str = "mutable_counters";
const CREATION_QUERY: &'static str = include_str!("../schemas/sqlite-mutable-counters.sql");
fn from_sql_connections(connections: SqlConnections) -> Self {
Self {
write_connection: connections.write_connection,
read_connection: connections.read_connection,
read_master_connection: connections.read_master_connection,
}
}
}
impl SqlConstructFromMetadataDatabaseConfig for SqlMutableCounters {}
impl MutableCounters for SqlMutableCounters {
fn get_counter(
&self,
ctx: CoreContext,
repoid: RepositoryId,
name: &str,
) -> BoxFuture<Option<i64>, Error> {
ctx.perf_counters()
.increment_counter(PerfCounterType::SqlReadsMaster);
let conn = self.read_master_connection.clone();
let name = name.to_string();
async move {
let counter = GetCounter::query(&conn, &repoid, &name.as_str()).await?;
Ok(counter.first().map(|entry| entry.0))
}
.boxed()
.compat()
.boxify()
}
fn get_maybe_stale_counter(
&self,
ctx: CoreContext,
repoid: RepositoryId,
name: &str,
) -> BoxFuture<Option<i64>, Error> {
ctx.perf_counters()
.increment_counter(PerfCounterType::SqlReadsReplica);
let conn = self.read_connection.clone();
let name = name.to_string();
async move {
let counter = GetCounter::query(&conn, &repoid, &name.as_str()).await?;
Ok(counter.first().map(|entry| entry.0))
}
.boxed()
.compat()
.boxify()
}
fn set_counter(
&self,
ctx: CoreContext,
repoid: RepositoryId,
name: &str,
value: i64,
prev_value: Option<i64>,
) -> BoxFuture<bool, Error> {
let conn = self.write_connection.clone();
let name = name.to_string();
async move {
let txn = conn.start_transaction().await?;
let txn_result =
Self::set_counter_on_txn(ctx, repoid, &name, value, prev_value, txn).await?;
match txn_result {
TransactionResult::Succeeded(txn) => {
txn.commit().await?;
Ok(true)
}
TransactionResult::Failed => Ok(false),
}
}
.boxed()
.compat()
.boxify()
}
fn get_all_counters(
&self,
ctx: CoreContext,
repoid: RepositoryId,
) -> BoxFuture<Vec<(String, i64)>, Error> {
ctx.perf_counters()
.increment_counter(PerfCounterType::SqlReadsMaster);
let conn = self.read_master_connection.clone();
async move {
let counters = GetCountersForRepo::query(&conn, &repoid).await?;
Ok(counters.into_iter().collect())
}
.boxed()
.compat()
.boxify()
}
}
impl SqlMutableCounters {
pub async fn set_counter_on_txn(
ctx: CoreContext,
repoid: RepositoryId,
name: &str,
value: i64,
prev_value: Option<i64>,
txn: SqlTransaction,
) -> Result<TransactionResult, Error> {
ctx.perf_counters()
.increment_counter(PerfCounterType::SqlWrites);
let (txn, result) = if let Some(prev_value) = prev_value {
SetCounterConditionally::query_with_transaction(
txn,
&repoid,
&name,
&value,
&prev_value,
)
.await?
} else {
SetCounter::query_with_transaction(txn, &repoid, &name, &value).await?
};
Ok(if result.affected_rows() >= 1
|
else {
TransactionResult::Failed
})
}
}
|
{
TransactionResult::Succeeded(txn)
}
|
conditional_block
|
lib.rs
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
#![deny(warnings)]
/// We have a few jobs that maintain some counters for each Mononoke repository. For example,
/// the latest blobimported revision, latest replayed pushrebase etc. Previously these counters were
/// stored in Manifold, but that's not convenient. They are harder to modify and harder to keep
/// track of. Storing all of them in the same table makes maintenance easier and safer,
/// for example, we can have conditional updates.
use anyhow::Error;
use context::{CoreContext, PerfCounterType};
use futures::future::{FutureExt, TryFutureExt};
use futures_ext::{BoxFuture, FutureExt as _};
use mononoke_types::RepositoryId;
use sql::{queries, Connection, Transaction as SqlTransaction};
use sql_construct::{SqlConstruct, SqlConstructFromMetadataDatabaseConfig};
use sql_ext::{SqlConnections, TransactionResult};
pub trait MutableCounters: Send + Sync +'static {
/// Get the current value of the counter
fn get_counter(
&self,
ctx: CoreContext,
repoid: RepositoryId,
name: &str,
) -> BoxFuture<Option<i64>, Error>;
fn get_maybe_stale_counter(
&self,
|
ctx: CoreContext,
repoid: RepositoryId,
name: &str,
) -> BoxFuture<Option<i64>, Error>;
/// Set the current value of the counter. if `prev_value` is not None, then it sets the value
/// conditionally.
fn set_counter(
&self,
ctx: CoreContext,
repoid: RepositoryId,
name: &str,
value: i64,
prev_value: Option<i64>,
) -> BoxFuture<bool, Error>;
/// Get the names and values of all the counters for a given repository
fn get_all_counters(
&self,
ctx: CoreContext,
repoid: RepositoryId,
) -> BoxFuture<Vec<(String, i64)>, Error>;
}
queries! {
write SetCounter(
repo_id: RepositoryId, name: &str, value: i64
) {
none,
mysql(
"REPLACE INTO mutable_counters (repo_id, name, value) VALUES ({repo_id}, {name}, {value})"
)
sqlite(
"REPLACE INTO mutable_counters (repo_id, name, value) VALUES ({repo_id}, CAST({name} AS TEXT), {value})"
)
}
write SetCounterConditionally(
repo_id: RepositoryId, name: &str, value: i64, prev_value: i64
) {
none,
mysql(
"UPDATE mutable_counters SET value = {value}
WHERE repo_id = {repo_id} AND name = {name} AND value = {prev_value}"
)
sqlite(
"UPDATE mutable_counters SET value = {value}
WHERE repo_id = {repo_id} AND name = CAST({name} AS TEXT) AND value = {prev_value}"
)
}
read GetCounter(repo_id: RepositoryId, name: &str) -> (i64) {
mysql(
"SELECT value FROM mutable_counters WHERE repo_id = {repo_id} and name = {name}"
)
sqlite(
"SELECT value FROM mutable_counters WHERE repo_id = {repo_id} and name = CAST({name} AS TEXT)"
)
}
read GetCountersForRepo(repo_id: RepositoryId) -> (String, i64) {
"SELECT name, value FROM mutable_counters WHERE repo_id = {repo_id} ORDER BY name"
}
}
#[derive(Clone)]
pub struct SqlMutableCounters {
write_connection: Connection,
read_connection: Connection,
read_master_connection: Connection,
}
impl SqlConstruct for SqlMutableCounters {
const LABEL: &'static str = "mutable_counters";
const CREATION_QUERY: &'static str = include_str!("../schemas/sqlite-mutable-counters.sql");
fn from_sql_connections(connections: SqlConnections) -> Self {
Self {
write_connection: connections.write_connection,
read_connection: connections.read_connection,
read_master_connection: connections.read_master_connection,
}
}
}
impl SqlConstructFromMetadataDatabaseConfig for SqlMutableCounters {}
impl MutableCounters for SqlMutableCounters {
fn get_counter(
&self,
ctx: CoreContext,
repoid: RepositoryId,
name: &str,
) -> BoxFuture<Option<i64>, Error> {
ctx.perf_counters()
.increment_counter(PerfCounterType::SqlReadsMaster);
let conn = self.read_master_connection.clone();
let name = name.to_string();
async move {
let counter = GetCounter::query(&conn, &repoid, &name.as_str()).await?;
Ok(counter.first().map(|entry| entry.0))
}
.boxed()
.compat()
.boxify()
}
fn get_maybe_stale_counter(
&self,
ctx: CoreContext,
repoid: RepositoryId,
name: &str,
) -> BoxFuture<Option<i64>, Error> {
ctx.perf_counters()
.increment_counter(PerfCounterType::SqlReadsReplica);
let conn = self.read_connection.clone();
let name = name.to_string();
async move {
let counter = GetCounter::query(&conn, &repoid, &name.as_str()).await?;
Ok(counter.first().map(|entry| entry.0))
}
.boxed()
.compat()
.boxify()
}
fn set_counter(
&self,
ctx: CoreContext,
repoid: RepositoryId,
name: &str,
value: i64,
prev_value: Option<i64>,
) -> BoxFuture<bool, Error> {
let conn = self.write_connection.clone();
let name = name.to_string();
async move {
let txn = conn.start_transaction().await?;
let txn_result =
Self::set_counter_on_txn(ctx, repoid, &name, value, prev_value, txn).await?;
match txn_result {
TransactionResult::Succeeded(txn) => {
txn.commit().await?;
Ok(true)
}
TransactionResult::Failed => Ok(false),
}
}
.boxed()
.compat()
.boxify()
}
fn get_all_counters(
&self,
ctx: CoreContext,
repoid: RepositoryId,
) -> BoxFuture<Vec<(String, i64)>, Error> {
ctx.perf_counters()
.increment_counter(PerfCounterType::SqlReadsMaster);
let conn = self.read_master_connection.clone();
async move {
let counters = GetCountersForRepo::query(&conn, &repoid).await?;
Ok(counters.into_iter().collect())
}
.boxed()
.compat()
.boxify()
}
}
impl SqlMutableCounters {
pub async fn set_counter_on_txn(
ctx: CoreContext,
repoid: RepositoryId,
name: &str,
value: i64,
prev_value: Option<i64>,
txn: SqlTransaction,
) -> Result<TransactionResult, Error> {
ctx.perf_counters()
.increment_counter(PerfCounterType::SqlWrites);
let (txn, result) = if let Some(prev_value) = prev_value {
SetCounterConditionally::query_with_transaction(
txn,
&repoid,
&name,
&value,
&prev_value,
)
.await?
} else {
SetCounter::query_with_transaction(txn, &repoid, &name, &value).await?
};
Ok(if result.affected_rows() >= 1 {
TransactionResult::Succeeded(txn)
} else {
TransactionResult::Failed
})
}
}
|
random_line_split
|
|
lib.rs
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
#![deny(warnings)]
/// We have a few jobs that maintain some counters for each Mononoke repository. For example,
/// the latest blobimported revision, latest replayed pushrebase etc. Previously these counters were
/// stored in Manifold, but that's not convenient. They are harder to modify and harder to keep
/// track of. Storing all of them in the same table makes maintenance easier and safer,
/// for example, we can have conditional updates.
use anyhow::Error;
use context::{CoreContext, PerfCounterType};
use futures::future::{FutureExt, TryFutureExt};
use futures_ext::{BoxFuture, FutureExt as _};
use mononoke_types::RepositoryId;
use sql::{queries, Connection, Transaction as SqlTransaction};
use sql_construct::{SqlConstruct, SqlConstructFromMetadataDatabaseConfig};
use sql_ext::{SqlConnections, TransactionResult};
pub trait MutableCounters: Send + Sync +'static {
/// Get the current value of the counter
fn get_counter(
&self,
ctx: CoreContext,
repoid: RepositoryId,
name: &str,
) -> BoxFuture<Option<i64>, Error>;
fn get_maybe_stale_counter(
&self,
ctx: CoreContext,
repoid: RepositoryId,
name: &str,
) -> BoxFuture<Option<i64>, Error>;
/// Set the current value of the counter. if `prev_value` is not None, then it sets the value
/// conditionally.
fn set_counter(
&self,
ctx: CoreContext,
repoid: RepositoryId,
name: &str,
value: i64,
prev_value: Option<i64>,
) -> BoxFuture<bool, Error>;
/// Get the names and values of all the counters for a given repository
fn get_all_counters(
&self,
ctx: CoreContext,
repoid: RepositoryId,
) -> BoxFuture<Vec<(String, i64)>, Error>;
}
queries! {
write SetCounter(
repo_id: RepositoryId, name: &str, value: i64
) {
none,
mysql(
"REPLACE INTO mutable_counters (repo_id, name, value) VALUES ({repo_id}, {name}, {value})"
)
sqlite(
"REPLACE INTO mutable_counters (repo_id, name, value) VALUES ({repo_id}, CAST({name} AS TEXT), {value})"
)
}
write SetCounterConditionally(
repo_id: RepositoryId, name: &str, value: i64, prev_value: i64
) {
none,
mysql(
"UPDATE mutable_counters SET value = {value}
WHERE repo_id = {repo_id} AND name = {name} AND value = {prev_value}"
)
sqlite(
"UPDATE mutable_counters SET value = {value}
WHERE repo_id = {repo_id} AND name = CAST({name} AS TEXT) AND value = {prev_value}"
)
}
read GetCounter(repo_id: RepositoryId, name: &str) -> (i64) {
mysql(
"SELECT value FROM mutable_counters WHERE repo_id = {repo_id} and name = {name}"
)
sqlite(
"SELECT value FROM mutable_counters WHERE repo_id = {repo_id} and name = CAST({name} AS TEXT)"
)
}
read GetCountersForRepo(repo_id: RepositoryId) -> (String, i64) {
"SELECT name, value FROM mutable_counters WHERE repo_id = {repo_id} ORDER BY name"
}
}
#[derive(Clone)]
pub struct SqlMutableCounters {
write_connection: Connection,
read_connection: Connection,
read_master_connection: Connection,
}
impl SqlConstruct for SqlMutableCounters {
const LABEL: &'static str = "mutable_counters";
const CREATION_QUERY: &'static str = include_str!("../schemas/sqlite-mutable-counters.sql");
fn from_sql_connections(connections: SqlConnections) -> Self {
Self {
write_connection: connections.write_connection,
read_connection: connections.read_connection,
read_master_connection: connections.read_master_connection,
}
}
}
impl SqlConstructFromMetadataDatabaseConfig for SqlMutableCounters {}
impl MutableCounters for SqlMutableCounters {
fn get_counter(
&self,
ctx: CoreContext,
repoid: RepositoryId,
name: &str,
) -> BoxFuture<Option<i64>, Error> {
ctx.perf_counters()
.increment_counter(PerfCounterType::SqlReadsMaster);
let conn = self.read_master_connection.clone();
let name = name.to_string();
async move {
let counter = GetCounter::query(&conn, &repoid, &name.as_str()).await?;
Ok(counter.first().map(|entry| entry.0))
}
.boxed()
.compat()
.boxify()
}
fn get_maybe_stale_counter(
&self,
ctx: CoreContext,
repoid: RepositoryId,
name: &str,
) -> BoxFuture<Option<i64>, Error> {
ctx.perf_counters()
.increment_counter(PerfCounterType::SqlReadsReplica);
let conn = self.read_connection.clone();
let name = name.to_string();
async move {
let counter = GetCounter::query(&conn, &repoid, &name.as_str()).await?;
Ok(counter.first().map(|entry| entry.0))
}
.boxed()
.compat()
.boxify()
}
fn set_counter(
&self,
ctx: CoreContext,
repoid: RepositoryId,
name: &str,
value: i64,
prev_value: Option<i64>,
) -> BoxFuture<bool, Error> {
let conn = self.write_connection.clone();
let name = name.to_string();
async move {
let txn = conn.start_transaction().await?;
let txn_result =
Self::set_counter_on_txn(ctx, repoid, &name, value, prev_value, txn).await?;
match txn_result {
TransactionResult::Succeeded(txn) => {
txn.commit().await?;
Ok(true)
}
TransactionResult::Failed => Ok(false),
}
}
.boxed()
.compat()
.boxify()
}
fn get_all_counters(
&self,
ctx: CoreContext,
repoid: RepositoryId,
) -> BoxFuture<Vec<(String, i64)>, Error> {
ctx.perf_counters()
.increment_counter(PerfCounterType::SqlReadsMaster);
let conn = self.read_master_connection.clone();
async move {
let counters = GetCountersForRepo::query(&conn, &repoid).await?;
Ok(counters.into_iter().collect())
}
.boxed()
.compat()
.boxify()
}
}
impl SqlMutableCounters {
pub async fn set_counter_on_txn(
ctx: CoreContext,
repoid: RepositoryId,
name: &str,
value: i64,
prev_value: Option<i64>,
txn: SqlTransaction,
) -> Result<TransactionResult, Error>
|
})
}
}
|
{
ctx.perf_counters()
.increment_counter(PerfCounterType::SqlWrites);
let (txn, result) = if let Some(prev_value) = prev_value {
SetCounterConditionally::query_with_transaction(
txn,
&repoid,
&name,
&value,
&prev_value,
)
.await?
} else {
SetCounter::query_with_transaction(txn, &repoid, &name, &value).await?
};
Ok(if result.affected_rows() >= 1 {
TransactionResult::Succeeded(txn)
} else {
TransactionResult::Failed
|
identifier_body
|
lib.rs
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
#![deny(warnings)]
/// We have a few jobs that maintain some counters for each Mononoke repository. For example,
/// the latest blobimported revision, latest replayed pushrebase etc. Previously these counters were
/// stored in Manifold, but that's not convenient. They are harder to modify and harder to keep
/// track of. Storing all of them in the same table makes maintenance easier and safer,
/// for example, we can have conditional updates.
use anyhow::Error;
use context::{CoreContext, PerfCounterType};
use futures::future::{FutureExt, TryFutureExt};
use futures_ext::{BoxFuture, FutureExt as _};
use mononoke_types::RepositoryId;
use sql::{queries, Connection, Transaction as SqlTransaction};
use sql_construct::{SqlConstruct, SqlConstructFromMetadataDatabaseConfig};
use sql_ext::{SqlConnections, TransactionResult};
pub trait MutableCounters: Send + Sync +'static {
/// Get the current value of the counter
fn get_counter(
&self,
ctx: CoreContext,
repoid: RepositoryId,
name: &str,
) -> BoxFuture<Option<i64>, Error>;
fn get_maybe_stale_counter(
&self,
ctx: CoreContext,
repoid: RepositoryId,
name: &str,
) -> BoxFuture<Option<i64>, Error>;
/// Set the current value of the counter. if `prev_value` is not None, then it sets the value
/// conditionally.
fn set_counter(
&self,
ctx: CoreContext,
repoid: RepositoryId,
name: &str,
value: i64,
prev_value: Option<i64>,
) -> BoxFuture<bool, Error>;
/// Get the names and values of all the counters for a given repository
fn get_all_counters(
&self,
ctx: CoreContext,
repoid: RepositoryId,
) -> BoxFuture<Vec<(String, i64)>, Error>;
}
queries! {
write SetCounter(
repo_id: RepositoryId, name: &str, value: i64
) {
none,
mysql(
"REPLACE INTO mutable_counters (repo_id, name, value) VALUES ({repo_id}, {name}, {value})"
)
sqlite(
"REPLACE INTO mutable_counters (repo_id, name, value) VALUES ({repo_id}, CAST({name} AS TEXT), {value})"
)
}
write SetCounterConditionally(
repo_id: RepositoryId, name: &str, value: i64, prev_value: i64
) {
none,
mysql(
"UPDATE mutable_counters SET value = {value}
WHERE repo_id = {repo_id} AND name = {name} AND value = {prev_value}"
)
sqlite(
"UPDATE mutable_counters SET value = {value}
WHERE repo_id = {repo_id} AND name = CAST({name} AS TEXT) AND value = {prev_value}"
)
}
read GetCounter(repo_id: RepositoryId, name: &str) -> (i64) {
mysql(
"SELECT value FROM mutable_counters WHERE repo_id = {repo_id} and name = {name}"
)
sqlite(
"SELECT value FROM mutable_counters WHERE repo_id = {repo_id} and name = CAST({name} AS TEXT)"
)
}
read GetCountersForRepo(repo_id: RepositoryId) -> (String, i64) {
"SELECT name, value FROM mutable_counters WHERE repo_id = {repo_id} ORDER BY name"
}
}
#[derive(Clone)]
pub struct
|
{
write_connection: Connection,
read_connection: Connection,
read_master_connection: Connection,
}
impl SqlConstruct for SqlMutableCounters {
const LABEL: &'static str = "mutable_counters";
const CREATION_QUERY: &'static str = include_str!("../schemas/sqlite-mutable-counters.sql");
fn from_sql_connections(connections: SqlConnections) -> Self {
Self {
write_connection: connections.write_connection,
read_connection: connections.read_connection,
read_master_connection: connections.read_master_connection,
}
}
}
impl SqlConstructFromMetadataDatabaseConfig for SqlMutableCounters {}
impl MutableCounters for SqlMutableCounters {
fn get_counter(
&self,
ctx: CoreContext,
repoid: RepositoryId,
name: &str,
) -> BoxFuture<Option<i64>, Error> {
ctx.perf_counters()
.increment_counter(PerfCounterType::SqlReadsMaster);
let conn = self.read_master_connection.clone();
let name = name.to_string();
async move {
let counter = GetCounter::query(&conn, &repoid, &name.as_str()).await?;
Ok(counter.first().map(|entry| entry.0))
}
.boxed()
.compat()
.boxify()
}
fn get_maybe_stale_counter(
&self,
ctx: CoreContext,
repoid: RepositoryId,
name: &str,
) -> BoxFuture<Option<i64>, Error> {
ctx.perf_counters()
.increment_counter(PerfCounterType::SqlReadsReplica);
let conn = self.read_connection.clone();
let name = name.to_string();
async move {
let counter = GetCounter::query(&conn, &repoid, &name.as_str()).await?;
Ok(counter.first().map(|entry| entry.0))
}
.boxed()
.compat()
.boxify()
}
fn set_counter(
&self,
ctx: CoreContext,
repoid: RepositoryId,
name: &str,
value: i64,
prev_value: Option<i64>,
) -> BoxFuture<bool, Error> {
let conn = self.write_connection.clone();
let name = name.to_string();
async move {
let txn = conn.start_transaction().await?;
let txn_result =
Self::set_counter_on_txn(ctx, repoid, &name, value, prev_value, txn).await?;
match txn_result {
TransactionResult::Succeeded(txn) => {
txn.commit().await?;
Ok(true)
}
TransactionResult::Failed => Ok(false),
}
}
.boxed()
.compat()
.boxify()
}
fn get_all_counters(
&self,
ctx: CoreContext,
repoid: RepositoryId,
) -> BoxFuture<Vec<(String, i64)>, Error> {
ctx.perf_counters()
.increment_counter(PerfCounterType::SqlReadsMaster);
let conn = self.read_master_connection.clone();
async move {
let counters = GetCountersForRepo::query(&conn, &repoid).await?;
Ok(counters.into_iter().collect())
}
.boxed()
.compat()
.boxify()
}
}
impl SqlMutableCounters {
pub async fn set_counter_on_txn(
ctx: CoreContext,
repoid: RepositoryId,
name: &str,
value: i64,
prev_value: Option<i64>,
txn: SqlTransaction,
) -> Result<TransactionResult, Error> {
ctx.perf_counters()
.increment_counter(PerfCounterType::SqlWrites);
let (txn, result) = if let Some(prev_value) = prev_value {
SetCounterConditionally::query_with_transaction(
txn,
&repoid,
&name,
&value,
&prev_value,
)
.await?
} else {
SetCounter::query_with_transaction(txn, &repoid, &name, &value).await?
};
Ok(if result.affected_rows() >= 1 {
TransactionResult::Succeeded(txn)
} else {
TransactionResult::Failed
})
}
}
|
SqlMutableCounters
|
identifier_name
|
wasm_testsuite.rs
|
use cranelift_codegen::isa;
use cranelift_codegen::print_errors::pretty_verifier_error;
use cranelift_codegen::settings::{self, Flags};
use cranelift_codegen::verifier;
use cranelift_wasm::{translate_module, DummyEnvironment, ReturnMode};
use std::fs;
use std::fs::File;
use std::io;
use std::io::prelude::*;
use std::path::Path;
use std::str::FromStr;
use target_lexicon::triple;
use wabt::wat2wasm;
#[test]
fn testsuite() {
let mut paths: Vec<_> = fs::read_dir("../wasmtests")
.unwrap()
.map(|r| r.unwrap())
.filter(|p| {
// Ignore files starting with `.`, which could be editor temporary files
if let Some(stem) = p.path().file_stem() {
if let Some(stemstr) = stem.to_str() {
return!stemstr.starts_with('.');
}
}
false
})
.collect();
paths.sort_by_key(|dir| dir.path());
let flags = Flags::new(settings::builder());
for path in paths {
let path = path.path();
handle_module(&path, &flags, ReturnMode::NormalReturns);
}
}
#[test]
fn use_fallthrough_return() {
let flags = Flags::new(settings::builder());
handle_module(
Path::new("../wasmtests/use_fallthrough_return.wat"),
&flags,
ReturnMode::FallthroughReturn,
);
}
fn
|
(path: &Path) -> io::Result<Vec<u8>> {
let mut buf: Vec<u8> = Vec::new();
let mut file = File::open(path)?;
file.read_to_end(&mut buf)?;
Ok(buf)
}
fn handle_module(path: &Path, flags: &Flags, return_mode: ReturnMode) {
let data = match path.extension() {
None => {
panic!("the file extension is not wasm or wat");
}
Some(ext) => match ext.to_str() {
Some("wasm") => read_file(path).expect("error reading wasm file"),
Some("wat") => {
let wat = read_file(path).expect("error reading wat file");
match wat2wasm(&wat) {
Ok(wasm) => wasm,
Err(e) => {
panic!("error converting wat to wasm: {:?}", e);
}
}
}
None | Some(&_) => panic!("the file extension for {:?} is not wasm or wat", path),
},
};
let triple = triple!("riscv64");
let isa = isa::lookup(triple).unwrap().finish(flags.clone());
let mut dummy_environ = DummyEnvironment::new(isa.frontend_config(), return_mode);
translate_module(&data, &mut dummy_environ).unwrap();
for func in dummy_environ.info.function_bodies.values() {
verifier::verify_function(func, &*isa)
.map_err(|errors| panic!(pretty_verifier_error(func, Some(&*isa), None, errors)))
.unwrap();
}
}
|
read_file
|
identifier_name
|
wasm_testsuite.rs
|
use cranelift_codegen::isa;
use cranelift_codegen::print_errors::pretty_verifier_error;
use cranelift_codegen::settings::{self, Flags};
use cranelift_codegen::verifier;
use cranelift_wasm::{translate_module, DummyEnvironment, ReturnMode};
use std::fs;
use std::fs::File;
use std::io;
use std::io::prelude::*;
use std::path::Path;
use std::str::FromStr;
use target_lexicon::triple;
use wabt::wat2wasm;
#[test]
fn testsuite() {
let mut paths: Vec<_> = fs::read_dir("../wasmtests")
.unwrap()
.map(|r| r.unwrap())
.filter(|p| {
// Ignore files starting with `.`, which could be editor temporary files
if let Some(stem) = p.path().file_stem() {
if let Some(stemstr) = stem.to_str() {
return!stemstr.starts_with('.');
}
}
false
})
.collect();
paths.sort_by_key(|dir| dir.path());
let flags = Flags::new(settings::builder());
for path in paths {
let path = path.path();
handle_module(&path, &flags, ReturnMode::NormalReturns);
}
}
#[test]
fn use_fallthrough_return()
|
fn read_file(path: &Path) -> io::Result<Vec<u8>> {
let mut buf: Vec<u8> = Vec::new();
let mut file = File::open(path)?;
file.read_to_end(&mut buf)?;
Ok(buf)
}
fn handle_module(path: &Path, flags: &Flags, return_mode: ReturnMode) {
let data = match path.extension() {
None => {
panic!("the file extension is not wasm or wat");
}
Some(ext) => match ext.to_str() {
Some("wasm") => read_file(path).expect("error reading wasm file"),
Some("wat") => {
let wat = read_file(path).expect("error reading wat file");
match wat2wasm(&wat) {
Ok(wasm) => wasm,
Err(e) => {
panic!("error converting wat to wasm: {:?}", e);
}
}
}
None | Some(&_) => panic!("the file extension for {:?} is not wasm or wat", path),
},
};
let triple = triple!("riscv64");
let isa = isa::lookup(triple).unwrap().finish(flags.clone());
let mut dummy_environ = DummyEnvironment::new(isa.frontend_config(), return_mode);
translate_module(&data, &mut dummy_environ).unwrap();
for func in dummy_environ.info.function_bodies.values() {
verifier::verify_function(func, &*isa)
.map_err(|errors| panic!(pretty_verifier_error(func, Some(&*isa), None, errors)))
.unwrap();
}
}
|
{
let flags = Flags::new(settings::builder());
handle_module(
Path::new("../wasmtests/use_fallthrough_return.wat"),
&flags,
ReturnMode::FallthroughReturn,
);
}
|
identifier_body
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.