file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
cfixed_string.rs | use std::borrow::{Borrow, Cow};
use std::ffi::{CStr, CString};
use std::fmt;
use std::mem;
use std::ops;
use std::os::raw::c_char;
use std::ptr;
const STRING_SIZE: usize = 512;
/// This is a C String abstractions that presents a CStr like
/// interface for interop purposes but tries to be little nicer
/// by avoiding heap allocations if the string is within the
/// generous bounds (512 bytes) of the statically sized buffer.
/// Strings over this limit will be heap allocated, but the
/// interface outside of this abstraction remains the same.
pub enum CFixedString {
Local {
s: [c_char; STRING_SIZE],
len: usize,
},
Heap {
s: CString,
len: usize,
},
}
impl CFixedString {
/// Creates an empty CFixedString, this is intended to be
/// used with write! or the `fmt::Write` trait
pub fn new() -> Self {
unsafe {
CFixedString::Local {
s: mem::uninitialized(),
len: 0,
}
}
}
pub fn from_str<S: AsRef<str>>(s: S) -> Self {
Self::from(s.as_ref())
}
pub fn as_ptr(&self) -> *const c_char {
match *self {
CFixedString::Local { ref s,.. } => s.as_ptr(),
CFixedString::Heap { ref s,.. } => s.as_ptr(),
}
}
/// Returns true if the string has been heap allocated
pub fn is_allocated(&self) -> bool {
match *self {
CFixedString::Local {.. } => false,
_ => true,
}
}
/// Converts a `CFixedString` into a `Cow<str>`.
///
/// This function will calculate the length of this string (which normally
/// requires a linear amount of work to be done) and then return the
/// resulting slice as a `Cow<str>`, replacing any invalid UTF-8 sequences
/// with `U+FFFD REPLACEMENT CHARACTER`. If there are no invalid UTF-8
/// sequences, this will merely return a borrowed slice.
pub fn to_string(&self) -> Cow<str> {
String::from_utf8_lossy(&self.to_bytes())
}
pub unsafe fn as_str(&self) -> &str {
use std::slice;
use std::str;
match *self {
CFixedString::Local { ref s, len } => {
str::from_utf8_unchecked(slice::from_raw_parts(s.as_ptr() as *const u8, len))
}
CFixedString::Heap { ref s, len } => {
str::from_utf8_unchecked(slice::from_raw_parts(s.as_ptr() as *const u8, len))
}
}
}
}
impl<'a> From<&'a str> for CFixedString {
fn from(s: &'a str) -> Self {
use std::fmt::Write;
let mut string = CFixedString::new();
string.write_str(s).unwrap();
string
}
}
impl fmt::Write for CFixedString {
fn write_str(&mut self, s: &str) -> Result<(), fmt::Error> {
// use std::fmt::Write;
unsafe {
let cur_len = self.as_str().len();
match cur_len + s.len() {
len if len <= STRING_SIZE - 1 => {
match *self {
CFixedString::Local { s: ref mut ls, len: ref mut lslen } => {
let ptr = ls.as_mut_ptr() as *mut u8;
ptr::copy(s.as_ptr(), ptr.offset(cur_len as isize), s.len());
*ptr.offset(len as isize) = 0;
*lslen = len;
}
_ => unreachable!(),
}
}
len => {
let mut heapstring = String::with_capacity(len + 1);
heapstring.write_str(self.as_str()).unwrap();
heapstring.write_str(s).unwrap();
*self = CFixedString::Heap {
s: CString::new(heapstring).unwrap(),
len: len,
};
}
}
}
// Yah....we should do error handling
Ok(())
}
}
impl From<CFixedString> for String {
fn from(s: CFixedString) -> Self {
String::from_utf8_lossy(&s.to_bytes()).into_owned()
}
}
impl ops::Deref for CFixedString {
type Target = CStr;
fn | (&self) -> &CStr {
use std::slice;
match *self {
CFixedString::Local { ref s, len } => unsafe {
mem::transmute(slice::from_raw_parts(s.as_ptr(), len + 1))
},
CFixedString::Heap { ref s,.. } => s,
}
}
}
impl Borrow<CStr> for CFixedString {
fn borrow(&self) -> &CStr {
self
}
}
impl AsRef<CStr> for CFixedString {
fn as_ref(&self) -> &CStr {
self
}
}
impl Borrow<str> for CFixedString {
fn borrow(&self) -> &str {
unsafe { self.as_str() }
}
}
impl AsRef<str> for CFixedString {
fn as_ref(&self) -> &str {
unsafe { self.as_str() }
}
}
macro_rules! format_c {
// This does not work on stable, to change the * to a + and
// have this arm be used when there are no arguments :(
// ($fmt:expr) => {
// use std::fmt::Write;
// let mut fixed = CFixedString::new();
// write!(&mut fixed, $fmt).unwrap();
// fixed
// }
($fmt:expr, $($args:tt)*) => ({
use std::fmt::Write;
let mut fixed = CFixedString::new();
write!(&mut fixed, $fmt, $($args)*).unwrap();
fixed
})
}
#[cfg(test)]
mod tests {
use super::*;
use std::fmt::Write;
fn gen_string(len: usize) -> String {
let mut out = String::with_capacity(len);
for _ in 0..len / 16 {
out.write_str("zyxvutabcdef9876").unwrap();
}
for i in 0..len % 16 {
out.write_char((i as u8 + 'A' as u8) as char).unwrap();
}
assert_eq!(out.len(), len);
out
}
#[test]
fn test_empty_handler() {
let short_string = "";
let t = CFixedString::from_str(short_string);
assert!(!t.is_allocated());
assert_eq!(&t.to_string(), short_string);
}
#[test]
fn test_short_1() {
let short_string = "test_local";
let t = CFixedString::from_str(short_string);
assert!(!t.is_allocated());
assert_eq!(&t.to_string(), short_string);
}
#[test]
fn test_short_2() {
let short_string = "test_local stoheusthsotheost";
let t = CFixedString::from_str(short_string);
assert!(!t.is_allocated());
assert_eq!(&t.to_string(), short_string);
}
#[test]
fn test_511() {
// this string (width 511) buffer should just fit
let test_511_string = gen_string(511);
let t = CFixedString::from_str(&test_511_string);
assert!(!t.is_allocated());
assert_eq!(&t.to_string(), &test_511_string);
}
#[test]
fn test_512() {
// this string (width 512) buffer should not fit
let test_512_string = gen_string(512);
let t = CFixedString::from_str(&test_512_string);
assert!(t.is_allocated());
assert_eq!(&t.to_string(), &test_512_string);
}
#[test]
fn test_513() {
// this string (width 513) buffer should not fit
let test_513_string = gen_string(513);
let t = CFixedString::from_str(&test_513_string);
assert!(t.is_allocated());
assert_eq!(&t.to_string(), &test_513_string);
}
#[test]
fn test_to_owned() {
let short = "this is an amazing string";
let t = CFixedString::from_str(short);
assert!(!t.is_allocated());
assert_eq!(&String::from(t), short);
let long = gen_string(1025);
let t = CFixedString::from_str(&long);
assert!(t.is_allocated());
assert_eq!(&String::from(t), &long);
}
#[test]
fn test_short_format() {
let mut fixed = CFixedString::new();
write!(&mut fixed, "one_{}", 1).unwrap();
write!(&mut fixed, "_two_{}", "two").unwrap();
write!(&mut fixed,
"_three_{}-{}-{:.3}",
23,
"some string data",
56.789)
.unwrap();
assert!(!fixed.is_allocated());
assert_eq!(&fixed.to_string(),
"one_1_two_two_three_23-some string data-56.789");
}
#[test]
fn test_long_format() {
let mut fixed = CFixedString::new();
let mut string = String::new();
for i in 1..30 {
let genned = gen_string(i * i);
write!(&mut fixed, "{}_{}", i, genned).unwrap();
write!(&mut string, "{}_{}", i, genned).unwrap();
}
assert!(fixed.is_allocated());
assert_eq!(&fixed.to_string(), &string);
}
// TODO: Reenable this test once the empty match arm is allowed
// by the compiler
// #[test]
// fn test_empty_fmt_macro() {
// let empty = format_c!("");
// let no_args = format_c!("there are no format args");
//
// assert!(!empty.is_allocated());
// assert_eq!(&empty.to_string(), "");
//
// assert!(!no_args.is_allocated());
// assert_eq!(&no_args.to_string(), "there are no format args");
// }
#[test]
fn test_short_fmt_macro() {
let first = 23;
let second = "#@!*()&^%_-+={}[]|\\/?><,.:;~`";
let third = u32::max_value();
let fourth = gen_string(512 - 45);
let fixed = format_c!("{}_{}_0x{:x}_{}", first, second, third, fourth);
let heaped = format!("{}_{}_0x{:x}_{}", first, second, third, fourth);
assert!(!fixed.is_allocated());
assert_eq!(&fixed.to_string(), &heaped);
}
#[test]
fn test_long_fmt_macro() {
let first = "";
let second = gen_string(510);
let third = 3;
let fourth = gen_string(513 * 8);
let fixed = format_c!("{}_{}{}{}", first, second, third, fourth);
let heaped = format!("{}_{}{}{}", first, second, third, fourth);
assert!(fixed.is_allocated());
assert_eq!(&fixed.to_string(), &heaped);
}
}
| deref | identifier_name |
cfixed_string.rs | use std::borrow::{Borrow, Cow};
use std::ffi::{CStr, CString};
use std::fmt;
use std::mem;
use std::ops;
use std::os::raw::c_char;
use std::ptr;
const STRING_SIZE: usize = 512;
/// This is a C String abstractions that presents a CStr like
/// interface for interop purposes but tries to be little nicer
/// by avoiding heap allocations if the string is within the
/// generous bounds (512 bytes) of the statically sized buffer.
/// Strings over this limit will be heap allocated, but the
/// interface outside of this abstraction remains the same.
pub enum CFixedString {
Local {
s: [c_char; STRING_SIZE],
len: usize,
},
Heap {
s: CString,
len: usize,
},
}
impl CFixedString {
/// Creates an empty CFixedString, this is intended to be
/// used with write! or the `fmt::Write` trait
pub fn new() -> Self {
unsafe {
CFixedString::Local {
s: mem::uninitialized(),
len: 0,
}
}
}
pub fn from_str<S: AsRef<str>>(s: S) -> Self {
Self::from(s.as_ref())
}
pub fn as_ptr(&self) -> *const c_char {
match *self {
CFixedString::Local { ref s,.. } => s.as_ptr(),
CFixedString::Heap { ref s,.. } => s.as_ptr(),
}
}
/// Returns true if the string has been heap allocated
pub fn is_allocated(&self) -> bool |
/// Converts a `CFixedString` into a `Cow<str>`.
///
/// This function will calculate the length of this string (which normally
/// requires a linear amount of work to be done) and then return the
/// resulting slice as a `Cow<str>`, replacing any invalid UTF-8 sequences
/// with `U+FFFD REPLACEMENT CHARACTER`. If there are no invalid UTF-8
/// sequences, this will merely return a borrowed slice.
pub fn to_string(&self) -> Cow<str> {
String::from_utf8_lossy(&self.to_bytes())
}
pub unsafe fn as_str(&self) -> &str {
use std::slice;
use std::str;
match *self {
CFixedString::Local { ref s, len } => {
str::from_utf8_unchecked(slice::from_raw_parts(s.as_ptr() as *const u8, len))
}
CFixedString::Heap { ref s, len } => {
str::from_utf8_unchecked(slice::from_raw_parts(s.as_ptr() as *const u8, len))
}
}
}
}
impl<'a> From<&'a str> for CFixedString {
fn from(s: &'a str) -> Self {
use std::fmt::Write;
let mut string = CFixedString::new();
string.write_str(s).unwrap();
string
}
}
impl fmt::Write for CFixedString {
fn write_str(&mut self, s: &str) -> Result<(), fmt::Error> {
// use std::fmt::Write;
unsafe {
let cur_len = self.as_str().len();
match cur_len + s.len() {
len if len <= STRING_SIZE - 1 => {
match *self {
CFixedString::Local { s: ref mut ls, len: ref mut lslen } => {
let ptr = ls.as_mut_ptr() as *mut u8;
ptr::copy(s.as_ptr(), ptr.offset(cur_len as isize), s.len());
*ptr.offset(len as isize) = 0;
*lslen = len;
}
_ => unreachable!(),
}
}
len => {
let mut heapstring = String::with_capacity(len + 1);
heapstring.write_str(self.as_str()).unwrap();
heapstring.write_str(s).unwrap();
*self = CFixedString::Heap {
s: CString::new(heapstring).unwrap(),
len: len,
};
}
}
}
// Yah....we should do error handling
Ok(())
}
}
impl From<CFixedString> for String {
fn from(s: CFixedString) -> Self {
String::from_utf8_lossy(&s.to_bytes()).into_owned()
}
}
impl ops::Deref for CFixedString {
type Target = CStr;
fn deref(&self) -> &CStr {
use std::slice;
match *self {
CFixedString::Local { ref s, len } => unsafe {
mem::transmute(slice::from_raw_parts(s.as_ptr(), len + 1))
},
CFixedString::Heap { ref s,.. } => s,
}
}
}
impl Borrow<CStr> for CFixedString {
fn borrow(&self) -> &CStr {
self
}
}
impl AsRef<CStr> for CFixedString {
fn as_ref(&self) -> &CStr {
self
}
}
impl Borrow<str> for CFixedString {
fn borrow(&self) -> &str {
unsafe { self.as_str() }
}
}
impl AsRef<str> for CFixedString {
fn as_ref(&self) -> &str {
unsafe { self.as_str() }
}
}
macro_rules! format_c {
// This does not work on stable, to change the * to a + and
// have this arm be used when there are no arguments :(
// ($fmt:expr) => {
// use std::fmt::Write;
// let mut fixed = CFixedString::new();
// write!(&mut fixed, $fmt).unwrap();
// fixed
// }
($fmt:expr, $($args:tt)*) => ({
use std::fmt::Write;
let mut fixed = CFixedString::new();
write!(&mut fixed, $fmt, $($args)*).unwrap();
fixed
})
}
#[cfg(test)]
mod tests {
use super::*;
use std::fmt::Write;
fn gen_string(len: usize) -> String {
let mut out = String::with_capacity(len);
for _ in 0..len / 16 {
out.write_str("zyxvutabcdef9876").unwrap();
}
for i in 0..len % 16 {
out.write_char((i as u8 + 'A' as u8) as char).unwrap();
}
assert_eq!(out.len(), len);
out
}
#[test]
fn test_empty_handler() {
let short_string = "";
let t = CFixedString::from_str(short_string);
assert!(!t.is_allocated());
assert_eq!(&t.to_string(), short_string);
}
#[test]
fn test_short_1() {
let short_string = "test_local";
let t = CFixedString::from_str(short_string);
assert!(!t.is_allocated());
assert_eq!(&t.to_string(), short_string);
}
#[test]
fn test_short_2() {
let short_string = "test_local stoheusthsotheost";
let t = CFixedString::from_str(short_string);
assert!(!t.is_allocated());
assert_eq!(&t.to_string(), short_string);
}
#[test]
fn test_511() {
// this string (width 511) buffer should just fit
let test_511_string = gen_string(511);
let t = CFixedString::from_str(&test_511_string);
assert!(!t.is_allocated());
assert_eq!(&t.to_string(), &test_511_string);
}
#[test]
fn test_512() {
// this string (width 512) buffer should not fit
let test_512_string = gen_string(512);
let t = CFixedString::from_str(&test_512_string);
assert!(t.is_allocated());
assert_eq!(&t.to_string(), &test_512_string);
}
#[test]
fn test_513() {
// this string (width 513) buffer should not fit
let test_513_string = gen_string(513);
let t = CFixedString::from_str(&test_513_string);
assert!(t.is_allocated());
assert_eq!(&t.to_string(), &test_513_string);
}
#[test]
fn test_to_owned() {
let short = "this is an amazing string";
let t = CFixedString::from_str(short);
assert!(!t.is_allocated());
assert_eq!(&String::from(t), short);
let long = gen_string(1025);
let t = CFixedString::from_str(&long);
assert!(t.is_allocated());
assert_eq!(&String::from(t), &long);
}
#[test]
fn test_short_format() {
let mut fixed = CFixedString::new();
write!(&mut fixed, "one_{}", 1).unwrap();
write!(&mut fixed, "_two_{}", "two").unwrap();
write!(&mut fixed,
"_three_{}-{}-{:.3}",
23,
"some string data",
56.789)
.unwrap();
assert!(!fixed.is_allocated());
assert_eq!(&fixed.to_string(),
"one_1_two_two_three_23-some string data-56.789");
}
#[test]
fn test_long_format() {
let mut fixed = CFixedString::new();
let mut string = String::new();
for i in 1..30 {
let genned = gen_string(i * i);
write!(&mut fixed, "{}_{}", i, genned).unwrap();
write!(&mut string, "{}_{}", i, genned).unwrap();
}
assert!(fixed.is_allocated());
assert_eq!(&fixed.to_string(), &string);
}
// TODO: Reenable this test once the empty match arm is allowed
// by the compiler
// #[test]
// fn test_empty_fmt_macro() {
// let empty = format_c!("");
// let no_args = format_c!("there are no format args");
//
// assert!(!empty.is_allocated());
// assert_eq!(&empty.to_string(), "");
//
// assert!(!no_args.is_allocated());
// assert_eq!(&no_args.to_string(), "there are no format args");
// }
#[test]
fn test_short_fmt_macro() {
let first = 23;
let second = "#@!*()&^%_-+={}[]|\\/?><,.:;~`";
let third = u32::max_value();
let fourth = gen_string(512 - 45);
let fixed = format_c!("{}_{}_0x{:x}_{}", first, second, third, fourth);
let heaped = format!("{}_{}_0x{:x}_{}", first, second, third, fourth);
assert!(!fixed.is_allocated());
assert_eq!(&fixed.to_string(), &heaped);
}
#[test]
fn test_long_fmt_macro() {
let first = "";
let second = gen_string(510);
let third = 3;
let fourth = gen_string(513 * 8);
let fixed = format_c!("{}_{}{}{}", first, second, third, fourth);
let heaped = format!("{}_{}{}{}", first, second, third, fourth);
assert!(fixed.is_allocated());
assert_eq!(&fixed.to_string(), &heaped);
}
}
| {
match *self {
CFixedString::Local { .. } => false,
_ => true,
}
} | identifier_body |
cfixed_string.rs | use std::borrow::{Borrow, Cow};
use std::ffi::{CStr, CString};
use std::fmt;
use std::mem;
use std::ops;
use std::os::raw::c_char;
use std::ptr;
const STRING_SIZE: usize = 512;
/// This is a C String abstractions that presents a CStr like
/// interface for interop purposes but tries to be little nicer
/// by avoiding heap allocations if the string is within the
/// generous bounds (512 bytes) of the statically sized buffer.
/// Strings over this limit will be heap allocated, but the
/// interface outside of this abstraction remains the same.
pub enum CFixedString {
Local {
s: [c_char; STRING_SIZE],
len: usize,
},
Heap {
s: CString,
len: usize,
}, | /// used with write! or the `fmt::Write` trait
pub fn new() -> Self {
unsafe {
CFixedString::Local {
s: mem::uninitialized(),
len: 0,
}
}
}
pub fn from_str<S: AsRef<str>>(s: S) -> Self {
Self::from(s.as_ref())
}
pub fn as_ptr(&self) -> *const c_char {
match *self {
CFixedString::Local { ref s,.. } => s.as_ptr(),
CFixedString::Heap { ref s,.. } => s.as_ptr(),
}
}
/// Returns true if the string has been heap allocated
pub fn is_allocated(&self) -> bool {
match *self {
CFixedString::Local {.. } => false,
_ => true,
}
}
/// Converts a `CFixedString` into a `Cow<str>`.
///
/// This function will calculate the length of this string (which normally
/// requires a linear amount of work to be done) and then return the
/// resulting slice as a `Cow<str>`, replacing any invalid UTF-8 sequences
/// with `U+FFFD REPLACEMENT CHARACTER`. If there are no invalid UTF-8
/// sequences, this will merely return a borrowed slice.
pub fn to_string(&self) -> Cow<str> {
String::from_utf8_lossy(&self.to_bytes())
}
pub unsafe fn as_str(&self) -> &str {
use std::slice;
use std::str;
match *self {
CFixedString::Local { ref s, len } => {
str::from_utf8_unchecked(slice::from_raw_parts(s.as_ptr() as *const u8, len))
}
CFixedString::Heap { ref s, len } => {
str::from_utf8_unchecked(slice::from_raw_parts(s.as_ptr() as *const u8, len))
}
}
}
}
impl<'a> From<&'a str> for CFixedString {
fn from(s: &'a str) -> Self {
use std::fmt::Write;
let mut string = CFixedString::new();
string.write_str(s).unwrap();
string
}
}
impl fmt::Write for CFixedString {
fn write_str(&mut self, s: &str) -> Result<(), fmt::Error> {
// use std::fmt::Write;
unsafe {
let cur_len = self.as_str().len();
match cur_len + s.len() {
len if len <= STRING_SIZE - 1 => {
match *self {
CFixedString::Local { s: ref mut ls, len: ref mut lslen } => {
let ptr = ls.as_mut_ptr() as *mut u8;
ptr::copy(s.as_ptr(), ptr.offset(cur_len as isize), s.len());
*ptr.offset(len as isize) = 0;
*lslen = len;
}
_ => unreachable!(),
}
}
len => {
let mut heapstring = String::with_capacity(len + 1);
heapstring.write_str(self.as_str()).unwrap();
heapstring.write_str(s).unwrap();
*self = CFixedString::Heap {
s: CString::new(heapstring).unwrap(),
len: len,
};
}
}
}
// Yah....we should do error handling
Ok(())
}
}
impl From<CFixedString> for String {
fn from(s: CFixedString) -> Self {
String::from_utf8_lossy(&s.to_bytes()).into_owned()
}
}
impl ops::Deref for CFixedString {
type Target = CStr;
fn deref(&self) -> &CStr {
use std::slice;
match *self {
CFixedString::Local { ref s, len } => unsafe {
mem::transmute(slice::from_raw_parts(s.as_ptr(), len + 1))
},
CFixedString::Heap { ref s,.. } => s,
}
}
}
impl Borrow<CStr> for CFixedString {
fn borrow(&self) -> &CStr {
self
}
}
impl AsRef<CStr> for CFixedString {
fn as_ref(&self) -> &CStr {
self
}
}
impl Borrow<str> for CFixedString {
fn borrow(&self) -> &str {
unsafe { self.as_str() }
}
}
impl AsRef<str> for CFixedString {
fn as_ref(&self) -> &str {
unsafe { self.as_str() }
}
}
macro_rules! format_c {
// This does not work on stable, to change the * to a + and
// have this arm be used when there are no arguments :(
// ($fmt:expr) => {
// use std::fmt::Write;
// let mut fixed = CFixedString::new();
// write!(&mut fixed, $fmt).unwrap();
// fixed
// }
($fmt:expr, $($args:tt)*) => ({
use std::fmt::Write;
let mut fixed = CFixedString::new();
write!(&mut fixed, $fmt, $($args)*).unwrap();
fixed
})
}
#[cfg(test)]
mod tests {
use super::*;
use std::fmt::Write;
fn gen_string(len: usize) -> String {
let mut out = String::with_capacity(len);
for _ in 0..len / 16 {
out.write_str("zyxvutabcdef9876").unwrap();
}
for i in 0..len % 16 {
out.write_char((i as u8 + 'A' as u8) as char).unwrap();
}
assert_eq!(out.len(), len);
out
}
#[test]
fn test_empty_handler() {
let short_string = "";
let t = CFixedString::from_str(short_string);
assert!(!t.is_allocated());
assert_eq!(&t.to_string(), short_string);
}
#[test]
fn test_short_1() {
let short_string = "test_local";
let t = CFixedString::from_str(short_string);
assert!(!t.is_allocated());
assert_eq!(&t.to_string(), short_string);
}
#[test]
fn test_short_2() {
let short_string = "test_local stoheusthsotheost";
let t = CFixedString::from_str(short_string);
assert!(!t.is_allocated());
assert_eq!(&t.to_string(), short_string);
}
#[test]
fn test_511() {
// this string (width 511) buffer should just fit
let test_511_string = gen_string(511);
let t = CFixedString::from_str(&test_511_string);
assert!(!t.is_allocated());
assert_eq!(&t.to_string(), &test_511_string);
}
#[test]
fn test_512() {
// this string (width 512) buffer should not fit
let test_512_string = gen_string(512);
let t = CFixedString::from_str(&test_512_string);
assert!(t.is_allocated());
assert_eq!(&t.to_string(), &test_512_string);
}
#[test]
fn test_513() {
// this string (width 513) buffer should not fit
let test_513_string = gen_string(513);
let t = CFixedString::from_str(&test_513_string);
assert!(t.is_allocated());
assert_eq!(&t.to_string(), &test_513_string);
}
#[test]
fn test_to_owned() {
let short = "this is an amazing string";
let t = CFixedString::from_str(short);
assert!(!t.is_allocated());
assert_eq!(&String::from(t), short);
let long = gen_string(1025);
let t = CFixedString::from_str(&long);
assert!(t.is_allocated());
assert_eq!(&String::from(t), &long);
}
#[test]
fn test_short_format() {
let mut fixed = CFixedString::new();
write!(&mut fixed, "one_{}", 1).unwrap();
write!(&mut fixed, "_two_{}", "two").unwrap();
write!(&mut fixed,
"_three_{}-{}-{:.3}",
23,
"some string data",
56.789)
.unwrap();
assert!(!fixed.is_allocated());
assert_eq!(&fixed.to_string(),
"one_1_two_two_three_23-some string data-56.789");
}
#[test]
fn test_long_format() {
let mut fixed = CFixedString::new();
let mut string = String::new();
for i in 1..30 {
let genned = gen_string(i * i);
write!(&mut fixed, "{}_{}", i, genned).unwrap();
write!(&mut string, "{}_{}", i, genned).unwrap();
}
assert!(fixed.is_allocated());
assert_eq!(&fixed.to_string(), &string);
}
// TODO: Reenable this test once the empty match arm is allowed
// by the compiler
// #[test]
// fn test_empty_fmt_macro() {
// let empty = format_c!("");
// let no_args = format_c!("there are no format args");
//
// assert!(!empty.is_allocated());
// assert_eq!(&empty.to_string(), "");
//
// assert!(!no_args.is_allocated());
// assert_eq!(&no_args.to_string(), "there are no format args");
// }
#[test]
fn test_short_fmt_macro() {
let first = 23;
let second = "#@!*()&^%_-+={}[]|\\/?><,.:;~`";
let third = u32::max_value();
let fourth = gen_string(512 - 45);
let fixed = format_c!("{}_{}_0x{:x}_{}", first, second, third, fourth);
let heaped = format!("{}_{}_0x{:x}_{}", first, second, third, fourth);
assert!(!fixed.is_allocated());
assert_eq!(&fixed.to_string(), &heaped);
}
#[test]
fn test_long_fmt_macro() {
let first = "";
let second = gen_string(510);
let third = 3;
let fourth = gen_string(513 * 8);
let fixed = format_c!("{}_{}{}{}", first, second, third, fourth);
let heaped = format!("{}_{}{}{}", first, second, third, fourth);
assert!(fixed.is_allocated());
assert_eq!(&fixed.to_string(), &heaped);
}
} | }
impl CFixedString {
/// Creates an empty CFixedString, this is intended to be | random_line_split |
generics.rs | #![recursion_limit = "128"]
#[macro_use]
extern crate generic_array;
use generic_array::typenum::consts::U4;
use std::fmt::Debug;
use std::ops::Add;
use generic_array::{GenericArray, ArrayLength};
use generic_array::sequence::*;
use generic_array::functional::*;
/// Example function using generics to pass N-length sequences and map them
pub fn generic_map<S>(s: S)
where
S: FunctionalSequence<i32>, // `.map`
S::Item: Add<i32, Output = i32>, // `x + 1`
S: MappedGenericSequence<i32, i32>, // `i32` -> `i32`
MappedSequence<S, i32, i32>: Debug, // println!
{
let a = s.map(|x| x + 1);
println!("{:?}", a);
}
/// Complex example function using generics to pass N-length sequences, zip them, and then map that result.
///
/// If used with `GenericArray` specifically this isn't necessary
pub fn generic_sequence_zip_sum<A, B>(a: A, b: B) -> i32
where
A: FunctionalSequence<i32>, // `.zip`
B: FunctionalSequence<i32, Length = A::Length>, // `.zip`
A: MappedGenericSequence<i32, i32>, // `i32` -> `i32`
B: MappedGenericSequence<i32, i32, Mapped = MappedSequence<A, i32, i32>>, // `i32` -> `i32`, prove A and B can map to the same output
A::Item: Add<B::Item, Output = i32>, // `l + r`
MappedSequence<A, i32, i32>: MappedGenericSequence<i32, i32> + FunctionalSequence<i32>, // `.map`
SequenceItem<MappedSequence<A, i32, i32>>: Add<i32, Output=i32>, // `x + 1`
MappedSequence<MappedSequence<A, i32, i32>, i32, i32>: Debug, // `println!`
MappedSequence<MappedSequence<A, i32, i32>, i32, i32>: FunctionalSequence<i32>, // `.fold`
SequenceItem<MappedSequence<MappedSequence<A, i32, i32>, i32, i32>>: Add<i32, Output=i32> // `x + a`, note the order
{
let c = a.zip(b, |l, r| l + r).map(|x| x + 1);
println!("{:?}", c);
c.fold(0, |a, x| x + a)
}
/// Super-simple fixed-length i32 `GenericArray`s
pub fn generic_array_plain_zip_sum(a: GenericArray<i32, U4>, b: GenericArray<i32, U4>) -> i32 {
a.zip(b, |l, r| l + r).map(|x| x + 1).fold(0, |a, x| x + a)
}
pub fn generic_array_variable_length_zip_sum<N>(a: GenericArray<i32, N>, b: GenericArray<i32, N>) -> i32
where
N: ArrayLength<i32>,
|
pub fn generic_array_same_type_variable_length_zip_sum<T, N>(a: GenericArray<T, N>, b: GenericArray<T, N>) -> i32
where
N: ArrayLength<T> + ArrayLength<<T as Add<T>>::Output>,
T: Add<T, Output=i32>,
{
a.zip(b, |l, r| l + r).map(|x| x + 1).fold(0, |a, x| x + a)
}
/// Complex example using fully generic `GenericArray`s with the same length.
///
/// It's mostly just the repeated `Add` traits, which would be present in other systems anyway.
pub fn generic_array_zip_sum<A, B, N: ArrayLength<A> + ArrayLength<B>>(a: GenericArray<A, N>, b: GenericArray<B, N>) -> i32
where
A: Add<B>,
N: ArrayLength<<A as Add<B>>::Output> +
ArrayLength<<<A as Add<B>>::Output as Add<i32>>::Output>,
<A as Add<B>>::Output: Add<i32>,
<<A as Add<B>>::Output as Add<i32>>::Output: Add<i32, Output=i32>,
{
a.zip(b, |l, r| l + r).map(|x| x + 1).fold(0, |a, x| x + a)
}
#[test]
fn test_generics() {
generic_map(arr![i32; 1, 2, 3, 4]);
assert_eq!(generic_sequence_zip_sum(arr![i32; 1, 2, 3, 4], arr![i32; 2, 3, 4, 5]), 28);
assert_eq!(generic_array_plain_zip_sum(arr![i32; 1, 2, 3, 4], arr![i32; 2, 3, 4, 5]), 28);
assert_eq!(generic_array_variable_length_zip_sum(arr![i32; 1, 2, 3, 4], arr![i32; 2, 3, 4, 5]), 28);
assert_eq!(generic_array_same_type_variable_length_zip_sum(arr![i32; 1, 2, 3, 4], arr![i32; 2, 3, 4, 5]), 28);
assert_eq!(generic_array_zip_sum(arr![i32; 1, 2, 3, 4], arr![i32; 2, 3, 4, 5]), 28);
} | {
a.zip(b, |l, r| l + r).map(|x| x + 1).fold(0, |a, x| x + a)
} | identifier_body |
generics.rs | #![recursion_limit = "128"]
#[macro_use]
extern crate generic_array;
use generic_array::typenum::consts::U4;
use std::fmt::Debug;
use std::ops::Add;
use generic_array::{GenericArray, ArrayLength};
use generic_array::sequence::*;
use generic_array::functional::*;
/// Example function using generics to pass N-length sequences and map them
pub fn generic_map<S>(s: S)
where
S: FunctionalSequence<i32>, // `.map`
S::Item: Add<i32, Output = i32>, // `x + 1`
S: MappedGenericSequence<i32, i32>, // `i32` -> `i32`
MappedSequence<S, i32, i32>: Debug, // println!
{
let a = s.map(|x| x + 1);
println!("{:?}", a);
}
/// Complex example function using generics to pass N-length sequences, zip them, and then map that result.
///
/// If used with `GenericArray` specifically this isn't necessary
pub fn generic_sequence_zip_sum<A, B>(a: A, b: B) -> i32
where
A: FunctionalSequence<i32>, // `.zip`
B: FunctionalSequence<i32, Length = A::Length>, // `.zip`
A: MappedGenericSequence<i32, i32>, // `i32` -> `i32`
B: MappedGenericSequence<i32, i32, Mapped = MappedSequence<A, i32, i32>>, // `i32` -> `i32`, prove A and B can map to the same output
A::Item: Add<B::Item, Output = i32>, // `l + r`
MappedSequence<A, i32, i32>: MappedGenericSequence<i32, i32> + FunctionalSequence<i32>, // `.map`
SequenceItem<MappedSequence<A, i32, i32>>: Add<i32, Output=i32>, // `x + 1`
MappedSequence<MappedSequence<A, i32, i32>, i32, i32>: Debug, // `println!`
MappedSequence<MappedSequence<A, i32, i32>, i32, i32>: FunctionalSequence<i32>, // `.fold`
SequenceItem<MappedSequence<MappedSequence<A, i32, i32>, i32, i32>>: Add<i32, Output=i32> // `x + a`, note the order
{
let c = a.zip(b, |l, r| l + r).map(|x| x + 1);
println!("{:?}", c);
c.fold(0, |a, x| x + a)
}
/// Super-simple fixed-length i32 `GenericArray`s
pub fn generic_array_plain_zip_sum(a: GenericArray<i32, U4>, b: GenericArray<i32, U4>) -> i32 {
a.zip(b, |l, r| l + r).map(|x| x + 1).fold(0, |a, x| x + a)
}
pub fn generic_array_variable_length_zip_sum<N>(a: GenericArray<i32, N>, b: GenericArray<i32, N>) -> i32
where
N: ArrayLength<i32>,
{
a.zip(b, |l, r| l + r).map(|x| x + 1).fold(0, |a, x| x + a)
}
pub fn generic_array_same_type_variable_length_zip_sum<T, N>(a: GenericArray<T, N>, b: GenericArray<T, N>) -> i32
where
N: ArrayLength<T> + ArrayLength<<T as Add<T>>::Output>,
T: Add<T, Output=i32>,
{
a.zip(b, |l, r| l + r).map(|x| x + 1).fold(0, |a, x| x + a)
}
/// Complex example using fully generic `GenericArray`s with the same length.
///
/// It's mostly just the repeated `Add` traits, which would be present in other systems anyway.
pub fn | <A, B, N: ArrayLength<A> + ArrayLength<B>>(a: GenericArray<A, N>, b: GenericArray<B, N>) -> i32
where
A: Add<B>,
N: ArrayLength<<A as Add<B>>::Output> +
ArrayLength<<<A as Add<B>>::Output as Add<i32>>::Output>,
<A as Add<B>>::Output: Add<i32>,
<<A as Add<B>>::Output as Add<i32>>::Output: Add<i32, Output=i32>,
{
a.zip(b, |l, r| l + r).map(|x| x + 1).fold(0, |a, x| x + a)
}
#[test]
fn test_generics() {
generic_map(arr![i32; 1, 2, 3, 4]);
assert_eq!(generic_sequence_zip_sum(arr![i32; 1, 2, 3, 4], arr![i32; 2, 3, 4, 5]), 28);
assert_eq!(generic_array_plain_zip_sum(arr![i32; 1, 2, 3, 4], arr![i32; 2, 3, 4, 5]), 28);
assert_eq!(generic_array_variable_length_zip_sum(arr![i32; 1, 2, 3, 4], arr![i32; 2, 3, 4, 5]), 28);
assert_eq!(generic_array_same_type_variable_length_zip_sum(arr![i32; 1, 2, 3, 4], arr![i32; 2, 3, 4, 5]), 28);
assert_eq!(generic_array_zip_sum(arr![i32; 1, 2, 3, 4], arr![i32; 2, 3, 4, 5]), 28);
} | generic_array_zip_sum | identifier_name |
generics.rs | #![recursion_limit = "128"]
#[macro_use]
extern crate generic_array;
use generic_array::typenum::consts::U4;
use std::fmt::Debug;
use std::ops::Add;
use generic_array::{GenericArray, ArrayLength};
use generic_array::sequence::*;
use generic_array::functional::*;
/// Example function using generics to pass N-length sequences and map them
pub fn generic_map<S>(s: S)
where
S: FunctionalSequence<i32>, // `.map`
S::Item: Add<i32, Output = i32>, // `x + 1`
S: MappedGenericSequence<i32, i32>, // `i32` -> `i32`
MappedSequence<S, i32, i32>: Debug, // println!
{
let a = s.map(|x| x + 1);
println!("{:?}", a);
}
/// Complex example function using generics to pass N-length sequences, zip them, and then map that result.
///
/// If used with `GenericArray` specifically this isn't necessary
pub fn generic_sequence_zip_sum<A, B>(a: A, b: B) -> i32
where
A: FunctionalSequence<i32>, // `.zip`
B: FunctionalSequence<i32, Length = A::Length>, // `.zip`
A: MappedGenericSequence<i32, i32>, // `i32` -> `i32`
B: MappedGenericSequence<i32, i32, Mapped = MappedSequence<A, i32, i32>>, // `i32` -> `i32`, prove A and B can map to the same output
A::Item: Add<B::Item, Output = i32>, // `l + r`
MappedSequence<A, i32, i32>: MappedGenericSequence<i32, i32> + FunctionalSequence<i32>, // `.map`
SequenceItem<MappedSequence<A, i32, i32>>: Add<i32, Output=i32>, // `x + 1`
MappedSequence<MappedSequence<A, i32, i32>, i32, i32>: Debug, // `println!`
MappedSequence<MappedSequence<A, i32, i32>, i32, i32>: FunctionalSequence<i32>, // `.fold` | println!("{:?}", c);
c.fold(0, |a, x| x + a)
}
/// Super-simple fixed-length i32 `GenericArray`s
pub fn generic_array_plain_zip_sum(a: GenericArray<i32, U4>, b: GenericArray<i32, U4>) -> i32 {
a.zip(b, |l, r| l + r).map(|x| x + 1).fold(0, |a, x| x + a)
}
pub fn generic_array_variable_length_zip_sum<N>(a: GenericArray<i32, N>, b: GenericArray<i32, N>) -> i32
where
N: ArrayLength<i32>,
{
a.zip(b, |l, r| l + r).map(|x| x + 1).fold(0, |a, x| x + a)
}
pub fn generic_array_same_type_variable_length_zip_sum<T, N>(a: GenericArray<T, N>, b: GenericArray<T, N>) -> i32
where
N: ArrayLength<T> + ArrayLength<<T as Add<T>>::Output>,
T: Add<T, Output=i32>,
{
a.zip(b, |l, r| l + r).map(|x| x + 1).fold(0, |a, x| x + a)
}
/// Complex example using fully generic `GenericArray`s with the same length.
///
/// It's mostly just the repeated `Add` traits, which would be present in other systems anyway.
pub fn generic_array_zip_sum<A, B, N: ArrayLength<A> + ArrayLength<B>>(a: GenericArray<A, N>, b: GenericArray<B, N>) -> i32
where
A: Add<B>,
N: ArrayLength<<A as Add<B>>::Output> +
ArrayLength<<<A as Add<B>>::Output as Add<i32>>::Output>,
<A as Add<B>>::Output: Add<i32>,
<<A as Add<B>>::Output as Add<i32>>::Output: Add<i32, Output=i32>,
{
a.zip(b, |l, r| l + r).map(|x| x + 1).fold(0, |a, x| x + a)
}
#[test]
fn test_generics() {
generic_map(arr![i32; 1, 2, 3, 4]);
assert_eq!(generic_sequence_zip_sum(arr![i32; 1, 2, 3, 4], arr![i32; 2, 3, 4, 5]), 28);
assert_eq!(generic_array_plain_zip_sum(arr![i32; 1, 2, 3, 4], arr![i32; 2, 3, 4, 5]), 28);
assert_eq!(generic_array_variable_length_zip_sum(arr![i32; 1, 2, 3, 4], arr![i32; 2, 3, 4, 5]), 28);
assert_eq!(generic_array_same_type_variable_length_zip_sum(arr![i32; 1, 2, 3, 4], arr![i32; 2, 3, 4, 5]), 28);
assert_eq!(generic_array_zip_sum(arr![i32; 1, 2, 3, 4], arr![i32; 2, 3, 4, 5]), 28);
} | SequenceItem<MappedSequence<MappedSequence<A, i32, i32>, i32, i32>>: Add<i32, Output=i32> // `x + a`, note the order
{
let c = a.zip(b, |l, r| l + r).map(|x| x + 1);
| random_line_split |
broker.rs | use std::option::Option;
use redis;
use rustc_serialize::json::ToJson;
use uuid::Uuid;
use task::{TaskDef, Task, TaskState};
| broker: &'a RedisBroker
}
impl<'a> Task for RedisTask<'a> {
fn status<'b>(&self) -> Option<&'b ToJson> {
None
}
fn await<'b>(&self) -> Option<&'b ToJson> {
None
}
fn get<'b>(&self) -> Option<&'b ToJson> {
None
}
}
pub struct RedisBroker {
conn: redis::Connection,
key_prefix: &'static str,
poll_interval_ms: u32
}
impl RedisBroker {
pub fn new(conn: redis::Connection) -> RedisBroker {
RedisBroker {conn: conn, key_prefix: "", poll_interval_ms: 5}
}
pub fn execute_task(&self, task_def: &TaskDef, args: &ToJson) -> RedisTask {
let id = Uuid::new_v4().simple().to_string();
RedisTask {
id: id,
state: TaskState::Queued,
broker: self
}
}
} |
pub struct RedisTask<'a> {
id: String,
state: TaskState, | random_line_split |
broker.rs | use std::option::Option;
use redis;
use rustc_serialize::json::ToJson;
use uuid::Uuid;
use task::{TaskDef, Task, TaskState};
pub struct RedisTask<'a> {
id: String,
state: TaskState,
broker: &'a RedisBroker
}
impl<'a> Task for RedisTask<'a> {
fn status<'b>(&self) -> Option<&'b ToJson> {
None
}
fn | <'b>(&self) -> Option<&'b ToJson> {
None
}
fn get<'b>(&self) -> Option<&'b ToJson> {
None
}
}
pub struct RedisBroker {
conn: redis::Connection,
key_prefix: &'static str,
poll_interval_ms: u32
}
impl RedisBroker {
pub fn new(conn: redis::Connection) -> RedisBroker {
RedisBroker {conn: conn, key_prefix: "", poll_interval_ms: 5}
}
pub fn execute_task(&self, task_def: &TaskDef, args: &ToJson) -> RedisTask {
let id = Uuid::new_v4().simple().to_string();
RedisTask {
id: id,
state: TaskState::Queued,
broker: self
}
}
}
| await | identifier_name |
render.rs | use gfx::{self, texture, Factory, Resources, PipelineState, Encoder, CommandBuffer};
use gfx::traits::FactoryExt;
use gfx::handle::{RenderTargetView, DepthStencilView};
use image::RgbaImage;
use resource::atlas::{Texmap, TextureSelection};
use std::collections::HashMap;
use ui::managed::ManagedBuffer;
use ColorFormat;
use DepthFormat;
// TODO: Multitexture-per-PSO
pub struct TexturedPipe<R> where R: Resources {
buffer: ManagedBuffer<R>,
state: PipelineState<R, textured_pipe::Meta>,
data: textured_pipe::Data<R>,
}
impl<R> TexturedPipe<R> where R: Resources {
pub fn create<F>(factory: &mut F, image: &RgbaImage, out: RenderTargetView<R, ColorFormat>, out_depth: DepthStencilView<R, DepthFormat>) -> Self where F: Factory<R> + FactoryExt<R> {
let state = factory.create_pipeline_simple(
VERTEX_SHADER_TEX.as_bytes(),
FRAGMENT_SHADER_TEX.as_bytes(),
textured_pipe::new()
).unwrap();
let buffer = ManagedBuffer::new(factory);
let (_, view) = factory.create_texture_immutable_u8::<(gfx::format::R8_G8_B8_A8, gfx::format::Srgb)>(
texture::Kind::D2(image.width() as u16, image.height() as u16, texture::AaMode::Single),
&[&image]
).unwrap();
let sampler = factory.create_sampler(texture::SamplerInfo::new(
texture::FilterMethod::Scale,
texture::WrapMode::Tile,
));
let data = textured_pipe::Data {
buffer: buffer.remote().clone(),
out: out,
out_depth: out_depth,
texture: (view, sampler),
};
TexturedPipe {
buffer: buffer,
state: state,
data: data
}
}
pub fn | (&mut self) -> &mut ManagedBuffer<R> {
&mut self.buffer
}
pub fn render<F, C>(&mut self, factory: &mut F, encoder: &mut Encoder<R, C>) where F: Factory<R> + FactoryExt<R>, C: CommandBuffer<R> {
self.buffer.update(factory, encoder);
self.data.buffer = self.buffer.remote().clone();
encoder.draw(&self.buffer.slice(), &self.state, &self.data);
}
}
pub struct SolidPipe<R> where R: Resources {
buffer: ManagedBuffer<R>,
state: PipelineState<R, solid_pipe::Meta>,
data: solid_pipe::Data<R>
}
impl<R> SolidPipe<R> where R: Resources {
pub fn create<F>(factory: &mut F, out: RenderTargetView<R, ColorFormat>, out_depth: DepthStencilView<R, DepthFormat>) -> Self where F: Factory<R> + FactoryExt<R> {
let state = factory.create_pipeline_simple(
VERTEX_SHADER_SOLID.as_bytes(),
FRAGMENT_SHADER_SOLID.as_bytes(),
solid_pipe::new()
).unwrap();
let buffer = ManagedBuffer::new(factory);
let data = solid_pipe::Data {
buffer: buffer.remote().clone(),
out: out,
out_depth: out_depth
};
SolidPipe {
buffer: buffer,
state: state,
data: data
}
}
pub fn buffer_mut(&mut self) -> &mut ManagedBuffer<R> {
&mut self.buffer
}
pub fn render<F, C>(&mut self, factory: &mut F, encoder: &mut Encoder<R, C>) where F: Factory<R> + FactoryExt<R>, C: CommandBuffer<R> {
self.buffer.update(factory, encoder);
self.data.buffer = self.buffer.remote().clone();
encoder.draw(&self.buffer.slice(), &self.state, &self.data);
}
}
pub struct Context<R> where R: Resources {
out: RenderTargetView<R, ColorFormat>,
out_depth: DepthStencilView<R, DepthFormat>,
solid: SolidPipe<R>,
textured: Vec<TexturedPipe<R>>,
textures: HashMap<String, (usize, TextureSelection)>
}
impl<R> Context<R> where R: Resources {
pub fn create<F>(factory: &mut F, out: RenderTargetView<R, ColorFormat>, out_depth: DepthStencilView<R, DepthFormat>) -> Self where F: Factory<R> + FactoryExt<R> {
Context {
out: out.clone(),
out_depth: out_depth.clone(),
solid: SolidPipe::create(factory, out.clone(), out_depth.clone()),
textured: Vec::new(),
textures: HashMap::new()
}
}
pub fn new_zone(&mut self) -> usize {
let zone = self.solid.buffer_mut().new_zone();
for pipe in &mut self.textured {
pipe.buffer_mut().new_zone();
}
zone
}
pub fn extend_zone<I>(&mut self, iter: I, texture: Option<&str>) -> bool where I: IntoIterator<Item=Vertex> {
if let Some(texture) = texture {
if let Some(&(index, selection)) = self.textures.get(texture) {
//println!("tex: {}@{}, ", texture, index);
Self::extend_textured(&mut self.textured[index], iter, selection);
true
} else {
false
}
} else {
self.solid.buffer_mut().extend(iter);
true
}
}
fn extend_textured<I>(pipe: &mut TexturedPipe<R>, iter: I, selection: TextureSelection) where I: IntoIterator<Item=Vertex> {
//println!("sel: {:?}", selection);
pipe.buffer_mut().extend(iter.into_iter().map(|v| {/*println!("tx: {:?}", v.tex);*/let v = Vertex {
pos: v.pos,
color: v.color,
tex: [
selection.min[0].to_part(0.0) + v.tex[0] * selection.size[0].to_part(0.0),
selection.min[1].to_part(0.0) + v.tex[1] * selection.size[1].to_part(0.0)
]
}; /*println!("ext {:?}", v)*/; v}))
}
pub fn add_texture<F>(&mut self, factory: &mut F, texmap: &Texmap, texture: &RgbaImage) where F: Factory<R> + FactoryExt<R> {
let index = self.textured.len();
self.textured.push(TexturedPipe::create(factory, texture, self.out.clone(), self.out_depth.clone()));
for (k, v) in texmap.0.iter() {
self.textures.insert(k.clone(), (index, *v));
}
}
pub fn render<F, C>(&mut self, factory: &mut F, encoder: &mut Encoder<R, C>) where F: Factory<R> + FactoryExt<R>, C: CommandBuffer<R> {
self.solid.render(factory, encoder);
for pipe in &mut self.textured {
pipe.render(factory, encoder);
}
}
}
gfx_defines!{
vertex Vertex {
pos: [f32; 3] = "a_Pos",
color: [f32; 3] = "a_Color",
tex: [f32; 2] = "a_Tex",
}
pipeline textured_pipe {
buffer: gfx::VertexBuffer<Vertex> = (),
out: gfx::BlendTarget<ColorFormat> = ("Out", gfx::state::MASK_ALL, gfx::preset::blend::ALPHA),
out_depth: gfx::DepthTarget<DepthFormat> = gfx::preset::depth::LESS_EQUAL_WRITE,
texture: gfx::TextureSampler<[f32; 4]> = "Texture",
}
pipeline solid_pipe {
buffer: gfx::VertexBuffer<Vertex> = (),
out: gfx::BlendTarget<ColorFormat> = ("Out", gfx::state::MASK_ALL, gfx::preset::blend::ALPHA),
out_depth: gfx::DepthTarget<DepthFormat> = gfx::preset::depth::LESS_EQUAL_WRITE,
}
}
const VERTEX_SHADER_TEX: &str = "
#version 130
in vec3 a_Pos;
in vec3 a_Color;
in vec2 a_Tex;
out vec4 v_Color;
out vec2 v_Tex;
void main() {
v_Tex = a_Tex;
v_Color = vec4(a_Color, 1.0);
gl_Position = vec4(a_Pos, 1.0);
}
";
const FRAGMENT_SHADER_TEX: &str = "
#version 130
uniform sampler2D Texture;
in vec4 v_Color;
in vec2 v_Tex;
out vec4 Out;
void main() {
Out = texture(Texture, v_Tex) * v_Color;
}
";
const VERTEX_SHADER_SOLID: &str = "
#version 130
in vec3 a_Pos;
in vec3 a_Color;
out vec4 v_Color;
void main() {
v_Color = vec4(a_Color, 1.0);
gl_Position = vec4(a_Pos, 1.0);
}
";
const FRAGMENT_SHADER_SOLID: &str = "
#version 130
uniform sampler2D Texture;
in vec4 v_Color;
out vec4 Out;
void main() {
Out = v_Color;
}
"; | buffer_mut | identifier_name |
render.rs | use gfx::{self, texture, Factory, Resources, PipelineState, Encoder, CommandBuffer};
use gfx::traits::FactoryExt;
use gfx::handle::{RenderTargetView, DepthStencilView};
use image::RgbaImage;
use resource::atlas::{Texmap, TextureSelection};
use std::collections::HashMap;
use ui::managed::ManagedBuffer;
use ColorFormat;
use DepthFormat;
// TODO: Multitexture-per-PSO
pub struct TexturedPipe<R> where R: Resources {
buffer: ManagedBuffer<R>,
state: PipelineState<R, textured_pipe::Meta>,
data: textured_pipe::Data<R>,
}
impl<R> TexturedPipe<R> where R: Resources {
pub fn create<F>(factory: &mut F, image: &RgbaImage, out: RenderTargetView<R, ColorFormat>, out_depth: DepthStencilView<R, DepthFormat>) -> Self where F: Factory<R> + FactoryExt<R> {
let state = factory.create_pipeline_simple(
VERTEX_SHADER_TEX.as_bytes(),
FRAGMENT_SHADER_TEX.as_bytes(),
textured_pipe::new()
).unwrap();
let buffer = ManagedBuffer::new(factory);
let (_, view) = factory.create_texture_immutable_u8::<(gfx::format::R8_G8_B8_A8, gfx::format::Srgb)>(
texture::Kind::D2(image.width() as u16, image.height() as u16, texture::AaMode::Single),
&[&image]
).unwrap();
let sampler = factory.create_sampler(texture::SamplerInfo::new(
texture::FilterMethod::Scale,
texture::WrapMode::Tile,
));
let data = textured_pipe::Data {
buffer: buffer.remote().clone(),
out: out,
out_depth: out_depth,
texture: (view, sampler),
};
TexturedPipe {
buffer: buffer,
state: state,
data: data
}
}
pub fn buffer_mut(&mut self) -> &mut ManagedBuffer<R> {
&mut self.buffer
}
pub fn render<F, C>(&mut self, factory: &mut F, encoder: &mut Encoder<R, C>) where F: Factory<R> + FactoryExt<R>, C: CommandBuffer<R> {
self.buffer.update(factory, encoder);
self.data.buffer = self.buffer.remote().clone();
encoder.draw(&self.buffer.slice(), &self.state, &self.data);
}
}
pub struct SolidPipe<R> where R: Resources {
buffer: ManagedBuffer<R>,
state: PipelineState<R, solid_pipe::Meta>,
data: solid_pipe::Data<R>
}
impl<R> SolidPipe<R> where R: Resources {
pub fn create<F>(factory: &mut F, out: RenderTargetView<R, ColorFormat>, out_depth: DepthStencilView<R, DepthFormat>) -> Self where F: Factory<R> + FactoryExt<R> {
let state = factory.create_pipeline_simple(
VERTEX_SHADER_SOLID.as_bytes(),
FRAGMENT_SHADER_SOLID.as_bytes(),
solid_pipe::new()
).unwrap();
let buffer = ManagedBuffer::new(factory);
let data = solid_pipe::Data {
buffer: buffer.remote().clone(),
out: out,
out_depth: out_depth
};
SolidPipe {
buffer: buffer,
state: state,
data: data
}
}
pub fn buffer_mut(&mut self) -> &mut ManagedBuffer<R> {
&mut self.buffer
}
pub fn render<F, C>(&mut self, factory: &mut F, encoder: &mut Encoder<R, C>) where F: Factory<R> + FactoryExt<R>, C: CommandBuffer<R> {
self.buffer.update(factory, encoder);
self.data.buffer = self.buffer.remote().clone();
encoder.draw(&self.buffer.slice(), &self.state, &self.data);
}
}
pub struct Context<R> where R: Resources {
out: RenderTargetView<R, ColorFormat>,
out_depth: DepthStencilView<R, DepthFormat>,
solid: SolidPipe<R>,
textured: Vec<TexturedPipe<R>>,
textures: HashMap<String, (usize, TextureSelection)>
}
impl<R> Context<R> where R: Resources {
pub fn create<F>(factory: &mut F, out: RenderTargetView<R, ColorFormat>, out_depth: DepthStencilView<R, DepthFormat>) -> Self where F: Factory<R> + FactoryExt<R> {
Context {
out: out.clone(),
out_depth: out_depth.clone(),
solid: SolidPipe::create(factory, out.clone(), out_depth.clone()),
textured: Vec::new(),
textures: HashMap::new()
} | for pipe in &mut self.textured {
pipe.buffer_mut().new_zone();
}
zone
}
pub fn extend_zone<I>(&mut self, iter: I, texture: Option<&str>) -> bool where I: IntoIterator<Item=Vertex> {
if let Some(texture) = texture {
if let Some(&(index, selection)) = self.textures.get(texture) {
//println!("tex: {}@{}, ", texture, index);
Self::extend_textured(&mut self.textured[index], iter, selection);
true
} else {
false
}
} else {
self.solid.buffer_mut().extend(iter);
true
}
}
fn extend_textured<I>(pipe: &mut TexturedPipe<R>, iter: I, selection: TextureSelection) where I: IntoIterator<Item=Vertex> {
//println!("sel: {:?}", selection);
pipe.buffer_mut().extend(iter.into_iter().map(|v| {/*println!("tx: {:?}", v.tex);*/let v = Vertex {
pos: v.pos,
color: v.color,
tex: [
selection.min[0].to_part(0.0) + v.tex[0] * selection.size[0].to_part(0.0),
selection.min[1].to_part(0.0) + v.tex[1] * selection.size[1].to_part(0.0)
]
}; /*println!("ext {:?}", v)*/; v}))
}
pub fn add_texture<F>(&mut self, factory: &mut F, texmap: &Texmap, texture: &RgbaImage) where F: Factory<R> + FactoryExt<R> {
let index = self.textured.len();
self.textured.push(TexturedPipe::create(factory, texture, self.out.clone(), self.out_depth.clone()));
for (k, v) in texmap.0.iter() {
self.textures.insert(k.clone(), (index, *v));
}
}
pub fn render<F, C>(&mut self, factory: &mut F, encoder: &mut Encoder<R, C>) where F: Factory<R> + FactoryExt<R>, C: CommandBuffer<R> {
self.solid.render(factory, encoder);
for pipe in &mut self.textured {
pipe.render(factory, encoder);
}
}
}
gfx_defines!{
vertex Vertex {
pos: [f32; 3] = "a_Pos",
color: [f32; 3] = "a_Color",
tex: [f32; 2] = "a_Tex",
}
pipeline textured_pipe {
buffer: gfx::VertexBuffer<Vertex> = (),
out: gfx::BlendTarget<ColorFormat> = ("Out", gfx::state::MASK_ALL, gfx::preset::blend::ALPHA),
out_depth: gfx::DepthTarget<DepthFormat> = gfx::preset::depth::LESS_EQUAL_WRITE,
texture: gfx::TextureSampler<[f32; 4]> = "Texture",
}
pipeline solid_pipe {
buffer: gfx::VertexBuffer<Vertex> = (),
out: gfx::BlendTarget<ColorFormat> = ("Out", gfx::state::MASK_ALL, gfx::preset::blend::ALPHA),
out_depth: gfx::DepthTarget<DepthFormat> = gfx::preset::depth::LESS_EQUAL_WRITE,
}
}
const VERTEX_SHADER_TEX: &str = "
#version 130
in vec3 a_Pos;
in vec3 a_Color;
in vec2 a_Tex;
out vec4 v_Color;
out vec2 v_Tex;
void main() {
v_Tex = a_Tex;
v_Color = vec4(a_Color, 1.0);
gl_Position = vec4(a_Pos, 1.0);
}
";
const FRAGMENT_SHADER_TEX: &str = "
#version 130
uniform sampler2D Texture;
in vec4 v_Color;
in vec2 v_Tex;
out vec4 Out;
void main() {
Out = texture(Texture, v_Tex) * v_Color;
}
";
const VERTEX_SHADER_SOLID: &str = "
#version 130
in vec3 a_Pos;
in vec3 a_Color;
out vec4 v_Color;
void main() {
v_Color = vec4(a_Color, 1.0);
gl_Position = vec4(a_Pos, 1.0);
}
";
const FRAGMENT_SHADER_SOLID: &str = "
#version 130
uniform sampler2D Texture;
in vec4 v_Color;
out vec4 Out;
void main() {
Out = v_Color;
}
"; | }
pub fn new_zone(&mut self) -> usize {
let zone = self.solid.buffer_mut().new_zone();
| random_line_split |
render.rs | use gfx::{self, texture, Factory, Resources, PipelineState, Encoder, CommandBuffer};
use gfx::traits::FactoryExt;
use gfx::handle::{RenderTargetView, DepthStencilView};
use image::RgbaImage;
use resource::atlas::{Texmap, TextureSelection};
use std::collections::HashMap;
use ui::managed::ManagedBuffer;
use ColorFormat;
use DepthFormat;
// TODO: Multitexture-per-PSO
pub struct TexturedPipe<R> where R: Resources {
buffer: ManagedBuffer<R>,
state: PipelineState<R, textured_pipe::Meta>,
data: textured_pipe::Data<R>,
}
impl<R> TexturedPipe<R> where R: Resources {
pub fn create<F>(factory: &mut F, image: &RgbaImage, out: RenderTargetView<R, ColorFormat>, out_depth: DepthStencilView<R, DepthFormat>) -> Self where F: Factory<R> + FactoryExt<R> {
let state = factory.create_pipeline_simple(
VERTEX_SHADER_TEX.as_bytes(),
FRAGMENT_SHADER_TEX.as_bytes(),
textured_pipe::new()
).unwrap();
let buffer = ManagedBuffer::new(factory);
let (_, view) = factory.create_texture_immutable_u8::<(gfx::format::R8_G8_B8_A8, gfx::format::Srgb)>(
texture::Kind::D2(image.width() as u16, image.height() as u16, texture::AaMode::Single),
&[&image]
).unwrap();
let sampler = factory.create_sampler(texture::SamplerInfo::new(
texture::FilterMethod::Scale,
texture::WrapMode::Tile,
));
let data = textured_pipe::Data {
buffer: buffer.remote().clone(),
out: out,
out_depth: out_depth,
texture: (view, sampler),
};
TexturedPipe {
buffer: buffer,
state: state,
data: data
}
}
pub fn buffer_mut(&mut self) -> &mut ManagedBuffer<R> {
&mut self.buffer
}
pub fn render<F, C>(&mut self, factory: &mut F, encoder: &mut Encoder<R, C>) where F: Factory<R> + FactoryExt<R>, C: CommandBuffer<R> {
self.buffer.update(factory, encoder);
self.data.buffer = self.buffer.remote().clone();
encoder.draw(&self.buffer.slice(), &self.state, &self.data);
}
}
pub struct SolidPipe<R> where R: Resources {
buffer: ManagedBuffer<R>,
state: PipelineState<R, solid_pipe::Meta>,
data: solid_pipe::Data<R>
}
impl<R> SolidPipe<R> where R: Resources {
pub fn create<F>(factory: &mut F, out: RenderTargetView<R, ColorFormat>, out_depth: DepthStencilView<R, DepthFormat>) -> Self where F: Factory<R> + FactoryExt<R> {
let state = factory.create_pipeline_simple(
VERTEX_SHADER_SOLID.as_bytes(),
FRAGMENT_SHADER_SOLID.as_bytes(),
solid_pipe::new()
).unwrap();
let buffer = ManagedBuffer::new(factory);
let data = solid_pipe::Data {
buffer: buffer.remote().clone(),
out: out,
out_depth: out_depth
};
SolidPipe {
buffer: buffer,
state: state,
data: data
}
}
pub fn buffer_mut(&mut self) -> &mut ManagedBuffer<R> {
&mut self.buffer
}
pub fn render<F, C>(&mut self, factory: &mut F, encoder: &mut Encoder<R, C>) where F: Factory<R> + FactoryExt<R>, C: CommandBuffer<R> {
self.buffer.update(factory, encoder);
self.data.buffer = self.buffer.remote().clone();
encoder.draw(&self.buffer.slice(), &self.state, &self.data);
}
}
pub struct Context<R> where R: Resources {
out: RenderTargetView<R, ColorFormat>,
out_depth: DepthStencilView<R, DepthFormat>,
solid: SolidPipe<R>,
textured: Vec<TexturedPipe<R>>,
textures: HashMap<String, (usize, TextureSelection)>
}
impl<R> Context<R> where R: Resources {
pub fn create<F>(factory: &mut F, out: RenderTargetView<R, ColorFormat>, out_depth: DepthStencilView<R, DepthFormat>) -> Self where F: Factory<R> + FactoryExt<R> {
Context {
out: out.clone(),
out_depth: out_depth.clone(),
solid: SolidPipe::create(factory, out.clone(), out_depth.clone()),
textured: Vec::new(),
textures: HashMap::new()
}
}
pub fn new_zone(&mut self) -> usize {
let zone = self.solid.buffer_mut().new_zone();
for pipe in &mut self.textured {
pipe.buffer_mut().new_zone();
}
zone
}
pub fn extend_zone<I>(&mut self, iter: I, texture: Option<&str>) -> bool where I: IntoIterator<Item=Vertex> {
if let Some(texture) = texture {
if let Some(&(index, selection)) = self.textures.get(texture) | else {
false
}
} else {
self.solid.buffer_mut().extend(iter);
true
}
}
fn extend_textured<I>(pipe: &mut TexturedPipe<R>, iter: I, selection: TextureSelection) where I: IntoIterator<Item=Vertex> {
//println!("sel: {:?}", selection);
pipe.buffer_mut().extend(iter.into_iter().map(|v| {/*println!("tx: {:?}", v.tex);*/let v = Vertex {
pos: v.pos,
color: v.color,
tex: [
selection.min[0].to_part(0.0) + v.tex[0] * selection.size[0].to_part(0.0),
selection.min[1].to_part(0.0) + v.tex[1] * selection.size[1].to_part(0.0)
]
}; /*println!("ext {:?}", v)*/; v}))
}
pub fn add_texture<F>(&mut self, factory: &mut F, texmap: &Texmap, texture: &RgbaImage) where F: Factory<R> + FactoryExt<R> {
let index = self.textured.len();
self.textured.push(TexturedPipe::create(factory, texture, self.out.clone(), self.out_depth.clone()));
for (k, v) in texmap.0.iter() {
self.textures.insert(k.clone(), (index, *v));
}
}
pub fn render<F, C>(&mut self, factory: &mut F, encoder: &mut Encoder<R, C>) where F: Factory<R> + FactoryExt<R>, C: CommandBuffer<R> {
self.solid.render(factory, encoder);
for pipe in &mut self.textured {
pipe.render(factory, encoder);
}
}
}
gfx_defines!{
vertex Vertex {
pos: [f32; 3] = "a_Pos",
color: [f32; 3] = "a_Color",
tex: [f32; 2] = "a_Tex",
}
pipeline textured_pipe {
buffer: gfx::VertexBuffer<Vertex> = (),
out: gfx::BlendTarget<ColorFormat> = ("Out", gfx::state::MASK_ALL, gfx::preset::blend::ALPHA),
out_depth: gfx::DepthTarget<DepthFormat> = gfx::preset::depth::LESS_EQUAL_WRITE,
texture: gfx::TextureSampler<[f32; 4]> = "Texture",
}
pipeline solid_pipe {
buffer: gfx::VertexBuffer<Vertex> = (),
out: gfx::BlendTarget<ColorFormat> = ("Out", gfx::state::MASK_ALL, gfx::preset::blend::ALPHA),
out_depth: gfx::DepthTarget<DepthFormat> = gfx::preset::depth::LESS_EQUAL_WRITE,
}
}
const VERTEX_SHADER_TEX: &str = "
#version 130
in vec3 a_Pos;
in vec3 a_Color;
in vec2 a_Tex;
out vec4 v_Color;
out vec2 v_Tex;
void main() {
v_Tex = a_Tex;
v_Color = vec4(a_Color, 1.0);
gl_Position = vec4(a_Pos, 1.0);
}
";
const FRAGMENT_SHADER_TEX: &str = "
#version 130
uniform sampler2D Texture;
in vec4 v_Color;
in vec2 v_Tex;
out vec4 Out;
void main() {
Out = texture(Texture, v_Tex) * v_Color;
}
";
const VERTEX_SHADER_SOLID: &str = "
#version 130
in vec3 a_Pos;
in vec3 a_Color;
out vec4 v_Color;
void main() {
v_Color = vec4(a_Color, 1.0);
gl_Position = vec4(a_Pos, 1.0);
}
";
const FRAGMENT_SHADER_SOLID: &str = "
#version 130
uniform sampler2D Texture;
in vec4 v_Color;
out vec4 Out;
void main() {
Out = v_Color;
}
"; | {
//println!("tex: {}@{}, ", texture, index);
Self::extend_textured(&mut self.textured[index], iter, selection);
true
} | conditional_block |
render.rs | use gfx::{self, texture, Factory, Resources, PipelineState, Encoder, CommandBuffer};
use gfx::traits::FactoryExt;
use gfx::handle::{RenderTargetView, DepthStencilView};
use image::RgbaImage;
use resource::atlas::{Texmap, TextureSelection};
use std::collections::HashMap;
use ui::managed::ManagedBuffer;
use ColorFormat;
use DepthFormat;
// TODO: Multitexture-per-PSO
pub struct TexturedPipe<R> where R: Resources {
buffer: ManagedBuffer<R>,
state: PipelineState<R, textured_pipe::Meta>,
data: textured_pipe::Data<R>,
}
impl<R> TexturedPipe<R> where R: Resources {
pub fn create<F>(factory: &mut F, image: &RgbaImage, out: RenderTargetView<R, ColorFormat>, out_depth: DepthStencilView<R, DepthFormat>) -> Self where F: Factory<R> + FactoryExt<R> {
let state = factory.create_pipeline_simple(
VERTEX_SHADER_TEX.as_bytes(),
FRAGMENT_SHADER_TEX.as_bytes(),
textured_pipe::new()
).unwrap();
let buffer = ManagedBuffer::new(factory);
let (_, view) = factory.create_texture_immutable_u8::<(gfx::format::R8_G8_B8_A8, gfx::format::Srgb)>(
texture::Kind::D2(image.width() as u16, image.height() as u16, texture::AaMode::Single),
&[&image]
).unwrap();
let sampler = factory.create_sampler(texture::SamplerInfo::new(
texture::FilterMethod::Scale,
texture::WrapMode::Tile,
));
let data = textured_pipe::Data {
buffer: buffer.remote().clone(),
out: out,
out_depth: out_depth,
texture: (view, sampler),
};
TexturedPipe {
buffer: buffer,
state: state,
data: data
}
}
pub fn buffer_mut(&mut self) -> &mut ManagedBuffer<R> {
&mut self.buffer
}
pub fn render<F, C>(&mut self, factory: &mut F, encoder: &mut Encoder<R, C>) where F: Factory<R> + FactoryExt<R>, C: CommandBuffer<R> {
self.buffer.update(factory, encoder);
self.data.buffer = self.buffer.remote().clone();
encoder.draw(&self.buffer.slice(), &self.state, &self.data);
}
}
pub struct SolidPipe<R> where R: Resources {
buffer: ManagedBuffer<R>,
state: PipelineState<R, solid_pipe::Meta>,
data: solid_pipe::Data<R>
}
impl<R> SolidPipe<R> where R: Resources {
pub fn create<F>(factory: &mut F, out: RenderTargetView<R, ColorFormat>, out_depth: DepthStencilView<R, DepthFormat>) -> Self where F: Factory<R> + FactoryExt<R> {
let state = factory.create_pipeline_simple(
VERTEX_SHADER_SOLID.as_bytes(),
FRAGMENT_SHADER_SOLID.as_bytes(),
solid_pipe::new()
).unwrap();
let buffer = ManagedBuffer::new(factory);
let data = solid_pipe::Data {
buffer: buffer.remote().clone(),
out: out,
out_depth: out_depth
};
SolidPipe {
buffer: buffer,
state: state,
data: data
}
}
pub fn buffer_mut(&mut self) -> &mut ManagedBuffer<R> {
&mut self.buffer
}
pub fn render<F, C>(&mut self, factory: &mut F, encoder: &mut Encoder<R, C>) where F: Factory<R> + FactoryExt<R>, C: CommandBuffer<R> {
self.buffer.update(factory, encoder);
self.data.buffer = self.buffer.remote().clone();
encoder.draw(&self.buffer.slice(), &self.state, &self.data);
}
}
pub struct Context<R> where R: Resources {
out: RenderTargetView<R, ColorFormat>,
out_depth: DepthStencilView<R, DepthFormat>,
solid: SolidPipe<R>,
textured: Vec<TexturedPipe<R>>,
textures: HashMap<String, (usize, TextureSelection)>
}
impl<R> Context<R> where R: Resources {
pub fn create<F>(factory: &mut F, out: RenderTargetView<R, ColorFormat>, out_depth: DepthStencilView<R, DepthFormat>) -> Self where F: Factory<R> + FactoryExt<R> {
Context {
out: out.clone(),
out_depth: out_depth.clone(),
solid: SolidPipe::create(factory, out.clone(), out_depth.clone()),
textured: Vec::new(),
textures: HashMap::new()
}
}
pub fn new_zone(&mut self) -> usize {
let zone = self.solid.buffer_mut().new_zone();
for pipe in &mut self.textured {
pipe.buffer_mut().new_zone();
}
zone
}
pub fn extend_zone<I>(&mut self, iter: I, texture: Option<&str>) -> bool where I: IntoIterator<Item=Vertex> {
if let Some(texture) = texture {
if let Some(&(index, selection)) = self.textures.get(texture) {
//println!("tex: {}@{}, ", texture, index);
Self::extend_textured(&mut self.textured[index], iter, selection);
true
} else {
false
}
} else {
self.solid.buffer_mut().extend(iter);
true
}
}
fn extend_textured<I>(pipe: &mut TexturedPipe<R>, iter: I, selection: TextureSelection) where I: IntoIterator<Item=Vertex> {
//println!("sel: {:?}", selection);
pipe.buffer_mut().extend(iter.into_iter().map(|v| {/*println!("tx: {:?}", v.tex);*/let v = Vertex {
pos: v.pos,
color: v.color,
tex: [
selection.min[0].to_part(0.0) + v.tex[0] * selection.size[0].to_part(0.0),
selection.min[1].to_part(0.0) + v.tex[1] * selection.size[1].to_part(0.0)
]
}; /*println!("ext {:?}", v)*/; v}))
}
pub fn add_texture<F>(&mut self, factory: &mut F, texmap: &Texmap, texture: &RgbaImage) where F: Factory<R> + FactoryExt<R> |
pub fn render<F, C>(&mut self, factory: &mut F, encoder: &mut Encoder<R, C>) where F: Factory<R> + FactoryExt<R>, C: CommandBuffer<R> {
self.solid.render(factory, encoder);
for pipe in &mut self.textured {
pipe.render(factory, encoder);
}
}
}
gfx_defines!{
vertex Vertex {
pos: [f32; 3] = "a_Pos",
color: [f32; 3] = "a_Color",
tex: [f32; 2] = "a_Tex",
}
pipeline textured_pipe {
buffer: gfx::VertexBuffer<Vertex> = (),
out: gfx::BlendTarget<ColorFormat> = ("Out", gfx::state::MASK_ALL, gfx::preset::blend::ALPHA),
out_depth: gfx::DepthTarget<DepthFormat> = gfx::preset::depth::LESS_EQUAL_WRITE,
texture: gfx::TextureSampler<[f32; 4]> = "Texture",
}
pipeline solid_pipe {
buffer: gfx::VertexBuffer<Vertex> = (),
out: gfx::BlendTarget<ColorFormat> = ("Out", gfx::state::MASK_ALL, gfx::preset::blend::ALPHA),
out_depth: gfx::DepthTarget<DepthFormat> = gfx::preset::depth::LESS_EQUAL_WRITE,
}
}
const VERTEX_SHADER_TEX: &str = "
#version 130
in vec3 a_Pos;
in vec3 a_Color;
in vec2 a_Tex;
out vec4 v_Color;
out vec2 v_Tex;
void main() {
v_Tex = a_Tex;
v_Color = vec4(a_Color, 1.0);
gl_Position = vec4(a_Pos, 1.0);
}
";
const FRAGMENT_SHADER_TEX: &str = "
#version 130
uniform sampler2D Texture;
in vec4 v_Color;
in vec2 v_Tex;
out vec4 Out;
void main() {
Out = texture(Texture, v_Tex) * v_Color;
}
";
const VERTEX_SHADER_SOLID: &str = "
#version 130
in vec3 a_Pos;
in vec3 a_Color;
out vec4 v_Color;
void main() {
v_Color = vec4(a_Color, 1.0);
gl_Position = vec4(a_Pos, 1.0);
}
";
const FRAGMENT_SHADER_SOLID: &str = "
#version 130
uniform sampler2D Texture;
in vec4 v_Color;
out vec4 Out;
void main() {
Out = v_Color;
}
"; | {
let index = self.textured.len();
self.textured.push(TexturedPipe::create(factory, texture, self.out.clone(), self.out_depth.clone()));
for (k, v) in texmap.0.iter() {
self.textures.insert(k.clone(), (index, *v));
}
} | identifier_body |
text.rs | debug!("TextRunScanner: complete.");
InlineFragments {
fragments: new_fragments,
}
}
/// A "clump" is a range of inline flow leaves that can be merged together into a single
/// fragment. Adjacent text with the same style can be merged, and nothing else can.
///
/// The flow keeps track of the fragments contained by all non-leaf DOM nodes. This is necessary
/// for correct painting order. Since we compress several leaf fragments here, the mapping must
/// be adjusted.
fn flush_clump_to_list(&mut self,
font_context: &mut FontContext,
out_fragments: &mut Vec<Fragment>,
paragraph_bytes_processed: &mut usize,
bidi_levels: Option<&[u8]>,
mut last_whitespace: bool)
-> bool {
debug!("TextRunScanner: flushing {} fragments in range", self.clump.len());
debug_assert!(!self.clump.is_empty());
match self.clump.front().unwrap().specific {
SpecificFragmentInfo::UnscannedText(_) => {}
_ => {
debug_assert!(self.clump.len() == 1,
"WAT: can't coalesce non-text nodes in flush_clump_to_list()!");
out_fragments.push(self.clump.pop_front().unwrap());
return false
}
}
// Concatenate all of the transformed strings together, saving the new character indices.
let mut mappings: Vec<RunMapping> = Vec::new();
let runs = {
let fontgroup;
let compression;
let text_transform;
let letter_spacing;
let word_spacing;
let text_rendering;
{
let in_fragment = self.clump.front().unwrap();
let font_style = in_fragment.style().get_font_arc();
let inherited_text_style = in_fragment.style().get_inheritedtext();
fontgroup = font_context.layout_font_group_for_style(font_style);
compression = match in_fragment.white_space() {
white_space::T::normal |
white_space::T::nowrap => CompressionMode::CompressWhitespaceNewline,
white_space::T::pre |
white_space::T::pre_wrap => CompressionMode::CompressNone,
white_space::T::pre_line => CompressionMode::CompressWhitespace,
};
text_transform = inherited_text_style.text_transform;
letter_spacing = inherited_text_style.letter_spacing.0;
word_spacing = inherited_text_style.word_spacing.0.unwrap_or(Au(0));
text_rendering = inherited_text_style.text_rendering;
}
// First, transform/compress text of all the nodes.
let (mut run_info_list, mut run_info) = (Vec::new(), RunInfo::new());
let mut insertion_point = None;
for (fragment_index, in_fragment) in self.clump.iter().enumerate() {
debug!(" flushing {:?}", in_fragment);
let mut mapping = RunMapping::new(&run_info_list[..], fragment_index);
let text;
let selection;
match in_fragment.specific {
SpecificFragmentInfo::UnscannedText(ref text_fragment_info) => {
text = &text_fragment_info.text;
selection = text_fragment_info.selection;
}
_ => panic!("Expected an unscanned text fragment!"),
};
insertion_point = match selection {
Some(range) if range.is_empty() => |
_ => None
};
let (mut start_position, mut end_position) = (0, 0);
for (byte_index, character) in text.char_indices() {
// Search for the first font in this font group that contains a glyph for this
// character.
let font_index = fontgroup.fonts.iter().position(|font| {
font.borrow().glyph_index(character).is_some()
}).unwrap_or(0);
// The following code panics one way or another if this condition isn't met.
assert!(fontgroup.fonts.len() > 0);
let bidi_level = match bidi_levels {
Some(levels) => levels[*paragraph_bytes_processed],
None => 0
};
// Break the run if the new character has a different explicit script than the
// previous characters.
//
// TODO: Special handling of paired punctuation characters.
// http://www.unicode.org/reports/tr24/#Common
let script = get_script(character);
let compatible_script = is_compatible(script, run_info.script);
if compatible_script &&!is_specific(run_info.script) && is_specific(script) {
run_info.script = script;
}
let selected = match selection {
Some(range) => range.contains(ByteIndex(byte_index as isize)),
None => false
};
// Now, if necessary, flush the mapping we were building up.
let flush_run = run_info.font_index!= font_index ||
run_info.bidi_level!= bidi_level ||
!compatible_script;
let flush_mapping = flush_run || mapping.selected!= selected;
if flush_mapping {
mapping.flush(&mut mappings,
&mut run_info,
&**text,
compression,
text_transform,
&mut last_whitespace,
&mut start_position,
end_position);
if run_info.text.len() > 0 {
if flush_run {
run_info.flush(&mut run_info_list, &mut insertion_point);
run_info = RunInfo::new();
}
mapping = RunMapping::new(&run_info_list[..],
fragment_index);
}
run_info.font_index = font_index;
run_info.bidi_level = bidi_level;
run_info.script = script;
mapping.selected = selected;
}
// Consume this character.
end_position += character.len_utf8();
*paragraph_bytes_processed += character.len_utf8();
}
// Flush the last mapping we created for this fragment to the list.
mapping.flush(&mut mappings,
&mut run_info,
&**text,
compression,
text_transform,
&mut last_whitespace,
&mut start_position,
end_position);
}
// Push the final run info.
run_info.flush(&mut run_info_list, &mut insertion_point);
// Per CSS 2.1 § 16.4, "when the resultant space between two characters is not the same
// as the default space, user agents should not use ligatures." This ensures that, for
// example, `finally` with a wide `letter-spacing` renders as `f i n a l l y` and not
// `fi n a l l y`.
let mut flags = ShapingFlags::empty();
match letter_spacing {
Some(Au(0)) | None => {}
Some(_) => flags.insert(IGNORE_LIGATURES_SHAPING_FLAG),
}
if text_rendering == text_rendering::T::optimizespeed {
flags.insert(IGNORE_LIGATURES_SHAPING_FLAG);
flags.insert(DISABLE_KERNING_SHAPING_FLAG)
}
let options = ShapingOptions {
letter_spacing: letter_spacing,
word_spacing: word_spacing,
script: Script::Common,
flags: flags,
};
// FIXME(https://github.com/rust-lang/rust/issues/23338)
run_info_list.into_iter().map(|run_info| {
let mut options = options;
options.script = run_info.script;
if is_rtl(run_info.bidi_level) {
options.flags.insert(RTL_FLAG);
}
let mut font = fontgroup.fonts.get(run_info.font_index).unwrap().borrow_mut();
ScannedTextRun {
run: Arc::new(TextRun::new(&mut *font,
run_info.text,
&options,
run_info.bidi_level)),
insertion_point: run_info.insertion_point,
}
}).collect::<Vec<_>>()
};
// Make new fragments with the runs and adjusted text indices.
debug!("TextRunScanner: pushing {} fragment(s)", self.clump.len());
let mut mappings = mappings.into_iter().peekable();
let mut prev_fragments_to_meld = Vec::new();
for (logical_offset, old_fragment) in
mem::replace(&mut self.clump, LinkedList::new()).into_iter().enumerate() {
let mut is_first_mapping_of_this_old_fragment = true;
loop {
match mappings.peek() {
Some(mapping) if mapping.old_fragment_index == logical_offset => {}
Some(_) | None => {
if is_first_mapping_of_this_old_fragment {
// There were no mappings for this unscanned fragment. Transfer its
// flags to the previous/next sibling elements instead.
if let Some(ref mut last_fragment) = out_fragments.last_mut() {
last_fragment.meld_with_next_inline_fragment(&old_fragment);
}
prev_fragments_to_meld.push(old_fragment);
}
break;
}
};
let mapping = mappings.next().unwrap();
let scanned_run = runs[mapping.text_run_index].clone();
let mut byte_range = Range::new(ByteIndex(mapping.byte_range.begin() as isize),
ByteIndex(mapping.byte_range.length() as isize));
let mut flags = ScannedTextFlags::empty();
let text_size = old_fragment.border_box.size;
let requires_line_break_afterward_if_wrapping_on_newlines =
scanned_run.run.text[mapping.byte_range.begin()..mapping.byte_range.end()]
.ends_with('\n');
if requires_line_break_afterward_if_wrapping_on_newlines {
byte_range.extend_by(ByteIndex(-1)); // Trim the '\n'
flags.insert(REQUIRES_LINE_BREAK_AFTERWARD_IF_WRAPPING_ON_NEWLINES);
}
if mapping.selected {
flags.insert(SELECTED);
}
let insertion_point = if mapping.contains_insertion_point(scanned_run.insertion_point) {
scanned_run.insertion_point
} else {
None
};
let mut new_text_fragment_info = box ScannedTextFragmentInfo::new(
scanned_run.run,
byte_range,
text_size,
insertion_point,
flags);
let new_metrics = new_text_fragment_info.run.metrics_for_range(&byte_range);
let writing_mode = old_fragment.style.writing_mode;
let bounding_box_size = bounding_box_for_run_metrics(&new_metrics, writing_mode);
new_text_fragment_info.content_size = bounding_box_size;
let mut new_fragment = old_fragment.transform(
bounding_box_size,
SpecificFragmentInfo::ScannedText(new_text_fragment_info));
let is_last_mapping_of_this_old_fragment = match mappings.peek() {
Some(mapping) if mapping.old_fragment_index == logical_offset => false,
_ => true
};
if let Some(ref mut context) = new_fragment.inline_context {
for node in &mut context.nodes {
if!is_last_mapping_of_this_old_fragment {
node.flags.remove(LAST_FRAGMENT_OF_ELEMENT);
}
if!is_first_mapping_of_this_old_fragment {
node.flags.remove(FIRST_FRAGMENT_OF_ELEMENT);
}
}
}
for prev_fragment in prev_fragments_to_meld.drain(..) {
new_fragment.meld_with_prev_inline_fragment(&prev_fragment);
}
is_first_mapping_of_this_old_fragment = false;
out_fragments.push(new_fragment)
}
}
last_whitespace
}
}
#[inline]
fn bounding_box_for_run_metrics(metrics: &RunMetrics, writing_mode: WritingMode)
-> LogicalSize<Au> {
// This does nothing, but it will fail to build
// when more values are added to the `text-orientation` CSS property.
// This will be a reminder to update the code below.
let dummy: Option<text_orientation::T> = None;
match dummy {
Some(text_orientation::T::sideways_right) |
Some(text_orientation::T::sideways_left) |
Some(text_orientation::T::sideways) |
None => {}
}
// In vertical sideways or horizontal upright text,
// the "width" of text metrics is always inline
// This will need to be updated when other text orientations are supported.
LogicalSize::new(
writing_mode,
metrics.bounding_box.size.width,
metrics.bounding_box.size.height)
}
/// Returns the metrics of the font represented by the given `style_structs::Font`, respectively.
///
/// `#[inline]` because often the caller only needs a few fields from the font metrics.
#[inline]
pub fn font_metrics_for_style(font_context: &mut FontContext, font_style: Arc<style_structs::Font>)
-> FontMetrics {
let fontgroup = font_context.layout_font_group_for_style(font_style);
// FIXME(https://github.com/rust-lang/rust/issues/23338)
let font = fontgroup.fonts[0].borrow();
font.metrics.clone()
}
/// Returns the line block-size needed by the given computed style and font size.
| {
// `range` is the range within the current fragment. To get the range
// within the text run, offset it by the length of the preceding fragments.
Some(range.begin() + ByteIndex(run_info.text.len() as isize))
} | conditional_block |
text.rs | debug!("TextRunScanner: complete.");
InlineFragments {
fragments: new_fragments,
}
}
/// A "clump" is a range of inline flow leaves that can be merged together into a single
/// fragment. Adjacent text with the same style can be merged, and nothing else can.
///
/// The flow keeps track of the fragments contained by all non-leaf DOM nodes. This is necessary
/// for correct painting order. Since we compress several leaf fragments here, the mapping must
/// be adjusted.
fn flush_clump_to_list(&mut self,
font_context: &mut FontContext,
out_fragments: &mut Vec<Fragment>,
paragraph_bytes_processed: &mut usize,
bidi_levels: Option<&[u8]>,
mut last_whitespace: bool)
-> bool {
debug!("TextRunScanner: flushing {} fragments in range", self.clump.len());
debug_assert!(!self.clump.is_empty());
match self.clump.front().unwrap().specific {
SpecificFragmentInfo::UnscannedText(_) => {}
_ => {
debug_assert!(self.clump.len() == 1,
"WAT: can't coalesce non-text nodes in flush_clump_to_list()!");
out_fragments.push(self.clump.pop_front().unwrap());
return false
}
}
// Concatenate all of the transformed strings together, saving the new character indices.
let mut mappings: Vec<RunMapping> = Vec::new();
let runs = {
let fontgroup;
let compression;
let text_transform;
let letter_spacing;
let word_spacing;
let text_rendering;
{
let in_fragment = self.clump.front().unwrap();
let font_style = in_fragment.style().get_font_arc();
let inherited_text_style = in_fragment.style().get_inheritedtext();
fontgroup = font_context.layout_font_group_for_style(font_style);
compression = match in_fragment.white_space() {
white_space::T::normal |
white_space::T::nowrap => CompressionMode::CompressWhitespaceNewline,
white_space::T::pre |
white_space::T::pre_wrap => CompressionMode::CompressNone,
white_space::T::pre_line => CompressionMode::CompressWhitespace,
};
text_transform = inherited_text_style.text_transform;
letter_spacing = inherited_text_style.letter_spacing.0;
word_spacing = inherited_text_style.word_spacing.0.unwrap_or(Au(0));
text_rendering = inherited_text_style.text_rendering;
}
// First, transform/compress text of all the nodes.
let (mut run_info_list, mut run_info) = (Vec::new(), RunInfo::new());
let mut insertion_point = None;
for (fragment_index, in_fragment) in self.clump.iter().enumerate() {
debug!(" flushing {:?}", in_fragment);
let mut mapping = RunMapping::new(&run_info_list[..], fragment_index);
let text;
let selection;
match in_fragment.specific {
SpecificFragmentInfo::UnscannedText(ref text_fragment_info) => {
text = &text_fragment_info.text;
selection = text_fragment_info.selection;
}
_ => panic!("Expected an unscanned text fragment!"),
};
insertion_point = match selection {
Some(range) if range.is_empty() => {
// `range` is the range within the current fragment. To get the range
// within the text run, offset it by the length of the preceding fragments.
Some(range.begin() + ByteIndex(run_info.text.len() as isize))
}
_ => None
};
let (mut start_position, mut end_position) = (0, 0);
for (byte_index, character) in text.char_indices() {
// Search for the first font in this font group that contains a glyph for this
// character.
let font_index = fontgroup.fonts.iter().position(|font| {
font.borrow().glyph_index(character).is_some()
}).unwrap_or(0);
// The following code panics one way or another if this condition isn't met.
assert!(fontgroup.fonts.len() > 0);
let bidi_level = match bidi_levels {
Some(levels) => levels[*paragraph_bytes_processed],
None => 0
};
// Break the run if the new character has a different explicit script than the
// previous characters.
//
// TODO: Special handling of paired punctuation characters.
// http://www.unicode.org/reports/tr24/#Common
let script = get_script(character);
let compatible_script = is_compatible(script, run_info.script);
if compatible_script &&!is_specific(run_info.script) && is_specific(script) {
run_info.script = script;
}
let selected = match selection {
Some(range) => range.contains(ByteIndex(byte_index as isize)),
None => false
};
// Now, if necessary, flush the mapping we were building up.
let flush_run = run_info.font_index!= font_index ||
run_info.bidi_level!= bidi_level ||
!compatible_script;
let flush_mapping = flush_run || mapping.selected!= selected;
if flush_mapping {
mapping.flush(&mut mappings,
&mut run_info,
&**text,
compression,
text_transform,
&mut last_whitespace,
&mut start_position,
end_position);
if run_info.text.len() > 0 {
if flush_run {
run_info.flush(&mut run_info_list, &mut insertion_point);
run_info = RunInfo::new();
}
mapping = RunMapping::new(&run_info_list[..],
fragment_index);
}
run_info.font_index = font_index;
run_info.bidi_level = bidi_level;
run_info.script = script;
mapping.selected = selected;
}
// Consume this character.
end_position += character.len_utf8();
*paragraph_bytes_processed += character.len_utf8();
}
// Flush the last mapping we created for this fragment to the list.
mapping.flush(&mut mappings,
&mut run_info,
&**text,
compression,
text_transform,
&mut last_whitespace,
&mut start_position,
end_position);
}
// Push the final run info.
run_info.flush(&mut run_info_list, &mut insertion_point);
// Per CSS 2.1 § 16.4, "when the resultant space between two characters is not the same
// as the default space, user agents should not use ligatures." This ensures that, for
// example, `finally` with a wide `letter-spacing` renders as `f i n a l l y` and not
// `fi n a l l y`.
let mut flags = ShapingFlags::empty();
match letter_spacing {
Some(Au(0)) | None => {}
Some(_) => flags.insert(IGNORE_LIGATURES_SHAPING_FLAG),
}
if text_rendering == text_rendering::T::optimizespeed {
flags.insert(IGNORE_LIGATURES_SHAPING_FLAG);
flags.insert(DISABLE_KERNING_SHAPING_FLAG)
}
let options = ShapingOptions {
letter_spacing: letter_spacing,
word_spacing: word_spacing,
script: Script::Common,
flags: flags,
};
// FIXME(https://github.com/rust-lang/rust/issues/23338)
run_info_list.into_iter().map(|run_info| {
let mut options = options;
options.script = run_info.script;
if is_rtl(run_info.bidi_level) {
options.flags.insert(RTL_FLAG);
}
let mut font = fontgroup.fonts.get(run_info.font_index).unwrap().borrow_mut();
ScannedTextRun {
run: Arc::new(TextRun::new(&mut *font,
run_info.text,
&options,
run_info.bidi_level)),
insertion_point: run_info.insertion_point, | }).collect::<Vec<_>>()
};
// Make new fragments with the runs and adjusted text indices.
debug!("TextRunScanner: pushing {} fragment(s)", self.clump.len());
let mut mappings = mappings.into_iter().peekable();
let mut prev_fragments_to_meld = Vec::new();
for (logical_offset, old_fragment) in
mem::replace(&mut self.clump, LinkedList::new()).into_iter().enumerate() {
let mut is_first_mapping_of_this_old_fragment = true;
loop {
match mappings.peek() {
Some(mapping) if mapping.old_fragment_index == logical_offset => {}
Some(_) | None => {
if is_first_mapping_of_this_old_fragment {
// There were no mappings for this unscanned fragment. Transfer its
// flags to the previous/next sibling elements instead.
if let Some(ref mut last_fragment) = out_fragments.last_mut() {
last_fragment.meld_with_next_inline_fragment(&old_fragment);
}
prev_fragments_to_meld.push(old_fragment);
}
break;
}
};
let mapping = mappings.next().unwrap();
let scanned_run = runs[mapping.text_run_index].clone();
let mut byte_range = Range::new(ByteIndex(mapping.byte_range.begin() as isize),
ByteIndex(mapping.byte_range.length() as isize));
let mut flags = ScannedTextFlags::empty();
let text_size = old_fragment.border_box.size;
let requires_line_break_afterward_if_wrapping_on_newlines =
scanned_run.run.text[mapping.byte_range.begin()..mapping.byte_range.end()]
.ends_with('\n');
if requires_line_break_afterward_if_wrapping_on_newlines {
byte_range.extend_by(ByteIndex(-1)); // Trim the '\n'
flags.insert(REQUIRES_LINE_BREAK_AFTERWARD_IF_WRAPPING_ON_NEWLINES);
}
if mapping.selected {
flags.insert(SELECTED);
}
let insertion_point = if mapping.contains_insertion_point(scanned_run.insertion_point) {
scanned_run.insertion_point
} else {
None
};
let mut new_text_fragment_info = box ScannedTextFragmentInfo::new(
scanned_run.run,
byte_range,
text_size,
insertion_point,
flags);
let new_metrics = new_text_fragment_info.run.metrics_for_range(&byte_range);
let writing_mode = old_fragment.style.writing_mode;
let bounding_box_size = bounding_box_for_run_metrics(&new_metrics, writing_mode);
new_text_fragment_info.content_size = bounding_box_size;
let mut new_fragment = old_fragment.transform(
bounding_box_size,
SpecificFragmentInfo::ScannedText(new_text_fragment_info));
let is_last_mapping_of_this_old_fragment = match mappings.peek() {
Some(mapping) if mapping.old_fragment_index == logical_offset => false,
_ => true
};
if let Some(ref mut context) = new_fragment.inline_context {
for node in &mut context.nodes {
if!is_last_mapping_of_this_old_fragment {
node.flags.remove(LAST_FRAGMENT_OF_ELEMENT);
}
if!is_first_mapping_of_this_old_fragment {
node.flags.remove(FIRST_FRAGMENT_OF_ELEMENT);
}
}
}
for prev_fragment in prev_fragments_to_meld.drain(..) {
new_fragment.meld_with_prev_inline_fragment(&prev_fragment);
}
is_first_mapping_of_this_old_fragment = false;
out_fragments.push(new_fragment)
}
}
last_whitespace
}
}
#[inline]
fn bounding_box_for_run_metrics(metrics: &RunMetrics, writing_mode: WritingMode)
-> LogicalSize<Au> {
// This does nothing, but it will fail to build
// when more values are added to the `text-orientation` CSS property.
// This will be a reminder to update the code below.
let dummy: Option<text_orientation::T> = None;
match dummy {
Some(text_orientation::T::sideways_right) |
Some(text_orientation::T::sideways_left) |
Some(text_orientation::T::sideways) |
None => {}
}
// In vertical sideways or horizontal upright text,
// the "width" of text metrics is always inline
// This will need to be updated when other text orientations are supported.
LogicalSize::new(
writing_mode,
metrics.bounding_box.size.width,
metrics.bounding_box.size.height)
}
/// Returns the metrics of the font represented by the given `style_structs::Font`, respectively.
///
/// `#[inline]` because often the caller only needs a few fields from the font metrics.
#[inline]
pub fn font_metrics_for_style(font_context: &mut FontContext, font_style: Arc<style_structs::Font>)
-> FontMetrics {
let fontgroup = font_context.layout_font_group_for_style(font_style);
// FIXME(https://github.com/rust-lang/rust/issues/23338)
let font = fontgroup.fonts[0].borrow();
font.metrics.clone()
}
/// Returns the line block-size needed by the given computed style and font size.
pub fn | } | random_line_split |
text.rs | /// be adjusted.
fn flush_clump_to_list(&mut self,
font_context: &mut FontContext,
out_fragments: &mut Vec<Fragment>,
paragraph_bytes_processed: &mut usize,
bidi_levels: Option<&[u8]>,
mut last_whitespace: bool)
-> bool {
debug!("TextRunScanner: flushing {} fragments in range", self.clump.len());
debug_assert!(!self.clump.is_empty());
match self.clump.front().unwrap().specific {
SpecificFragmentInfo::UnscannedText(_) => {}
_ => {
debug_assert!(self.clump.len() == 1,
"WAT: can't coalesce non-text nodes in flush_clump_to_list()!");
out_fragments.push(self.clump.pop_front().unwrap());
return false
}
}
// Concatenate all of the transformed strings together, saving the new character indices.
let mut mappings: Vec<RunMapping> = Vec::new();
let runs = {
let fontgroup;
let compression;
let text_transform;
let letter_spacing;
let word_spacing;
let text_rendering;
{
let in_fragment = self.clump.front().unwrap();
let font_style = in_fragment.style().get_font_arc();
let inherited_text_style = in_fragment.style().get_inheritedtext();
fontgroup = font_context.layout_font_group_for_style(font_style);
compression = match in_fragment.white_space() {
white_space::T::normal |
white_space::T::nowrap => CompressionMode::CompressWhitespaceNewline,
white_space::T::pre |
white_space::T::pre_wrap => CompressionMode::CompressNone,
white_space::T::pre_line => CompressionMode::CompressWhitespace,
};
text_transform = inherited_text_style.text_transform;
letter_spacing = inherited_text_style.letter_spacing.0;
word_spacing = inherited_text_style.word_spacing.0.unwrap_or(Au(0));
text_rendering = inherited_text_style.text_rendering;
}
// First, transform/compress text of all the nodes.
let (mut run_info_list, mut run_info) = (Vec::new(), RunInfo::new());
let mut insertion_point = None;
for (fragment_index, in_fragment) in self.clump.iter().enumerate() {
debug!(" flushing {:?}", in_fragment);
let mut mapping = RunMapping::new(&run_info_list[..], fragment_index);
let text;
let selection;
match in_fragment.specific {
SpecificFragmentInfo::UnscannedText(ref text_fragment_info) => {
text = &text_fragment_info.text;
selection = text_fragment_info.selection;
}
_ => panic!("Expected an unscanned text fragment!"),
};
insertion_point = match selection {
Some(range) if range.is_empty() => {
// `range` is the range within the current fragment. To get the range
// within the text run, offset it by the length of the preceding fragments.
Some(range.begin() + ByteIndex(run_info.text.len() as isize))
}
_ => None
};
let (mut start_position, mut end_position) = (0, 0);
for (byte_index, character) in text.char_indices() {
// Search for the first font in this font group that contains a glyph for this
// character.
let font_index = fontgroup.fonts.iter().position(|font| {
font.borrow().glyph_index(character).is_some()
}).unwrap_or(0);
// The following code panics one way or another if this condition isn't met.
assert!(fontgroup.fonts.len() > 0);
let bidi_level = match bidi_levels {
Some(levels) => levels[*paragraph_bytes_processed],
None => 0
};
// Break the run if the new character has a different explicit script than the
// previous characters.
//
// TODO: Special handling of paired punctuation characters.
// http://www.unicode.org/reports/tr24/#Common
let script = get_script(character);
let compatible_script = is_compatible(script, run_info.script);
if compatible_script &&!is_specific(run_info.script) && is_specific(script) {
run_info.script = script;
}
let selected = match selection {
Some(range) => range.contains(ByteIndex(byte_index as isize)),
None => false
};
// Now, if necessary, flush the mapping we were building up.
let flush_run = run_info.font_index!= font_index ||
run_info.bidi_level!= bidi_level ||
!compatible_script;
let flush_mapping = flush_run || mapping.selected!= selected;
if flush_mapping {
mapping.flush(&mut mappings,
&mut run_info,
&**text,
compression,
text_transform,
&mut last_whitespace,
&mut start_position,
end_position);
if run_info.text.len() > 0 {
if flush_run {
run_info.flush(&mut run_info_list, &mut insertion_point);
run_info = RunInfo::new();
}
mapping = RunMapping::new(&run_info_list[..],
fragment_index);
}
run_info.font_index = font_index;
run_info.bidi_level = bidi_level;
run_info.script = script;
mapping.selected = selected;
}
// Consume this character.
end_position += character.len_utf8();
*paragraph_bytes_processed += character.len_utf8();
}
// Flush the last mapping we created for this fragment to the list.
mapping.flush(&mut mappings,
&mut run_info,
&**text,
compression,
text_transform,
&mut last_whitespace,
&mut start_position,
end_position);
}
// Push the final run info.
run_info.flush(&mut run_info_list, &mut insertion_point);
// Per CSS 2.1 § 16.4, "when the resultant space between two characters is not the same
// as the default space, user agents should not use ligatures." This ensures that, for
// example, `finally` with a wide `letter-spacing` renders as `f i n a l l y` and not
// `fi n a l l y`.
let mut flags = ShapingFlags::empty();
match letter_spacing {
Some(Au(0)) | None => {}
Some(_) => flags.insert(IGNORE_LIGATURES_SHAPING_FLAG),
}
if text_rendering == text_rendering::T::optimizespeed {
flags.insert(IGNORE_LIGATURES_SHAPING_FLAG);
flags.insert(DISABLE_KERNING_SHAPING_FLAG)
}
let options = ShapingOptions {
letter_spacing: letter_spacing,
word_spacing: word_spacing,
script: Script::Common,
flags: flags,
};
// FIXME(https://github.com/rust-lang/rust/issues/23338)
run_info_list.into_iter().map(|run_info| {
let mut options = options;
options.script = run_info.script;
if is_rtl(run_info.bidi_level) {
options.flags.insert(RTL_FLAG);
}
let mut font = fontgroup.fonts.get(run_info.font_index).unwrap().borrow_mut();
ScannedTextRun {
run: Arc::new(TextRun::new(&mut *font,
run_info.text,
&options,
run_info.bidi_level)),
insertion_point: run_info.insertion_point,
}
}).collect::<Vec<_>>()
};
// Make new fragments with the runs and adjusted text indices.
debug!("TextRunScanner: pushing {} fragment(s)", self.clump.len());
let mut mappings = mappings.into_iter().peekable();
let mut prev_fragments_to_meld = Vec::new();
for (logical_offset, old_fragment) in
mem::replace(&mut self.clump, LinkedList::new()).into_iter().enumerate() {
let mut is_first_mapping_of_this_old_fragment = true;
loop {
match mappings.peek() {
Some(mapping) if mapping.old_fragment_index == logical_offset => {}
Some(_) | None => {
if is_first_mapping_of_this_old_fragment {
// There were no mappings for this unscanned fragment. Transfer its
// flags to the previous/next sibling elements instead.
if let Some(ref mut last_fragment) = out_fragments.last_mut() {
last_fragment.meld_with_next_inline_fragment(&old_fragment);
}
prev_fragments_to_meld.push(old_fragment);
}
break;
}
};
let mapping = mappings.next().unwrap();
let scanned_run = runs[mapping.text_run_index].clone();
let mut byte_range = Range::new(ByteIndex(mapping.byte_range.begin() as isize),
ByteIndex(mapping.byte_range.length() as isize));
let mut flags = ScannedTextFlags::empty();
let text_size = old_fragment.border_box.size;
let requires_line_break_afterward_if_wrapping_on_newlines =
scanned_run.run.text[mapping.byte_range.begin()..mapping.byte_range.end()]
.ends_with('\n');
if requires_line_break_afterward_if_wrapping_on_newlines {
byte_range.extend_by(ByteIndex(-1)); // Trim the '\n'
flags.insert(REQUIRES_LINE_BREAK_AFTERWARD_IF_WRAPPING_ON_NEWLINES);
}
if mapping.selected {
flags.insert(SELECTED);
}
let insertion_point = if mapping.contains_insertion_point(scanned_run.insertion_point) {
scanned_run.insertion_point
} else {
None
};
let mut new_text_fragment_info = box ScannedTextFragmentInfo::new(
scanned_run.run,
byte_range,
text_size,
insertion_point,
flags);
let new_metrics = new_text_fragment_info.run.metrics_for_range(&byte_range);
let writing_mode = old_fragment.style.writing_mode;
let bounding_box_size = bounding_box_for_run_metrics(&new_metrics, writing_mode);
new_text_fragment_info.content_size = bounding_box_size;
let mut new_fragment = old_fragment.transform(
bounding_box_size,
SpecificFragmentInfo::ScannedText(new_text_fragment_info));
let is_last_mapping_of_this_old_fragment = match mappings.peek() {
Some(mapping) if mapping.old_fragment_index == logical_offset => false,
_ => true
};
if let Some(ref mut context) = new_fragment.inline_context {
for node in &mut context.nodes {
if!is_last_mapping_of_this_old_fragment {
node.flags.remove(LAST_FRAGMENT_OF_ELEMENT);
}
if!is_first_mapping_of_this_old_fragment {
node.flags.remove(FIRST_FRAGMENT_OF_ELEMENT);
}
}
}
for prev_fragment in prev_fragments_to_meld.drain(..) {
new_fragment.meld_with_prev_inline_fragment(&prev_fragment);
}
is_first_mapping_of_this_old_fragment = false;
out_fragments.push(new_fragment)
}
}
last_whitespace
}
}
#[inline]
fn bounding_box_for_run_metrics(metrics: &RunMetrics, writing_mode: WritingMode)
-> LogicalSize<Au> {
// This does nothing, but it will fail to build
// when more values are added to the `text-orientation` CSS property.
// This will be a reminder to update the code below.
let dummy: Option<text_orientation::T> = None;
match dummy {
Some(text_orientation::T::sideways_right) |
Some(text_orientation::T::sideways_left) |
Some(text_orientation::T::sideways) |
None => {}
}
// In vertical sideways or horizontal upright text,
// the "width" of text metrics is always inline
// This will need to be updated when other text orientations are supported.
LogicalSize::new(
writing_mode,
metrics.bounding_box.size.width,
metrics.bounding_box.size.height)
}
/// Returns the metrics of the font represented by the given `style_structs::Font`, respectively.
///
/// `#[inline]` because often the caller only needs a few fields from the font metrics.
#[inline]
pub fn font_metrics_for_style(font_context: &mut FontContext, font_style: Arc<style_structs::Font>)
-> FontMetrics {
let fontgroup = font_context.layout_font_group_for_style(font_style);
// FIXME(https://github.com/rust-lang/rust/issues/23338)
let font = fontgroup.fonts[0].borrow();
font.metrics.clone()
}
/// Returns the line block-size needed by the given computed style and font size.
pub fn line_height_from_style(style: &ServoComputedValues, metrics: &FontMetrics) -> Au {
let font_size = style.get_font().font_size;
match style.get_inheritedtext().line_height {
line_height::T::Normal => metrics.line_gap,
line_height::T::Number(l) => font_size.scale_by(l),
line_height::T::Length(l) => l
}
}
fn spl | it_first_fragment_at_newline_if_necessary(fr | identifier_name |
|
borrowck-vec-pattern-loan-from-mut.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(slice_patterns)]
fn a() {
let mut v = vec![1, 2, 3];
let vb: &mut [isize] = &mut v;
match vb {
&mut [_a, ref tail..] => {
v.push(tail[0] + tail[1]); //~ ERROR cannot borrow
}
_ => {}
};
}
fn main() | {} | identifier_body |
|
borrowck-vec-pattern-loan-from-mut.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(slice_patterns)]
fn a() {
let mut v = vec![1, 2, 3];
let vb: &mut [isize] = &mut v;
match vb {
&mut [_a, ref tail..] => {
v.push(tail[0] + tail[1]); //~ ERROR cannot borrow
}
_ => {}
};
} |
fn main() {} | random_line_split |
|
borrowck-vec-pattern-loan-from-mut.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(slice_patterns)]
fn a() {
let mut v = vec![1, 2, 3];
let vb: &mut [isize] = &mut v;
match vb {
&mut [_a, ref tail..] => {
v.push(tail[0] + tail[1]); //~ ERROR cannot borrow
}
_ => {}
};
}
fn | () {}
| main | identifier_name |
mod.rs | //! The NFA we construct for each regex. Since the states are not
//! really of interest, we represent this just as a vector of labeled
//! edges.
use std::fmt::{Debug, Formatter, Error};
use std::usize;
use lexer::re::{Regex, Alternative, Elem, RepeatOp, Test};
#[cfg(test)]
mod interpret;
#[cfg(test)]
mod test;
#[derive(Debug)]
pub struct NFA {
states: Vec<State>,
edges: Edges
}
// An "epsilon" edge -- no input
#[derive(Debug, PartialEq, Eq)]
pub struct Noop;
// An "other" edge -- fallback if no other edges apply
#[derive(Debug, PartialEq, Eq)]
pub struct Other;
/// For each state, we just store the indices of the first char and
/// test edges, or usize::MAX if no such edge. You can then find all
/// edges by enumerating subsequent edges in the vectors until you
/// find one with a different `from` value.
#[derive(Debug)]
struct State {
kind: StateKind,
first_noop_edge: usize,
first_test_edge: usize,
first_other_edge: usize,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub enum StateKind {
Accept, Reject, Neither
}
#[derive(Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
pub struct NFAStateIndex(usize);
/// A set of edges for the state machine. Edges are kept sorted by the
/// type of label they have. Within a vector, all edges with the same
/// `from` are grouped together so they can be enumerated later (for
/// now we just ensure this during construction, but one could easily
/// sort).
#[derive(Debug)]
struct Edges {
noop_edges: Vec<Edge<Noop>>,
// edges where we are testing the character in some way; for any
// given state, there should not be multiple edges with the same
// test
test_edges: Vec<Edge<Test>>,
// fallback rules if no test_edge applies
other_edges: Vec<Edge<Other>>,
}
#[derive(PartialEq, Eq)]
pub struct Edge<L> {
pub from: NFAStateIndex,
pub label: L,
pub to: NFAStateIndex,
}
pub const ACCEPT: NFAStateIndex = NFAStateIndex(0);
pub const REJECT: NFAStateIndex = NFAStateIndex(1);
pub const START: NFAStateIndex = NFAStateIndex(2);
impl NFA {
pub fn from_re(regex: &Regex) -> NFA {
let mut nfa = NFA::new();
let s0 = nfa.regex(regex, ACCEPT, REJECT);
nfa.push_edge(START, Noop, s0);
nfa
}
///////////////////////////////////////////////////////////////////////////
// Public methods for querying an NFA
pub fn edges<L:EdgeLabel>(&self, from: NFAStateIndex) -> EdgeIterator<L> {
let vec = L::vec(&self.edges);
let first = *L::first(&self.states[from.0]);
EdgeIterator { edges: vec, from: from, index: first }
}
pub fn kind(&self, from: NFAStateIndex) -> StateKind {
self.states[from.0].kind
}
pub fn is_accepting_state(&self, from: NFAStateIndex) -> bool {
self.states[from.0].kind == StateKind::Accept
}
pub fn is_rejecting_state(&self, from: NFAStateIndex) -> bool {
self.states[from.0].kind == StateKind::Reject
}
///////////////////////////////////////////////////////////////////////////
// Private methods for building an NFA
fn new() -> NFA {
let mut nfa = NFA {
states: vec![],
edges: Edges {
noop_edges: vec![],
test_edges: vec![],
other_edges: vec![],
}
};
// reserve the ACCEPT, REJECT, and START states ahead of time
assert!(nfa.new_state(StateKind::Accept) == ACCEPT);
assert!(nfa.new_state(StateKind::Reject) == REJECT);
assert!(nfa.new_state(StateKind::Neither) == START);
// the ACCEPT state, given another token, becomes a REJECT
nfa.push_edge(ACCEPT, Other, REJECT);
// the REJECT state loops back to itself no matter what
nfa.push_edge(REJECT, Other, REJECT);
nfa
}
fn new_state(&mut self, kind: StateKind) -> NFAStateIndex {
let index = self.states.len();
// these edge indices will be patched later by patch_edges()
self.states.push(State { kind: kind,
first_noop_edge: usize::MAX,
first_test_edge: usize::MAX,
first_other_edge: usize::MAX });
NFAStateIndex(index)
}
// pushes an edge: note that all outgoing edges from a particular
// state should be pushed together, so that the edge vectors are
// suitably sorted
fn push_edge<L:EdgeLabel>(&mut self, from: NFAStateIndex, label: L, to: NFAStateIndex) {
let edge_vec = L::vec_mut(&mut self.edges);
let edge_index = edge_vec.len();
edge_vec.push(Edge { from: from, label: label, to: to });
// if this is the first edge from the `from` state, set the
// index
let first_index = L::first_mut(&mut self.states[from.0]);
if *first_index == usize::MAX {
*first_index = edge_index;
} else{
// otherwise, check that all edges are continuous
assert_eq!(edge_vec[edge_index - 1].from, from);
}
}
fn regex(&mut self, regex: &Regex, accept: NFAStateIndex, reject: NFAStateIndex) -> NFAStateIndex {
match regex.alternatives.len() {
0 => accept, // matches the empty string
1 => self.alternative(®ex.alternatives[0], accept, reject),
_ => {
// NB -- it is important that we *collect* into a
// vector, because we don't want to intersperse
// compiling each alternative with adding the edges
// below
let alt_starts: Vec<_> =
regex.alternatives.iter()
.map(|alt| self.alternative(alt, accept, reject))
.collect();
let start = self.new_state(StateKind::Neither);
for alt_start in alt_starts {
self.push_edge(start, Noop, alt_start);
}
start
}
}
}
fn alternative(&mut self, alt: &Alternative, accept: NFAStateIndex, reject: NFAStateIndex)
-> NFAStateIndex {
// build our way from the back
let mut p = accept;
for elem in alt.elems.iter().rev() {
p = self.elem(elem, p, reject);
}
p
}
fn | (&mut self, elem: &Elem, accept: NFAStateIndex, reject: NFAStateIndex) -> NFAStateIndex {
match *elem {
Elem::Any => {
// [s0] -otherwise-> [accept]
let s0 = self.new_state(StateKind::Neither);
self.push_edge(s0, Other, accept);
s0
}
Elem::Test(test) => {
// [s0] -----c---> [accept]
// |
// +-otherwise-> [reject]
let s0 = self.new_state(StateKind::Neither);
self.push_edge(s0, test, accept);
self.push_edge(s0, Other, reject);
s0
}
Elem::Group(ref regex) => {
self.regex(regex, accept, reject)
}
Elem::NotGroup(ref regex) => {
self.regex(regex, reject, accept) // NB: swapped accept/reject here :)
}
Elem::Repeat(RepeatOp::Question, ref elem) => {
// [s0] ----> [accept]
// | ^
// v |
// [s1] --...----+
// |
// v
// [reject]
let s1 = self.elem(elem, accept, reject);
let s0 = self.new_state(StateKind::Neither);
self.push_edge(s0, Noop, accept); // they might supply nothing
self.push_edge(s0, Noop, s1);
s0
}
Elem::Repeat(RepeatOp::Star, ref elem) => {
// [s0] ----> [accept]
// | ^
// | |
// | +----------+
// v |
// [s1] --...----+
// |
// v
// [reject]
let s0 = self.new_state(StateKind::Neither);
let s1 = self.elem(elem, s0, reject);
self.push_edge(s0, Noop, accept);
self.push_edge(s0, Noop, s1);
s0
}
Elem::Repeat(RepeatOp::Plus, ref elem) => {
// [accept]
// ^
// |
// +----------+
// v |
// [s0] --...--[s1]
// |
// v
// [reject]
let s1 = self.new_state(StateKind::Neither);
let s0 = self.elem(elem, s1, reject);
self.push_edge(s1, Noop, accept);
self.push_edge(s1, Noop, s0);
s0
}
}
}
}
pub trait EdgeLabel {
fn vec_mut(nfa: &mut Edges) -> &mut Vec<Edge<Self>>;
fn vec(nfa: &Edges) -> &Vec<Edge<Self>>;
fn first_mut(state: &mut State) -> &mut usize;
fn first(state: &State) -> &usize;
}
impl EdgeLabel for Noop {
fn vec_mut(nfa: &mut Edges) -> &mut Vec<Edge<Noop>> { &mut nfa.noop_edges }
fn first_mut(state: &mut State) -> &mut usize { &mut state.first_noop_edge }
fn vec(nfa: &Edges) -> &Vec<Edge<Noop>> { &nfa.noop_edges }
fn first(state: &State) -> &usize { &state.first_noop_edge }
}
impl EdgeLabel for Other {
fn vec_mut(nfa: &mut Edges) -> &mut Vec<Edge<Other>> { &mut nfa.other_edges }
fn first_mut(state: &mut State) -> &mut usize { &mut state.first_other_edge }
fn vec(nfa: &Edges) -> &Vec<Edge<Other>> { &nfa.other_edges }
fn first(state: &State) -> &usize { &state.first_other_edge }
}
impl EdgeLabel for Test {
fn vec_mut(nfa: &mut Edges) -> &mut Vec<Edge<Test>> { &mut nfa.test_edges }
fn first_mut(state: &mut State) -> &mut usize { &mut state.first_test_edge }
fn vec(nfa: &Edges) -> &Vec<Edge<Test>> { &nfa.test_edges }
fn first(state: &State) -> &usize { &state.first_test_edge }
}
pub struct EdgeIterator<'nfa,L:EdgeLabel+'nfa> {
edges: &'nfa [Edge<L>],
from: NFAStateIndex,
index: usize,
}
impl<'nfa,L:EdgeLabel> Iterator for EdgeIterator<'nfa,L> {
type Item = &'nfa Edge<L>;
fn next(&mut self) -> Option<&'nfa Edge<L>> {
let index = self.index;
if index == usize::MAX {
return None;
}
let next_index = index + 1;
if next_index >= self.edges.len() || self.edges[next_index].from!= self.from {
self.index = usize::MAX;
} else {
self.index = next_index;
}
Some(&self.edges[index])
}
}
impl Debug for NFAStateIndex {
fn fmt(&self, fmt: &mut Formatter) -> Result<(), Error> {
write!(fmt, "NFA{}", self.0)
}
}
impl<L:Debug> Debug for Edge<L> {
fn fmt(&self, fmt: &mut Formatter) -> Result<(), Error> {
write!(fmt, "{:?} -{:?}-> {:?}", self.from, self.label, self.to)
}
}
| elem | identifier_name |
mod.rs | //! The NFA we construct for each regex. Since the states are not
//! really of interest, we represent this just as a vector of labeled
//! edges.
use std::fmt::{Debug, Formatter, Error};
use std::usize;
use lexer::re::{Regex, Alternative, Elem, RepeatOp, Test};
#[cfg(test)]
mod interpret;
#[cfg(test)]
mod test;
#[derive(Debug)]
pub struct NFA {
states: Vec<State>,
edges: Edges
}
// An "epsilon" edge -- no input
#[derive(Debug, PartialEq, Eq)]
pub struct Noop;
// An "other" edge -- fallback if no other edges apply
#[derive(Debug, PartialEq, Eq)]
pub struct Other;
/// For each state, we just store the indices of the first char and
/// test edges, or usize::MAX if no such edge. You can then find all
/// edges by enumerating subsequent edges in the vectors until you
/// find one with a different `from` value.
#[derive(Debug)]
struct State {
kind: StateKind,
first_noop_edge: usize,
first_test_edge: usize,
first_other_edge: usize,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub enum StateKind {
Accept, Reject, Neither
}
#[derive(Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
pub struct NFAStateIndex(usize);
/// A set of edges for the state machine. Edges are kept sorted by the
/// type of label they have. Within a vector, all edges with the same
/// `from` are grouped together so they can be enumerated later (for
/// now we just ensure this during construction, but one could easily
/// sort).
#[derive(Debug)]
struct Edges {
noop_edges: Vec<Edge<Noop>>,
// edges where we are testing the character in some way; for any
// given state, there should not be multiple edges with the same
// test
test_edges: Vec<Edge<Test>>,
// fallback rules if no test_edge applies
other_edges: Vec<Edge<Other>>,
}
#[derive(PartialEq, Eq)]
pub struct Edge<L> {
pub from: NFAStateIndex,
pub label: L,
pub to: NFAStateIndex,
}
pub const ACCEPT: NFAStateIndex = NFAStateIndex(0);
pub const REJECT: NFAStateIndex = NFAStateIndex(1);
pub const START: NFAStateIndex = NFAStateIndex(2);
impl NFA {
pub fn from_re(regex: &Regex) -> NFA {
let mut nfa = NFA::new();
let s0 = nfa.regex(regex, ACCEPT, REJECT);
nfa.push_edge(START, Noop, s0);
nfa
}
///////////////////////////////////////////////////////////////////////////
// Public methods for querying an NFA
pub fn edges<L:EdgeLabel>(&self, from: NFAStateIndex) -> EdgeIterator<L> {
let vec = L::vec(&self.edges);
let first = *L::first(&self.states[from.0]);
EdgeIterator { edges: vec, from: from, index: first }
}
pub fn kind(&self, from: NFAStateIndex) -> StateKind {
self.states[from.0].kind
}
pub fn is_accepting_state(&self, from: NFAStateIndex) -> bool {
self.states[from.0].kind == StateKind::Accept
}
pub fn is_rejecting_state(&self, from: NFAStateIndex) -> bool {
self.states[from.0].kind == StateKind::Reject
}
///////////////////////////////////////////////////////////////////////////
// Private methods for building an NFA
fn new() -> NFA {
let mut nfa = NFA {
states: vec![],
edges: Edges {
noop_edges: vec![],
test_edges: vec![],
other_edges: vec![],
}
};
// reserve the ACCEPT, REJECT, and START states ahead of time
assert!(nfa.new_state(StateKind::Accept) == ACCEPT);
assert!(nfa.new_state(StateKind::Reject) == REJECT);
assert!(nfa.new_state(StateKind::Neither) == START);
// the ACCEPT state, given another token, becomes a REJECT
nfa.push_edge(ACCEPT, Other, REJECT);
// the REJECT state loops back to itself no matter what
nfa.push_edge(REJECT, Other, REJECT);
nfa
}
fn new_state(&mut self, kind: StateKind) -> NFAStateIndex {
let index = self.states.len();
// these edge indices will be patched later by patch_edges()
self.states.push(State { kind: kind,
first_noop_edge: usize::MAX,
first_test_edge: usize::MAX,
first_other_edge: usize::MAX });
NFAStateIndex(index)
}
// pushes an edge: note that all outgoing edges from a particular
// state should be pushed together, so that the edge vectors are
// suitably sorted
fn push_edge<L:EdgeLabel>(&mut self, from: NFAStateIndex, label: L, to: NFAStateIndex) {
let edge_vec = L::vec_mut(&mut self.edges);
let edge_index = edge_vec.len();
edge_vec.push(Edge { from: from, label: label, to: to });
// if this is the first edge from the `from` state, set the
// index
let first_index = L::first_mut(&mut self.states[from.0]);
if *first_index == usize::MAX {
*first_index = edge_index;
} else{
// otherwise, check that all edges are continuous
assert_eq!(edge_vec[edge_index - 1].from, from);
}
}
fn regex(&mut self, regex: &Regex, accept: NFAStateIndex, reject: NFAStateIndex) -> NFAStateIndex {
match regex.alternatives.len() {
0 => accept, // matches the empty string
1 => self.alternative(®ex.alternatives[0], accept, reject),
_ => {
// NB -- it is important that we *collect* into a
// vector, because we don't want to intersperse
// compiling each alternative with adding the edges
// below
let alt_starts: Vec<_> =
regex.alternatives.iter()
.map(|alt| self.alternative(alt, accept, reject))
.collect();
let start = self.new_state(StateKind::Neither);
for alt_start in alt_starts {
self.push_edge(start, Noop, alt_start);
}
start
}
}
}
fn alternative(&mut self, alt: &Alternative, accept: NFAStateIndex, reject: NFAStateIndex)
-> NFAStateIndex {
// build our way from the back
let mut p = accept;
for elem in alt.elems.iter().rev() {
p = self.elem(elem, p, reject);
}
p
}
fn elem(&mut self, elem: &Elem, accept: NFAStateIndex, reject: NFAStateIndex) -> NFAStateIndex {
match *elem {
Elem::Any => {
// [s0] -otherwise-> [accept]
let s0 = self.new_state(StateKind::Neither);
self.push_edge(s0, Other, accept);
s0
}
Elem::Test(test) => {
// [s0] -----c---> [accept]
// |
// +-otherwise-> [reject]
let s0 = self.new_state(StateKind::Neither);
self.push_edge(s0, test, accept);
self.push_edge(s0, Other, reject);
s0
}
Elem::Group(ref regex) => {
self.regex(regex, accept, reject)
}
Elem::NotGroup(ref regex) => {
self.regex(regex, reject, accept) // NB: swapped accept/reject here :)
}
Elem::Repeat(RepeatOp::Question, ref elem) => {
// [s0] ----> [accept]
// | ^
// v |
// [s1] --...----+
// |
// v
// [reject]
let s1 = self.elem(elem, accept, reject);
let s0 = self.new_state(StateKind::Neither);
self.push_edge(s0, Noop, accept); // they might supply nothing
self.push_edge(s0, Noop, s1);
s0
}
Elem::Repeat(RepeatOp::Star, ref elem) => {
// [s0] ----> [accept]
// | ^
// | |
// | +----------+
// v |
// [s1] --...----+
// |
// v
// [reject]
let s0 = self.new_state(StateKind::Neither);
let s1 = self.elem(elem, s0, reject);
self.push_edge(s0, Noop, accept);
self.push_edge(s0, Noop, s1);
s0
}
Elem::Repeat(RepeatOp::Plus, ref elem) => {
// [accept]
// ^
// |
// +----------+
// v |
// [s0] --...--[s1]
// |
// v
// [reject]
let s1 = self.new_state(StateKind::Neither);
let s0 = self.elem(elem, s1, reject);
self.push_edge(s1, Noop, accept);
self.push_edge(s1, Noop, s0);
s0
}
}
}
}
pub trait EdgeLabel {
fn vec_mut(nfa: &mut Edges) -> &mut Vec<Edge<Self>>;
fn vec(nfa: &Edges) -> &Vec<Edge<Self>>;
fn first_mut(state: &mut State) -> &mut usize;
fn first(state: &State) -> &usize;
}
impl EdgeLabel for Noop {
fn vec_mut(nfa: &mut Edges) -> &mut Vec<Edge<Noop>> { &mut nfa.noop_edges }
fn first_mut(state: &mut State) -> &mut usize { &mut state.first_noop_edge }
fn vec(nfa: &Edges) -> &Vec<Edge<Noop>> { &nfa.noop_edges }
fn first(state: &State) -> &usize { &state.first_noop_edge }
}
impl EdgeLabel for Other {
fn vec_mut(nfa: &mut Edges) -> &mut Vec<Edge<Other>> { &mut nfa.other_edges }
fn first_mut(state: &mut State) -> &mut usize { &mut state.first_other_edge }
fn vec(nfa: &Edges) -> &Vec<Edge<Other>> { &nfa.other_edges }
fn first(state: &State) -> &usize { &state.first_other_edge }
}
impl EdgeLabel for Test {
fn vec_mut(nfa: &mut Edges) -> &mut Vec<Edge<Test>> { &mut nfa.test_edges }
fn first_mut(state: &mut State) -> &mut usize { &mut state.first_test_edge }
fn vec(nfa: &Edges) -> &Vec<Edge<Test>> { &nfa.test_edges }
fn first(state: &State) -> &usize { &state.first_test_edge }
}
pub struct EdgeIterator<'nfa,L:EdgeLabel+'nfa> {
edges: &'nfa [Edge<L>],
from: NFAStateIndex,
index: usize,
}
impl<'nfa,L:EdgeLabel> Iterator for EdgeIterator<'nfa,L> {
type Item = &'nfa Edge<L>;
fn next(&mut self) -> Option<&'nfa Edge<L>> {
let index = self.index;
if index == usize::MAX |
let next_index = index + 1;
if next_index >= self.edges.len() || self.edges[next_index].from!= self.from {
self.index = usize::MAX;
} else {
self.index = next_index;
}
Some(&self.edges[index])
}
}
impl Debug for NFAStateIndex {
fn fmt(&self, fmt: &mut Formatter) -> Result<(), Error> {
write!(fmt, "NFA{}", self.0)
}
}
impl<L:Debug> Debug for Edge<L> {
fn fmt(&self, fmt: &mut Formatter) -> Result<(), Error> {
write!(fmt, "{:?} -{:?}-> {:?}", self.from, self.label, self.to)
}
}
| {
return None;
} | conditional_block |
mod.rs | //! The NFA we construct for each regex. Since the states are not
//! really of interest, we represent this just as a vector of labeled
//! edges.
use std::fmt::{Debug, Formatter, Error};
use std::usize;
use lexer::re::{Regex, Alternative, Elem, RepeatOp, Test};
#[cfg(test)]
mod interpret;
#[cfg(test)]
mod test;
#[derive(Debug)]
pub struct NFA {
states: Vec<State>,
edges: Edges
}
// An "epsilon" edge -- no input
#[derive(Debug, PartialEq, Eq)]
pub struct Noop;
// An "other" edge -- fallback if no other edges apply
#[derive(Debug, PartialEq, Eq)]
pub struct Other;
/// For each state, we just store the indices of the first char and
/// test edges, or usize::MAX if no such edge. You can then find all
/// edges by enumerating subsequent edges in the vectors until you
/// find one with a different `from` value.
#[derive(Debug)]
struct State {
kind: StateKind,
first_noop_edge: usize,
first_test_edge: usize,
first_other_edge: usize,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub enum StateKind {
Accept, Reject, Neither
}
#[derive(Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
pub struct NFAStateIndex(usize);
/// A set of edges for the state machine. Edges are kept sorted by the
/// type of label they have. Within a vector, all edges with the same
/// `from` are grouped together so they can be enumerated later (for
/// now we just ensure this during construction, but one could easily
/// sort).
#[derive(Debug)]
struct Edges {
noop_edges: Vec<Edge<Noop>>,
// edges where we are testing the character in some way; for any
// given state, there should not be multiple edges with the same
// test
test_edges: Vec<Edge<Test>>,
// fallback rules if no test_edge applies
other_edges: Vec<Edge<Other>>,
}
#[derive(PartialEq, Eq)]
pub struct Edge<L> {
pub from: NFAStateIndex,
pub label: L,
pub to: NFAStateIndex,
}
pub const ACCEPT: NFAStateIndex = NFAStateIndex(0);
pub const REJECT: NFAStateIndex = NFAStateIndex(1);
pub const START: NFAStateIndex = NFAStateIndex(2);
impl NFA {
pub fn from_re(regex: &Regex) -> NFA {
let mut nfa = NFA::new();
let s0 = nfa.regex(regex, ACCEPT, REJECT);
nfa.push_edge(START, Noop, s0);
nfa
}
///////////////////////////////////////////////////////////////////////////
// Public methods for querying an NFA
pub fn edges<L:EdgeLabel>(&self, from: NFAStateIndex) -> EdgeIterator<L> {
let vec = L::vec(&self.edges);
let first = *L::first(&self.states[from.0]);
EdgeIterator { edges: vec, from: from, index: first }
}
pub fn kind(&self, from: NFAStateIndex) -> StateKind {
self.states[from.0].kind
}
pub fn is_accepting_state(&self, from: NFAStateIndex) -> bool {
self.states[from.0].kind == StateKind::Accept
}
pub fn is_rejecting_state(&self, from: NFAStateIndex) -> bool {
self.states[from.0].kind == StateKind::Reject
}
///////////////////////////////////////////////////////////////////////////
// Private methods for building an NFA
fn new() -> NFA {
let mut nfa = NFA {
states: vec![],
edges: Edges {
noop_edges: vec![],
test_edges: vec![],
other_edges: vec![],
}
};
// reserve the ACCEPT, REJECT, and START states ahead of time
assert!(nfa.new_state(StateKind::Accept) == ACCEPT);
assert!(nfa.new_state(StateKind::Reject) == REJECT);
assert!(nfa.new_state(StateKind::Neither) == START);
// the ACCEPT state, given another token, becomes a REJECT
nfa.push_edge(ACCEPT, Other, REJECT);
// the REJECT state loops back to itself no matter what
nfa.push_edge(REJECT, Other, REJECT);
nfa
}
fn new_state(&mut self, kind: StateKind) -> NFAStateIndex {
let index = self.states.len();
// these edge indices will be patched later by patch_edges()
self.states.push(State { kind: kind,
first_noop_edge: usize::MAX,
first_test_edge: usize::MAX,
first_other_edge: usize::MAX });
NFAStateIndex(index)
}
// pushes an edge: note that all outgoing edges from a particular
// state should be pushed together, so that the edge vectors are
// suitably sorted
fn push_edge<L:EdgeLabel>(&mut self, from: NFAStateIndex, label: L, to: NFAStateIndex) {
let edge_vec = L::vec_mut(&mut self.edges);
let edge_index = edge_vec.len();
edge_vec.push(Edge { from: from, label: label, to: to });
// if this is the first edge from the `from` state, set the
// index
let first_index = L::first_mut(&mut self.states[from.0]);
if *first_index == usize::MAX {
*first_index = edge_index;
} else{
// otherwise, check that all edges are continuous
assert_eq!(edge_vec[edge_index - 1].from, from);
}
}
fn regex(&mut self, regex: &Regex, accept: NFAStateIndex, reject: NFAStateIndex) -> NFAStateIndex {
match regex.alternatives.len() {
0 => accept, // matches the empty string
1 => self.alternative(®ex.alternatives[0], accept, reject),
_ => {
// NB -- it is important that we *collect* into a
// vector, because we don't want to intersperse
// compiling each alternative with adding the edges
// below
let alt_starts: Vec<_> =
regex.alternatives.iter()
.map(|alt| self.alternative(alt, accept, reject))
.collect();
let start = self.new_state(StateKind::Neither);
for alt_start in alt_starts {
self.push_edge(start, Noop, alt_start);
}
start
}
}
}
fn alternative(&mut self, alt: &Alternative, accept: NFAStateIndex, reject: NFAStateIndex)
-> NFAStateIndex {
// build our way from the back
let mut p = accept;
for elem in alt.elems.iter().rev() {
p = self.elem(elem, p, reject);
}
p
}
fn elem(&mut self, elem: &Elem, accept: NFAStateIndex, reject: NFAStateIndex) -> NFAStateIndex {
match *elem {
Elem::Any => {
// [s0] -otherwise-> [accept]
let s0 = self.new_state(StateKind::Neither);
self.push_edge(s0, Other, accept);
s0
}
Elem::Test(test) => {
// [s0] -----c---> [accept]
// |
// +-otherwise-> [reject]
let s0 = self.new_state(StateKind::Neither);
self.push_edge(s0, test, accept);
self.push_edge(s0, Other, reject);
s0
}
Elem::Group(ref regex) => {
self.regex(regex, accept, reject)
}
Elem::NotGroup(ref regex) => {
self.regex(regex, reject, accept) // NB: swapped accept/reject here :)
}
Elem::Repeat(RepeatOp::Question, ref elem) => {
// [s0] ----> [accept]
// | ^
// v |
// [s1] --...----+
// |
// v
// [reject]
let s1 = self.elem(elem, accept, reject);
let s0 = self.new_state(StateKind::Neither);
self.push_edge(s0, Noop, accept); // they might supply nothing
self.push_edge(s0, Noop, s1);
s0
}
Elem::Repeat(RepeatOp::Star, ref elem) => {
// [s0] ----> [accept]
// | ^
// | |
// | +----------+
// v |
// [s1] --...----+
// |
// v
// [reject]
let s0 = self.new_state(StateKind::Neither);
let s1 = self.elem(elem, s0, reject);
self.push_edge(s0, Noop, accept);
self.push_edge(s0, Noop, s1);
s0
}
Elem::Repeat(RepeatOp::Plus, ref elem) => {
// [accept]
// ^
// |
// +----------+
// v |
// [s0] --...--[s1]
// |
// v
// [reject]
let s1 = self.new_state(StateKind::Neither);
let s0 = self.elem(elem, s1, reject);
self.push_edge(s1, Noop, accept);
self.push_edge(s1, Noop, s0);
s0
}
}
}
}
pub trait EdgeLabel {
fn vec_mut(nfa: &mut Edges) -> &mut Vec<Edge<Self>>;
fn vec(nfa: &Edges) -> &Vec<Edge<Self>>;
fn first_mut(state: &mut State) -> &mut usize;
fn first(state: &State) -> &usize;
}
impl EdgeLabel for Noop {
fn vec_mut(nfa: &mut Edges) -> &mut Vec<Edge<Noop>> { &mut nfa.noop_edges }
fn first_mut(state: &mut State) -> &mut usize { &mut state.first_noop_edge }
fn vec(nfa: &Edges) -> &Vec<Edge<Noop>> { &nfa.noop_edges }
fn first(state: &State) -> &usize { &state.first_noop_edge }
}
impl EdgeLabel for Other {
fn vec_mut(nfa: &mut Edges) -> &mut Vec<Edge<Other>> { &mut nfa.other_edges }
fn first_mut(state: &mut State) -> &mut usize { &mut state.first_other_edge }
fn vec(nfa: &Edges) -> &Vec<Edge<Other>> |
fn first(state: &State) -> &usize { &state.first_other_edge }
}
impl EdgeLabel for Test {
fn vec_mut(nfa: &mut Edges) -> &mut Vec<Edge<Test>> { &mut nfa.test_edges }
fn first_mut(state: &mut State) -> &mut usize { &mut state.first_test_edge }
fn vec(nfa: &Edges) -> &Vec<Edge<Test>> { &nfa.test_edges }
fn first(state: &State) -> &usize { &state.first_test_edge }
}
pub struct EdgeIterator<'nfa,L:EdgeLabel+'nfa> {
edges: &'nfa [Edge<L>],
from: NFAStateIndex,
index: usize,
}
impl<'nfa,L:EdgeLabel> Iterator for EdgeIterator<'nfa,L> {
type Item = &'nfa Edge<L>;
fn next(&mut self) -> Option<&'nfa Edge<L>> {
let index = self.index;
if index == usize::MAX {
return None;
}
let next_index = index + 1;
if next_index >= self.edges.len() || self.edges[next_index].from!= self.from {
self.index = usize::MAX;
} else {
self.index = next_index;
}
Some(&self.edges[index])
}
}
impl Debug for NFAStateIndex {
fn fmt(&self, fmt: &mut Formatter) -> Result<(), Error> {
write!(fmt, "NFA{}", self.0)
}
}
impl<L:Debug> Debug for Edge<L> {
fn fmt(&self, fmt: &mut Formatter) -> Result<(), Error> {
write!(fmt, "{:?} -{:?}-> {:?}", self.from, self.label, self.to)
}
}
| { &nfa.other_edges } | identifier_body |
mod.rs | //! The NFA we construct for each regex. Since the states are not
//! really of interest, we represent this just as a vector of labeled
//! edges.
use std::fmt::{Debug, Formatter, Error};
use std::usize;
use lexer::re::{Regex, Alternative, Elem, RepeatOp, Test};
#[cfg(test)]
mod interpret;
#[cfg(test)]
mod test;
#[derive(Debug)]
pub struct NFA {
states: Vec<State>,
edges: Edges
}
// An "epsilon" edge -- no input
#[derive(Debug, PartialEq, Eq)]
pub struct Noop;
// An "other" edge -- fallback if no other edges apply
#[derive(Debug, PartialEq, Eq)]
pub struct Other;
/// For each state, we just store the indices of the first char and
/// test edges, or usize::MAX if no such edge. You can then find all
/// edges by enumerating subsequent edges in the vectors until you
/// find one with a different `from` value.
#[derive(Debug)]
struct State {
kind: StateKind,
first_noop_edge: usize,
first_test_edge: usize,
first_other_edge: usize,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub enum StateKind {
Accept, Reject, Neither
}
#[derive(Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
pub struct NFAStateIndex(usize);
/// A set of edges for the state machine. Edges are kept sorted by the
/// type of label they have. Within a vector, all edges with the same
/// `from` are grouped together so they can be enumerated later (for
/// now we just ensure this during construction, but one could easily
/// sort).
#[derive(Debug)]
struct Edges {
noop_edges: Vec<Edge<Noop>>,
// edges where we are testing the character in some way; for any
// given state, there should not be multiple edges with the same
// test
test_edges: Vec<Edge<Test>>,
// fallback rules if no test_edge applies
other_edges: Vec<Edge<Other>>,
}
#[derive(PartialEq, Eq)]
pub struct Edge<L> {
pub from: NFAStateIndex,
pub label: L,
pub to: NFAStateIndex,
}
pub const ACCEPT: NFAStateIndex = NFAStateIndex(0);
pub const REJECT: NFAStateIndex = NFAStateIndex(1);
pub const START: NFAStateIndex = NFAStateIndex(2);
impl NFA { | let s0 = nfa.regex(regex, ACCEPT, REJECT);
nfa.push_edge(START, Noop, s0);
nfa
}
///////////////////////////////////////////////////////////////////////////
// Public methods for querying an NFA
pub fn edges<L:EdgeLabel>(&self, from: NFAStateIndex) -> EdgeIterator<L> {
let vec = L::vec(&self.edges);
let first = *L::first(&self.states[from.0]);
EdgeIterator { edges: vec, from: from, index: first }
}
pub fn kind(&self, from: NFAStateIndex) -> StateKind {
self.states[from.0].kind
}
pub fn is_accepting_state(&self, from: NFAStateIndex) -> bool {
self.states[from.0].kind == StateKind::Accept
}
pub fn is_rejecting_state(&self, from: NFAStateIndex) -> bool {
self.states[from.0].kind == StateKind::Reject
}
///////////////////////////////////////////////////////////////////////////
// Private methods for building an NFA
fn new() -> NFA {
let mut nfa = NFA {
states: vec![],
edges: Edges {
noop_edges: vec![],
test_edges: vec![],
other_edges: vec![],
}
};
// reserve the ACCEPT, REJECT, and START states ahead of time
assert!(nfa.new_state(StateKind::Accept) == ACCEPT);
assert!(nfa.new_state(StateKind::Reject) == REJECT);
assert!(nfa.new_state(StateKind::Neither) == START);
// the ACCEPT state, given another token, becomes a REJECT
nfa.push_edge(ACCEPT, Other, REJECT);
// the REJECT state loops back to itself no matter what
nfa.push_edge(REJECT, Other, REJECT);
nfa
}
fn new_state(&mut self, kind: StateKind) -> NFAStateIndex {
let index = self.states.len();
// these edge indices will be patched later by patch_edges()
self.states.push(State { kind: kind,
first_noop_edge: usize::MAX,
first_test_edge: usize::MAX,
first_other_edge: usize::MAX });
NFAStateIndex(index)
}
// pushes an edge: note that all outgoing edges from a particular
// state should be pushed together, so that the edge vectors are
// suitably sorted
fn push_edge<L:EdgeLabel>(&mut self, from: NFAStateIndex, label: L, to: NFAStateIndex) {
let edge_vec = L::vec_mut(&mut self.edges);
let edge_index = edge_vec.len();
edge_vec.push(Edge { from: from, label: label, to: to });
// if this is the first edge from the `from` state, set the
// index
let first_index = L::first_mut(&mut self.states[from.0]);
if *first_index == usize::MAX {
*first_index = edge_index;
} else{
// otherwise, check that all edges are continuous
assert_eq!(edge_vec[edge_index - 1].from, from);
}
}
fn regex(&mut self, regex: &Regex, accept: NFAStateIndex, reject: NFAStateIndex) -> NFAStateIndex {
match regex.alternatives.len() {
0 => accept, // matches the empty string
1 => self.alternative(®ex.alternatives[0], accept, reject),
_ => {
// NB -- it is important that we *collect* into a
// vector, because we don't want to intersperse
// compiling each alternative with adding the edges
// below
let alt_starts: Vec<_> =
regex.alternatives.iter()
.map(|alt| self.alternative(alt, accept, reject))
.collect();
let start = self.new_state(StateKind::Neither);
for alt_start in alt_starts {
self.push_edge(start, Noop, alt_start);
}
start
}
}
}
fn alternative(&mut self, alt: &Alternative, accept: NFAStateIndex, reject: NFAStateIndex)
-> NFAStateIndex {
// build our way from the back
let mut p = accept;
for elem in alt.elems.iter().rev() {
p = self.elem(elem, p, reject);
}
p
}
fn elem(&mut self, elem: &Elem, accept: NFAStateIndex, reject: NFAStateIndex) -> NFAStateIndex {
match *elem {
Elem::Any => {
// [s0] -otherwise-> [accept]
let s0 = self.new_state(StateKind::Neither);
self.push_edge(s0, Other, accept);
s0
}
Elem::Test(test) => {
// [s0] -----c---> [accept]
// |
// +-otherwise-> [reject]
let s0 = self.new_state(StateKind::Neither);
self.push_edge(s0, test, accept);
self.push_edge(s0, Other, reject);
s0
}
Elem::Group(ref regex) => {
self.regex(regex, accept, reject)
}
Elem::NotGroup(ref regex) => {
self.regex(regex, reject, accept) // NB: swapped accept/reject here :)
}
Elem::Repeat(RepeatOp::Question, ref elem) => {
// [s0] ----> [accept]
// | ^
// v |
// [s1] --...----+
// |
// v
// [reject]
let s1 = self.elem(elem, accept, reject);
let s0 = self.new_state(StateKind::Neither);
self.push_edge(s0, Noop, accept); // they might supply nothing
self.push_edge(s0, Noop, s1);
s0
}
Elem::Repeat(RepeatOp::Star, ref elem) => {
// [s0] ----> [accept]
// | ^
// | |
// | +----------+
// v |
// [s1] --...----+
// |
// v
// [reject]
let s0 = self.new_state(StateKind::Neither);
let s1 = self.elem(elem, s0, reject);
self.push_edge(s0, Noop, accept);
self.push_edge(s0, Noop, s1);
s0
}
Elem::Repeat(RepeatOp::Plus, ref elem) => {
// [accept]
// ^
// |
// +----------+
// v |
// [s0] --...--[s1]
// |
// v
// [reject]
let s1 = self.new_state(StateKind::Neither);
let s0 = self.elem(elem, s1, reject);
self.push_edge(s1, Noop, accept);
self.push_edge(s1, Noop, s0);
s0
}
}
}
}
pub trait EdgeLabel {
fn vec_mut(nfa: &mut Edges) -> &mut Vec<Edge<Self>>;
fn vec(nfa: &Edges) -> &Vec<Edge<Self>>;
fn first_mut(state: &mut State) -> &mut usize;
fn first(state: &State) -> &usize;
}
impl EdgeLabel for Noop {
fn vec_mut(nfa: &mut Edges) -> &mut Vec<Edge<Noop>> { &mut nfa.noop_edges }
fn first_mut(state: &mut State) -> &mut usize { &mut state.first_noop_edge }
fn vec(nfa: &Edges) -> &Vec<Edge<Noop>> { &nfa.noop_edges }
fn first(state: &State) -> &usize { &state.first_noop_edge }
}
impl EdgeLabel for Other {
fn vec_mut(nfa: &mut Edges) -> &mut Vec<Edge<Other>> { &mut nfa.other_edges }
fn first_mut(state: &mut State) -> &mut usize { &mut state.first_other_edge }
fn vec(nfa: &Edges) -> &Vec<Edge<Other>> { &nfa.other_edges }
fn first(state: &State) -> &usize { &state.first_other_edge }
}
impl EdgeLabel for Test {
fn vec_mut(nfa: &mut Edges) -> &mut Vec<Edge<Test>> { &mut nfa.test_edges }
fn first_mut(state: &mut State) -> &mut usize { &mut state.first_test_edge }
fn vec(nfa: &Edges) -> &Vec<Edge<Test>> { &nfa.test_edges }
fn first(state: &State) -> &usize { &state.first_test_edge }
}
pub struct EdgeIterator<'nfa,L:EdgeLabel+'nfa> {
edges: &'nfa [Edge<L>],
from: NFAStateIndex,
index: usize,
}
impl<'nfa,L:EdgeLabel> Iterator for EdgeIterator<'nfa,L> {
type Item = &'nfa Edge<L>;
fn next(&mut self) -> Option<&'nfa Edge<L>> {
let index = self.index;
if index == usize::MAX {
return None;
}
let next_index = index + 1;
if next_index >= self.edges.len() || self.edges[next_index].from!= self.from {
self.index = usize::MAX;
} else {
self.index = next_index;
}
Some(&self.edges[index])
}
}
impl Debug for NFAStateIndex {
fn fmt(&self, fmt: &mut Formatter) -> Result<(), Error> {
write!(fmt, "NFA{}", self.0)
}
}
impl<L:Debug> Debug for Edge<L> {
fn fmt(&self, fmt: &mut Formatter) -> Result<(), Error> {
write!(fmt, "{:?} -{:?}-> {:?}", self.from, self.label, self.to)
}
} | pub fn from_re(regex: &Regex) -> NFA {
let mut nfa = NFA::new(); | random_line_split |
htmlbaseelement.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HTMLBaseElementBinding;
use dom::bindings::codegen::InheritTypes::HTMLBaseElementDerived;
use dom::bindings::js::{JSRef, Temporary};
use dom::bindings::utils::{Reflectable, Reflector};
use dom::document::Document;
use dom::element::HTMLBaseElementTypeId;
use dom::eventtarget::{EventTarget, NodeTargetTypeId};
use dom::htmlelement::HTMLElement;
use dom::node::{Node, ElementNodeTypeId};
use servo_util::str::DOMString;
#[deriving(Encodable)]
pub struct HTMLBaseElement {
pub htmlelement: HTMLElement
}
impl HTMLBaseElementDerived for EventTarget {
fn is_htmlbaseelement(&self) -> bool {
self.type_id == NodeTargetTypeId(ElementNodeTypeId(HTMLBaseElementTypeId))
}
}
impl HTMLBaseElement {
pub fn new_inherited(localName: DOMString, document: &JSRef<Document>) -> HTMLBaseElement {
HTMLBaseElement {
htmlelement: HTMLElement::new_inherited(HTMLBaseElementTypeId, localName, document)
}
}
pub fn new(localName: DOMString, document: &JSRef<Document>) -> Temporary<HTMLBaseElement> {
let element = HTMLBaseElement::new_inherited(localName, document);
Node::reflect_node(box element, document, HTMLBaseElementBinding::Wrap)
}
}
pub trait HTMLBaseElementMethods {
}
impl Reflectable for HTMLBaseElement {
fn reflector<'a>(&'a self) -> &'a Reflector {
self.htmlelement.reflector() | }
} | random_line_split |
|
htmlbaseelement.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HTMLBaseElementBinding;
use dom::bindings::codegen::InheritTypes::HTMLBaseElementDerived;
use dom::bindings::js::{JSRef, Temporary};
use dom::bindings::utils::{Reflectable, Reflector};
use dom::document::Document;
use dom::element::HTMLBaseElementTypeId;
use dom::eventtarget::{EventTarget, NodeTargetTypeId};
use dom::htmlelement::HTMLElement;
use dom::node::{Node, ElementNodeTypeId};
use servo_util::str::DOMString;
#[deriving(Encodable)]
pub struct HTMLBaseElement {
pub htmlelement: HTMLElement
}
impl HTMLBaseElementDerived for EventTarget {
fn is_htmlbaseelement(&self) -> bool {
self.type_id == NodeTargetTypeId(ElementNodeTypeId(HTMLBaseElementTypeId))
}
}
impl HTMLBaseElement {
pub fn new_inherited(localName: DOMString, document: &JSRef<Document>) -> HTMLBaseElement {
HTMLBaseElement {
htmlelement: HTMLElement::new_inherited(HTMLBaseElementTypeId, localName, document)
}
}
pub fn new(localName: DOMString, document: &JSRef<Document>) -> Temporary<HTMLBaseElement> |
}
pub trait HTMLBaseElementMethods {
}
impl Reflectable for HTMLBaseElement {
fn reflector<'a>(&'a self) -> &'a Reflector {
self.htmlelement.reflector()
}
}
| {
let element = HTMLBaseElement::new_inherited(localName, document);
Node::reflect_node(box element, document, HTMLBaseElementBinding::Wrap)
} | identifier_body |
htmlbaseelement.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HTMLBaseElementBinding;
use dom::bindings::codegen::InheritTypes::HTMLBaseElementDerived;
use dom::bindings::js::{JSRef, Temporary};
use dom::bindings::utils::{Reflectable, Reflector};
use dom::document::Document;
use dom::element::HTMLBaseElementTypeId;
use dom::eventtarget::{EventTarget, NodeTargetTypeId};
use dom::htmlelement::HTMLElement;
use dom::node::{Node, ElementNodeTypeId};
use servo_util::str::DOMString;
#[deriving(Encodable)]
pub struct HTMLBaseElement {
pub htmlelement: HTMLElement
}
impl HTMLBaseElementDerived for EventTarget {
fn is_htmlbaseelement(&self) -> bool {
self.type_id == NodeTargetTypeId(ElementNodeTypeId(HTMLBaseElementTypeId))
}
}
impl HTMLBaseElement {
pub fn new_inherited(localName: DOMString, document: &JSRef<Document>) -> HTMLBaseElement {
HTMLBaseElement {
htmlelement: HTMLElement::new_inherited(HTMLBaseElementTypeId, localName, document)
}
}
pub fn | (localName: DOMString, document: &JSRef<Document>) -> Temporary<HTMLBaseElement> {
let element = HTMLBaseElement::new_inherited(localName, document);
Node::reflect_node(box element, document, HTMLBaseElementBinding::Wrap)
}
}
pub trait HTMLBaseElementMethods {
}
impl Reflectable for HTMLBaseElement {
fn reflector<'a>(&'a self) -> &'a Reflector {
self.htmlelement.reflector()
}
}
| new | identifier_name |
canvasgradient.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use canvas_traits::{CanvasGradientStop, FillOrStrokeStyle, LinearGradientStyle, RadialGradientStyle};
use cssparser::Color as CSSColor;
use cssparser::{Parser, RGBA};
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::CanvasGradientBinding;
use dom::bindings::codegen::Bindings::CanvasGradientBinding::CanvasGradientMethods;
use dom::bindings::error::{Error, ErrorResult};
use dom::bindings::global::GlobalRef;
use dom::bindings::js::Root;
use dom::bindings::num::Finite;
use dom::bindings::reflector::{Reflector, reflect_dom_object};
use util::str::DOMString;
// https://html.spec.whatwg.org/multipage/#canvasgradient
#[dom_struct]
pub struct CanvasGradient {
reflector_: Reflector,
style: CanvasGradientStyle,
stops: DOMRefCell<Vec<CanvasGradientStop>>,
}
#[derive(JSTraceable, Clone, HeapSizeOf)]
pub enum CanvasGradientStyle {
Linear(LinearGradientStyle),
Radial(RadialGradientStyle),
}
impl CanvasGradient {
fn new_inherited(style: CanvasGradientStyle) -> CanvasGradient {
CanvasGradient {
reflector_: Reflector::new(),
style: style,
stops: DOMRefCell::new(Vec::new()),
}
}
pub fn | (global: GlobalRef, style: CanvasGradientStyle) -> Root<CanvasGradient> {
reflect_dom_object(box CanvasGradient::new_inherited(style),
global,
CanvasGradientBinding::Wrap)
}
}
impl CanvasGradientMethods for CanvasGradient {
// https://html.spec.whatwg.org/multipage/#dom-canvasgradient-addcolorstop
fn AddColorStop(&self, offset: Finite<f64>, color: DOMString) -> ErrorResult {
if *offset < 0f64 || *offset > 1f64 {
return Err(Error::IndexSize);
}
let mut parser = Parser::new(&color);
let color = CSSColor::parse(&mut parser);
let color = if parser.is_exhausted() {
match color {
Ok(CSSColor::RGBA(rgba)) => rgba,
Ok(CSSColor::CurrentColor) => RGBA { red: 0.0, green: 0.0, blue: 0.0, alpha: 1.0 },
_ => return Err(Error::Syntax)
}
} else {
return Err(Error::Syntax)
};
self.stops.borrow_mut().push(CanvasGradientStop {
offset: (*offset) as f64,
color: color,
});
Ok(())
}
}
pub trait ToFillOrStrokeStyle {
fn to_fill_or_stroke_style(self) -> FillOrStrokeStyle;
}
impl<'a> ToFillOrStrokeStyle for &'a CanvasGradient {
fn to_fill_or_stroke_style(self) -> FillOrStrokeStyle {
let gradient_stops = self.stops.borrow().clone();
match self.style {
CanvasGradientStyle::Linear(ref gradient) => {
FillOrStrokeStyle::LinearGradient(LinearGradientStyle::new(gradient.x0,
gradient.y0,
gradient.x1,
gradient.y1,
gradient_stops))
}
CanvasGradientStyle::Radial(ref gradient) => {
FillOrStrokeStyle::RadialGradient(RadialGradientStyle::new(gradient.x0,
gradient.y0,
gradient.r0,
gradient.x1,
gradient.y1,
gradient.r1,
gradient_stops))
}
}
}
}
| new | identifier_name |
canvasgradient.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use canvas_traits::{CanvasGradientStop, FillOrStrokeStyle, LinearGradientStyle, RadialGradientStyle};
use cssparser::Color as CSSColor;
use cssparser::{Parser, RGBA};
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::CanvasGradientBinding;
use dom::bindings::codegen::Bindings::CanvasGradientBinding::CanvasGradientMethods;
use dom::bindings::error::{Error, ErrorResult};
use dom::bindings::global::GlobalRef;
use dom::bindings::js::Root;
use dom::bindings::num::Finite;
use dom::bindings::reflector::{Reflector, reflect_dom_object};
use util::str::DOMString;
// https://html.spec.whatwg.org/multipage/#canvasgradient
#[dom_struct]
pub struct CanvasGradient {
reflector_: Reflector,
style: CanvasGradientStyle,
stops: DOMRefCell<Vec<CanvasGradientStop>>,
}
#[derive(JSTraceable, Clone, HeapSizeOf)]
pub enum CanvasGradientStyle {
Linear(LinearGradientStyle),
Radial(RadialGradientStyle),
}
impl CanvasGradient {
fn new_inherited(style: CanvasGradientStyle) -> CanvasGradient {
CanvasGradient {
reflector_: Reflector::new(),
style: style,
stops: DOMRefCell::new(Vec::new()),
}
}
pub fn new(global: GlobalRef, style: CanvasGradientStyle) -> Root<CanvasGradient> {
reflect_dom_object(box CanvasGradient::new_inherited(style),
global,
CanvasGradientBinding::Wrap)
}
}
impl CanvasGradientMethods for CanvasGradient {
// https://html.spec.whatwg.org/multipage/#dom-canvasgradient-addcolorstop
fn AddColorStop(&self, offset: Finite<f64>, color: DOMString) -> ErrorResult {
if *offset < 0f64 || *offset > 1f64 {
return Err(Error::IndexSize);
} | match color {
Ok(CSSColor::RGBA(rgba)) => rgba,
Ok(CSSColor::CurrentColor) => RGBA { red: 0.0, green: 0.0, blue: 0.0, alpha: 1.0 },
_ => return Err(Error::Syntax)
}
} else {
return Err(Error::Syntax)
};
self.stops.borrow_mut().push(CanvasGradientStop {
offset: (*offset) as f64,
color: color,
});
Ok(())
}
}
pub trait ToFillOrStrokeStyle {
fn to_fill_or_stroke_style(self) -> FillOrStrokeStyle;
}
impl<'a> ToFillOrStrokeStyle for &'a CanvasGradient {
fn to_fill_or_stroke_style(self) -> FillOrStrokeStyle {
let gradient_stops = self.stops.borrow().clone();
match self.style {
CanvasGradientStyle::Linear(ref gradient) => {
FillOrStrokeStyle::LinearGradient(LinearGradientStyle::new(gradient.x0,
gradient.y0,
gradient.x1,
gradient.y1,
gradient_stops))
}
CanvasGradientStyle::Radial(ref gradient) => {
FillOrStrokeStyle::RadialGradient(RadialGradientStyle::new(gradient.x0,
gradient.y0,
gradient.r0,
gradient.x1,
gradient.y1,
gradient.r1,
gradient_stops))
}
}
}
} |
let mut parser = Parser::new(&color);
let color = CSSColor::parse(&mut parser);
let color = if parser.is_exhausted() { | random_line_split |
unboxed-closures-blanket-fn-mut.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that you can supply `&F` where `F: FnMut()`.
// pretty-expanded FIXME #23616
#![feature(lang_items, unboxed_closures)]
fn a<F:FnMut() -> i32>(mut f: F) -> i32 {
f()
}
fn b(f: &mut FnMut() -> i32) -> i32 {
a(f)
}
fn c<F:FnMut() -> i32>(f: &mut F) -> i32 {
a(f)
}
fn main() {
let z: isize = 7;
let x = b(&mut || 22); |
let x = c(&mut || 22);
assert_eq!(x, 22);
} | assert_eq!(x, 22); | random_line_split |
unboxed-closures-blanket-fn-mut.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that you can supply `&F` where `F: FnMut()`.
// pretty-expanded FIXME #23616
#![feature(lang_items, unboxed_closures)]
fn a<F:FnMut() -> i32>(mut f: F) -> i32 {
f()
}
fn b(f: &mut FnMut() -> i32) -> i32 |
fn c<F:FnMut() -> i32>(f: &mut F) -> i32 {
a(f)
}
fn main() {
let z: isize = 7;
let x = b(&mut || 22);
assert_eq!(x, 22);
let x = c(&mut || 22);
assert_eq!(x, 22);
}
| {
a(f)
} | identifier_body |
unboxed-closures-blanket-fn-mut.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that you can supply `&F` where `F: FnMut()`.
// pretty-expanded FIXME #23616
#![feature(lang_items, unboxed_closures)]
fn a<F:FnMut() -> i32>(mut f: F) -> i32 {
f()
}
fn b(f: &mut FnMut() -> i32) -> i32 {
a(f)
}
fn | <F:FnMut() -> i32>(f: &mut F) -> i32 {
a(f)
}
fn main() {
let z: isize = 7;
let x = b(&mut || 22);
assert_eq!(x, 22);
let x = c(&mut || 22);
assert_eq!(x, 22);
}
| c | identifier_name |
tablet_tool.rs | //! TODO Documentation
use std::{cell::Cell, ptr::NonNull, rc::Rc};
use wlroots_sys::{wlr_input_device, wlr_tablet, wlr_tablet_tool_axes};
pub use crate::events::tablet_tool_events as event;
pub use crate::manager::tablet_tool_handler::*;
use crate::{
input::{self, InputState},
utils::{self, HandleErr, HandleResult, Handleable}
};
pub type Handle = utils::Handle<NonNull<wlr_input_device>, wlr_tablet, TabletTool>;
#[derive(Debug)]
pub struct TabletTool {
/// The structure that ensures weak handles to this structure are still
/// alive.
///
/// They contain weak handles, and will safely not use dead memory when this
/// is freed by wlroots.
///
/// If this is `None`, then this is from an upgraded `tablet_tool::Handle`,
/// and the operations are **unchecked**.
/// This is means safe operations might fail, but only if you use the unsafe
/// marked function `upgrade` on a `tablet_tool::Handle`.
liveliness: Rc<Cell<bool>>,
/// The device that refers to this tablet tool.
device: input::Device,
/// Underlying tablet state
tool: NonNull<wlr_tablet>
}
bitflags! {
pub struct Axis: u32 {
const WLR_TABLET_TOOL_AXIS_X =
wlr_tablet_tool_axes::WLR_TABLET_TOOL_AXIS_X as u32;
const WLR_TABLET_TOOL_AXIS_Y =
wlr_tablet_tool_axes::WLR_TABLET_TOOL_AXIS_Y as u32;
const WLR_TABLET_TOOL_AXIS_DISTANCE =
wlr_tablet_tool_axes::WLR_TABLET_TOOL_AXIS_DISTANCE as u32;
const WLR_TABLET_TOOL_AXIS_PRESSURE =
wlr_tablet_tool_axes::WLR_TABLET_TOOL_AXIS_PRESSURE as u32;
const WLR_TABLET_TOOL_AXIS_TILT_X =
wlr_tablet_tool_axes::WLR_TABLET_TOOL_AXIS_TILT_X as u32;
const WLR_TABLET_TOOL_AXIS_TILT_Y =
wlr_tablet_tool_axes::WLR_TABLET_TOOL_AXIS_TILT_Y as u32;
}
}
impl TabletTool {
/// Tries to convert an input device to a TabletTool
///
/// Returns None if it is of a different type of input variant.
///
/// # Safety
/// This creates a totally new TabletTool (e.g with its own reference count)
/// so only do this once per `wlr_input_device`!
pub(crate) unsafe fn new_from_input_device(device: *mut wlr_input_device) -> Option<Self> {
use wlroots_sys::wlr_input_device_type::*;
match (*device).type_ {
WLR_INPUT_DEVICE_TABLET_TOOL => {
let tool = NonNull::new((*device).__bindgen_anon_1.tablet).expect(
"Tablet Tool \
pointer was \
null"
);
let liveliness = Rc::new(Cell::new(false));
let handle = Rc::downgrade(&liveliness);
let state = Box::new(InputState {
handle,
device: input::Device::from_ptr(device)
});
(*tool.as_ptr()).data = Box::into_raw(state) as *mut _;
Some(TabletTool {
liveliness,
device: input::Device::from_ptr(device),
tool
})
},
_ => None
}
}
/// Gets the wlr_input_device associated with this TabletTool.
pub fn input_device(&self) -> &input::Device {
&self.device
}
}
impl Drop for TabletTool {
fn drop(&mut self) {
if Rc::strong_count(&self.liveliness)!= 1 {
return;
}
wlr_log!(WLR_DEBUG, "Dropped TabletTool {:p}", self.tool.as_ptr());
unsafe {
let _ = Box::from_raw((*self.tool.as_ptr()).data as *mut InputState);
}
let weak_count = Rc::weak_count(&self.liveliness);
if weak_count > 0 {
wlr_log!(
WLR_DEBUG,
"Still {} weak pointers to TabletTool {:p}",
weak_count,
self.tool.as_ptr()
);
}
}
}
impl Handleable<NonNull<wlr_input_device>, wlr_tablet> for TabletTool {
#[doc(hidden)]
unsafe fn from_ptr(tool: *mut wlr_tablet) -> Option<Self> {
let tool = NonNull::new(tool)?;
let data = Box::from_raw((*tool.as_ptr()).data as *mut InputState);
let handle = data.handle.clone();
let device = data.device.clone();
(*tool.as_ptr()).data = Box::into_raw(data) as *mut _;
Some(TabletTool {
liveliness: handle.upgrade().unwrap(),
device,
tool
})
}
#[doc(hidden)]
unsafe fn as_ptr(&self) -> *mut wlr_tablet {
self.tool.as_ptr()
}
#[doc(hidden)]
unsafe fn from_handle(handle: &Handle) -> HandleResult<Self> {
let liveliness = handle.handle.upgrade().ok_or(HandleErr::AlreadyDropped)?;
let device = handle.data.ok_or(HandleErr::AlreadyDropped)?;
Ok(TabletTool {
liveliness,
// NOTE Rationale for cloning:
// If we already dropped we don't reach this point.
device: input::Device { device },
tool: handle.as_non_null()
})
}
fn | (&self) -> Handle {
Handle {
ptr: self.tool,
handle: Rc::downgrade(&self.liveliness),
// NOTE Rationale for cloning:
// Since we have a strong reference already,
// the input must still be alive.
data: unsafe { Some(self.device.as_non_null()) },
_marker: std::marker::PhantomData
}
}
}
| weak_reference | identifier_name |
tablet_tool.rs | //! TODO Documentation
use std::{cell::Cell, ptr::NonNull, rc::Rc};
use wlroots_sys::{wlr_input_device, wlr_tablet, wlr_tablet_tool_axes};
pub use crate::events::tablet_tool_events as event;
pub use crate::manager::tablet_tool_handler::*;
use crate::{
input::{self, InputState},
utils::{self, HandleErr, HandleResult, Handleable}
};
pub type Handle = utils::Handle<NonNull<wlr_input_device>, wlr_tablet, TabletTool>;
#[derive(Debug)]
pub struct TabletTool {
/// The structure that ensures weak handles to this structure are still
/// alive.
///
/// They contain weak handles, and will safely not use dead memory when this
/// is freed by wlroots.
///
/// If this is `None`, then this is from an upgraded `tablet_tool::Handle`,
/// and the operations are **unchecked**.
/// This is means safe operations might fail, but only if you use the unsafe
/// marked function `upgrade` on a `tablet_tool::Handle`.
liveliness: Rc<Cell<bool>>,
/// The device that refers to this tablet tool.
device: input::Device,
/// Underlying tablet state
tool: NonNull<wlr_tablet>
}
bitflags! {
pub struct Axis: u32 {
const WLR_TABLET_TOOL_AXIS_X =
wlr_tablet_tool_axes::WLR_TABLET_TOOL_AXIS_X as u32;
const WLR_TABLET_TOOL_AXIS_Y =
wlr_tablet_tool_axes::WLR_TABLET_TOOL_AXIS_Y as u32;
const WLR_TABLET_TOOL_AXIS_DISTANCE =
wlr_tablet_tool_axes::WLR_TABLET_TOOL_AXIS_DISTANCE as u32;
const WLR_TABLET_TOOL_AXIS_PRESSURE =
wlr_tablet_tool_axes::WLR_TABLET_TOOL_AXIS_PRESSURE as u32;
const WLR_TABLET_TOOL_AXIS_TILT_X =
wlr_tablet_tool_axes::WLR_TABLET_TOOL_AXIS_TILT_X as u32;
const WLR_TABLET_TOOL_AXIS_TILT_Y =
wlr_tablet_tool_axes::WLR_TABLET_TOOL_AXIS_TILT_Y as u32;
}
}
impl TabletTool {
/// Tries to convert an input device to a TabletTool
///
/// Returns None if it is of a different type of input variant.
///
/// # Safety
/// This creates a totally new TabletTool (e.g with its own reference count)
/// so only do this once per `wlr_input_device`!
pub(crate) unsafe fn new_from_input_device(device: *mut wlr_input_device) -> Option<Self> {
use wlroots_sys::wlr_input_device_type::*;
match (*device).type_ {
WLR_INPUT_DEVICE_TABLET_TOOL => {
let tool = NonNull::new((*device).__bindgen_anon_1.tablet).expect(
"Tablet Tool \
pointer was \
null"
);
let liveliness = Rc::new(Cell::new(false));
let handle = Rc::downgrade(&liveliness);
let state = Box::new(InputState {
handle,
device: input::Device::from_ptr(device)
});
(*tool.as_ptr()).data = Box::into_raw(state) as *mut _;
Some(TabletTool {
liveliness,
device: input::Device::from_ptr(device),
tool
})
},
_ => None
}
}
/// Gets the wlr_input_device associated with this TabletTool.
pub fn input_device(&self) -> &input::Device {
&self.device
}
}
impl Drop for TabletTool {
fn drop(&mut self) {
if Rc::strong_count(&self.liveliness)!= 1 {
return;
}
wlr_log!(WLR_DEBUG, "Dropped TabletTool {:p}", self.tool.as_ptr());
unsafe {
let _ = Box::from_raw((*self.tool.as_ptr()).data as *mut InputState);
}
let weak_count = Rc::weak_count(&self.liveliness);
if weak_count > 0 {
wlr_log!(
WLR_DEBUG,
"Still {} weak pointers to TabletTool {:p}",
weak_count,
self.tool.as_ptr()
);
}
}
}
impl Handleable<NonNull<wlr_input_device>, wlr_tablet> for TabletTool {
#[doc(hidden)]
unsafe fn from_ptr(tool: *mut wlr_tablet) -> Option<Self> {
let tool = NonNull::new(tool)?;
let data = Box::from_raw((*tool.as_ptr()).data as *mut InputState);
let handle = data.handle.clone();
let device = data.device.clone();
(*tool.as_ptr()).data = Box::into_raw(data) as *mut _;
Some(TabletTool {
liveliness: handle.upgrade().unwrap(),
device,
tool
})
}
#[doc(hidden)]
unsafe fn as_ptr(&self) -> *mut wlr_tablet {
self.tool.as_ptr()
}
#[doc(hidden)]
unsafe fn from_handle(handle: &Handle) -> HandleResult<Self> {
let liveliness = handle.handle.upgrade().ok_or(HandleErr::AlreadyDropped)?;
let device = handle.data.ok_or(HandleErr::AlreadyDropped)?;
Ok(TabletTool {
liveliness,
// NOTE Rationale for cloning:
// If we already dropped we don't reach this point.
device: input::Device { device },
tool: handle.as_non_null()
}) |
fn weak_reference(&self) -> Handle {
Handle {
ptr: self.tool,
handle: Rc::downgrade(&self.liveliness),
// NOTE Rationale for cloning:
// Since we have a strong reference already,
// the input must still be alive.
data: unsafe { Some(self.device.as_non_null()) },
_marker: std::marker::PhantomData
}
}
} | } | random_line_split |
tablet_tool.rs | //! TODO Documentation
use std::{cell::Cell, ptr::NonNull, rc::Rc};
use wlroots_sys::{wlr_input_device, wlr_tablet, wlr_tablet_tool_axes};
pub use crate::events::tablet_tool_events as event;
pub use crate::manager::tablet_tool_handler::*;
use crate::{
input::{self, InputState},
utils::{self, HandleErr, HandleResult, Handleable}
};
pub type Handle = utils::Handle<NonNull<wlr_input_device>, wlr_tablet, TabletTool>;
#[derive(Debug)]
pub struct TabletTool {
/// The structure that ensures weak handles to this structure are still
/// alive.
///
/// They contain weak handles, and will safely not use dead memory when this
/// is freed by wlroots.
///
/// If this is `None`, then this is from an upgraded `tablet_tool::Handle`,
/// and the operations are **unchecked**.
/// This is means safe operations might fail, but only if you use the unsafe
/// marked function `upgrade` on a `tablet_tool::Handle`.
liveliness: Rc<Cell<bool>>,
/// The device that refers to this tablet tool.
device: input::Device,
/// Underlying tablet state
tool: NonNull<wlr_tablet>
}
bitflags! {
pub struct Axis: u32 {
const WLR_TABLET_TOOL_AXIS_X =
wlr_tablet_tool_axes::WLR_TABLET_TOOL_AXIS_X as u32;
const WLR_TABLET_TOOL_AXIS_Y =
wlr_tablet_tool_axes::WLR_TABLET_TOOL_AXIS_Y as u32;
const WLR_TABLET_TOOL_AXIS_DISTANCE =
wlr_tablet_tool_axes::WLR_TABLET_TOOL_AXIS_DISTANCE as u32;
const WLR_TABLET_TOOL_AXIS_PRESSURE =
wlr_tablet_tool_axes::WLR_TABLET_TOOL_AXIS_PRESSURE as u32;
const WLR_TABLET_TOOL_AXIS_TILT_X =
wlr_tablet_tool_axes::WLR_TABLET_TOOL_AXIS_TILT_X as u32;
const WLR_TABLET_TOOL_AXIS_TILT_Y =
wlr_tablet_tool_axes::WLR_TABLET_TOOL_AXIS_TILT_Y as u32;
}
}
impl TabletTool {
/// Tries to convert an input device to a TabletTool
///
/// Returns None if it is of a different type of input variant.
///
/// # Safety
/// This creates a totally new TabletTool (e.g with its own reference count)
/// so only do this once per `wlr_input_device`!
pub(crate) unsafe fn new_from_input_device(device: *mut wlr_input_device) -> Option<Self> {
use wlroots_sys::wlr_input_device_type::*;
match (*device).type_ {
WLR_INPUT_DEVICE_TABLET_TOOL => {
let tool = NonNull::new((*device).__bindgen_anon_1.tablet).expect(
"Tablet Tool \
pointer was \
null"
);
let liveliness = Rc::new(Cell::new(false));
let handle = Rc::downgrade(&liveliness);
let state = Box::new(InputState {
handle,
device: input::Device::from_ptr(device)
});
(*tool.as_ptr()).data = Box::into_raw(state) as *mut _;
Some(TabletTool {
liveliness,
device: input::Device::from_ptr(device),
tool
})
},
_ => None
}
}
/// Gets the wlr_input_device associated with this TabletTool.
pub fn input_device(&self) -> &input::Device {
&self.device
}
}
impl Drop for TabletTool {
fn drop(&mut self) {
if Rc::strong_count(&self.liveliness)!= 1 {
return;
}
wlr_log!(WLR_DEBUG, "Dropped TabletTool {:p}", self.tool.as_ptr());
unsafe {
let _ = Box::from_raw((*self.tool.as_ptr()).data as *mut InputState);
}
let weak_count = Rc::weak_count(&self.liveliness);
if weak_count > 0 {
wlr_log!(
WLR_DEBUG,
"Still {} weak pointers to TabletTool {:p}",
weak_count,
self.tool.as_ptr()
);
}
}
}
impl Handleable<NonNull<wlr_input_device>, wlr_tablet> for TabletTool {
#[doc(hidden)]
unsafe fn from_ptr(tool: *mut wlr_tablet) -> Option<Self> {
let tool = NonNull::new(tool)?;
let data = Box::from_raw((*tool.as_ptr()).data as *mut InputState);
let handle = data.handle.clone();
let device = data.device.clone();
(*tool.as_ptr()).data = Box::into_raw(data) as *mut _;
Some(TabletTool {
liveliness: handle.upgrade().unwrap(),
device,
tool
})
}
#[doc(hidden)]
unsafe fn as_ptr(&self) -> *mut wlr_tablet {
self.tool.as_ptr()
}
#[doc(hidden)]
unsafe fn from_handle(handle: &Handle) -> HandleResult<Self> {
let liveliness = handle.handle.upgrade().ok_or(HandleErr::AlreadyDropped)?;
let device = handle.data.ok_or(HandleErr::AlreadyDropped)?;
Ok(TabletTool {
liveliness,
// NOTE Rationale for cloning:
// If we already dropped we don't reach this point.
device: input::Device { device },
tool: handle.as_non_null()
})
}
fn weak_reference(&self) -> Handle |
}
| {
Handle {
ptr: self.tool,
handle: Rc::downgrade(&self.liveliness),
// NOTE Rationale for cloning:
// Since we have a strong reference already,
// the input must still be alive.
data: unsafe { Some(self.device.as_non_null()) },
_marker: std::marker::PhantomData
}
} | identifier_body |
version.rs | // Copyright (c) 2015 Y. T. Chung <[email protected]>
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT
// license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
//! Memcached version
use std::fmt::{self, Display, Formatter};
use std::str::FromStr;
/// Memcached version
///
/// Version(major, minor, patch)
#[derive(Copy, Debug)]
pub struct Version(u32, u32, u32);
impl Version {
pub fn new(major: u32, minor: u32, patch: u32) -> Version {
Version(major, minor, patch)
}
}
impl Display for Version {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
let &Version(major, minor, patch) = self;
write!(f, "{}:{}:{}", major, minor, patch)
}
}
macro_rules! try_option(
($inp:expr) => (
match $inp {
Some(v) => { v },
None => { return None; },
}
);
);
impl FromStr for Version { | fn from_str(s: &str) -> Option<Version> {
let mut sp = s.split('.');
let major = match sp.next() {
Some(s) => try_option!(s.parse()),
None => return None,
};
let minor = match sp.next() {
Some(s) => try_option!(s.parse()),
None => 0,
};
let patch = match sp.next() {
Some(s) => try_option!(s.parse()),
None => 0,
};
Some(Version::new(major, minor, patch))
}
} | random_line_split |
|
version.rs | // Copyright (c) 2015 Y. T. Chung <[email protected]>
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT
// license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
//! Memcached version
use std::fmt::{self, Display, Formatter};
use std::str::FromStr;
/// Memcached version
///
/// Version(major, minor, patch)
#[derive(Copy, Debug)]
pub struct Version(u32, u32, u32);
impl Version {
pub fn | (major: u32, minor: u32, patch: u32) -> Version {
Version(major, minor, patch)
}
}
impl Display for Version {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
let &Version(major, minor, patch) = self;
write!(f, "{}:{}:{}", major, minor, patch)
}
}
macro_rules! try_option(
($inp:expr) => (
match $inp {
Some(v) => { v },
None => { return None; },
}
);
);
impl FromStr for Version {
fn from_str(s: &str) -> Option<Version> {
let mut sp = s.split('.');
let major = match sp.next() {
Some(s) => try_option!(s.parse()),
None => return None,
};
let minor = match sp.next() {
Some(s) => try_option!(s.parse()),
None => 0,
};
let patch = match sp.next() {
Some(s) => try_option!(s.parse()),
None => 0,
};
Some(Version::new(major, minor, patch))
}
}
| new | identifier_name |
node_style.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
// Style retrieval from DOM elements.
use css::node_util::NodeUtil;
use layout::incremental::RestyleDamage;
use layout::wrapper::ThreadSafeLayoutNode;
use extra::arc::Arc;
use style::ComputedValues;
/// Node mixin providing `style` method that returns a `NodeStyle`
pub trait StyledNode {
fn style<'a>(&'a self) -> &'a Arc<ComputedValues>;
fn restyle_damage(&self) -> RestyleDamage;
}
impl<'ln> StyledNode for ThreadSafeLayoutNode<'ln> {
#[inline]
fn style<'a>(&'a self) -> &'a Arc<ComputedValues> {
self.get_css_select_results()
}
fn | (&self) -> RestyleDamage {
self.get_restyle_damage()
}
}
| restyle_damage | identifier_name |
node_style.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
// Style retrieval from DOM elements.
use css::node_util::NodeUtil;
use layout::incremental::RestyleDamage;
use layout::wrapper::ThreadSafeLayoutNode;
use extra::arc::Arc;
use style::ComputedValues;
/// Node mixin providing `style` method that returns a `NodeStyle`
pub trait StyledNode {
fn style<'a>(&'a self) -> &'a Arc<ComputedValues>;
fn restyle_damage(&self) -> RestyleDamage;
}
impl<'ln> StyledNode for ThreadSafeLayoutNode<'ln> {
#[inline]
fn style<'a>(&'a self) -> &'a Arc<ComputedValues> {
self.get_css_select_results()
}
fn restyle_damage(&self) -> RestyleDamage |
}
| {
self.get_restyle_damage()
} | identifier_body |
node_style.rs | /* This Source Code Form is subject to the terms of the Mozilla Public | * License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
// Style retrieval from DOM elements.
use css::node_util::NodeUtil;
use layout::incremental::RestyleDamage;
use layout::wrapper::ThreadSafeLayoutNode;
use extra::arc::Arc;
use style::ComputedValues;
/// Node mixin providing `style` method that returns a `NodeStyle`
pub trait StyledNode {
fn style<'a>(&'a self) -> &'a Arc<ComputedValues>;
fn restyle_damage(&self) -> RestyleDamage;
}
impl<'ln> StyledNode for ThreadSafeLayoutNode<'ln> {
#[inline]
fn style<'a>(&'a self) -> &'a Arc<ComputedValues> {
self.get_css_select_results()
}
fn restyle_damage(&self) -> RestyleDamage {
self.get_restyle_damage()
}
} | random_line_split |
|
mod.rs | // Copyright 2013 The rust-gobject authors.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use glib;
use std::ptr;
mod native;
pub unsafe fn g_object_ref(object: glib::gpointer) -> glib::gpointer {
#[fixed_stack_segment]; #[inline(never)];
assert!(ptr::is_not_null(object));
native::g_object_ref(object)
}
pub unsafe fn | (object: glib::gpointer) -> glib::gpointer {
#[fixed_stack_segment]; #[inline(never)];
assert!(ptr::is_not_null(object));
native::g_object_ref_sink(object)
}
pub unsafe fn g_object_unref(object: glib::gpointer) {
#[fixed_stack_segment]; #[inline(never)];
assert!(ptr::is_not_null(object));
native::g_object_unref(object)
}
| g_object_ref_sink | identifier_name |
mod.rs | // Copyright 2013 The rust-gobject authors.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use glib;
use std::ptr;
mod native;
pub unsafe fn g_object_ref(object: glib::gpointer) -> glib::gpointer {
#[fixed_stack_segment]; #[inline(never)];
assert!(ptr::is_not_null(object));
native::g_object_ref(object)
}
pub unsafe fn g_object_ref_sink(object: glib::gpointer) -> glib::gpointer |
pub unsafe fn g_object_unref(object: glib::gpointer) {
#[fixed_stack_segment]; #[inline(never)];
assert!(ptr::is_not_null(object));
native::g_object_unref(object)
}
| {
#[fixed_stack_segment]; #[inline(never)];
assert!(ptr::is_not_null(object));
native::g_object_ref_sink(object)
} | identifier_body |
mod.rs | // Copyright 2013 The rust-gobject authors.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use glib;
use std::ptr;
mod native; | pub unsafe fn g_object_ref(object: glib::gpointer) -> glib::gpointer {
#[fixed_stack_segment]; #[inline(never)];
assert!(ptr::is_not_null(object));
native::g_object_ref(object)
}
pub unsafe fn g_object_ref_sink(object: glib::gpointer) -> glib::gpointer {
#[fixed_stack_segment]; #[inline(never)];
assert!(ptr::is_not_null(object));
native::g_object_ref_sink(object)
}
pub unsafe fn g_object_unref(object: glib::gpointer) {
#[fixed_stack_segment]; #[inline(never)];
assert!(ptr::is_not_null(object));
native::g_object_unref(object)
} | random_line_split |
|
logging.rs | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::Error;
use consensus_types::common::{Author, Round};
use diem_logger::Schema;
use diem_types::waypoint::Waypoint;
use serde::Serialize;
#[derive(Schema)]
pub struct SafetyLogSchema<'a> {
name: LogEntry,
event: LogEvent,
round: Option<Round>,
preferred_round: Option<u64>,
last_voted_round: Option<u64>,
epoch: Option<u64>,
#[schema(display)]
error: Option<&'a Error>,
waypoint: Option<Waypoint>,
author: Option<Author>,
}
impl<'a> SafetyLogSchema<'a> {
pub fn new(name: LogEntry, event: LogEvent) -> Self {
Self {
name,
event,
round: None,
preferred_round: None,
last_voted_round: None,
epoch: None,
error: None,
waypoint: None,
author: None,
}
}
}
#[derive(Clone, Copy, Serialize)]
#[serde(rename_all = "snake_case")]
pub enum | {
ConsensusState,
ConstructAndSignVote,
Epoch,
Initialize,
KeyReconciliation,
LastVotedRound,
PreferredRound,
SignProposal,
SignTimeout,
State,
Waypoint,
}
impl LogEntry {
pub fn as_str(&self) -> &'static str {
match self {
LogEntry::ConsensusState => "consensus_state",
LogEntry::ConstructAndSignVote => "construct_and_sign_vote",
LogEntry::Epoch => "epoch",
LogEntry::Initialize => "initialize",
LogEntry::LastVotedRound => "last_voted_round",
LogEntry::KeyReconciliation => "key_reconciliation",
LogEntry::PreferredRound => "preferred_round",
LogEntry::SignProposal => "sign_proposal",
LogEntry::SignTimeout => "sign_timeout",
LogEntry::State => "state",
LogEntry::Waypoint => "waypoint",
}
}
}
#[derive(Clone, Copy, Serialize)]
#[serde(rename_all = "snake_case")]
pub enum LogEvent {
Error,
Request,
Success,
Update,
}
| LogEntry | identifier_name |
logging.rs | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::Error;
use consensus_types::common::{Author, Round};
use diem_logger::Schema;
use diem_types::waypoint::Waypoint;
use serde::Serialize;
#[derive(Schema)] | event: LogEvent,
round: Option<Round>,
preferred_round: Option<u64>,
last_voted_round: Option<u64>,
epoch: Option<u64>,
#[schema(display)]
error: Option<&'a Error>,
waypoint: Option<Waypoint>,
author: Option<Author>,
}
impl<'a> SafetyLogSchema<'a> {
pub fn new(name: LogEntry, event: LogEvent) -> Self {
Self {
name,
event,
round: None,
preferred_round: None,
last_voted_round: None,
epoch: None,
error: None,
waypoint: None,
author: None,
}
}
}
#[derive(Clone, Copy, Serialize)]
#[serde(rename_all = "snake_case")]
pub enum LogEntry {
ConsensusState,
ConstructAndSignVote,
Epoch,
Initialize,
KeyReconciliation,
LastVotedRound,
PreferredRound,
SignProposal,
SignTimeout,
State,
Waypoint,
}
impl LogEntry {
pub fn as_str(&self) -> &'static str {
match self {
LogEntry::ConsensusState => "consensus_state",
LogEntry::ConstructAndSignVote => "construct_and_sign_vote",
LogEntry::Epoch => "epoch",
LogEntry::Initialize => "initialize",
LogEntry::LastVotedRound => "last_voted_round",
LogEntry::KeyReconciliation => "key_reconciliation",
LogEntry::PreferredRound => "preferred_round",
LogEntry::SignProposal => "sign_proposal",
LogEntry::SignTimeout => "sign_timeout",
LogEntry::State => "state",
LogEntry::Waypoint => "waypoint",
}
}
}
#[derive(Clone, Copy, Serialize)]
#[serde(rename_all = "snake_case")]
pub enum LogEvent {
Error,
Request,
Success,
Update,
} | pub struct SafetyLogSchema<'a> {
name: LogEntry, | random_line_split |
nonzero.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Exposes the NonZero lang item which provides optimization hints.
#![unstable(feature = "nonzero",
reason = "needs an RFC to flesh out the design",
issue = "27730")]
use marker::Sized;
use ops::{CoerceUnsized, Deref};
/// Unsafe trait to indicate what types are usable with the NonZero struct
pub unsafe trait Zeroable {}
unsafe impl<T:?Sized> Zeroable for *const T {}
unsafe impl<T:?Sized> Zeroable for *mut T {}
unsafe impl Zeroable for isize {}
unsafe impl Zeroable for usize {} | unsafe impl Zeroable for i16 {}
unsafe impl Zeroable for u16 {}
unsafe impl Zeroable for i32 {}
unsafe impl Zeroable for u32 {}
unsafe impl Zeroable for i64 {}
unsafe impl Zeroable for u64 {}
/// A wrapper type for raw pointers and integers that will never be
/// NULL or 0 that might allow certain optimizations.
#[lang = "non_zero"]
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Debug, Hash)]
pub struct NonZero<T: Zeroable>(T);
#[cfg(stage0)]
macro_rules! nonzero_new {
() => (
/// Creates an instance of NonZero with the provided value.
/// You must indeed ensure that the value is actually "non-zero".
#[inline(always)]
pub unsafe fn new(inner: T) -> NonZero<T> {
NonZero(inner)
}
)
}
#[cfg(not(stage0))]
macro_rules! nonzero_new {
() => (
/// Creates an instance of NonZero with the provided value.
/// You must indeed ensure that the value is actually "non-zero".
#[inline(always)]
pub const unsafe fn new(inner: T) -> NonZero<T> {
NonZero(inner)
}
)
}
impl<T: Zeroable> NonZero<T> {
nonzero_new!{}
}
impl<T: Zeroable> Deref for NonZero<T> {
type Target = T;
#[inline]
fn deref(&self) -> &T {
let NonZero(ref inner) = *self;
inner
}
}
impl<T: Zeroable+CoerceUnsized<U>, U: Zeroable> CoerceUnsized<NonZero<U>> for NonZero<T> {} | unsafe impl Zeroable for i8 {}
unsafe impl Zeroable for u8 {} | random_line_split |
nonzero.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Exposes the NonZero lang item which provides optimization hints.
#![unstable(feature = "nonzero",
reason = "needs an RFC to flesh out the design",
issue = "27730")]
use marker::Sized;
use ops::{CoerceUnsized, Deref};
/// Unsafe trait to indicate what types are usable with the NonZero struct
pub unsafe trait Zeroable {}
unsafe impl<T:?Sized> Zeroable for *const T {}
unsafe impl<T:?Sized> Zeroable for *mut T {}
unsafe impl Zeroable for isize {}
unsafe impl Zeroable for usize {}
unsafe impl Zeroable for i8 {}
unsafe impl Zeroable for u8 {}
unsafe impl Zeroable for i16 {}
unsafe impl Zeroable for u16 {}
unsafe impl Zeroable for i32 {}
unsafe impl Zeroable for u32 {}
unsafe impl Zeroable for i64 {}
unsafe impl Zeroable for u64 {}
/// A wrapper type for raw pointers and integers that will never be
/// NULL or 0 that might allow certain optimizations.
#[lang = "non_zero"]
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Debug, Hash)]
pub struct NonZero<T: Zeroable>(T);
#[cfg(stage0)]
macro_rules! nonzero_new {
() => (
/// Creates an instance of NonZero with the provided value.
/// You must indeed ensure that the value is actually "non-zero".
#[inline(always)]
pub unsafe fn new(inner: T) -> NonZero<T> {
NonZero(inner)
}
)
}
#[cfg(not(stage0))]
macro_rules! nonzero_new {
() => (
/// Creates an instance of NonZero with the provided value.
/// You must indeed ensure that the value is actually "non-zero".
#[inline(always)]
pub const unsafe fn new(inner: T) -> NonZero<T> {
NonZero(inner)
}
)
}
impl<T: Zeroable> NonZero<T> {
nonzero_new!{}
}
impl<T: Zeroable> Deref for NonZero<T> {
type Target = T;
#[inline]
fn | (&self) -> &T {
let NonZero(ref inner) = *self;
inner
}
}
impl<T: Zeroable+CoerceUnsized<U>, U: Zeroable> CoerceUnsized<NonZero<U>> for NonZero<T> {}
| deref | identifier_name |
nonzero.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Exposes the NonZero lang item which provides optimization hints.
#![unstable(feature = "nonzero",
reason = "needs an RFC to flesh out the design",
issue = "27730")]
use marker::Sized;
use ops::{CoerceUnsized, Deref};
/// Unsafe trait to indicate what types are usable with the NonZero struct
pub unsafe trait Zeroable {}
unsafe impl<T:?Sized> Zeroable for *const T {}
unsafe impl<T:?Sized> Zeroable for *mut T {}
unsafe impl Zeroable for isize {}
unsafe impl Zeroable for usize {}
unsafe impl Zeroable for i8 {}
unsafe impl Zeroable for u8 {}
unsafe impl Zeroable for i16 {}
unsafe impl Zeroable for u16 {}
unsafe impl Zeroable for i32 {}
unsafe impl Zeroable for u32 {}
unsafe impl Zeroable for i64 {}
unsafe impl Zeroable for u64 {}
/// A wrapper type for raw pointers and integers that will never be
/// NULL or 0 that might allow certain optimizations.
#[lang = "non_zero"]
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Debug, Hash)]
pub struct NonZero<T: Zeroable>(T);
#[cfg(stage0)]
macro_rules! nonzero_new {
() => (
/// Creates an instance of NonZero with the provided value.
/// You must indeed ensure that the value is actually "non-zero".
#[inline(always)]
pub unsafe fn new(inner: T) -> NonZero<T> {
NonZero(inner)
}
)
}
#[cfg(not(stage0))]
macro_rules! nonzero_new {
() => (
/// Creates an instance of NonZero with the provided value.
/// You must indeed ensure that the value is actually "non-zero".
#[inline(always)]
pub const unsafe fn new(inner: T) -> NonZero<T> {
NonZero(inner)
}
)
}
impl<T: Zeroable> NonZero<T> {
nonzero_new!{}
}
impl<T: Zeroable> Deref for NonZero<T> {
type Target = T;
#[inline]
fn deref(&self) -> &T |
}
impl<T: Zeroable+CoerceUnsized<U>, U: Zeroable> CoerceUnsized<NonZero<U>> for NonZero<T> {}
| {
let NonZero(ref inner) = *self;
inner
} | identifier_body |
lint-unsafe-block.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(unused_unsafe)]
#![allow(dead_code)]
#![deny(unsafe_block)]
#![feature(macro_rules)]
unsafe fn allowed() {}
#[allow(unsafe_block)] fn also_allowed() { unsafe {} }
macro_rules! unsafe_in_macro {
() => {
unsafe {} //~ ERROR: usage of an `unsafe` block
}
}
fn | () {
unsafe {} //~ ERROR: usage of an `unsafe` block
unsafe_in_macro!()
}
| main | identifier_name |
lint-unsafe-block.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(unused_unsafe)] |
unsafe fn allowed() {}
#[allow(unsafe_block)] fn also_allowed() { unsafe {} }
macro_rules! unsafe_in_macro {
() => {
unsafe {} //~ ERROR: usage of an `unsafe` block
}
}
fn main() {
unsafe {} //~ ERROR: usage of an `unsafe` block
unsafe_in_macro!()
} | #![allow(dead_code)]
#![deny(unsafe_block)]
#![feature(macro_rules)] | random_line_split |
lint-unsafe-block.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(unused_unsafe)]
#![allow(dead_code)]
#![deny(unsafe_block)]
#![feature(macro_rules)]
unsafe fn allowed() {}
#[allow(unsafe_block)] fn also_allowed() { unsafe {} }
macro_rules! unsafe_in_macro {
() => {
unsafe {} //~ ERROR: usage of an `unsafe` block
}
}
fn main() | {
unsafe {} //~ ERROR: usage of an `unsafe` block
unsafe_in_macro!()
} | identifier_body |
|
autoderef-full-lval.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern crate debug;
use std::gc::{Gc, GC};
struct clam {
x: Gc<int>,
y: Gc<int>,
}
struct | {
a: Gc<int>,
}
fn main() {
let a: clam = clam{x: box(GC) 1, y: box(GC) 2};
let b: clam = clam{x: box(GC) 10, y: box(GC) 20};
let z: int = a.x + b.y; //~ ERROR binary operation `+` cannot be applied to type `Gc<int>`
println!("{:?}", z);
assert_eq!(z, 21);
let forty: fish = fish{a: box(GC) 40};
let two: fish = fish{a: box(GC) 2};
let answer: int = forty.a + two.a;
//~^ ERROR binary operation `+` cannot be applied to type `Gc<int>`
println!("{:?}", answer);
assert_eq!(answer, 42);
}
| fish | identifier_name |
autoderef-full-lval.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
| extern crate debug;
use std::gc::{Gc, GC};
struct clam {
x: Gc<int>,
y: Gc<int>,
}
struct fish {
a: Gc<int>,
}
fn main() {
let a: clam = clam{x: box(GC) 1, y: box(GC) 2};
let b: clam = clam{x: box(GC) 10, y: box(GC) 20};
let z: int = a.x + b.y; //~ ERROR binary operation `+` cannot be applied to type `Gc<int>`
println!("{:?}", z);
assert_eq!(z, 21);
let forty: fish = fish{a: box(GC) 40};
let two: fish = fish{a: box(GC) 2};
let answer: int = forty.a + two.a;
//~^ ERROR binary operation `+` cannot be applied to type `Gc<int>`
println!("{:?}", answer);
assert_eq!(answer, 42);
} | random_line_split |
|
distrib.rs | use mopa::Any;
use builder::context::Context;
use builder::commands::alpine;
use builder::error::StepError;
use builder::packages;
/// This returns the same as Distribution::name but is separate trait because
/// static methods makes trait non-object-safe
pub trait Named {
/// Human-readable name of distribution
fn static_name() -> &'static str;
}
pub trait Distribution: Any {
/// Only true if distribution is not known yet (i.e. can be set)
fn is_unknown(&self) -> bool { false }
/// Human-readable name of distribution
///
/// Object-safe variant of the method
fn name(&self) -> &'static str;
/// Downloads initial image of distribution
fn bootstrap(&mut self, &mut Context) -> Result<(), StepError>;
/// Does distro-specific cleanup at the end of the build
fn finish(&mut self, &mut Context) -> Result<(), String> { Ok(()) }
/// Install normal packages
fn install(&mut self, &mut Context, &[String]) -> Result<(), StepError>;
/// Install special predefined packages for specific features
fn ensure_packages(&mut self, ctx: &mut Context,
features: &[packages::Package])
-> Result<Vec<packages::Package>, StepError>;
}
// This is needed for cast to work
mopafy!(Distribution);
pub struct Unknown;
impl Distribution for Unknown {
fn is_unknown(&self) -> bool { true }
fn name(&self) -> &'static str { "unknown" }
fn bootstrap(&mut self, _: &mut Context) -> Result<(), StepError> {
unreachable!();
}
fn install(&mut self, _: &mut Context, _pkgs: &[String])
-> Result<(), StepError>
{
Err(StepError::NoDistro)
}
fn ensure_packages(&mut self, _: &mut Context, _: &[packages::Package])
-> Result<Vec<packages::Package>, StepError>
{
Err(StepError::NoDistro)
}
}
pub trait DistroBox {
fn set<D: Distribution+Sized>(&mut self, value: D) -> Result<(), StepError>;
fn specific<T, R, F>(&mut self, f: F) -> Result<R, StepError>
where T: Distribution+Named, R: Sized,
F: FnOnce(&mut T) -> Result<R, StepError>;
fn npm_configure(&mut self, ctx: &mut Context) -> Result<(), StepError>;
}
impl DistroBox for Box<Distribution> {
fn set<D: Distribution+Sized>(&mut self, value: D) -> Result<(), StepError> {
if self.is::<Unknown>() {
*self = Box::new(value);
Ok(())
} else {
return Err(StepError::DistroOverlap(value.name(), self.name()));
}
}
fn specific<T, R, F>(&mut self, f: F) -> Result<R, StepError>
where T: Distribution+Named, R: Sized,
F: FnOnce(&mut T) -> Result<R, StepError>,
{
self.downcast_mut::<T>()
.map(f)
.ok_or(StepError::WrongDistro(T::static_name(), self.name()))
.and_then(|x| x)
}
fn | (&mut self, ctx: &mut Context) -> Result<(), StepError> {
if (**self).is::<Unknown>() {
try!(alpine::configure(self, ctx, alpine::LATEST_VERSION));
}
Ok(())
}
}
| npm_configure | identifier_name |
distrib.rs | use mopa::Any;
use builder::context::Context;
use builder::commands::alpine;
use builder::error::StepError;
use builder::packages;
/// This returns the same as Distribution::name but is separate trait because
/// static methods makes trait non-object-safe
pub trait Named {
/// Human-readable name of distribution
fn static_name() -> &'static str;
}
pub trait Distribution: Any {
/// Only true if distribution is not known yet (i.e. can be set)
fn is_unknown(&self) -> bool { false }
/// Human-readable name of distribution
///
/// Object-safe variant of the method
fn name(&self) -> &'static str;
/// Downloads initial image of distribution
fn bootstrap(&mut self, &mut Context) -> Result<(), StepError>;
/// Does distro-specific cleanup at the end of the build
fn finish(&mut self, &mut Context) -> Result<(), String> { Ok(()) }
/// Install normal packages
fn install(&mut self, &mut Context, &[String]) -> Result<(), StepError>;
/// Install special predefined packages for specific features
fn ensure_packages(&mut self, ctx: &mut Context,
features: &[packages::Package])
-> Result<Vec<packages::Package>, StepError>;
}
// This is needed for cast to work
mopafy!(Distribution);
pub struct Unknown;
impl Distribution for Unknown {
fn is_unknown(&self) -> bool { true }
fn name(&self) -> &'static str { "unknown" }
fn bootstrap(&mut self, _: &mut Context) -> Result<(), StepError> {
unreachable!();
}
fn install(&mut self, _: &mut Context, _pkgs: &[String])
-> Result<(), StepError>
{
Err(StepError::NoDistro)
}
fn ensure_packages(&mut self, _: &mut Context, _: &[packages::Package])
-> Result<Vec<packages::Package>, StepError>
|
}
pub trait DistroBox {
fn set<D: Distribution+Sized>(&mut self, value: D) -> Result<(), StepError>;
fn specific<T, R, F>(&mut self, f: F) -> Result<R, StepError>
where T: Distribution+Named, R: Sized,
F: FnOnce(&mut T) -> Result<R, StepError>;
fn npm_configure(&mut self, ctx: &mut Context) -> Result<(), StepError>;
}
impl DistroBox for Box<Distribution> {
fn set<D: Distribution+Sized>(&mut self, value: D) -> Result<(), StepError> {
if self.is::<Unknown>() {
*self = Box::new(value);
Ok(())
} else {
return Err(StepError::DistroOverlap(value.name(), self.name()));
}
}
fn specific<T, R, F>(&mut self, f: F) -> Result<R, StepError>
where T: Distribution+Named, R: Sized,
F: FnOnce(&mut T) -> Result<R, StepError>,
{
self.downcast_mut::<T>()
.map(f)
.ok_or(StepError::WrongDistro(T::static_name(), self.name()))
.and_then(|x| x)
}
fn npm_configure(&mut self, ctx: &mut Context) -> Result<(), StepError> {
if (**self).is::<Unknown>() {
try!(alpine::configure(self, ctx, alpine::LATEST_VERSION));
}
Ok(())
}
}
| {
Err(StepError::NoDistro)
} | identifier_body |
distrib.rs | use mopa::Any;
use builder::context::Context;
use builder::commands::alpine;
use builder::error::StepError;
use builder::packages;
/// This returns the same as Distribution::name but is separate trait because
/// static methods makes trait non-object-safe
pub trait Named {
/// Human-readable name of distribution
fn static_name() -> &'static str;
}
pub trait Distribution: Any {
/// Only true if distribution is not known yet (i.e. can be set)
fn is_unknown(&self) -> bool { false }
/// Human-readable name of distribution
///
/// Object-safe variant of the method
fn name(&self) -> &'static str; |
/// Does distro-specific cleanup at the end of the build
fn finish(&mut self, &mut Context) -> Result<(), String> { Ok(()) }
/// Install normal packages
fn install(&mut self, &mut Context, &[String]) -> Result<(), StepError>;
/// Install special predefined packages for specific features
fn ensure_packages(&mut self, ctx: &mut Context,
features: &[packages::Package])
-> Result<Vec<packages::Package>, StepError>;
}
// This is needed for cast to work
mopafy!(Distribution);
pub struct Unknown;
impl Distribution for Unknown {
fn is_unknown(&self) -> bool { true }
fn name(&self) -> &'static str { "unknown" }
fn bootstrap(&mut self, _: &mut Context) -> Result<(), StepError> {
unreachable!();
}
fn install(&mut self, _: &mut Context, _pkgs: &[String])
-> Result<(), StepError>
{
Err(StepError::NoDistro)
}
fn ensure_packages(&mut self, _: &mut Context, _: &[packages::Package])
-> Result<Vec<packages::Package>, StepError>
{
Err(StepError::NoDistro)
}
}
pub trait DistroBox {
fn set<D: Distribution+Sized>(&mut self, value: D) -> Result<(), StepError>;
fn specific<T, R, F>(&mut self, f: F) -> Result<R, StepError>
where T: Distribution+Named, R: Sized,
F: FnOnce(&mut T) -> Result<R, StepError>;
fn npm_configure(&mut self, ctx: &mut Context) -> Result<(), StepError>;
}
impl DistroBox for Box<Distribution> {
fn set<D: Distribution+Sized>(&mut self, value: D) -> Result<(), StepError> {
if self.is::<Unknown>() {
*self = Box::new(value);
Ok(())
} else {
return Err(StepError::DistroOverlap(value.name(), self.name()));
}
}
fn specific<T, R, F>(&mut self, f: F) -> Result<R, StepError>
where T: Distribution+Named, R: Sized,
F: FnOnce(&mut T) -> Result<R, StepError>,
{
self.downcast_mut::<T>()
.map(f)
.ok_or(StepError::WrongDistro(T::static_name(), self.name()))
.and_then(|x| x)
}
fn npm_configure(&mut self, ctx: &mut Context) -> Result<(), StepError> {
if (**self).is::<Unknown>() {
try!(alpine::configure(self, ctx, alpine::LATEST_VERSION));
}
Ok(())
}
} |
/// Downloads initial image of distribution
fn bootstrap(&mut self, &mut Context) -> Result<(), StepError>; | random_line_split |
distrib.rs | use mopa::Any;
use builder::context::Context;
use builder::commands::alpine;
use builder::error::StepError;
use builder::packages;
/// This returns the same as Distribution::name but is separate trait because
/// static methods makes trait non-object-safe
pub trait Named {
/// Human-readable name of distribution
fn static_name() -> &'static str;
}
pub trait Distribution: Any {
/// Only true if distribution is not known yet (i.e. can be set)
fn is_unknown(&self) -> bool { false }
/// Human-readable name of distribution
///
/// Object-safe variant of the method
fn name(&self) -> &'static str;
/// Downloads initial image of distribution
fn bootstrap(&mut self, &mut Context) -> Result<(), StepError>;
/// Does distro-specific cleanup at the end of the build
fn finish(&mut self, &mut Context) -> Result<(), String> { Ok(()) }
/// Install normal packages
fn install(&mut self, &mut Context, &[String]) -> Result<(), StepError>;
/// Install special predefined packages for specific features
fn ensure_packages(&mut self, ctx: &mut Context,
features: &[packages::Package])
-> Result<Vec<packages::Package>, StepError>;
}
// This is needed for cast to work
mopafy!(Distribution);
pub struct Unknown;
impl Distribution for Unknown {
fn is_unknown(&self) -> bool { true }
fn name(&self) -> &'static str { "unknown" }
fn bootstrap(&mut self, _: &mut Context) -> Result<(), StepError> {
unreachable!();
}
fn install(&mut self, _: &mut Context, _pkgs: &[String])
-> Result<(), StepError>
{
Err(StepError::NoDistro)
}
fn ensure_packages(&mut self, _: &mut Context, _: &[packages::Package])
-> Result<Vec<packages::Package>, StepError>
{
Err(StepError::NoDistro)
}
}
pub trait DistroBox {
fn set<D: Distribution+Sized>(&mut self, value: D) -> Result<(), StepError>;
fn specific<T, R, F>(&mut self, f: F) -> Result<R, StepError>
where T: Distribution+Named, R: Sized,
F: FnOnce(&mut T) -> Result<R, StepError>;
fn npm_configure(&mut self, ctx: &mut Context) -> Result<(), StepError>;
}
impl DistroBox for Box<Distribution> {
fn set<D: Distribution+Sized>(&mut self, value: D) -> Result<(), StepError> {
if self.is::<Unknown>() {
*self = Box::new(value);
Ok(())
} else {
return Err(StepError::DistroOverlap(value.name(), self.name()));
}
}
fn specific<T, R, F>(&mut self, f: F) -> Result<R, StepError>
where T: Distribution+Named, R: Sized,
F: FnOnce(&mut T) -> Result<R, StepError>,
{
self.downcast_mut::<T>()
.map(f)
.ok_or(StepError::WrongDistro(T::static_name(), self.name()))
.and_then(|x| x)
}
fn npm_configure(&mut self, ctx: &mut Context) -> Result<(), StepError> {
if (**self).is::<Unknown>() |
Ok(())
}
}
| {
try!(alpine::configure(self, ctx, alpine::LATEST_VERSION));
} | conditional_block |
main.rs | extern crate sdl;
//use sdl::event::Event;
#[macro_use]
extern crate log;
extern crate env_logger;
#[macro_use]
extern crate clap;
mod chipate;
mod display;
use chipate::Chipate;
fn main() {
env_logger::init().unwrap();
let matches = clap_app!(myapp =>
(version: "1.0")
(author: "Robert J. Lambert III <[email protected]>")
(about: "Chip8 Emulator written in rust")
(@arg program: -p --program +required +takes_value "Program to run")
(@arg clock: -c --clock +required +takes_value "Clock speed in ms")
)
.get_matches(); |
let program = matches.value_of("program").unwrap();
debug!("Value for program: {}", program);
let clock = matches.value_of("clock").unwrap();
debug!("Value for clock: {}", clock);
let mut chip = Chipate::new();
chip.init();
// chip.load_program("PONG");
chip.load_program(program);
chip.set_clock_speed(clock.parse::<u64>().unwrap());
loop {
chip.emulate_cycle();
chip.display.draw_screen();
chip.set_keys();
}
} | random_line_split |
|
main.rs | extern crate sdl;
//use sdl::event::Event;
#[macro_use]
extern crate log;
extern crate env_logger;
#[macro_use]
extern crate clap;
mod chipate;
mod display;
use chipate::Chipate;
fn | () {
env_logger::init().unwrap();
let matches = clap_app!(myapp =>
(version: "1.0")
(author: "Robert J. Lambert III <[email protected]>")
(about: "Chip8 Emulator written in rust")
(@arg program: -p --program +required +takes_value "Program to run")
(@arg clock: -c --clock +required +takes_value "Clock speed in ms")
)
.get_matches();
let program = matches.value_of("program").unwrap();
debug!("Value for program: {}", program);
let clock = matches.value_of("clock").unwrap();
debug!("Value for clock: {}", clock);
let mut chip = Chipate::new();
chip.init();
// chip.load_program("PONG");
chip.load_program(program);
chip.set_clock_speed(clock.parse::<u64>().unwrap());
loop {
chip.emulate_cycle();
chip.display.draw_screen();
chip.set_keys();
}
}
| main | identifier_name |
main.rs | extern crate sdl;
//use sdl::event::Event;
#[macro_use]
extern crate log;
extern crate env_logger;
#[macro_use]
extern crate clap;
mod chipate;
mod display;
use chipate::Chipate;
fn main() | chip.load_program(program);
chip.set_clock_speed(clock.parse::<u64>().unwrap());
loop {
chip.emulate_cycle();
chip.display.draw_screen();
chip.set_keys();
}
}
| {
env_logger::init().unwrap();
let matches = clap_app!(myapp =>
(version: "1.0")
(author: "Robert J. Lambert III <[email protected]>")
(about: "Chip8 Emulator written in rust")
(@arg program: -p --program +required +takes_value "Program to run")
(@arg clock: -c --clock +required +takes_value "Clock speed in ms")
)
.get_matches();
let program = matches.value_of("program").unwrap();
debug!("Value for program: {}", program);
let clock = matches.value_of("clock").unwrap();
debug!("Value for clock: {}", clock);
let mut chip = Chipate::new();
chip.init();
// chip.load_program("PONG"); | identifier_body |
size_hint.rs | #![feature(core)]
extern crate core;
#[cfg(test)]
mod tests {
use core::iter::Iterator;
use core::iter::Peekable;
struct A<T> {
begin: T,
end: T
}
macro_rules! Iterator_impl {
($T:ty) => {
impl Iterator for A<$T> {
type Item = $T;
fn next(&mut self) -> Option<Self::Item> {
if self.begin < self.end {
let result = self.begin;
self.begin = self.begin.wrapping_add(1);
Some::<Self::Item>(result)
} else {
None::<Self::Item>
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
debug_assert!(self.begin <= self.end);
let exact: usize = (self.end - self.begin) as usize;
(exact, Some::<usize>(exact))
}
// fn peekable(self) -> Peekable<Self> where Self: Sized {
// Peekable{iter: self, peeked: None}
// }
}
}
}
type T = i32;
Iterator_impl!(T);
// impl<I: Iterator> Iterator for Peekable<I> {
// type Item = I::Item;
//
// #[inline]
// fn next(&mut self) -> Option<I::Item> {
// match self.peeked {
// Some(_) => self.peeked.take(),
// None => self.iter.next(),
// }
// }
//
// #[inline]
// fn count(self) -> usize {
// (if self.peeked.is_some() { 1 } else { 0 }) + self.iter.count()
// }
//
// #[inline]
// fn nth(&mut self, n: usize) -> Option<I::Item> {
// match self.peeked {
// Some(_) if n == 0 => self.peeked.take(),
// Some(_) => {
// self.peeked = None;
// self.iter.nth(n-1)
// },
// None => self.iter.nth(n)
// }
// }
//
// #[inline]
// fn last(self) -> Option<I::Item> {
// self.iter.last().or(self.peeked)
// }
//
// #[inline]
// fn size_hint(&self) -> (usize, Option<usize>) {
// let (lo, hi) = self.iter.size_hint();
// if self.peeked.is_some() {
// let lo = lo.saturating_add(1);
// let hi = hi.and_then(|x| x.checked_add(1));
// (lo, hi)
// } else {
// (lo, hi)
// }
// }
// }
#[test]
fn size_hint_test1() {
let a: A<T> = A { begin: 0, end: 10 };
let peekable: Peekable<A<T>> = a.peekable();
let (lower, upper): (usize, Option<usize>) = peekable.size_hint();
assert_eq!(lower, 10);
assert_eq!(upper, Some::<usize>(10));
}
#[test]
fn | () {
let a: A<T> = A { begin: 0, end: 10 };
let mut peekable: Peekable<A<T>> = a.peekable();
peekable.next();
let (lower, upper): (usize, Option<usize>) = peekable.size_hint();
assert_eq!(lower, 9);
assert_eq!(upper, Some::<usize>(9));
}
}
| size_hint_test2 | identifier_name |
size_hint.rs | #![feature(core)]
extern crate core;
#[cfg(test)]
mod tests {
use core::iter::Iterator;
use core::iter::Peekable;
struct A<T> {
begin: T,
end: T
}
macro_rules! Iterator_impl {
($T:ty) => {
impl Iterator for A<$T> {
type Item = $T; |
fn next(&mut self) -> Option<Self::Item> {
if self.begin < self.end {
let result = self.begin;
self.begin = self.begin.wrapping_add(1);
Some::<Self::Item>(result)
} else {
None::<Self::Item>
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
debug_assert!(self.begin <= self.end);
let exact: usize = (self.end - self.begin) as usize;
(exact, Some::<usize>(exact))
}
// fn peekable(self) -> Peekable<Self> where Self: Sized {
// Peekable{iter: self, peeked: None}
// }
}
}
}
type T = i32;
Iterator_impl!(T);
// impl<I: Iterator> Iterator for Peekable<I> {
// type Item = I::Item;
//
// #[inline]
// fn next(&mut self) -> Option<I::Item> {
// match self.peeked {
// Some(_) => self.peeked.take(),
// None => self.iter.next(),
// }
// }
//
// #[inline]
// fn count(self) -> usize {
// (if self.peeked.is_some() { 1 } else { 0 }) + self.iter.count()
// }
//
// #[inline]
// fn nth(&mut self, n: usize) -> Option<I::Item> {
// match self.peeked {
// Some(_) if n == 0 => self.peeked.take(),
// Some(_) => {
// self.peeked = None;
// self.iter.nth(n-1)
// },
// None => self.iter.nth(n)
// }
// }
//
// #[inline]
// fn last(self) -> Option<I::Item> {
// self.iter.last().or(self.peeked)
// }
//
// #[inline]
// fn size_hint(&self) -> (usize, Option<usize>) {
// let (lo, hi) = self.iter.size_hint();
// if self.peeked.is_some() {
// let lo = lo.saturating_add(1);
// let hi = hi.and_then(|x| x.checked_add(1));
// (lo, hi)
// } else {
// (lo, hi)
// }
// }
// }
#[test]
fn size_hint_test1() {
let a: A<T> = A { begin: 0, end: 10 };
let peekable: Peekable<A<T>> = a.peekable();
let (lower, upper): (usize, Option<usize>) = peekable.size_hint();
assert_eq!(lower, 10);
assert_eq!(upper, Some::<usize>(10));
}
#[test]
fn size_hint_test2() {
let a: A<T> = A { begin: 0, end: 10 };
let mut peekable: Peekable<A<T>> = a.peekable();
peekable.next();
let (lower, upper): (usize, Option<usize>) = peekable.size_hint();
assert_eq!(lower, 9);
assert_eq!(upper, Some::<usize>(9));
}
} | random_line_split |
|
size_hint.rs | #![feature(core)]
extern crate core;
#[cfg(test)]
mod tests {
use core::iter::Iterator;
use core::iter::Peekable;
struct A<T> {
begin: T,
end: T
}
macro_rules! Iterator_impl {
($T:ty) => {
impl Iterator for A<$T> {
type Item = $T;
fn next(&mut self) -> Option<Self::Item> {
if self.begin < self.end {
let result = self.begin;
self.begin = self.begin.wrapping_add(1);
Some::<Self::Item>(result)
} else {
None::<Self::Item>
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
debug_assert!(self.begin <= self.end);
let exact: usize = (self.end - self.begin) as usize;
(exact, Some::<usize>(exact))
}
// fn peekable(self) -> Peekable<Self> where Self: Sized {
// Peekable{iter: self, peeked: None}
// }
}
}
}
type T = i32;
Iterator_impl!(T);
// impl<I: Iterator> Iterator for Peekable<I> {
// type Item = I::Item;
//
// #[inline]
// fn next(&mut self) -> Option<I::Item> {
// match self.peeked {
// Some(_) => self.peeked.take(),
// None => self.iter.next(),
// }
// }
//
// #[inline]
// fn count(self) -> usize {
// (if self.peeked.is_some() { 1 } else { 0 }) + self.iter.count()
// }
//
// #[inline]
// fn nth(&mut self, n: usize) -> Option<I::Item> {
// match self.peeked {
// Some(_) if n == 0 => self.peeked.take(),
// Some(_) => {
// self.peeked = None;
// self.iter.nth(n-1)
// },
// None => self.iter.nth(n)
// }
// }
//
// #[inline]
// fn last(self) -> Option<I::Item> {
// self.iter.last().or(self.peeked)
// }
//
// #[inline]
// fn size_hint(&self) -> (usize, Option<usize>) {
// let (lo, hi) = self.iter.size_hint();
// if self.peeked.is_some() {
// let lo = lo.saturating_add(1);
// let hi = hi.and_then(|x| x.checked_add(1));
// (lo, hi)
// } else {
// (lo, hi)
// }
// }
// }
#[test]
fn size_hint_test1() |
#[test]
fn size_hint_test2() {
let a: A<T> = A { begin: 0, end: 10 };
let mut peekable: Peekable<A<T>> = a.peekable();
peekable.next();
let (lower, upper): (usize, Option<usize>) = peekable.size_hint();
assert_eq!(lower, 9);
assert_eq!(upper, Some::<usize>(9));
}
}
| {
let a: A<T> = A { begin: 0, end: 10 };
let peekable: Peekable<A<T>> = a.peekable();
let (lower, upper): (usize, Option<usize>) = peekable.size_hint();
assert_eq!(lower, 10);
assert_eq!(upper, Some::<usize>(10));
} | identifier_body |
chain.rs | use futures_core::ready;
use futures_core::task::{Context, Poll};
#[cfg(feature = "read-initializer")]
use futures_io::Initializer;
use futures_io::{AsyncBufRead, AsyncRead, IoSliceMut};
use pin_project_lite::pin_project;
use std::fmt;
use std::io;
use std::pin::Pin;
pin_project! {
/// Reader for the [`chain`](super::AsyncReadExt::chain) method.
#[must_use = "readers do nothing unless polled"]
pub struct Chain<T, U> {
#[pin]
first: T,
#[pin]
second: U,
done_first: bool,
}
}
impl<T, U> Chain<T, U>
where
T: AsyncRead,
U: AsyncRead,
{
pub(super) fn new(first: T, second: U) -> Self {
Self { first, second, done_first: false }
}
/// Gets references to the underlying readers in this `Chain`.
pub fn get_ref(&self) -> (&T, &U) {
(&self.first, &self.second)
}
/// Gets mutable references to the underlying readers in this `Chain`.
///
/// Care should be taken to avoid modifying the internal I/O state of the
/// underlying readers as doing so may corrupt the internal state of this
/// `Chain`.
pub fn get_mut(&mut self) -> (&mut T, &mut U) { | }
/// Gets pinned mutable references to the underlying readers in this `Chain`.
///
/// Care should be taken to avoid modifying the internal I/O state of the
/// underlying readers as doing so may corrupt the internal state of this
/// `Chain`.
pub fn get_pin_mut(self: Pin<&mut Self>) -> (Pin<&mut T>, Pin<&mut U>) {
let this = self.project();
(this.first, this.second)
}
/// Consumes the `Chain`, returning the wrapped readers.
pub fn into_inner(self) -> (T, U) {
(self.first, self.second)
}
}
impl<T, U> fmt::Debug for Chain<T, U>
where
T: fmt::Debug,
U: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Chain")
.field("t", &self.first)
.field("u", &self.second)
.field("done_first", &self.done_first)
.finish()
}
}
impl<T, U> AsyncRead for Chain<T, U>
where
T: AsyncRead,
U: AsyncRead,
{
fn poll_read(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &mut [u8],
) -> Poll<io::Result<usize>> {
let this = self.project();
if!*this.done_first {
match ready!(this.first.poll_read(cx, buf)?) {
0 if!buf.is_empty() => *this.done_first = true,
n => return Poll::Ready(Ok(n)),
}
}
this.second.poll_read(cx, buf)
}
fn poll_read_vectored(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
bufs: &mut [IoSliceMut<'_>],
) -> Poll<io::Result<usize>> {
let this = self.project();
if!*this.done_first {
let n = ready!(this.first.poll_read_vectored(cx, bufs)?);
if n == 0 && bufs.iter().any(|b|!b.is_empty()) {
*this.done_first = true
} else {
return Poll::Ready(Ok(n));
}
}
this.second.poll_read_vectored(cx, bufs)
}
#[cfg(feature = "read-initializer")]
unsafe fn initializer(&self) -> Initializer {
let initializer = self.first.initializer();
if initializer.should_initialize() {
initializer
} else {
self.second.initializer()
}
}
}
impl<T, U> AsyncBufRead for Chain<T, U>
where
T: AsyncBufRead,
U: AsyncBufRead,
{
fn poll_fill_buf(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<&[u8]>> {
let this = self.project();
if!*this.done_first {
match ready!(this.first.poll_fill_buf(cx)?) {
buf if buf.is_empty() => {
*this.done_first = true;
}
buf => return Poll::Ready(Ok(buf)),
}
}
this.second.poll_fill_buf(cx)
}
fn consume(self: Pin<&mut Self>, amt: usize) {
let this = self.project();
if!*this.done_first {
this.first.consume(amt)
} else {
this.second.consume(amt)
}
}
} | (&mut self.first, &mut self.second) | random_line_split |
chain.rs | use futures_core::ready;
use futures_core::task::{Context, Poll};
#[cfg(feature = "read-initializer")]
use futures_io::Initializer;
use futures_io::{AsyncBufRead, AsyncRead, IoSliceMut};
use pin_project_lite::pin_project;
use std::fmt;
use std::io;
use std::pin::Pin;
pin_project! {
/// Reader for the [`chain`](super::AsyncReadExt::chain) method.
#[must_use = "readers do nothing unless polled"]
pub struct Chain<T, U> {
#[pin]
first: T,
#[pin]
second: U,
done_first: bool,
}
}
impl<T, U> Chain<T, U>
where
T: AsyncRead,
U: AsyncRead,
{
pub(super) fn new(first: T, second: U) -> Self {
Self { first, second, done_first: false }
}
/// Gets references to the underlying readers in this `Chain`.
pub fn get_ref(&self) -> (&T, &U) {
(&self.first, &self.second)
}
/// Gets mutable references to the underlying readers in this `Chain`.
///
/// Care should be taken to avoid modifying the internal I/O state of the
/// underlying readers as doing so may corrupt the internal state of this
/// `Chain`.
pub fn get_mut(&mut self) -> (&mut T, &mut U) {
(&mut self.first, &mut self.second)
}
/// Gets pinned mutable references to the underlying readers in this `Chain`.
///
/// Care should be taken to avoid modifying the internal I/O state of the
/// underlying readers as doing so may corrupt the internal state of this
/// `Chain`.
pub fn get_pin_mut(self: Pin<&mut Self>) -> (Pin<&mut T>, Pin<&mut U>) {
let this = self.project();
(this.first, this.second)
}
/// Consumes the `Chain`, returning the wrapped readers.
pub fn into_inner(self) -> (T, U) {
(self.first, self.second)
}
}
impl<T, U> fmt::Debug for Chain<T, U>
where
T: fmt::Debug,
U: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Chain")
.field("t", &self.first)
.field("u", &self.second)
.field("done_first", &self.done_first)
.finish()
}
}
impl<T, U> AsyncRead for Chain<T, U>
where
T: AsyncRead,
U: AsyncRead,
{
fn poll_read(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &mut [u8],
) -> Poll<io::Result<usize>> {
let this = self.project();
if!*this.done_first {
match ready!(this.first.poll_read(cx, buf)?) {
0 if!buf.is_empty() => *this.done_first = true,
n => return Poll::Ready(Ok(n)),
}
}
this.second.poll_read(cx, buf)
}
fn poll_read_vectored(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
bufs: &mut [IoSliceMut<'_>],
) -> Poll<io::Result<usize>> {
let this = self.project();
if!*this.done_first {
let n = ready!(this.first.poll_read_vectored(cx, bufs)?);
if n == 0 && bufs.iter().any(|b|!b.is_empty()) {
*this.done_first = true
} else {
return Poll::Ready(Ok(n));
}
}
this.second.poll_read_vectored(cx, bufs)
}
#[cfg(feature = "read-initializer")]
unsafe fn initializer(&self) -> Initializer {
let initializer = self.first.initializer();
if initializer.should_initialize() {
initializer
} else {
self.second.initializer()
}
}
}
impl<T, U> AsyncBufRead for Chain<T, U>
where
T: AsyncBufRead,
U: AsyncBufRead,
{
fn poll_fill_buf(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<&[u8]>> |
fn consume(self: Pin<&mut Self>, amt: usize) {
let this = self.project();
if!*this.done_first {
this.first.consume(amt)
} else {
this.second.consume(amt)
}
}
}
| {
let this = self.project();
if !*this.done_first {
match ready!(this.first.poll_fill_buf(cx)?) {
buf if buf.is_empty() => {
*this.done_first = true;
}
buf => return Poll::Ready(Ok(buf)),
}
}
this.second.poll_fill_buf(cx)
} | identifier_body |
chain.rs | use futures_core::ready;
use futures_core::task::{Context, Poll};
#[cfg(feature = "read-initializer")]
use futures_io::Initializer;
use futures_io::{AsyncBufRead, AsyncRead, IoSliceMut};
use pin_project_lite::pin_project;
use std::fmt;
use std::io;
use std::pin::Pin;
pin_project! {
/// Reader for the [`chain`](super::AsyncReadExt::chain) method.
#[must_use = "readers do nothing unless polled"]
pub struct Chain<T, U> {
#[pin]
first: T,
#[pin]
second: U,
done_first: bool,
}
}
impl<T, U> Chain<T, U>
where
T: AsyncRead,
U: AsyncRead,
{
pub(super) fn new(first: T, second: U) -> Self {
Self { first, second, done_first: false }
}
/// Gets references to the underlying readers in this `Chain`.
pub fn get_ref(&self) -> (&T, &U) {
(&self.first, &self.second)
}
/// Gets mutable references to the underlying readers in this `Chain`.
///
/// Care should be taken to avoid modifying the internal I/O state of the
/// underlying readers as doing so may corrupt the internal state of this
/// `Chain`.
pub fn get_mut(&mut self) -> (&mut T, &mut U) {
(&mut self.first, &mut self.second)
}
/// Gets pinned mutable references to the underlying readers in this `Chain`.
///
/// Care should be taken to avoid modifying the internal I/O state of the
/// underlying readers as doing so may corrupt the internal state of this
/// `Chain`.
pub fn get_pin_mut(self: Pin<&mut Self>) -> (Pin<&mut T>, Pin<&mut U>) {
let this = self.project();
(this.first, this.second)
}
/// Consumes the `Chain`, returning the wrapped readers.
pub fn into_inner(self) -> (T, U) {
(self.first, self.second)
}
}
impl<T, U> fmt::Debug for Chain<T, U>
where
T: fmt::Debug,
U: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Chain")
.field("t", &self.first)
.field("u", &self.second)
.field("done_first", &self.done_first)
.finish()
}
}
impl<T, U> AsyncRead for Chain<T, U>
where
T: AsyncRead,
U: AsyncRead,
{
fn poll_read(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &mut [u8],
) -> Poll<io::Result<usize>> {
let this = self.project();
if!*this.done_first {
match ready!(this.first.poll_read(cx, buf)?) {
0 if!buf.is_empty() => *this.done_first = true,
n => return Poll::Ready(Ok(n)),
}
}
this.second.poll_read(cx, buf)
}
fn poll_read_vectored(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
bufs: &mut [IoSliceMut<'_>],
) -> Poll<io::Result<usize>> {
let this = self.project();
if!*this.done_first {
let n = ready!(this.first.poll_read_vectored(cx, bufs)?);
if n == 0 && bufs.iter().any(|b|!b.is_empty()) {
*this.done_first = true
} else {
return Poll::Ready(Ok(n));
}
}
this.second.poll_read_vectored(cx, bufs)
}
#[cfg(feature = "read-initializer")]
unsafe fn initializer(&self) -> Initializer {
let initializer = self.first.initializer();
if initializer.should_initialize() {
initializer
} else {
self.second.initializer()
}
}
}
impl<T, U> AsyncBufRead for Chain<T, U>
where
T: AsyncBufRead,
U: AsyncBufRead,
{
fn poll_fill_buf(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<&[u8]>> {
let this = self.project();
if!*this.done_first {
match ready!(this.first.poll_fill_buf(cx)?) {
buf if buf.is_empty() => {
*this.done_first = true;
}
buf => return Poll::Ready(Ok(buf)),
}
}
this.second.poll_fill_buf(cx)
}
fn consume(self: Pin<&mut Self>, amt: usize) {
let this = self.project();
if!*this.done_first {
this.first.consume(amt)
} else |
}
}
| {
this.second.consume(amt)
} | conditional_block |
chain.rs | use futures_core::ready;
use futures_core::task::{Context, Poll};
#[cfg(feature = "read-initializer")]
use futures_io::Initializer;
use futures_io::{AsyncBufRead, AsyncRead, IoSliceMut};
use pin_project_lite::pin_project;
use std::fmt;
use std::io;
use std::pin::Pin;
pin_project! {
/// Reader for the [`chain`](super::AsyncReadExt::chain) method.
#[must_use = "readers do nothing unless polled"]
pub struct Chain<T, U> {
#[pin]
first: T,
#[pin]
second: U,
done_first: bool,
}
}
impl<T, U> Chain<T, U>
where
T: AsyncRead,
U: AsyncRead,
{
pub(super) fn new(first: T, second: U) -> Self {
Self { first, second, done_first: false }
}
/// Gets references to the underlying readers in this `Chain`.
pub fn get_ref(&self) -> (&T, &U) {
(&self.first, &self.second)
}
/// Gets mutable references to the underlying readers in this `Chain`.
///
/// Care should be taken to avoid modifying the internal I/O state of the
/// underlying readers as doing so may corrupt the internal state of this
/// `Chain`.
pub fn get_mut(&mut self) -> (&mut T, &mut U) {
(&mut self.first, &mut self.second)
}
/// Gets pinned mutable references to the underlying readers in this `Chain`.
///
/// Care should be taken to avoid modifying the internal I/O state of the
/// underlying readers as doing so may corrupt the internal state of this
/// `Chain`.
pub fn get_pin_mut(self: Pin<&mut Self>) -> (Pin<&mut T>, Pin<&mut U>) {
let this = self.project();
(this.first, this.second)
}
/// Consumes the `Chain`, returning the wrapped readers.
pub fn | (self) -> (T, U) {
(self.first, self.second)
}
}
impl<T, U> fmt::Debug for Chain<T, U>
where
T: fmt::Debug,
U: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Chain")
.field("t", &self.first)
.field("u", &self.second)
.field("done_first", &self.done_first)
.finish()
}
}
impl<T, U> AsyncRead for Chain<T, U>
where
T: AsyncRead,
U: AsyncRead,
{
fn poll_read(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &mut [u8],
) -> Poll<io::Result<usize>> {
let this = self.project();
if!*this.done_first {
match ready!(this.first.poll_read(cx, buf)?) {
0 if!buf.is_empty() => *this.done_first = true,
n => return Poll::Ready(Ok(n)),
}
}
this.second.poll_read(cx, buf)
}
fn poll_read_vectored(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
bufs: &mut [IoSliceMut<'_>],
) -> Poll<io::Result<usize>> {
let this = self.project();
if!*this.done_first {
let n = ready!(this.first.poll_read_vectored(cx, bufs)?);
if n == 0 && bufs.iter().any(|b|!b.is_empty()) {
*this.done_first = true
} else {
return Poll::Ready(Ok(n));
}
}
this.second.poll_read_vectored(cx, bufs)
}
#[cfg(feature = "read-initializer")]
unsafe fn initializer(&self) -> Initializer {
let initializer = self.first.initializer();
if initializer.should_initialize() {
initializer
} else {
self.second.initializer()
}
}
}
impl<T, U> AsyncBufRead for Chain<T, U>
where
T: AsyncBufRead,
U: AsyncBufRead,
{
fn poll_fill_buf(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<&[u8]>> {
let this = self.project();
if!*this.done_first {
match ready!(this.first.poll_fill_buf(cx)?) {
buf if buf.is_empty() => {
*this.done_first = true;
}
buf => return Poll::Ready(Ok(buf)),
}
}
this.second.poll_fill_buf(cx)
}
fn consume(self: Pin<&mut Self>, amt: usize) {
let this = self.project();
if!*this.done_first {
this.first.consume(amt)
} else {
this.second.consume(amt)
}
}
}
| into_inner | identifier_name |
main.rs | #![allow(dead_code)]
extern crate mio;
use self::mio::{Ready, PollOpt, Token};
use node::{Node, NET_RECEIVER_CHANNEL_TOKEN};
use network::{ConnectionIdentity, Connection, TcpNetwork, SocketType, TcpHandlerCommand, TcpHandlerCMD};
use helper::{Log, NetHelper};
use event::{Event};
use std::error::Error;
use std::process;
use std::sync::Arc;
use std::collections::btree_map::Entry::{Occupied, Vacant};
pub enum NetworkCMD {
None,
ConnectionClose,
HandleConnection,
HandleEvent
}
pub struct | {
pub cmd: NetworkCMD,
pub token: Vec<String>,
pub value: Vec<u64>,
pub conn_identity: Vec<ConnectionIdentity>,
pub event: Vec<Event>
}
pub trait Networking {
/// Main function to init Networking
fn init_networking(&mut self);
/// Handle Networking channel events as a NetworkCommand
fn notify(&mut self, command: &mut NetworkCommand);
/// Generating handshake information for sending it over networking handshake
fn handshake_info(&self) -> Vec<u8>;
/// main input from event loop to networking
fn net_ready(&mut self, token: Token, event_kind: Ready) -> bool;
/// sending event with specific path
fn emit(&mut self, event: Event);
}
impl NetworkCommand {
#[inline(always)]
pub fn new() -> NetworkCommand {
NetworkCommand {
cmd: NetworkCMD::None,
token: vec![],
value: vec![],
conn_identity: vec![],
event: vec![]
}
}
}
impl Networking for Node {
#[inline(always)]
fn notify(&mut self, command: &mut NetworkCommand) {
match command.cmd {
NetworkCMD::HandleConnection => {
// currently supporting only one connection per single command request
if command.token.len()!= 1
|| command.conn_identity.len()!= 1
|| command.value.len()!= 1 {
return;
}
let token = command.token.remove(0);
let identity = command.conn_identity.remove(0);
let value = command.value.remove(0);
let contained_token = match self.connections.entry(token.clone()) {
Vacant(entry) => {
entry.insert(Connection::new(token.clone(), value, identity));
false
},
Occupied(mut entry) => {
// adding connection identity
let conn = entry.get_mut();
conn.add_identity(identity);
true
},
};
if contained_token {
// handling new connection
self.on_new_connection_channel(&token);
} else {
// if we have API connection
if value == 0 {
self.on_new_api_connection(&token);
} else { // if we have regular Node connection
self.on_new_connection(&token, value);
}
}
}
NetworkCMD::ConnectionClose => {
// currently supporting only one connection per single command request
if command.token.len()!= 1 || command.conn_identity.len()!= 1 {
return;
}
let token = command.token.remove(0);
let identity = command.conn_identity.remove(0);
let remove_conn = match self.connections.get_mut(&token) {
Some(conn) => {
conn.rm_identity(identity.socket_token, identity.handler_index);
// if identity count is 0, we need to close connection
conn.identity_count() == 0
},
None => return
};
// anyway we need to close channel of this connection
self.on_connection_channel_close(&token);
// if we need to close full connection
// letting node know about it
if remove_conn {
self.on_connection_close(&token);
self.connections.remove(&token);
}
}
NetworkCMD::HandleEvent => {
// currently supporting only one connection per single command request
if command.token.len()!= 1 {
return;
}
// getting token out
let token = command.token.remove(0);
while!command.event.is_empty() {
let event = command.event.remove(0);
// if event processing passing fine
// emitting event based on his path
if self.on_event_data(&token, &event) &&!event.path.is_zero() {
// then trying to send event over available connections
self.emit(event);
}
}
}
NetworkCMD::None => {}
}
}
fn init_networking(&mut self) {
// Registering Networking receiver
match self.poll.register(&self.net_receiver_chan
, NET_RECEIVER_CHANNEL_TOKEN
, Ready::readable()
, PollOpt::level()) {
Ok(_) => {},
Err(e) => {
Log::error("Unable to register networking receiver channel to Node POLL service"
, e.description());
process::exit(1);
}
}
self.register_tcp();
}
#[inline(always)]
fn net_ready(&mut self, token: Token, event_kind: Ready) -> bool {
if token == NET_RECEIVER_CHANNEL_TOKEN {
// trying to get commands while there is available data
// if we got error, then data is unavailable
// and breaking receive loop
while let Ok(mut cmd) = self.net_receiver_chan.try_recv() {
self.notify(&mut cmd);
}
return true;
}
self.tcp_ready(token, event_kind)
}
#[inline(always)]
fn handshake_info(&self) -> Vec<u8> {
let token_len = self.token.len();
let total_value_len = token_len + 8;
// adding 4 byte API version
// 4 Bytes token string length
// N bytes for token string
// 8 bytes for Prime Value
let mut buffer = vec![0; (4 + 4 + token_len + 8)];
let mut offset = NetHelper::u32_to_bytes(self.api_version, &mut buffer, 0);
offset += NetHelper::u32_to_bytes(total_value_len as u32, &mut buffer, offset);
buffer[offset..offset + token_len].copy_from_slice(self.token.as_bytes());
offset += token_len;
NetHelper::u64_to_bytes(self.value, &mut buffer, offset);
buffer
}
#[inline(always)]
fn emit(&mut self, event: Event) {
let mut tcp_conns_to_send: Vec<Vec<Token>> = vec![Vec::new(); self.net_tcp_handler_sender_chan.len()];
let mut event = event;
for (_, mut conn) in &mut self.connections {
if conn.value == 0 {
continue;
}
if!event.path.dividable(conn.value) {
continue;
}
// if we trying to send to this connection
// removing it from path
event.path.div(conn.value);
let identity = conn.get_identity();
match identity.socket_type {
SocketType::TCP => {
tcp_conns_to_send[identity.handler_index].push(identity.socket_token);
}
SocketType::NONE => {}
}
}
if tcp_conns_to_send.len() == 0 {
return;
}
let data = Arc::new(match event.to_raw() {
Some(d) => d,
None => return
});
for i in 0..self.net_tcp_handler_sender_chan.len() {
if tcp_conns_to_send[i].len() == 0 {
continue;
}
let mut command = TcpHandlerCommand::new();
command.cmd = TcpHandlerCMD::WriteData;
command.token = tcp_conns_to_send[i].clone();
command.data = vec![data.clone()];
match self.net_tcp_handler_sender_chan[i].send(command) {
Ok(_) => {},
Err(e) => {
Log::error("Unable to send data to TcpHandler during emiting event", e.description());
}
}
}
}
} | NetworkCommand | identifier_name |
main.rs | #![allow(dead_code)]
extern crate mio;
use self::mio::{Ready, PollOpt, Token};
use node::{Node, NET_RECEIVER_CHANNEL_TOKEN};
use network::{ConnectionIdentity, Connection, TcpNetwork, SocketType, TcpHandlerCommand, TcpHandlerCMD};
use helper::{Log, NetHelper};
use event::{Event};
use std::error::Error;
use std::process;
use std::sync::Arc;
use std::collections::btree_map::Entry::{Occupied, Vacant};
pub enum NetworkCMD {
None,
ConnectionClose,
HandleConnection,
HandleEvent
}
pub struct NetworkCommand {
pub cmd: NetworkCMD,
pub token: Vec<String>,
pub value: Vec<u64>,
pub conn_identity: Vec<ConnectionIdentity>,
pub event: Vec<Event>
}
pub trait Networking {
/// Main function to init Networking
fn init_networking(&mut self);
/// Handle Networking channel events as a NetworkCommand
fn notify(&mut self, command: &mut NetworkCommand);
/// Generating handshake information for sending it over networking handshake
fn handshake_info(&self) -> Vec<u8>;
/// main input from event loop to networking
fn net_ready(&mut self, token: Token, event_kind: Ready) -> bool;
/// sending event with specific path
fn emit(&mut self, event: Event);
}
impl NetworkCommand {
#[inline(always)]
pub fn new() -> NetworkCommand {
NetworkCommand {
cmd: NetworkCMD::None,
token: vec![],
value: vec![],
conn_identity: vec![],
event: vec![]
}
}
}
impl Networking for Node {
#[inline(always)]
fn notify(&mut self, command: &mut NetworkCommand) {
match command.cmd {
NetworkCMD::HandleConnection => {
// currently supporting only one connection per single command request
if command.token.len()!= 1
|| command.conn_identity.len()!= 1
|| command.value.len()!= 1 {
return;
}
let token = command.token.remove(0);
let identity = command.conn_identity.remove(0);
let value = command.value.remove(0);
let contained_token = match self.connections.entry(token.clone()) {
Vacant(entry) => {
entry.insert(Connection::new(token.clone(), value, identity));
false
},
Occupied(mut entry) => {
// adding connection identity
let conn = entry.get_mut();
conn.add_identity(identity);
true
},
};
if contained_token {
// handling new connection
self.on_new_connection_channel(&token);
} else {
// if we have API connection
if value == 0 {
self.on_new_api_connection(&token);
} else { // if we have regular Node connection
self.on_new_connection(&token, value);
}
}
}
NetworkCMD::ConnectionClose => {
// currently supporting only one connection per single command request
if command.token.len()!= 1 || command.conn_identity.len()!= 1 {
return;
}
let token = command.token.remove(0);
let identity = command.conn_identity.remove(0);
let remove_conn = match self.connections.get_mut(&token) {
Some(conn) => {
conn.rm_identity(identity.socket_token, identity.handler_index);
// if identity count is 0, we need to close connection
conn.identity_count() == 0
},
None => return
};
// anyway we need to close channel of this connection
self.on_connection_channel_close(&token);
// if we need to close full connection
// letting node know about it
if remove_conn {
self.on_connection_close(&token);
self.connections.remove(&token);
}
}
NetworkCMD::HandleEvent => {
// currently supporting only one connection per single command request
if command.token.len()!= 1 {
return;
}
// getting token out
let token = command.token.remove(0);
while!command.event.is_empty() {
let event = command.event.remove(0);
// if event processing passing fine
// emitting event based on his path
if self.on_event_data(&token, &event) &&!event.path.is_zero() {
// then trying to send event over available connections
self.emit(event);
}
}
}
NetworkCMD::None => {}
}
}
fn init_networking(&mut self) {
// Registering Networking receiver
match self.poll.register(&self.net_receiver_chan
, NET_RECEIVER_CHANNEL_TOKEN
, Ready::readable()
, PollOpt::level()) {
Ok(_) => {},
Err(e) => {
Log::error("Unable to register networking receiver channel to Node POLL service"
, e.description());
process::exit(1);
}
}
self.register_tcp();
}
#[inline(always)]
fn net_ready(&mut self, token: Token, event_kind: Ready) -> bool {
if token == NET_RECEIVER_CHANNEL_TOKEN {
// trying to get commands while there is available data
// if we got error, then data is unavailable
// and breaking receive loop
while let Ok(mut cmd) = self.net_receiver_chan.try_recv() {
self.notify(&mut cmd);
}
return true;
}
self.tcp_ready(token, event_kind)
}
#[inline(always)]
fn handshake_info(&self) -> Vec<u8> |
#[inline(always)]
fn emit(&mut self, event: Event) {
let mut tcp_conns_to_send: Vec<Vec<Token>> = vec![Vec::new(); self.net_tcp_handler_sender_chan.len()];
let mut event = event;
for (_, mut conn) in &mut self.connections {
if conn.value == 0 {
continue;
}
if!event.path.dividable(conn.value) {
continue;
}
// if we trying to send to this connection
// removing it from path
event.path.div(conn.value);
let identity = conn.get_identity();
match identity.socket_type {
SocketType::TCP => {
tcp_conns_to_send[identity.handler_index].push(identity.socket_token);
}
SocketType::NONE => {}
}
}
if tcp_conns_to_send.len() == 0 {
return;
}
let data = Arc::new(match event.to_raw() {
Some(d) => d,
None => return
});
for i in 0..self.net_tcp_handler_sender_chan.len() {
if tcp_conns_to_send[i].len() == 0 {
continue;
}
let mut command = TcpHandlerCommand::new();
command.cmd = TcpHandlerCMD::WriteData;
command.token = tcp_conns_to_send[i].clone();
command.data = vec![data.clone()];
match self.net_tcp_handler_sender_chan[i].send(command) {
Ok(_) => {},
Err(e) => {
Log::error("Unable to send data to TcpHandler during emiting event", e.description());
}
}
}
}
} | {
let token_len = self.token.len();
let total_value_len = token_len + 8;
// adding 4 byte API version
// 4 Bytes token string length
// N bytes for token string
// 8 bytes for Prime Value
let mut buffer = vec![0; (4 + 4 + token_len + 8)];
let mut offset = NetHelper::u32_to_bytes(self.api_version, &mut buffer, 0);
offset += NetHelper::u32_to_bytes(total_value_len as u32, &mut buffer, offset);
buffer[offset..offset + token_len].copy_from_slice(self.token.as_bytes());
offset += token_len;
NetHelper::u64_to_bytes(self.value, &mut buffer, offset);
buffer
} | identifier_body |
main.rs | #![allow(dead_code)]
extern crate mio;
use self::mio::{Ready, PollOpt, Token};
use node::{Node, NET_RECEIVER_CHANNEL_TOKEN};
use network::{ConnectionIdentity, Connection, TcpNetwork, SocketType, TcpHandlerCommand, TcpHandlerCMD};
use helper::{Log, NetHelper};
use event::{Event};
use std::error::Error;
use std::process;
use std::sync::Arc;
use std::collections::btree_map::Entry::{Occupied, Vacant};
pub enum NetworkCMD {
None,
ConnectionClose,
HandleConnection,
HandleEvent
}
pub struct NetworkCommand {
pub cmd: NetworkCMD,
pub token: Vec<String>,
pub value: Vec<u64>,
pub conn_identity: Vec<ConnectionIdentity>,
pub event: Vec<Event>
}
pub trait Networking {
/// Main function to init Networking
fn init_networking(&mut self);
/// Handle Networking channel events as a NetworkCommand
fn notify(&mut self, command: &mut NetworkCommand);
/// Generating handshake information for sending it over networking handshake
fn handshake_info(&self) -> Vec<u8>;
/// main input from event loop to networking
fn net_ready(&mut self, token: Token, event_kind: Ready) -> bool;
/// sending event with specific path
fn emit(&mut self, event: Event);
}
impl NetworkCommand {
#[inline(always)]
pub fn new() -> NetworkCommand {
NetworkCommand {
cmd: NetworkCMD::None,
token: vec![],
value: vec![],
conn_identity: vec![],
event: vec![]
}
}
}
| impl Networking for Node {
#[inline(always)]
fn notify(&mut self, command: &mut NetworkCommand) {
match command.cmd {
NetworkCMD::HandleConnection => {
// currently supporting only one connection per single command request
if command.token.len()!= 1
|| command.conn_identity.len()!= 1
|| command.value.len()!= 1 {
return;
}
let token = command.token.remove(0);
let identity = command.conn_identity.remove(0);
let value = command.value.remove(0);
let contained_token = match self.connections.entry(token.clone()) {
Vacant(entry) => {
entry.insert(Connection::new(token.clone(), value, identity));
false
},
Occupied(mut entry) => {
// adding connection identity
let conn = entry.get_mut();
conn.add_identity(identity);
true
},
};
if contained_token {
// handling new connection
self.on_new_connection_channel(&token);
} else {
// if we have API connection
if value == 0 {
self.on_new_api_connection(&token);
} else { // if we have regular Node connection
self.on_new_connection(&token, value);
}
}
}
NetworkCMD::ConnectionClose => {
// currently supporting only one connection per single command request
if command.token.len()!= 1 || command.conn_identity.len()!= 1 {
return;
}
let token = command.token.remove(0);
let identity = command.conn_identity.remove(0);
let remove_conn = match self.connections.get_mut(&token) {
Some(conn) => {
conn.rm_identity(identity.socket_token, identity.handler_index);
// if identity count is 0, we need to close connection
conn.identity_count() == 0
},
None => return
};
// anyway we need to close channel of this connection
self.on_connection_channel_close(&token);
// if we need to close full connection
// letting node know about it
if remove_conn {
self.on_connection_close(&token);
self.connections.remove(&token);
}
}
NetworkCMD::HandleEvent => {
// currently supporting only one connection per single command request
if command.token.len()!= 1 {
return;
}
// getting token out
let token = command.token.remove(0);
while!command.event.is_empty() {
let event = command.event.remove(0);
// if event processing passing fine
// emitting event based on his path
if self.on_event_data(&token, &event) &&!event.path.is_zero() {
// then trying to send event over available connections
self.emit(event);
}
}
}
NetworkCMD::None => {}
}
}
fn init_networking(&mut self) {
// Registering Networking receiver
match self.poll.register(&self.net_receiver_chan
, NET_RECEIVER_CHANNEL_TOKEN
, Ready::readable()
, PollOpt::level()) {
Ok(_) => {},
Err(e) => {
Log::error("Unable to register networking receiver channel to Node POLL service"
, e.description());
process::exit(1);
}
}
self.register_tcp();
}
#[inline(always)]
fn net_ready(&mut self, token: Token, event_kind: Ready) -> bool {
if token == NET_RECEIVER_CHANNEL_TOKEN {
// trying to get commands while there is available data
// if we got error, then data is unavailable
// and breaking receive loop
while let Ok(mut cmd) = self.net_receiver_chan.try_recv() {
self.notify(&mut cmd);
}
return true;
}
self.tcp_ready(token, event_kind)
}
#[inline(always)]
fn handshake_info(&self) -> Vec<u8> {
let token_len = self.token.len();
let total_value_len = token_len + 8;
// adding 4 byte API version
// 4 Bytes token string length
// N bytes for token string
// 8 bytes for Prime Value
let mut buffer = vec![0; (4 + 4 + token_len + 8)];
let mut offset = NetHelper::u32_to_bytes(self.api_version, &mut buffer, 0);
offset += NetHelper::u32_to_bytes(total_value_len as u32, &mut buffer, offset);
buffer[offset..offset + token_len].copy_from_slice(self.token.as_bytes());
offset += token_len;
NetHelper::u64_to_bytes(self.value, &mut buffer, offset);
buffer
}
#[inline(always)]
fn emit(&mut self, event: Event) {
let mut tcp_conns_to_send: Vec<Vec<Token>> = vec![Vec::new(); self.net_tcp_handler_sender_chan.len()];
let mut event = event;
for (_, mut conn) in &mut self.connections {
if conn.value == 0 {
continue;
}
if!event.path.dividable(conn.value) {
continue;
}
// if we trying to send to this connection
// removing it from path
event.path.div(conn.value);
let identity = conn.get_identity();
match identity.socket_type {
SocketType::TCP => {
tcp_conns_to_send[identity.handler_index].push(identity.socket_token);
}
SocketType::NONE => {}
}
}
if tcp_conns_to_send.len() == 0 {
return;
}
let data = Arc::new(match event.to_raw() {
Some(d) => d,
None => return
});
for i in 0..self.net_tcp_handler_sender_chan.len() {
if tcp_conns_to_send[i].len() == 0 {
continue;
}
let mut command = TcpHandlerCommand::new();
command.cmd = TcpHandlerCMD::WriteData;
command.token = tcp_conns_to_send[i].clone();
command.data = vec![data.clone()];
match self.net_tcp_handler_sender_chan[i].send(command) {
Ok(_) => {},
Err(e) => {
Log::error("Unable to send data to TcpHandler during emiting event", e.description());
}
}
}
}
} | random_line_split |
|
main.rs | #![allow(dead_code)]
extern crate mio;
use self::mio::{Ready, PollOpt, Token};
use node::{Node, NET_RECEIVER_CHANNEL_TOKEN};
use network::{ConnectionIdentity, Connection, TcpNetwork, SocketType, TcpHandlerCommand, TcpHandlerCMD};
use helper::{Log, NetHelper};
use event::{Event};
use std::error::Error;
use std::process;
use std::sync::Arc;
use std::collections::btree_map::Entry::{Occupied, Vacant};
pub enum NetworkCMD {
None,
ConnectionClose,
HandleConnection,
HandleEvent
}
pub struct NetworkCommand {
pub cmd: NetworkCMD,
pub token: Vec<String>,
pub value: Vec<u64>,
pub conn_identity: Vec<ConnectionIdentity>,
pub event: Vec<Event>
}
pub trait Networking {
/// Main function to init Networking
fn init_networking(&mut self);
/// Handle Networking channel events as a NetworkCommand
fn notify(&mut self, command: &mut NetworkCommand);
/// Generating handshake information for sending it over networking handshake
fn handshake_info(&self) -> Vec<u8>;
/// main input from event loop to networking
fn net_ready(&mut self, token: Token, event_kind: Ready) -> bool;
/// sending event with specific path
fn emit(&mut self, event: Event);
}
impl NetworkCommand {
#[inline(always)]
pub fn new() -> NetworkCommand {
NetworkCommand {
cmd: NetworkCMD::None,
token: vec![],
value: vec![],
conn_identity: vec![],
event: vec![]
}
}
}
impl Networking for Node {
#[inline(always)]
fn notify(&mut self, command: &mut NetworkCommand) {
match command.cmd {
NetworkCMD::HandleConnection => {
// currently supporting only one connection per single command request
if command.token.len()!= 1
|| command.conn_identity.len()!= 1
|| command.value.len()!= 1 {
return;
}
let token = command.token.remove(0);
let identity = command.conn_identity.remove(0);
let value = command.value.remove(0);
let contained_token = match self.connections.entry(token.clone()) {
Vacant(entry) => {
entry.insert(Connection::new(token.clone(), value, identity));
false
},
Occupied(mut entry) => {
// adding connection identity
let conn = entry.get_mut();
conn.add_identity(identity);
true
},
};
if contained_token {
// handling new connection
self.on_new_connection_channel(&token);
} else |
}
NetworkCMD::ConnectionClose => {
// currently supporting only one connection per single command request
if command.token.len()!= 1 || command.conn_identity.len()!= 1 {
return;
}
let token = command.token.remove(0);
let identity = command.conn_identity.remove(0);
let remove_conn = match self.connections.get_mut(&token) {
Some(conn) => {
conn.rm_identity(identity.socket_token, identity.handler_index);
// if identity count is 0, we need to close connection
conn.identity_count() == 0
},
None => return
};
// anyway we need to close channel of this connection
self.on_connection_channel_close(&token);
// if we need to close full connection
// letting node know about it
if remove_conn {
self.on_connection_close(&token);
self.connections.remove(&token);
}
}
NetworkCMD::HandleEvent => {
// currently supporting only one connection per single command request
if command.token.len()!= 1 {
return;
}
// getting token out
let token = command.token.remove(0);
while!command.event.is_empty() {
let event = command.event.remove(0);
// if event processing passing fine
// emitting event based on his path
if self.on_event_data(&token, &event) &&!event.path.is_zero() {
// then trying to send event over available connections
self.emit(event);
}
}
}
NetworkCMD::None => {}
}
}
fn init_networking(&mut self) {
// Registering Networking receiver
match self.poll.register(&self.net_receiver_chan
, NET_RECEIVER_CHANNEL_TOKEN
, Ready::readable()
, PollOpt::level()) {
Ok(_) => {},
Err(e) => {
Log::error("Unable to register networking receiver channel to Node POLL service"
, e.description());
process::exit(1);
}
}
self.register_tcp();
}
#[inline(always)]
fn net_ready(&mut self, token: Token, event_kind: Ready) -> bool {
if token == NET_RECEIVER_CHANNEL_TOKEN {
// trying to get commands while there is available data
// if we got error, then data is unavailable
// and breaking receive loop
while let Ok(mut cmd) = self.net_receiver_chan.try_recv() {
self.notify(&mut cmd);
}
return true;
}
self.tcp_ready(token, event_kind)
}
#[inline(always)]
fn handshake_info(&self) -> Vec<u8> {
let token_len = self.token.len();
let total_value_len = token_len + 8;
// adding 4 byte API version
// 4 Bytes token string length
// N bytes for token string
// 8 bytes for Prime Value
let mut buffer = vec![0; (4 + 4 + token_len + 8)];
let mut offset = NetHelper::u32_to_bytes(self.api_version, &mut buffer, 0);
offset += NetHelper::u32_to_bytes(total_value_len as u32, &mut buffer, offset);
buffer[offset..offset + token_len].copy_from_slice(self.token.as_bytes());
offset += token_len;
NetHelper::u64_to_bytes(self.value, &mut buffer, offset);
buffer
}
#[inline(always)]
fn emit(&mut self, event: Event) {
let mut tcp_conns_to_send: Vec<Vec<Token>> = vec![Vec::new(); self.net_tcp_handler_sender_chan.len()];
let mut event = event;
for (_, mut conn) in &mut self.connections {
if conn.value == 0 {
continue;
}
if!event.path.dividable(conn.value) {
continue;
}
// if we trying to send to this connection
// removing it from path
event.path.div(conn.value);
let identity = conn.get_identity();
match identity.socket_type {
SocketType::TCP => {
tcp_conns_to_send[identity.handler_index].push(identity.socket_token);
}
SocketType::NONE => {}
}
}
if tcp_conns_to_send.len() == 0 {
return;
}
let data = Arc::new(match event.to_raw() {
Some(d) => d,
None => return
});
for i in 0..self.net_tcp_handler_sender_chan.len() {
if tcp_conns_to_send[i].len() == 0 {
continue;
}
let mut command = TcpHandlerCommand::new();
command.cmd = TcpHandlerCMD::WriteData;
command.token = tcp_conns_to_send[i].clone();
command.data = vec![data.clone()];
match self.net_tcp_handler_sender_chan[i].send(command) {
Ok(_) => {},
Err(e) => {
Log::error("Unable to send data to TcpHandler during emiting event", e.description());
}
}
}
}
} | {
// if we have API connection
if value == 0 {
self.on_new_api_connection(&token);
} else { // if we have regular Node connection
self.on_new_connection(&token, value);
}
} | conditional_block |
cube.rs | extern crate piston_window;
extern crate vecmath;
extern crate camera_controllers;
#[macro_use]
extern crate gfx;
extern crate gfx_device_gl;
extern crate sdl2_window;
extern crate piston_meta;
extern crate piston_meta_search;
extern crate shader_version;
use shader_version::Shaders;
use shader_version::glsl::GLSL;
use std::fs::File;
use std::io::Read;
use sdl2_window::Sdl2Window;
use piston_window::*;
use camera_controllers::{
FirstPersonSettings,
FirstPerson,
CameraPerspective,
model_view_projection
};
use gfx::traits::*;
use piston_meta::*;
use piston_meta_search::*;
//----------------------------------------
// Cube associated data
gfx_vertex_struct!( Vertex {
a_pos: [f32; 3] = "a_pos",
});
impl Vertex {
fn | (pos: [f32; 3]) -> Vertex {
Vertex {
a_pos: pos,
}
}
}
gfx_pipeline!( pipe {
vbuf: gfx::VertexBuffer<Vertex> = (),
u_model_view_proj: gfx::Global<[[f32; 4]; 4]> = "u_model_view_proj",
out_color: gfx::RenderTarget<gfx::format::Rgba8> = "o_Color",
out_depth: gfx::DepthTarget<gfx::format::DepthStencil> =
gfx::preset::depth::LESS_EQUAL_WRITE,
});
//----------------------------------------
fn main() {
let opengl = OpenGL::V3_2;
let mut events: PistonWindow<(), Sdl2Window> =
WindowSettings::new("piston: cube", [640, 480])
.exit_on_esc(true)
.samples(4)
.opengl(opengl)
.build()
.unwrap();
events.set_capture_cursor(true);
let ref mut factory = events.factory.borrow().clone();
// Read OpenGEX meta rules.
let mut file_h = File::open("examples/assets/opengex-syntax.txt").unwrap();
let mut source = String::new();
file_h.read_to_string(&mut source).unwrap();
let rules = stderr_unwrap(&source, syntax(&source));
// Read cube.ogex.
let mut file_h = File::open("examples/assets/cube.ogex").unwrap();
let mut source = String::new();
file_h.read_to_string(&mut source).unwrap();
let mut data = vec![];
stderr_unwrap(&source, parse(&rules, &source, &mut data));
let s = Search::new(&data);
let vertex_data: Vec<Vertex> = stderr_unwrap(&source, s.for_bool("position", true,
|ref mut s| {
let mut vs = Vec::with_capacity(24);
for _ in 0.. 24 {
vs.push(Vertex::new([
try!(s.f64("x")) as f32,
try!(s.f64("y")) as f32,
try!(s.f64("z")) as f32
]));
}
Ok(vs)
}));
let index_data: Vec<u8> = stderr_unwrap(&source, s.for_node("IndexArray",
|ref mut s| {
let mut is = Vec::with_capacity(36);
for _ in 0.. 12 {
is.push(try!(s.f64("a")) as u8);
is.push(try!(s.f64("b")) as u8);
is.push(try!(s.f64("c")) as u8);
}
Ok(is)
}));
let (vbuf, slice) = factory.create_vertex_buffer_indexed(&vertex_data,
&index_data[..]);
let glsl = opengl.to_glsl();
let pso = factory.create_pipeline_simple(
Shaders::new()
.set(GLSL::V1_20, include_str!("assets/cube_colored_120.glslv"))
.set(GLSL::V1_50, include_str!("assets/cube_colored_150.glslv"))
.get(glsl).unwrap().as_bytes(),
Shaders::new()
.set(GLSL::V1_20, include_str!("assets/cube_colored_120.glslf"))
.set(GLSL::V1_50, include_str!("assets/cube_colored_150.glslf"))
.get(glsl).unwrap().as_bytes(),
gfx::state::CullFace::Nothing,
pipe::new()
).unwrap();
let get_projection = |w: &PistonWindow<(), Sdl2Window>| {
let draw_size = w.window.borrow().draw_size();
CameraPerspective {
fov: 90.0, near_clip: 0.1, far_clip: 1000.0,
aspect_ratio: (draw_size.width as f32) / (draw_size.height as f32)
}.projection()
};
let model = vecmath::mat4_id();
let mut projection = get_projection(&events);
let mut first_person = FirstPerson::new(
[0.5, 0.5, 4.0],
FirstPersonSettings::keyboard_wasd()
);
for e in events {
first_person.event(&e);
e.draw_3d(|encoder| {
let args = e.render_args().unwrap();
encoder.clear(&e.output_color, [0.3, 0.3, 0.3, 1.0]);
encoder.clear_depth(&e.output_stencil, 1.0);
let data = pipe::Data {
vbuf: vbuf.clone(),
u_model_view_proj: model_view_projection(
model,
first_person.camera(args.ext_dt).orthogonal(),
projection
),
out_color: (*e.output_color).clone(),
out_depth: (*e.output_stencil).clone(),
};
encoder.draw(&slice, &pso, &data);
});
e.draw_2d(|c, g| {
ellipse([1.0, 1.0, 0.0, 1.0], [0.0, 0.0, 10.0, 10.0],
c.transform, g);
});
if let Some(_) = e.resize_args() {
projection = get_projection(&e);
}
}
}
| new | identifier_name |
cube.rs | extern crate piston_window;
extern crate vecmath;
extern crate camera_controllers;
#[macro_use]
extern crate gfx;
extern crate gfx_device_gl;
extern crate sdl2_window;
extern crate piston_meta;
extern crate piston_meta_search;
extern crate shader_version;
use shader_version::Shaders;
use shader_version::glsl::GLSL;
use std::fs::File;
use std::io::Read;
use sdl2_window::Sdl2Window;
use piston_window::*;
use camera_controllers::{
FirstPersonSettings,
FirstPerson,
CameraPerspective,
model_view_projection
};
use gfx::traits::*;
use piston_meta::*;
use piston_meta_search::*;
//----------------------------------------
// Cube associated data
gfx_vertex_struct!( Vertex {
a_pos: [f32; 3] = "a_pos",
});
impl Vertex {
fn new(pos: [f32; 3]) -> Vertex {
Vertex {
a_pos: pos,
}
}
}
gfx_pipeline!( pipe {
vbuf: gfx::VertexBuffer<Vertex> = (),
u_model_view_proj: gfx::Global<[[f32; 4]; 4]> = "u_model_view_proj",
out_color: gfx::RenderTarget<gfx::format::Rgba8> = "o_Color",
out_depth: gfx::DepthTarget<gfx::format::DepthStencil> =
gfx::preset::depth::LESS_EQUAL_WRITE,
});
//----------------------------------------
fn main() {
let opengl = OpenGL::V3_2;
let mut events: PistonWindow<(), Sdl2Window> =
WindowSettings::new("piston: cube", [640, 480])
.exit_on_esc(true)
.samples(4)
.opengl(opengl)
.build()
.unwrap();
events.set_capture_cursor(true);
let ref mut factory = events.factory.borrow().clone();
// Read OpenGEX meta rules.
let mut file_h = File::open("examples/assets/opengex-syntax.txt").unwrap();
let mut source = String::new();
file_h.read_to_string(&mut source).unwrap();
let rules = stderr_unwrap(&source, syntax(&source));
// Read cube.ogex.
let mut file_h = File::open("examples/assets/cube.ogex").unwrap();
let mut source = String::new();
file_h.read_to_string(&mut source).unwrap();
let mut data = vec![];
stderr_unwrap(&source, parse(&rules, &source, &mut data));
let s = Search::new(&data);
let vertex_data: Vec<Vertex> = stderr_unwrap(&source, s.for_bool("position", true,
|ref mut s| {
let mut vs = Vec::with_capacity(24);
for _ in 0.. 24 {
vs.push(Vertex::new([
try!(s.f64("x")) as f32,
try!(s.f64("y")) as f32,
try!(s.f64("z")) as f32
]));
}
Ok(vs)
}));
let index_data: Vec<u8> = stderr_unwrap(&source, s.for_node("IndexArray",
|ref mut s| {
let mut is = Vec::with_capacity(36);
for _ in 0.. 12 {
is.push(try!(s.f64("a")) as u8);
is.push(try!(s.f64("b")) as u8);
is.push(try!(s.f64("c")) as u8);
}
Ok(is)
}));
let (vbuf, slice) = factory.create_vertex_buffer_indexed(&vertex_data,
&index_data[..]);
let glsl = opengl.to_glsl();
let pso = factory.create_pipeline_simple(
Shaders::new()
.set(GLSL::V1_20, include_str!("assets/cube_colored_120.glslv"))
.set(GLSL::V1_50, include_str!("assets/cube_colored_150.glslv"))
.get(glsl).unwrap().as_bytes(),
Shaders::new()
.set(GLSL::V1_20, include_str!("assets/cube_colored_120.glslf"))
.set(GLSL::V1_50, include_str!("assets/cube_colored_150.glslf"))
.get(glsl).unwrap().as_bytes(),
gfx::state::CullFace::Nothing, | CameraPerspective {
fov: 90.0, near_clip: 0.1, far_clip: 1000.0,
aspect_ratio: (draw_size.width as f32) / (draw_size.height as f32)
}.projection()
};
let model = vecmath::mat4_id();
let mut projection = get_projection(&events);
let mut first_person = FirstPerson::new(
[0.5, 0.5, 4.0],
FirstPersonSettings::keyboard_wasd()
);
for e in events {
first_person.event(&e);
e.draw_3d(|encoder| {
let args = e.render_args().unwrap();
encoder.clear(&e.output_color, [0.3, 0.3, 0.3, 1.0]);
encoder.clear_depth(&e.output_stencil, 1.0);
let data = pipe::Data {
vbuf: vbuf.clone(),
u_model_view_proj: model_view_projection(
model,
first_person.camera(args.ext_dt).orthogonal(),
projection
),
out_color: (*e.output_color).clone(),
out_depth: (*e.output_stencil).clone(),
};
encoder.draw(&slice, &pso, &data);
});
e.draw_2d(|c, g| {
ellipse([1.0, 1.0, 0.0, 1.0], [0.0, 0.0, 10.0, 10.0],
c.transform, g);
});
if let Some(_) = e.resize_args() {
projection = get_projection(&e);
}
}
} | pipe::new()
).unwrap();
let get_projection = |w: &PistonWindow<(), Sdl2Window>| {
let draw_size = w.window.borrow().draw_size(); | random_line_split |
cube.rs | extern crate piston_window;
extern crate vecmath;
extern crate camera_controllers;
#[macro_use]
extern crate gfx;
extern crate gfx_device_gl;
extern crate sdl2_window;
extern crate piston_meta;
extern crate piston_meta_search;
extern crate shader_version;
use shader_version::Shaders;
use shader_version::glsl::GLSL;
use std::fs::File;
use std::io::Read;
use sdl2_window::Sdl2Window;
use piston_window::*;
use camera_controllers::{
FirstPersonSettings,
FirstPerson,
CameraPerspective,
model_view_projection
};
use gfx::traits::*;
use piston_meta::*;
use piston_meta_search::*;
//----------------------------------------
// Cube associated data
gfx_vertex_struct!( Vertex {
a_pos: [f32; 3] = "a_pos",
});
impl Vertex {
fn new(pos: [f32; 3]) -> Vertex {
Vertex {
a_pos: pos,
}
}
}
gfx_pipeline!( pipe {
vbuf: gfx::VertexBuffer<Vertex> = (),
u_model_view_proj: gfx::Global<[[f32; 4]; 4]> = "u_model_view_proj",
out_color: gfx::RenderTarget<gfx::format::Rgba8> = "o_Color",
out_depth: gfx::DepthTarget<gfx::format::DepthStencil> =
gfx::preset::depth::LESS_EQUAL_WRITE,
});
//----------------------------------------
fn main() {
let opengl = OpenGL::V3_2;
let mut events: PistonWindow<(), Sdl2Window> =
WindowSettings::new("piston: cube", [640, 480])
.exit_on_esc(true)
.samples(4)
.opengl(opengl)
.build()
.unwrap();
events.set_capture_cursor(true);
let ref mut factory = events.factory.borrow().clone();
// Read OpenGEX meta rules.
let mut file_h = File::open("examples/assets/opengex-syntax.txt").unwrap();
let mut source = String::new();
file_h.read_to_string(&mut source).unwrap();
let rules = stderr_unwrap(&source, syntax(&source));
// Read cube.ogex.
let mut file_h = File::open("examples/assets/cube.ogex").unwrap();
let mut source = String::new();
file_h.read_to_string(&mut source).unwrap();
let mut data = vec![];
stderr_unwrap(&source, parse(&rules, &source, &mut data));
let s = Search::new(&data);
let vertex_data: Vec<Vertex> = stderr_unwrap(&source, s.for_bool("position", true,
|ref mut s| {
let mut vs = Vec::with_capacity(24);
for _ in 0.. 24 {
vs.push(Vertex::new([
try!(s.f64("x")) as f32,
try!(s.f64("y")) as f32,
try!(s.f64("z")) as f32
]));
}
Ok(vs)
}));
let index_data: Vec<u8> = stderr_unwrap(&source, s.for_node("IndexArray",
|ref mut s| {
let mut is = Vec::with_capacity(36);
for _ in 0.. 12 {
is.push(try!(s.f64("a")) as u8);
is.push(try!(s.f64("b")) as u8);
is.push(try!(s.f64("c")) as u8);
}
Ok(is)
}));
let (vbuf, slice) = factory.create_vertex_buffer_indexed(&vertex_data,
&index_data[..]);
let glsl = opengl.to_glsl();
let pso = factory.create_pipeline_simple(
Shaders::new()
.set(GLSL::V1_20, include_str!("assets/cube_colored_120.glslv"))
.set(GLSL::V1_50, include_str!("assets/cube_colored_150.glslv"))
.get(glsl).unwrap().as_bytes(),
Shaders::new()
.set(GLSL::V1_20, include_str!("assets/cube_colored_120.glslf"))
.set(GLSL::V1_50, include_str!("assets/cube_colored_150.glslf"))
.get(glsl).unwrap().as_bytes(),
gfx::state::CullFace::Nothing,
pipe::new()
).unwrap();
let get_projection = |w: &PistonWindow<(), Sdl2Window>| {
let draw_size = w.window.borrow().draw_size();
CameraPerspective {
fov: 90.0, near_clip: 0.1, far_clip: 1000.0,
aspect_ratio: (draw_size.width as f32) / (draw_size.height as f32)
}.projection()
};
let model = vecmath::mat4_id();
let mut projection = get_projection(&events);
let mut first_person = FirstPerson::new(
[0.5, 0.5, 4.0],
FirstPersonSettings::keyboard_wasd()
);
for e in events {
first_person.event(&e);
e.draw_3d(|encoder| {
let args = e.render_args().unwrap();
encoder.clear(&e.output_color, [0.3, 0.3, 0.3, 1.0]);
encoder.clear_depth(&e.output_stencil, 1.0);
let data = pipe::Data {
vbuf: vbuf.clone(),
u_model_view_proj: model_view_projection(
model,
first_person.camera(args.ext_dt).orthogonal(),
projection
),
out_color: (*e.output_color).clone(),
out_depth: (*e.output_stencil).clone(),
};
encoder.draw(&slice, &pso, &data);
});
e.draw_2d(|c, g| {
ellipse([1.0, 1.0, 0.0, 1.0], [0.0, 0.0, 10.0, 10.0],
c.transform, g);
});
if let Some(_) = e.resize_args() |
}
}
| {
projection = get_projection(&e);
} | conditional_block |
cube.rs | extern crate piston_window;
extern crate vecmath;
extern crate camera_controllers;
#[macro_use]
extern crate gfx;
extern crate gfx_device_gl;
extern crate sdl2_window;
extern crate piston_meta;
extern crate piston_meta_search;
extern crate shader_version;
use shader_version::Shaders;
use shader_version::glsl::GLSL;
use std::fs::File;
use std::io::Read;
use sdl2_window::Sdl2Window;
use piston_window::*;
use camera_controllers::{
FirstPersonSettings,
FirstPerson,
CameraPerspective,
model_view_projection
};
use gfx::traits::*;
use piston_meta::*;
use piston_meta_search::*;
//----------------------------------------
// Cube associated data
gfx_vertex_struct!( Vertex {
a_pos: [f32; 3] = "a_pos",
});
impl Vertex {
fn new(pos: [f32; 3]) -> Vertex {
Vertex {
a_pos: pos,
}
}
}
gfx_pipeline!( pipe {
vbuf: gfx::VertexBuffer<Vertex> = (),
u_model_view_proj: gfx::Global<[[f32; 4]; 4]> = "u_model_view_proj",
out_color: gfx::RenderTarget<gfx::format::Rgba8> = "o_Color",
out_depth: gfx::DepthTarget<gfx::format::DepthStencil> =
gfx::preset::depth::LESS_EQUAL_WRITE,
});
//----------------------------------------
fn main() | // Read cube.ogex.
let mut file_h = File::open("examples/assets/cube.ogex").unwrap();
let mut source = String::new();
file_h.read_to_string(&mut source).unwrap();
let mut data = vec![];
stderr_unwrap(&source, parse(&rules, &source, &mut data));
let s = Search::new(&data);
let vertex_data: Vec<Vertex> = stderr_unwrap(&source, s.for_bool("position", true,
|ref mut s| {
let mut vs = Vec::with_capacity(24);
for _ in 0.. 24 {
vs.push(Vertex::new([
try!(s.f64("x")) as f32,
try!(s.f64("y")) as f32,
try!(s.f64("z")) as f32
]));
}
Ok(vs)
}));
let index_data: Vec<u8> = stderr_unwrap(&source, s.for_node("IndexArray",
|ref mut s| {
let mut is = Vec::with_capacity(36);
for _ in 0.. 12 {
is.push(try!(s.f64("a")) as u8);
is.push(try!(s.f64("b")) as u8);
is.push(try!(s.f64("c")) as u8);
}
Ok(is)
}));
let (vbuf, slice) = factory.create_vertex_buffer_indexed(&vertex_data,
&index_data[..]);
let glsl = opengl.to_glsl();
let pso = factory.create_pipeline_simple(
Shaders::new()
.set(GLSL::V1_20, include_str!("assets/cube_colored_120.glslv"))
.set(GLSL::V1_50, include_str!("assets/cube_colored_150.glslv"))
.get(glsl).unwrap().as_bytes(),
Shaders::new()
.set(GLSL::V1_20, include_str!("assets/cube_colored_120.glslf"))
.set(GLSL::V1_50, include_str!("assets/cube_colored_150.glslf"))
.get(glsl).unwrap().as_bytes(),
gfx::state::CullFace::Nothing,
pipe::new()
).unwrap();
let get_projection = |w: &PistonWindow<(), Sdl2Window>| {
let draw_size = w.window.borrow().draw_size();
CameraPerspective {
fov: 90.0, near_clip: 0.1, far_clip: 1000.0,
aspect_ratio: (draw_size.width as f32) / (draw_size.height as f32)
}.projection()
};
let model = vecmath::mat4_id();
let mut projection = get_projection(&events);
let mut first_person = FirstPerson::new(
[0.5, 0.5, 4.0],
FirstPersonSettings::keyboard_wasd()
);
for e in events {
first_person.event(&e);
e.draw_3d(|encoder| {
let args = e.render_args().unwrap();
encoder.clear(&e.output_color, [0.3, 0.3, 0.3, 1.0]);
encoder.clear_depth(&e.output_stencil, 1.0);
let data = pipe::Data {
vbuf: vbuf.clone(),
u_model_view_proj: model_view_projection(
model,
first_person.camera(args.ext_dt).orthogonal(),
projection
),
out_color: (*e.output_color).clone(),
out_depth: (*e.output_stencil).clone(),
};
encoder.draw(&slice, &pso, &data);
});
e.draw_2d(|c, g| {
ellipse([1.0, 1.0, 0.0, 1.0], [0.0, 0.0, 10.0, 10.0],
c.transform, g);
});
if let Some(_) = e.resize_args() {
projection = get_projection(&e);
}
}
}
| {
let opengl = OpenGL::V3_2;
let mut events: PistonWindow<(), Sdl2Window> =
WindowSettings::new("piston: cube", [640, 480])
.exit_on_esc(true)
.samples(4)
.opengl(opengl)
.build()
.unwrap();
events.set_capture_cursor(true);
let ref mut factory = events.factory.borrow().clone();
// Read OpenGEX meta rules.
let mut file_h = File::open("examples/assets/opengex-syntax.txt").unwrap();
let mut source = String::new();
file_h.read_to_string(&mut source).unwrap();
let rules = stderr_unwrap(&source, syntax(&source));
| identifier_body |
regions-early-bound-error-method.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Tests that you can use a fn lifetime parameter as part of
// the value for a type parameter in a bound.
trait GetRef<'a> {
fn get(&self) -> &'a int;
}
struct Box<'a> {
t: &'a int
}
impl<'a> GetRef<'a> for Box<'a> {
fn get(&self) -> &'a int {
self.t
}
}
impl<'a> Box<'a> {
fn or<'b,G:GetRef<'b>>(&self, g2: G) -> &'a int { | } | g2.get() //~ ERROR cannot infer an appropriate lifetime for automatic coercion due to
}
}
fn main() { | random_line_split |
regions-early-bound-error-method.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Tests that you can use a fn lifetime parameter as part of
// the value for a type parameter in a bound.
trait GetRef<'a> {
fn get(&self) -> &'a int;
}
struct Box<'a> {
t: &'a int
}
impl<'a> GetRef<'a> for Box<'a> {
fn get(&self) -> &'a int {
self.t
}
}
impl<'a> Box<'a> {
fn or<'b,G:GetRef<'b>>(&self, g2: G) -> &'a int {
g2.get() //~ ERROR cannot infer an appropriate lifetime for automatic coercion due to
}
}
fn | () {
}
| main | identifier_name |
cssgroupingrule.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::CSSGroupingRuleBinding::CSSGroupingRuleMethods;
use dom::bindings::error::{ErrorResult, Fallible};
use dom::bindings::inheritance::Castable;
use dom::bindings::reflector::DomObject;
use dom::bindings::root::{DomRoot, MutNullableDom};
use dom::bindings::str::DOMString;
use dom::cssrule::CSSRule;
use dom::cssrulelist::{CSSRuleList, RulesSource};
use dom::cssstylesheet::CSSStyleSheet;
use dom_struct::dom_struct;
use servo_arc::Arc;
use style::shared_lock::{SharedRwLock, Locked};
use style::stylesheets::CssRules as StyleCssRules;
#[dom_struct]
pub struct CSSGroupingRule {
cssrule: CSSRule,
#[ignore_heap_size_of = "Arc"]
rules: Arc<Locked<StyleCssRules>>,
rulelist: MutNullableDom<CSSRuleList>,
}
impl CSSGroupingRule {
pub fn new_inherited(parent_stylesheet: &CSSStyleSheet,
rules: Arc<Locked<StyleCssRules>>) -> CSSGroupingRule {
CSSGroupingRule {
cssrule: CSSRule::new_inherited(parent_stylesheet),
rules: rules,
rulelist: MutNullableDom::new(None),
}
}
fn rulelist(&self) -> DomRoot<CSSRuleList> {
let parent_stylesheet = self.upcast::<CSSRule>().parent_stylesheet();
self.rulelist.or_init(|| CSSRuleList::new(self.global().as_window(),
parent_stylesheet,
RulesSource::Rules(self.rules.clone())))
}
pub fn parent_stylesheet(&self) -> &CSSStyleSheet {
self.cssrule.parent_stylesheet()
}
pub fn shared_lock(&self) -> &SharedRwLock {
self.cssrule.shared_lock()
}
}
impl CSSGroupingRuleMethods for CSSGroupingRule {
// https://drafts.csswg.org/cssom/#dom-cssgroupingrule-cssrules
fn CssRules(&self) -> DomRoot<CSSRuleList> {
// XXXManishearth check origin clean flag
self.rulelist()
} | }
// https://drafts.csswg.org/cssom/#dom-cssgroupingrule-deleterule
fn DeleteRule(&self, index: u32) -> ErrorResult {
self.rulelist().remove_rule(index)
}
} |
// https://drafts.csswg.org/cssom/#dom-cssgroupingrule-insertrule
fn InsertRule(&self, rule: DOMString, index: u32) -> Fallible<u32> {
self.rulelist().insert_rule(&rule, index, /* nested */ true) | random_line_split |
cssgroupingrule.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::CSSGroupingRuleBinding::CSSGroupingRuleMethods;
use dom::bindings::error::{ErrorResult, Fallible};
use dom::bindings::inheritance::Castable;
use dom::bindings::reflector::DomObject;
use dom::bindings::root::{DomRoot, MutNullableDom};
use dom::bindings::str::DOMString;
use dom::cssrule::CSSRule;
use dom::cssrulelist::{CSSRuleList, RulesSource};
use dom::cssstylesheet::CSSStyleSheet;
use dom_struct::dom_struct;
use servo_arc::Arc;
use style::shared_lock::{SharedRwLock, Locked};
use style::stylesheets::CssRules as StyleCssRules;
#[dom_struct]
pub struct CSSGroupingRule {
cssrule: CSSRule,
#[ignore_heap_size_of = "Arc"]
rules: Arc<Locked<StyleCssRules>>,
rulelist: MutNullableDom<CSSRuleList>,
}
impl CSSGroupingRule {
pub fn new_inherited(parent_stylesheet: &CSSStyleSheet,
rules: Arc<Locked<StyleCssRules>>) -> CSSGroupingRule {
CSSGroupingRule {
cssrule: CSSRule::new_inherited(parent_stylesheet),
rules: rules,
rulelist: MutNullableDom::new(None),
}
}
fn | (&self) -> DomRoot<CSSRuleList> {
let parent_stylesheet = self.upcast::<CSSRule>().parent_stylesheet();
self.rulelist.or_init(|| CSSRuleList::new(self.global().as_window(),
parent_stylesheet,
RulesSource::Rules(self.rules.clone())))
}
pub fn parent_stylesheet(&self) -> &CSSStyleSheet {
self.cssrule.parent_stylesheet()
}
pub fn shared_lock(&self) -> &SharedRwLock {
self.cssrule.shared_lock()
}
}
impl CSSGroupingRuleMethods for CSSGroupingRule {
// https://drafts.csswg.org/cssom/#dom-cssgroupingrule-cssrules
fn CssRules(&self) -> DomRoot<CSSRuleList> {
// XXXManishearth check origin clean flag
self.rulelist()
}
// https://drafts.csswg.org/cssom/#dom-cssgroupingrule-insertrule
fn InsertRule(&self, rule: DOMString, index: u32) -> Fallible<u32> {
self.rulelist().insert_rule(&rule, index, /* nested */ true)
}
// https://drafts.csswg.org/cssom/#dom-cssgroupingrule-deleterule
fn DeleteRule(&self, index: u32) -> ErrorResult {
self.rulelist().remove_rule(index)
}
}
| rulelist | identifier_name |
cssgroupingrule.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::CSSGroupingRuleBinding::CSSGroupingRuleMethods;
use dom::bindings::error::{ErrorResult, Fallible};
use dom::bindings::inheritance::Castable;
use dom::bindings::reflector::DomObject;
use dom::bindings::root::{DomRoot, MutNullableDom};
use dom::bindings::str::DOMString;
use dom::cssrule::CSSRule;
use dom::cssrulelist::{CSSRuleList, RulesSource};
use dom::cssstylesheet::CSSStyleSheet;
use dom_struct::dom_struct;
use servo_arc::Arc;
use style::shared_lock::{SharedRwLock, Locked};
use style::stylesheets::CssRules as StyleCssRules;
#[dom_struct]
pub struct CSSGroupingRule {
cssrule: CSSRule,
#[ignore_heap_size_of = "Arc"]
rules: Arc<Locked<StyleCssRules>>,
rulelist: MutNullableDom<CSSRuleList>,
}
impl CSSGroupingRule {
pub fn new_inherited(parent_stylesheet: &CSSStyleSheet,
rules: Arc<Locked<StyleCssRules>>) -> CSSGroupingRule {
CSSGroupingRule {
cssrule: CSSRule::new_inherited(parent_stylesheet),
rules: rules,
rulelist: MutNullableDom::new(None),
}
}
fn rulelist(&self) -> DomRoot<CSSRuleList> {
let parent_stylesheet = self.upcast::<CSSRule>().parent_stylesheet();
self.rulelist.or_init(|| CSSRuleList::new(self.global().as_window(),
parent_stylesheet,
RulesSource::Rules(self.rules.clone())))
}
pub fn parent_stylesheet(&self) -> &CSSStyleSheet {
self.cssrule.parent_stylesheet()
}
pub fn shared_lock(&self) -> &SharedRwLock |
}
impl CSSGroupingRuleMethods for CSSGroupingRule {
// https://drafts.csswg.org/cssom/#dom-cssgroupingrule-cssrules
fn CssRules(&self) -> DomRoot<CSSRuleList> {
// XXXManishearth check origin clean flag
self.rulelist()
}
// https://drafts.csswg.org/cssom/#dom-cssgroupingrule-insertrule
fn InsertRule(&self, rule: DOMString, index: u32) -> Fallible<u32> {
self.rulelist().insert_rule(&rule, index, /* nested */ true)
}
// https://drafts.csswg.org/cssom/#dom-cssgroupingrule-deleterule
fn DeleteRule(&self, index: u32) -> ErrorResult {
self.rulelist().remove_rule(index)
}
}
| {
self.cssrule.shared_lock()
} | identifier_body |
directory.rs | use std::collections::HashMap;
use std::fmt::{self, Debug, Formatter};
use std::fs::File;
use std::io::Read;
use std::path::{Path, PathBuf};
use hex;
use serde_json;
use core::{Dependency, Package, PackageId, Source, SourceId, Summary}; |
pub struct DirectorySource<'cfg> {
source_id: SourceId,
root: PathBuf,
packages: HashMap<PackageId, (Package, Checksum)>,
config: &'cfg Config,
}
#[derive(Deserialize)]
struct Checksum {
package: Option<String>,
files: HashMap<String, String>,
}
impl<'cfg> DirectorySource<'cfg> {
pub fn new(path: &Path, id: &SourceId, config: &'cfg Config) -> DirectorySource<'cfg> {
DirectorySource {
source_id: id.clone(),
root: path.to_path_buf(),
config,
packages: HashMap::new(),
}
}
}
impl<'cfg> Debug for DirectorySource<'cfg> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "DirectorySource {{ root: {:?} }}", self.root)
}
}
impl<'cfg> Source for DirectorySource<'cfg> {
fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
let packages = self.packages.values().map(|p| &p.0);
let matches = packages.filter(|pkg| dep.matches(pkg.summary()));
for summary in matches.map(|pkg| pkg.summary().clone()) {
f(summary);
}
Ok(())
}
fn supports_checksums(&self) -> bool {
true
}
fn requires_precise(&self) -> bool {
true
}
fn source_id(&self) -> &SourceId {
&self.source_id
}
fn update(&mut self) -> CargoResult<()> {
self.packages.clear();
let entries = self.root.read_dir().chain_err(|| {
format!(
"failed to read root of directory source: {}",
self.root.display()
)
})?;
for entry in entries {
let entry = entry?;
let path = entry.path();
// Ignore hidden/dot directories as they typically don't contain
// crates and otherwise may conflict with a VCS
// (rust-lang/cargo#3414).
if let Some(s) = path.file_name().and_then(|s| s.to_str()) {
if s.starts_with('.') {
continue;
}
}
// Vendor directories are often checked into a VCS, but throughout
// the lifetime of a vendor dir crates are often added and deleted.
// Some VCS implementations don't always fully delete the directory
// when a dir is removed from a different checkout. Sometimes a
// mostly-empty dir is left behind.
//
// Additionally vendor directories are sometimes accompanied with
// readme files and other auxiliary information not too interesting
// to Cargo.
//
// To help handle all this we only try processing folders with a
// `Cargo.toml` in them. This has the upside of being pretty
// flexible with the contents of vendor directories but has the
// downside of accidentally misconfigured vendor directories
// silently returning less crates.
if!path.join("Cargo.toml").exists() {
continue;
}
let mut src = PathSource::new(&path, &self.source_id, self.config);
src.update()?;
let pkg = src.root_package()?;
let cksum_file = path.join(".cargo-checksum.json");
let cksum = paths::read(&path.join(cksum_file)).chain_err(|| {
format!(
"failed to load checksum `.cargo-checksum.json` \
of {} v{}",
pkg.package_id().name(),
pkg.package_id().version()
)
})?;
let cksum: Checksum = serde_json::from_str(&cksum).chain_err(|| {
format!(
"failed to decode `.cargo-checksum.json` of \
{} v{}",
pkg.package_id().name(),
pkg.package_id().version()
)
})?;
let mut manifest = pkg.manifest().clone();
let mut summary = manifest.summary().clone();
if let Some(ref package) = cksum.package {
summary = summary.set_checksum(package.clone());
}
manifest.set_summary(summary);
let pkg = Package::new(manifest, pkg.manifest_path());
self.packages.insert(pkg.package_id().clone(), (pkg, cksum));
}
Ok(())
}
fn download(&mut self, id: &PackageId) -> CargoResult<Package> {
self.packages
.get(id)
.map(|p| &p.0)
.cloned()
.ok_or_else(|| format_err!("failed to find package with id: {}", id))
}
fn fingerprint(&self, pkg: &Package) -> CargoResult<String> {
Ok(pkg.package_id().version().to_string())
}
fn verify(&self, id: &PackageId) -> CargoResult<()> {
let (pkg, cksum) = match self.packages.get(id) {
Some(&(ref pkg, ref cksum)) => (pkg, cksum),
None => bail!("failed to find entry for `{}` in directory source", id),
};
let mut buf = [0; 16 * 1024];
for (file, cksum) in cksum.files.iter() {
let mut h = Sha256::new();
let file = pkg.root().join(file);
(|| -> CargoResult<()> {
let mut f = File::open(&file)?;
loop {
match f.read(&mut buf)? {
0 => return Ok(()),
n => h.update(&buf[..n]),
}
}
})()
.chain_err(|| format!("failed to calculate checksum of: {}", file.display()))?;
let actual = hex::encode(h.finish());
if &*actual!= cksum {
bail!(
"\
the listed checksum of `{}` has changed:\n\
expected: {}\n\
actual: {}\n\
\n\
directory sources are not intended to be edited, if \
modifications are required then it is recommended \
that [replace] is used with a forked copy of the \
source\
",
file.display(),
cksum,
actual
);
}
}
Ok(())
}
} | use sources::PathSource;
use util::{Config, Sha256};
use util::errors::{CargoResult, CargoResultExt};
use util::paths; | random_line_split |
directory.rs | use std::collections::HashMap;
use std::fmt::{self, Debug, Formatter};
use std::fs::File;
use std::io::Read;
use std::path::{Path, PathBuf};
use hex;
use serde_json;
use core::{Dependency, Package, PackageId, Source, SourceId, Summary};
use sources::PathSource;
use util::{Config, Sha256};
use util::errors::{CargoResult, CargoResultExt};
use util::paths;
pub struct DirectorySource<'cfg> {
source_id: SourceId,
root: PathBuf,
packages: HashMap<PackageId, (Package, Checksum)>,
config: &'cfg Config,
}
#[derive(Deserialize)]
struct Checksum {
package: Option<String>,
files: HashMap<String, String>,
}
impl<'cfg> DirectorySource<'cfg> {
pub fn new(path: &Path, id: &SourceId, config: &'cfg Config) -> DirectorySource<'cfg> {
DirectorySource {
source_id: id.clone(),
root: path.to_path_buf(),
config,
packages: HashMap::new(),
}
}
}
impl<'cfg> Debug for DirectorySource<'cfg> {
fn | (&self, f: &mut Formatter) -> fmt::Result {
write!(f, "DirectorySource {{ root: {:?} }}", self.root)
}
}
impl<'cfg> Source for DirectorySource<'cfg> {
fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
let packages = self.packages.values().map(|p| &p.0);
let matches = packages.filter(|pkg| dep.matches(pkg.summary()));
for summary in matches.map(|pkg| pkg.summary().clone()) {
f(summary);
}
Ok(())
}
fn supports_checksums(&self) -> bool {
true
}
fn requires_precise(&self) -> bool {
true
}
fn source_id(&self) -> &SourceId {
&self.source_id
}
fn update(&mut self) -> CargoResult<()> {
self.packages.clear();
let entries = self.root.read_dir().chain_err(|| {
format!(
"failed to read root of directory source: {}",
self.root.display()
)
})?;
for entry in entries {
let entry = entry?;
let path = entry.path();
// Ignore hidden/dot directories as they typically don't contain
// crates and otherwise may conflict with a VCS
// (rust-lang/cargo#3414).
if let Some(s) = path.file_name().and_then(|s| s.to_str()) {
if s.starts_with('.') {
continue;
}
}
// Vendor directories are often checked into a VCS, but throughout
// the lifetime of a vendor dir crates are often added and deleted.
// Some VCS implementations don't always fully delete the directory
// when a dir is removed from a different checkout. Sometimes a
// mostly-empty dir is left behind.
//
// Additionally vendor directories are sometimes accompanied with
// readme files and other auxiliary information not too interesting
// to Cargo.
//
// To help handle all this we only try processing folders with a
// `Cargo.toml` in them. This has the upside of being pretty
// flexible with the contents of vendor directories but has the
// downside of accidentally misconfigured vendor directories
// silently returning less crates.
if!path.join("Cargo.toml").exists() {
continue;
}
let mut src = PathSource::new(&path, &self.source_id, self.config);
src.update()?;
let pkg = src.root_package()?;
let cksum_file = path.join(".cargo-checksum.json");
let cksum = paths::read(&path.join(cksum_file)).chain_err(|| {
format!(
"failed to load checksum `.cargo-checksum.json` \
of {} v{}",
pkg.package_id().name(),
pkg.package_id().version()
)
})?;
let cksum: Checksum = serde_json::from_str(&cksum).chain_err(|| {
format!(
"failed to decode `.cargo-checksum.json` of \
{} v{}",
pkg.package_id().name(),
pkg.package_id().version()
)
})?;
let mut manifest = pkg.manifest().clone();
let mut summary = manifest.summary().clone();
if let Some(ref package) = cksum.package {
summary = summary.set_checksum(package.clone());
}
manifest.set_summary(summary);
let pkg = Package::new(manifest, pkg.manifest_path());
self.packages.insert(pkg.package_id().clone(), (pkg, cksum));
}
Ok(())
}
fn download(&mut self, id: &PackageId) -> CargoResult<Package> {
self.packages
.get(id)
.map(|p| &p.0)
.cloned()
.ok_or_else(|| format_err!("failed to find package with id: {}", id))
}
fn fingerprint(&self, pkg: &Package) -> CargoResult<String> {
Ok(pkg.package_id().version().to_string())
}
fn verify(&self, id: &PackageId) -> CargoResult<()> {
let (pkg, cksum) = match self.packages.get(id) {
Some(&(ref pkg, ref cksum)) => (pkg, cksum),
None => bail!("failed to find entry for `{}` in directory source", id),
};
let mut buf = [0; 16 * 1024];
for (file, cksum) in cksum.files.iter() {
let mut h = Sha256::new();
let file = pkg.root().join(file);
(|| -> CargoResult<()> {
let mut f = File::open(&file)?;
loop {
match f.read(&mut buf)? {
0 => return Ok(()),
n => h.update(&buf[..n]),
}
}
})()
.chain_err(|| format!("failed to calculate checksum of: {}", file.display()))?;
let actual = hex::encode(h.finish());
if &*actual!= cksum {
bail!(
"\
the listed checksum of `{}` has changed:\n\
expected: {}\n\
actual: {}\n\
\n\
directory sources are not intended to be edited, if \
modifications are required then it is recommended \
that [replace] is used with a forked copy of the \
source\
",
file.display(),
cksum,
actual
);
}
}
Ok(())
}
}
| fmt | identifier_name |
directory.rs | use std::collections::HashMap;
use std::fmt::{self, Debug, Formatter};
use std::fs::File;
use std::io::Read;
use std::path::{Path, PathBuf};
use hex;
use serde_json;
use core::{Dependency, Package, PackageId, Source, SourceId, Summary};
use sources::PathSource;
use util::{Config, Sha256};
use util::errors::{CargoResult, CargoResultExt};
use util::paths;
pub struct DirectorySource<'cfg> {
source_id: SourceId,
root: PathBuf,
packages: HashMap<PackageId, (Package, Checksum)>,
config: &'cfg Config,
}
#[derive(Deserialize)]
struct Checksum {
package: Option<String>,
files: HashMap<String, String>,
}
impl<'cfg> DirectorySource<'cfg> {
pub fn new(path: &Path, id: &SourceId, config: &'cfg Config) -> DirectorySource<'cfg> {
DirectorySource {
source_id: id.clone(),
root: path.to_path_buf(),
config,
packages: HashMap::new(),
}
}
}
impl<'cfg> Debug for DirectorySource<'cfg> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "DirectorySource {{ root: {:?} }}", self.root)
}
}
impl<'cfg> Source for DirectorySource<'cfg> {
fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
let packages = self.packages.values().map(|p| &p.0);
let matches = packages.filter(|pkg| dep.matches(pkg.summary()));
for summary in matches.map(|pkg| pkg.summary().clone()) {
f(summary);
}
Ok(())
}
fn supports_checksums(&self) -> bool {
true
}
fn requires_precise(&self) -> bool {
true
}
fn source_id(&self) -> &SourceId {
&self.source_id
}
fn update(&mut self) -> CargoResult<()> {
self.packages.clear();
let entries = self.root.read_dir().chain_err(|| {
format!(
"failed to read root of directory source: {}",
self.root.display()
)
})?;
for entry in entries {
let entry = entry?;
let path = entry.path();
// Ignore hidden/dot directories as they typically don't contain
// crates and otherwise may conflict with a VCS
// (rust-lang/cargo#3414).
if let Some(s) = path.file_name().and_then(|s| s.to_str()) |
// Vendor directories are often checked into a VCS, but throughout
// the lifetime of a vendor dir crates are often added and deleted.
// Some VCS implementations don't always fully delete the directory
// when a dir is removed from a different checkout. Sometimes a
// mostly-empty dir is left behind.
//
// Additionally vendor directories are sometimes accompanied with
// readme files and other auxiliary information not too interesting
// to Cargo.
//
// To help handle all this we only try processing folders with a
// `Cargo.toml` in them. This has the upside of being pretty
// flexible with the contents of vendor directories but has the
// downside of accidentally misconfigured vendor directories
// silently returning less crates.
if!path.join("Cargo.toml").exists() {
continue;
}
let mut src = PathSource::new(&path, &self.source_id, self.config);
src.update()?;
let pkg = src.root_package()?;
let cksum_file = path.join(".cargo-checksum.json");
let cksum = paths::read(&path.join(cksum_file)).chain_err(|| {
format!(
"failed to load checksum `.cargo-checksum.json` \
of {} v{}",
pkg.package_id().name(),
pkg.package_id().version()
)
})?;
let cksum: Checksum = serde_json::from_str(&cksum).chain_err(|| {
format!(
"failed to decode `.cargo-checksum.json` of \
{} v{}",
pkg.package_id().name(),
pkg.package_id().version()
)
})?;
let mut manifest = pkg.manifest().clone();
let mut summary = manifest.summary().clone();
if let Some(ref package) = cksum.package {
summary = summary.set_checksum(package.clone());
}
manifest.set_summary(summary);
let pkg = Package::new(manifest, pkg.manifest_path());
self.packages.insert(pkg.package_id().clone(), (pkg, cksum));
}
Ok(())
}
fn download(&mut self, id: &PackageId) -> CargoResult<Package> {
self.packages
.get(id)
.map(|p| &p.0)
.cloned()
.ok_or_else(|| format_err!("failed to find package with id: {}", id))
}
fn fingerprint(&self, pkg: &Package) -> CargoResult<String> {
Ok(pkg.package_id().version().to_string())
}
fn verify(&self, id: &PackageId) -> CargoResult<()> {
let (pkg, cksum) = match self.packages.get(id) {
Some(&(ref pkg, ref cksum)) => (pkg, cksum),
None => bail!("failed to find entry for `{}` in directory source", id),
};
let mut buf = [0; 16 * 1024];
for (file, cksum) in cksum.files.iter() {
let mut h = Sha256::new();
let file = pkg.root().join(file);
(|| -> CargoResult<()> {
let mut f = File::open(&file)?;
loop {
match f.read(&mut buf)? {
0 => return Ok(()),
n => h.update(&buf[..n]),
}
}
})()
.chain_err(|| format!("failed to calculate checksum of: {}", file.display()))?;
let actual = hex::encode(h.finish());
if &*actual!= cksum {
bail!(
"\
the listed checksum of `{}` has changed:\n\
expected: {}\n\
actual: {}\n\
\n\
directory sources are not intended to be edited, if \
modifications are required then it is recommended \
that [replace] is used with a forked copy of the \
source\
",
file.display(),
cksum,
actual
);
}
}
Ok(())
}
}
| {
if s.starts_with('.') {
continue;
}
} | conditional_block |
directory.rs | use std::collections::HashMap;
use std::fmt::{self, Debug, Formatter};
use std::fs::File;
use std::io::Read;
use std::path::{Path, PathBuf};
use hex;
use serde_json;
use core::{Dependency, Package, PackageId, Source, SourceId, Summary};
use sources::PathSource;
use util::{Config, Sha256};
use util::errors::{CargoResult, CargoResultExt};
use util::paths;
pub struct DirectorySource<'cfg> {
source_id: SourceId,
root: PathBuf,
packages: HashMap<PackageId, (Package, Checksum)>,
config: &'cfg Config,
}
#[derive(Deserialize)]
struct Checksum {
package: Option<String>,
files: HashMap<String, String>,
}
impl<'cfg> DirectorySource<'cfg> {
pub fn new(path: &Path, id: &SourceId, config: &'cfg Config) -> DirectorySource<'cfg> {
DirectorySource {
source_id: id.clone(),
root: path.to_path_buf(),
config,
packages: HashMap::new(),
}
}
}
impl<'cfg> Debug for DirectorySource<'cfg> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "DirectorySource {{ root: {:?} }}", self.root)
}
}
impl<'cfg> Source for DirectorySource<'cfg> {
fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
let packages = self.packages.values().map(|p| &p.0);
let matches = packages.filter(|pkg| dep.matches(pkg.summary()));
for summary in matches.map(|pkg| pkg.summary().clone()) {
f(summary);
}
Ok(())
}
fn supports_checksums(&self) -> bool {
true
}
fn requires_precise(&self) -> bool {
true
}
fn source_id(&self) -> &SourceId {
&self.source_id
}
fn update(&mut self) -> CargoResult<()> | }
// Vendor directories are often checked into a VCS, but throughout
// the lifetime of a vendor dir crates are often added and deleted.
// Some VCS implementations don't always fully delete the directory
// when a dir is removed from a different checkout. Sometimes a
// mostly-empty dir is left behind.
//
// Additionally vendor directories are sometimes accompanied with
// readme files and other auxiliary information not too interesting
// to Cargo.
//
// To help handle all this we only try processing folders with a
// `Cargo.toml` in them. This has the upside of being pretty
// flexible with the contents of vendor directories but has the
// downside of accidentally misconfigured vendor directories
// silently returning less crates.
if!path.join("Cargo.toml").exists() {
continue;
}
let mut src = PathSource::new(&path, &self.source_id, self.config);
src.update()?;
let pkg = src.root_package()?;
let cksum_file = path.join(".cargo-checksum.json");
let cksum = paths::read(&path.join(cksum_file)).chain_err(|| {
format!(
"failed to load checksum `.cargo-checksum.json` \
of {} v{}",
pkg.package_id().name(),
pkg.package_id().version()
)
})?;
let cksum: Checksum = serde_json::from_str(&cksum).chain_err(|| {
format!(
"failed to decode `.cargo-checksum.json` of \
{} v{}",
pkg.package_id().name(),
pkg.package_id().version()
)
})?;
let mut manifest = pkg.manifest().clone();
let mut summary = manifest.summary().clone();
if let Some(ref package) = cksum.package {
summary = summary.set_checksum(package.clone());
}
manifest.set_summary(summary);
let pkg = Package::new(manifest, pkg.manifest_path());
self.packages.insert(pkg.package_id().clone(), (pkg, cksum));
}
Ok(())
}
fn download(&mut self, id: &PackageId) -> CargoResult<Package> {
self.packages
.get(id)
.map(|p| &p.0)
.cloned()
.ok_or_else(|| format_err!("failed to find package with id: {}", id))
}
fn fingerprint(&self, pkg: &Package) -> CargoResult<String> {
Ok(pkg.package_id().version().to_string())
}
fn verify(&self, id: &PackageId) -> CargoResult<()> {
let (pkg, cksum) = match self.packages.get(id) {
Some(&(ref pkg, ref cksum)) => (pkg, cksum),
None => bail!("failed to find entry for `{}` in directory source", id),
};
let mut buf = [0; 16 * 1024];
for (file, cksum) in cksum.files.iter() {
let mut h = Sha256::new();
let file = pkg.root().join(file);
(|| -> CargoResult<()> {
let mut f = File::open(&file)?;
loop {
match f.read(&mut buf)? {
0 => return Ok(()),
n => h.update(&buf[..n]),
}
}
})()
.chain_err(|| format!("failed to calculate checksum of: {}", file.display()))?;
let actual = hex::encode(h.finish());
if &*actual!= cksum {
bail!(
"\
the listed checksum of `{}` has changed:\n\
expected: {}\n\
actual: {}\n\
\n\
directory sources are not intended to be edited, if \
modifications are required then it is recommended \
that [replace] is used with a forked copy of the \
source\
",
file.display(),
cksum,
actual
);
}
}
Ok(())
}
}
| {
self.packages.clear();
let entries = self.root.read_dir().chain_err(|| {
format!(
"failed to read root of directory source: {}",
self.root.display()
)
})?;
for entry in entries {
let entry = entry?;
let path = entry.path();
// Ignore hidden/dot directories as they typically don't contain
// crates and otherwise may conflict with a VCS
// (rust-lang/cargo#3414).
if let Some(s) = path.file_name().and_then(|s| s.to_str()) {
if s.starts_with('.') {
continue;
} | identifier_body |
literals.rs | fn | () {
// Suffixed literals, their types are known at initialization
let x = 1u8;
let y = 2u32;
let z = 3f32;
// Unsuffixed literal, their types depend on how they are used
let i = 1;
let f = 1.0;
// `size_of_val` returns the size of a variable in bytes
println!("size of `x` in bytes: {}", std::mem::size_of_val(&x));
println!("size of `y` in bytes: {}", std::mem::size_of_val(&y));
println!("size of `z` in bytes: {}", std::mem::size_of_val(&z));
println!("size of `i` in bytes: {}", std::mem::size_of_val(&i));
println!("size of `f` in bytes: {}", std::mem::size_of_val(&f));
}
| main | identifier_name |
literals.rs | fn main() {
// Suffixed literals, their types are known at initialization
let x = 1u8;
let y = 2u32;
let z = 3f32;
// Unsuffixed literal, their types depend on how they are used
let i = 1;
let f = 1.0;
// `size_of_val` returns the size of a variable in bytes
println!("size of `x` in bytes: {}", std::mem::size_of_val(&x));
println!("size of `y` in bytes: {}", std::mem::size_of_val(&y));
println!("size of `z` in bytes: {}", std::mem::size_of_val(&z));
println!("size of `i` in bytes: {}", std::mem::size_of_val(&i)); | println!("size of `f` in bytes: {}", std::mem::size_of_val(&f));
} | random_line_split |
|
literals.rs | fn main() | {
// Suffixed literals, their types are known at initialization
let x = 1u8;
let y = 2u32;
let z = 3f32;
// Unsuffixed literal, their types depend on how they are used
let i = 1;
let f = 1.0;
// `size_of_val` returns the size of a variable in bytes
println!("size of `x` in bytes: {}", std::mem::size_of_val(&x));
println!("size of `y` in bytes: {}", std::mem::size_of_val(&y));
println!("size of `z` in bytes: {}", std::mem::size_of_val(&z));
println!("size of `i` in bytes: {}", std::mem::size_of_val(&i));
println!("size of `f` in bytes: {}", std::mem::size_of_val(&f));
} | identifier_body |
|
const-binops.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(macro_rules)]
macro_rules! assert_approx_eq(
($a:expr, $b:expr) => ({
let (a, b) = (&$a, &$b);
assert!((*a - *b).abs() < 1.0e-6,
"{} is not approximately equal to {}", *a, *b);
})
)
static A: int = -4 + 3;
static A2: uint = 3 + 3;
static B: f64 = 3.0 + 2.7;
static C: int = 3 - 4;
static D: uint = 3 - 3;
static E: f64 = 3.0 - 2.7;
static E2: int = -3 * 3;
static F: uint = 3 * 3;
static G: f64 = 3.3 * 3.3;
static H: int = 3 / -1;
static I: uint = 3 / 3;
static J: f64 = 3.3 / 3.3;
static N: bool = true && false;
static O: bool = true || false;
static P: int = 3 & 1;
static Q: uint = 1 & 3;
static R: int = 3 | 1;
static S: uint = 1 | 3;
static T: int = 3 ^ 1;
static U: uint = 1 ^ 3;
static V: int = 1 << 3;
// NOTE: better shr coverage
static W: int = 1024 >> 4;
static X: uint = 1024 >> 4;
static Y: bool = 1i == 1;
static Z: bool = 1.0f64 == 1.0;
static AA: bool = 1i <= 2;
static AB: bool = -1i <= 2;
static AC: bool = 1.0f64 <= 2.0;
static AD: bool = 1i < 2;
static AE: bool = -1i < 2;
static AF: bool = 1.0f64 < 2.0;
static AG: bool = 1i!= 2;
static AH: bool = -1i!= 2;
static AI: bool = 1.0f64!= 2.0;
static AJ: bool = 2i >= 1;
static AK: bool = 2i >= -2;
static AL: bool = 1.0f64 >= -2.0;
static AM: bool = 2i > 1;
static AN: bool = 2i > -2;
static AO: bool = 1.0f64 > -2.0;
pub fn | () {
assert_eq!(A, -1);
assert_eq!(A2, 6);
assert_approx_eq!(B, 5.7);
assert_eq!(C, -1);
assert_eq!(D, 0);
assert_approx_eq!(E, 0.3);
assert_eq!(E2, -9);
assert_eq!(F, 9);
assert_approx_eq!(G, 10.89);
assert_eq!(H, -3);
assert_eq!(I, 1);
assert_approx_eq!(J, 1.0);
assert_eq!(N, false);
assert_eq!(O, true);
assert_eq!(P, 1);
assert_eq!(Q, 1);
assert_eq!(R, 3);
assert_eq!(S, 3);
assert_eq!(T, 2);
assert_eq!(U, 2);
assert_eq!(V, 8);
assert_eq!(W, 64);
assert_eq!(X, 64);
assert_eq!(Y, true);
assert_eq!(Z, true);
assert_eq!(AA, true);
assert_eq!(AB, true);
assert_eq!(AC, true);
assert_eq!(AD, true);
assert_eq!(AE, true);
assert_eq!(AF, true);
assert_eq!(AG, true);
assert_eq!(AH, true);
assert_eq!(AI, true);
assert_eq!(AJ, true);
assert_eq!(AK, true);
assert_eq!(AL, true);
assert_eq!(AM, true);
assert_eq!(AN, true);
assert_eq!(AO, true);
}
| main | identifier_name |
const-binops.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(macro_rules)]
macro_rules! assert_approx_eq(
($a:expr, $b:expr) => ({
let (a, b) = (&$a, &$b);
assert!((*a - *b).abs() < 1.0e-6,
"{} is not approximately equal to {}", *a, *b);
})
)
static A: int = -4 + 3;
static A2: uint = 3 + 3;
static B: f64 = 3.0 + 2.7;
static C: int = 3 - 4;
static D: uint = 3 - 3;
static E: f64 = 3.0 - 2.7;
static E2: int = -3 * 3;
static F: uint = 3 * 3;
static G: f64 = 3.3 * 3.3;
static H: int = 3 / -1;
static I: uint = 3 / 3;
static J: f64 = 3.3 / 3.3;
static N: bool = true && false;
static O: bool = true || false;
static P: int = 3 & 1;
static Q: uint = 1 & 3;
static R: int = 3 | 1;
static S: uint = 1 | 3; | static U: uint = 1 ^ 3;
static V: int = 1 << 3;
// NOTE: better shr coverage
static W: int = 1024 >> 4;
static X: uint = 1024 >> 4;
static Y: bool = 1i == 1;
static Z: bool = 1.0f64 == 1.0;
static AA: bool = 1i <= 2;
static AB: bool = -1i <= 2;
static AC: bool = 1.0f64 <= 2.0;
static AD: bool = 1i < 2;
static AE: bool = -1i < 2;
static AF: bool = 1.0f64 < 2.0;
static AG: bool = 1i!= 2;
static AH: bool = -1i!= 2;
static AI: bool = 1.0f64!= 2.0;
static AJ: bool = 2i >= 1;
static AK: bool = 2i >= -2;
static AL: bool = 1.0f64 >= -2.0;
static AM: bool = 2i > 1;
static AN: bool = 2i > -2;
static AO: bool = 1.0f64 > -2.0;
pub fn main() {
assert_eq!(A, -1);
assert_eq!(A2, 6);
assert_approx_eq!(B, 5.7);
assert_eq!(C, -1);
assert_eq!(D, 0);
assert_approx_eq!(E, 0.3);
assert_eq!(E2, -9);
assert_eq!(F, 9);
assert_approx_eq!(G, 10.89);
assert_eq!(H, -3);
assert_eq!(I, 1);
assert_approx_eq!(J, 1.0);
assert_eq!(N, false);
assert_eq!(O, true);
assert_eq!(P, 1);
assert_eq!(Q, 1);
assert_eq!(R, 3);
assert_eq!(S, 3);
assert_eq!(T, 2);
assert_eq!(U, 2);
assert_eq!(V, 8);
assert_eq!(W, 64);
assert_eq!(X, 64);
assert_eq!(Y, true);
assert_eq!(Z, true);
assert_eq!(AA, true);
assert_eq!(AB, true);
assert_eq!(AC, true);
assert_eq!(AD, true);
assert_eq!(AE, true);
assert_eq!(AF, true);
assert_eq!(AG, true);
assert_eq!(AH, true);
assert_eq!(AI, true);
assert_eq!(AJ, true);
assert_eq!(AK, true);
assert_eq!(AL, true);
assert_eq!(AM, true);
assert_eq!(AN, true);
assert_eq!(AO, true);
} |
static T: int = 3 ^ 1; | random_line_split |
const-binops.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(macro_rules)]
macro_rules! assert_approx_eq(
($a:expr, $b:expr) => ({
let (a, b) = (&$a, &$b);
assert!((*a - *b).abs() < 1.0e-6,
"{} is not approximately equal to {}", *a, *b);
})
)
static A: int = -4 + 3;
static A2: uint = 3 + 3;
static B: f64 = 3.0 + 2.7;
static C: int = 3 - 4;
static D: uint = 3 - 3;
static E: f64 = 3.0 - 2.7;
static E2: int = -3 * 3;
static F: uint = 3 * 3;
static G: f64 = 3.3 * 3.3;
static H: int = 3 / -1;
static I: uint = 3 / 3;
static J: f64 = 3.3 / 3.3;
static N: bool = true && false;
static O: bool = true || false;
static P: int = 3 & 1;
static Q: uint = 1 & 3;
static R: int = 3 | 1;
static S: uint = 1 | 3;
static T: int = 3 ^ 1;
static U: uint = 1 ^ 3;
static V: int = 1 << 3;
// NOTE: better shr coverage
static W: int = 1024 >> 4;
static X: uint = 1024 >> 4;
static Y: bool = 1i == 1;
static Z: bool = 1.0f64 == 1.0;
static AA: bool = 1i <= 2;
static AB: bool = -1i <= 2;
static AC: bool = 1.0f64 <= 2.0;
static AD: bool = 1i < 2;
static AE: bool = -1i < 2;
static AF: bool = 1.0f64 < 2.0;
static AG: bool = 1i!= 2;
static AH: bool = -1i!= 2;
static AI: bool = 1.0f64!= 2.0;
static AJ: bool = 2i >= 1;
static AK: bool = 2i >= -2;
static AL: bool = 1.0f64 >= -2.0;
static AM: bool = 2i > 1;
static AN: bool = 2i > -2;
static AO: bool = 1.0f64 > -2.0;
pub fn main() |
assert_eq!(P, 1);
assert_eq!(Q, 1);
assert_eq!(R, 3);
assert_eq!(S, 3);
assert_eq!(T, 2);
assert_eq!(U, 2);
assert_eq!(V, 8);
assert_eq!(W, 64);
assert_eq!(X, 64);
assert_eq!(Y, true);
assert_eq!(Z, true);
assert_eq!(AA, true);
assert_eq!(AB, true);
assert_eq!(AC, true);
assert_eq!(AD, true);
assert_eq!(AE, true);
assert_eq!(AF, true);
assert_eq!(AG, true);
assert_eq!(AH, true);
assert_eq!(AI, true);
assert_eq!(AJ, true);
assert_eq!(AK, true);
assert_eq!(AL, true);
assert_eq!(AM, true);
assert_eq!(AN, true);
assert_eq!(AO, true);
}
| {
assert_eq!(A, -1);
assert_eq!(A2, 6);
assert_approx_eq!(B, 5.7);
assert_eq!(C, -1);
assert_eq!(D, 0);
assert_approx_eq!(E, 0.3);
assert_eq!(E2, -9);
assert_eq!(F, 9);
assert_approx_eq!(G, 10.89);
assert_eq!(H, -3);
assert_eq!(I, 1);
assert_approx_eq!(J, 1.0);
assert_eq!(N, false);
assert_eq!(O, true); | identifier_body |
main.rs | //! # Standalone Pact Verifier
//!
//! This project provides a command line interface to verify pact files against a running provider. It is a single executable binary. It implements the [V2 Pact specification](https://github.com/pact-foundation/pact-specification/tree/version-2).
//!
//! [Online rust docs](https://docs.rs/pact_verifier_cli/)
//!
//! The Pact Verifier works by taking all the interactions (requests and responses) from a number of pact files. For each interaction, it will make the request defined in the pact to a running service provider and check the response received back against the one defined in the pact file. All mismatches will then be reported.
//!
//! ## Command line interface
//!
//! The pact verifier is bundled as a single binary executable `pact_verifier_cli`. Running this with out any options displays the standard help.
//!
//! ```console,ignore
//! pact_verifier_cli 0.9.8
//! Standalone Pact verifier
//!
//! USAGE:
//! pact_verifier_cli [FLAGS] [OPTIONS] --broker-url <broker-url> --dir <dir>... --file <file>... --provider-name <provider-name> --url <url>...
//!
//! FLAGS:
//! --disable-ssl-verification Disables validation of SSL certificates
//! --enable-pending Enables Pending Pacts
//! --help Prints help information
//! --publish Enables publishing of verification results back to the Pact Broker. Requires the
//! broker-url and provider-version parameters.
//! --state-change-as-query State change request data will be sent as query parameters instead of in the
//! request body
//! --state-change-teardown State change teardown requests are to be made after each interaction
//! -v, --version Prints version information
//!
//! OPTIONS:
//! --base-path <base-path> Base path to add to all requests
//! -b, --broker-url <broker-url>
//! URL of the pact broker to fetch pacts from to verify (requires the provider name parameter) [env:
//! PACT_BROKER_BASE_URL=]
//! --build-url <build-url>
//! URL of the build to associate with the published verification results.
//!
//! --consumer-version-selectors <consumer-version-selectors>...
//! Consumer version selectors to use when fetching pacts from the Broker. Accepts a JSON string as per
//! https://docs.pact.io/pact_broker/advanced_topics/consumer_version_selectors/
//! --consumer-version-tags <consumer-version-tags>
//! Consumer tags to use when fetching pacts from the Broker. Accepts comma-separated values.
//!
//! --header <custom-header>...
//! Add a custom header to be included in the calls to the provider. Values must be in the form KEY=VALUE, where
//! KEY and VALUE contain ASCII characters (32-127) only. Can be repeated.
//! -d, --dir <dir>...
//! Directory of pact files to verify (can be repeated)
//!
//! -f, --file <file>... Pact file to verify (can be repeated)
//! -c, --filter-consumer <filter-consumer>...
//! Consumer name to filter the pacts to be verified (can be repeated)
//!
//! --filter-description <filter-description>
//! Only validate interactions whose descriptions match this filter [env: PACT_DESCRIPTION=]
//!
//! --filter-no-state <filter-no-state>
//! Only validate interactions that have no defined provider state [env: PACT_PROVIDER_NO_STATE=]
//!
//! --filter-state <filter-state>
//! Only validate interactions whose provider states match this filter [env: PACT_PROVIDER_STATE=]
//!
//! -h, --hostname <hostname> Provider hostname (defaults to localhost)
//! --include-wip-pacts-since <include-wip-pacts-since>
//! Allow pacts that don't match given consumer selectors (or tags) to be verified, without causing the overall
//! task to fail. For more information, see https://pact.io/wip
//! -j, --json <json-file> Generate a JSON report of the verification
//! -l, --loglevel <loglevel>
//! Log level (defaults to warn) [possible values: error, warn, info, debug,
//! trace, none]
//! --password <password>
//! Password to use when fetching pacts from URLS [env: PACT_BROKER_PASSWORD=]
//!
//! -p, --port <port>
//! Provider port (defaults to protocol default 80/443)
//!
//! --provider-branch <provider-branch> Provider branch to use when publishing results
//! -n, --provider-name <provider-name> Provider name (defaults to provider)
//! --provider-tags <provider-tags>
//! Provider tags to use when publishing results. Accepts comma-separated values.
//!
//! --provider-version <provider-version>
//! Provider version that is being verified. This is required when publishing results.
//!
//! --request-timeout <request-timeout>
//! Sets the HTTP request timeout in milliseconds for requests to the target API and for state change requests.
//!
//! --scheme <scheme>
//! Provider URI scheme (defaults to http) [default: http] [possible values: http, https]
//!
//! -s, --state-change-url <state-change-url> URL to post state change requests to
//! -t, --token <token>
//! Bearer token to use when fetching pacts from URLS [env: PACT_BROKER_TOKEN=]
//!
//! -u, --url <url>... URL of pact file to verify (can be repeated)
//! --user
//! Username to use when fetching pacts from URLS [env: PACT_BROKER_USERNAME=]
//! ```
//!
//! ## Options
//!
//! ### Log Level
//!
//! You can control the log level with the `-l, --loglevel <loglevel>` option. It defaults to warn, and the options that you can specify are: error, warn, info, debug, trace, none.
//!
//! ### Pact File Sources
//!
//! You can specify the pacts to verify with the following options. They can be repeated to set multiple sources.
//!
//! | Option | Type | Description |
//! |--------|------|-------------|
//! | `-f, --file <file>` | File | Loads a pact from the given file |
//! | `-u, --url <url>` | URL | Loads a pact from a URL resource |
//! | `-d, --dir <dir>` | Directory | Loads all the pacts from the given directory |
//! | `-b, --broker-url <broker-url>` | Pact Broker | Loads all the pacts for the provider from the pact broker. Requires the `-n, --provider-name <provider-name>` option |
//!
//! ### Provider Options
//!
//! The running provider can be specified with the following options:
//!
//! | Option | Description |
//! |--------|-------------|
//! | `-h, --hostname <hostname>` | The provider hostname, defaults to `localhost` |
//! | `-p, --port <port>` | The provider port (defaults to 8080) |
//! | `-n, --provider-name <provider-name>` | The name of the provider. Required if you are loading pacts from a pact broker |
//!
//! ### Filtering the interactions
//!
//! The interactions that are verified can be filtered by the following options:
//!
//! #### `-c, --filter-consumer <filter-consumer>`
//!
//! This will only verify the interactions of matching consumers. You can specify multiple consumers by either seperating the names with a comma, or repeating the option.
//!
//! #### `--filter-description <filter-description>`
//!
//! This option will filter the interactions that are verified that match by desciption. You can use a regular expression to match.
//!
//! #### `--filter-state <filter-state>`
//!
//! This option will filter the interactions that are verified that match by provider state. You can use a regular expression to match. Can't be used with the `--filter-no-state` option.
//!
//! #### `--filter-no-state`
//!
//! This option will filter the interactions that are verified that don't have a defined provider state. Can't be used with the `--filter-state` option.
//!
//! ### State change requests
//!
//! Provider states are a mechanism to define the state that the provider needs to be in to be able to verify a particular request. This is achieved by setting a state change URL that will receive a POST request with the provider state before the actual request is made.
//!
//! #### `-s, --state-change-url <state-change-url>`
//!
//! This sets the URL that the POST requests will be made to before each actual request.
//!
//! #### `--state-change-as-query`
//!
//! By default, the state for the state change request will be sent as a JSON document in the body of the request. This option forces it to be sent as a query parameter instead.
//!
//! #### `--state-change-teardown`
//!
//! This option will cause the verifier to also make a tear down request after the main request is made. It will receive a second field in the body or a query parameter named `action` with the value `teardown`.
//!
//! ## Example run
//!
//! This will verify all the pacts for the `happy_provider` found in the pact broker (running on localhost) against the provider running on localhost port 5050. Only the pacts for the consumers `Consumer` and `Consumer2` will be verified.
//!
//! ```console,ignore
//! $ pact_verifier_cli -b http://localhost -n 'happy_provider' -p 5050 --filter-consumer Consumer --filter-consumer Consumer2
//! 21:59:28 [WARN] pact_matching::models: No metadata found in pact file "http://localhost/pacts/provider/happy_provider/consumer/Consumer/version/1.0.0", assuming V1.1 specification
//! 21:59:28 [WARN] pact_matching::models: No metadata found in pact file "http://localhost/pacts/provider/happy_provider/consumer/Consumer2/version/1.0.0", assuming V1.1 specification
//!
//! Verifying a pact between Consumer and happy_provider
//! Given I am friends with Fred
//! WARNING: State Change ignored as there is no state change URL
//! Given I have no friends
//! WARNING: State Change ignored as there is no state change URL
//! a request to unfriend but no friends
//! returns a response which
//! has status code 200 (OK)
//! includes headers
//! has a matching body (OK)
//! a request friends
//! returns a response which
//! has status code 200 (FAILED)
//! includes headers
//! "Content-Type" with value "application/json" (FAILED)
//! has a matching body (FAILED)
//! a request to unfriend
//! returns a response which
//! has status code 200 (OK)
//! includes headers
//! "Content-Type" with value "application/json" (OK)
//! has a matching body (FAILED)
//!
//!
//! Verifying a pact between Consumer2 and happy_provider
//! Given I am friends with Fred
//! WARNING: State Change ignored as there is no state change URL
//! Given I have no friends
//! WARNING: State Change ignored as there is no state change URL
//! a request to unfriend but no friends
//! returns a response which
//! has status code 200 (OK)
//! includes headers
//! has a matching body (OK)
//! a request friends
//! returns a response which
//! has status code 200 (FAILED)
//! includes headers
//! "Content-Type" with value "application/json" (FAILED)
//! has a matching body (FAILED)
//! a request to unfriend
//! returns a response which
//! has status code 200 (OK)
//! includes headers
//! "Content-Type" with value "application/json" (OK)
//! has a matching body (FAILED)
//!
//!
//! Failures:
//!
//! 0) Verifying a pact between Consumer and happy_provider - a request friends returns a response which has a matching body
//! expected "application/json" body but was "text/plain"
//!
//! 1) Verifying a pact between Consumer and happy_provider - a request friends returns a response which has status code 200
//! expected 200 but was 404
//!
//! 2) Verifying a pact between Consumer and happy_provider - a request friends returns a response which includes header "Content-Type" with value "application/json"
//! Expected header "Content-Type" to have value "application/json" but was "text/plain"
//!
//! 3) Verifying a pact between Consumer and happy_provider Given I am friends with Fred - a request to unfriend returns a response which has a matching body
//! $.body -> Type mismatch: Expected Map {"reply":"Bye"} but received "Ok"
//!
//!
//! 4) Verifying a pact between Consumer2 and happy_provider - a request friends returns a response which has a matching body
//! expected "application/json" body but was "text/plain"
//!
//! 5) Verifying a pact between Consumer2 and happy_provider - a request friends returns a response which has status code 200
//! expected 200 but was 404
//!
//! 6) Verifying a pact between Consumer2 and happy_provider - a request friends returns a response which includes header "Content-Type" with value "application/json"
//! Expected header "Content-Type" to have value "application/json" but was "text/plain"
//!
//! 7) Verifying a pact between Consumer2 and happy_provider Given I am friends with Fred - a request to unfriend returns a response which has a matching body
//! $.body -> Type mismatch: Expected Map {"reply":"Bye"} but received "Ok"
//!
//!
//!
//! There were 8 pact failures
//!
//! ```
#![warn(missing_docs)]
// Due to large generated future for async fns
#![type_length_limit="100000000"]
use std::env;
use std::fs::File;
use std::io::Write;
use std::str::FromStr;
use std::sync::Arc;
use std::time::Duration;
use clap::{AppSettings, ArgMatches, ErrorKind};
use log::{debug, error, LevelFilter, warn};
use maplit::hashmap;
use pact_models::{PACT_RUST_VERSION, PactSpecification};
use pact_models::prelude::HttpAuth;
use serde_json::Value;
use simplelog::{ColorChoice, Config, TerminalMode, TermLogger};
use tokio::time::sleep;
use pact_verifier::{
FilterInfo,
NullRequestFilterExecutor,
PactSource,
ProviderInfo,
PublishOptions,
VerificationOptions,
verify_provider_async
};
use pact_verifier::callback_executors::HttpRequestProviderStateExecutor;
use pact_verifier::metrics::VerificationMetrics;
use pact_verifier::selectors::{consumer_tags_to_selectors, json_to_selectors};
use pact_verifier::verification_result::VerificationExecutionResult;
mod args;
/// Handles the command line arguments from the running process
pub async fn handle_cli(version: &str) -> Result<(), i32> {
let args: Vec<String> = env::args().collect();
let program = args[0].clone();
let app = args::setup_app(program, version);
let matches = app
.setting(AppSettings::ArgRequiredElseHelp)
.setting(AppSettings::ColoredHelp)
.get_matches_safe();
match matches {
Ok(results) => handle_matches(&results).await,
Err(ref err) => {
match err.kind {
ErrorKind::HelpDisplayed => {
println!("{}", err.message);
Ok(())
},
ErrorKind::VersionDisplayed => {
print_version(version);
println!();
Ok(())
},
_ => {
err.exit()
}
}
}
}
}
async fn handle_matches(matches: &clap::ArgMatches<'_>) -> Result<(), i32> {
let level = matches.value_of("loglevel").unwrap_or("warn");
let log_level = match level {
"none" => LevelFilter::Off,
_ => LevelFilter::from_str(level).unwrap()
};
TermLogger::init(log_level, Config::default(), TerminalMode::Mixed, ColorChoice::Auto).unwrap_or_default();
let provider = ProviderInfo {
host: matches.value_of("hostname").unwrap_or("localhost").to_string(),
port: matches.value_of("port").map(|port| port.parse::<u16>().unwrap()),
path: matches.value_of("base-path").unwrap_or("/").into(),
protocol: matches.value_of("scheme").unwrap_or("http").to_string(),
.. ProviderInfo::default()
};
let source = pact_source(matches);
let filter = interaction_filter(matches);
let provider_state_executor = Arc::new(HttpRequestProviderStateExecutor {
state_change_url: matches.value_of("state-change-url").map(|s| s.to_string()),
state_change_body:!matches.is_present("state-change-as-query"),
state_change_teardown: matches.is_present("state-change-teardown")
});
let mut custom_headers = hashmap!{};
if let Some(headers) = matches.values_of("custom-header") {
for header in headers {
let (key, value) = header.split_once('=').ok_or_else(|| {
error!("Custom header values must be in the form KEY=VALUE, where KEY and VALUE contain ASCII characters (32-127) only.");
3
})?;
custom_headers.insert(key.to_string(), value.to_string());
}
}
let verification_options = VerificationOptions {
request_filter: None::<Arc<NullRequestFilterExecutor>>,
disable_ssl_verification: matches.is_present("disable-ssl-verification"),
request_timeout: matches.value_of("request-timeout")
.map(|t| t.parse::<u64>().unwrap_or(5000)).unwrap_or(5000),
custom_headers
};
let publish_options = if matches.is_present("publish") {
Some(PublishOptions {
provider_version: matches.value_of("provider-version").map(|v| v.to_string()),
build_url: matches.value_of("build-url").map(|v| v.to_string()),
provider_tags: matches.values_of("provider-tags")
.map_or_else(Vec::new, |tags| tags.map(|tag| tag.to_string()).collect()),
provider_branch: matches.value_of("provider-branch").map(|v| v.to_string())
})
} else {
None
};
for s in &source {
debug!("Pact source to verify = {}", s);
};
verify_provider_async(
provider,
source,
filter,
matches.values_of_lossy("filter-consumer").unwrap_or_default(),
&verification_options,
publish_options.as_ref(),
&provider_state_executor,
Some(VerificationMetrics {
test_framework: "pact_verifier_cli".to_string(),
app_name: "pact_verifier_cli".to_string(),
app_version: env!("CARGO_PKG_VERSION").to_string()
}),
).await
.map_err(|err| {
error!("Verification failed with error: {}", err);
2
})
.and_then(|result| {
if let Some(json_file) = matches.value_of("json-file") {
if let Err(err) = write_json_report(&result, json_file) {
error!("Failed to write JSON report to '{json_file}' - {err}");
return Err(2)
}
}
if result.result { Ok(()) } else { Err(1) }
})
}
fn write_json_report(result: &VerificationExecutionResult, file_name: &str) -> anyhow::Result<()> {
debug!("Writing JSON result of the verification to '{file_name}'");
let mut f = File::create(file_name)?;
let json: Value = result.into();
f.write_all(json.to_string().as_bytes())?;
Ok(())
}
fn print_version(version: &str) {
println!("\npact verifier version : v{}", version);
println!("pact specification : v{}", PactSpecification::V4.version_str());
println!("models version : v{}", PACT_RUST_VERSION.unwrap_or_default());
}
fn pact_source(matches: &ArgMatches) -> Vec<PactSource> {
let mut sources = vec![];
if let Some(values) = matches.values_of("file") {
sources.extend(values.map(|v| PactSource::File(v.to_string())).collect::<Vec<PactSource>>());
};
if let Some(values) = matches.values_of("dir") {
sources.extend(values.map(|v| PactSource::Dir(v.to_string())).collect::<Vec<PactSource>>());
};
if let Some(values) = matches.values_of("url") {
sources.extend(values.map(|v| {
if matches.is_present("user") {
PactSource::URL(v.to_string(), matches.value_of("user").map(|user| {
HttpAuth::User(user.to_string(), matches.value_of("password").map(|p| p.to_string()))
}))
} else if matches.is_present("token") {
PactSource::URL(v.to_string(), matches.value_of("token").map(|token| HttpAuth::Token(token.to_string())))
} else {
PactSource::URL(v.to_string(), None)
}
}).collect::<Vec<PactSource>>());
};
if let Some(broker_url) = matches.value_of("broker-url") {
let name = matches.value_of("provider-name").map(|n| n.to_string()).unwrap_or_default();
let auth = matches.value_of("user").map(|user| {
HttpAuth::User(user.to_string(), matches.value_of("password").map(|p| p.to_string()))
}).or_else(|| matches.value_of("token").map(|t| HttpAuth::Token(t.to_string())));
let source = if matches.is_present("consumer-version-selectors") || matches.is_present("consumer-version-tags") {
let pending = matches.is_present("enable-pending");
let wip = matches.value_of("include-wip-pacts-since").map(|wip| wip.to_string());
let provider_tags = matches.values_of("provider-tags")
.map_or_else(Vec::new, |tags| tags.map(|tag| tag.to_string()).collect());
let provider_branch = matches.value_of("provider-branch").map(|v| v.to_string());
let selectors = if matches.is_present("consumer-version-selectors") {
matches.values_of("consumer-version-selectors")
.map_or_else(Vec::new, |s| json_to_selectors(s.collect::<Vec<_>>()))
} else if matches.is_present("consumer-version-tags") {
matches.values_of("consumer-version-tags")
.map_or_else(Vec::new, |tags| consumer_tags_to_selectors(tags.collect::<Vec<_>>()))
} else {
vec![]
};
PactSource::BrokerWithDynamicConfiguration {
provider_name: name,
broker_url: broker_url.into(),
enable_pending: pending,
include_wip_pacts_since: wip,
provider_tags,
provider_branch,
selectors,
auth,
links: vec![]
}
} else {
PactSource::BrokerUrl(name, broker_url.to_string(), auth, vec![])
};
sources.push(source);
};
sources
}
fn interaction_filter(matches: &ArgMatches) -> FilterInfo {
if matches.is_present("filter-description") &&
(matches.is_present("filter-state") || matches.is_present("filter-no-state")) {
if matches.is_present("filter-state") {
FilterInfo::DescriptionAndState(matches.value_of("filter-description").unwrap().to_string(),
matches.value_of("filter-state").unwrap().to_string())
} else {
FilterInfo::DescriptionAndState(matches.value_of("filter-description").unwrap().to_string(),
String::new())
}
} else if matches.is_present("filter-description") {
FilterInfo::Description(matches.value_of("filter-description").unwrap().to_string())
} else if matches.is_present("filter-state") {
FilterInfo::State(matches.value_of("filter-state").unwrap().to_string())
} else if matches.is_present("filter-no-state") {
FilterInfo::State(String::new())
} else {
FilterInfo::None
}
}
fn main() | std::process::exit(err);
}
}
#[cfg(windows)]
fn init_windows() {
if let Err(err) = ansi_term::enable_ansi_support() {
warn!("Could not enable ANSI console support - {err}");
}
}
#[cfg(not(windows))]
fn init_windows() { }
| {
init_windows();
let runtime = tokio::runtime::Builder::new_multi_thread()
.enable_all()
.build()
.expect("Could not start a Tokio runtime for running async tasks");
let result = runtime.block_on(async {
let result = handle_cli(clap::crate_version!()).await;
// Add a small delay to let asynchronous tasks to complete
sleep(Duration::from_millis(500)).await;
result
});
runtime.shutdown_timeout(Duration::from_millis(500));
if let Err(err) = result { | identifier_body |
main.rs | //! # Standalone Pact Verifier
//!
//! This project provides a command line interface to verify pact files against a running provider. It is a single executable binary. It implements the [V2 Pact specification](https://github.com/pact-foundation/pact-specification/tree/version-2).
//!
//! [Online rust docs](https://docs.rs/pact_verifier_cli/)
//!
//! The Pact Verifier works by taking all the interactions (requests and responses) from a number of pact files. For each interaction, it will make the request defined in the pact to a running service provider and check the response received back against the one defined in the pact file. All mismatches will then be reported.
//!
//! ## Command line interface
//!
//! The pact verifier is bundled as a single binary executable `pact_verifier_cli`. Running this with out any options displays the standard help.
//!
//! ```console,ignore
//! pact_verifier_cli 0.9.8
//! Standalone Pact verifier
//!
//! USAGE:
//! pact_verifier_cli [FLAGS] [OPTIONS] --broker-url <broker-url> --dir <dir>... --file <file>... --provider-name <provider-name> --url <url>...
//!
//! FLAGS:
//! --disable-ssl-verification Disables validation of SSL certificates
//! --enable-pending Enables Pending Pacts
//! --help Prints help information
//! --publish Enables publishing of verification results back to the Pact Broker. Requires the
//! broker-url and provider-version parameters.
//! --state-change-as-query State change request data will be sent as query parameters instead of in the
//! request body
//! --state-change-teardown State change teardown requests are to be made after each interaction
//! -v, --version Prints version information
//!
//! OPTIONS:
//! --base-path <base-path> Base path to add to all requests
//! -b, --broker-url <broker-url>
//! URL of the pact broker to fetch pacts from to verify (requires the provider name parameter) [env:
//! PACT_BROKER_BASE_URL=]
//! --build-url <build-url>
//! URL of the build to associate with the published verification results.
//!
//! --consumer-version-selectors <consumer-version-selectors>...
//! Consumer version selectors to use when fetching pacts from the Broker. Accepts a JSON string as per
//! https://docs.pact.io/pact_broker/advanced_topics/consumer_version_selectors/
//! --consumer-version-tags <consumer-version-tags>
//! Consumer tags to use when fetching pacts from the Broker. Accepts comma-separated values.
//!
//! --header <custom-header>...
//! Add a custom header to be included in the calls to the provider. Values must be in the form KEY=VALUE, where
//! KEY and VALUE contain ASCII characters (32-127) only. Can be repeated.
//! -d, --dir <dir>...
//! Directory of pact files to verify (can be repeated)
//!
//! -f, --file <file>... Pact file to verify (can be repeated)
//! -c, --filter-consumer <filter-consumer>...
//! Consumer name to filter the pacts to be verified (can be repeated)
//!
//! --filter-description <filter-description>
//! Only validate interactions whose descriptions match this filter [env: PACT_DESCRIPTION=]
//!
//! --filter-no-state <filter-no-state>
//! Only validate interactions that have no defined provider state [env: PACT_PROVIDER_NO_STATE=]
//!
//! --filter-state <filter-state>
//! Only validate interactions whose provider states match this filter [env: PACT_PROVIDER_STATE=]
//!
//! -h, --hostname <hostname> Provider hostname (defaults to localhost)
//! --include-wip-pacts-since <include-wip-pacts-since>
//! Allow pacts that don't match given consumer selectors (or tags) to be verified, without causing the overall
//! task to fail. For more information, see https://pact.io/wip
//! -j, --json <json-file> Generate a JSON report of the verification
//! -l, --loglevel <loglevel>
//! Log level (defaults to warn) [possible values: error, warn, info, debug,
//! trace, none]
//! --password <password>
//! Password to use when fetching pacts from URLS [env: PACT_BROKER_PASSWORD=]
//!
//! -p, --port <port>
//! Provider port (defaults to protocol default 80/443)
//!
//! --provider-branch <provider-branch> Provider branch to use when publishing results
//! -n, --provider-name <provider-name> Provider name (defaults to provider)
//! --provider-tags <provider-tags>
//! Provider tags to use when publishing results. Accepts comma-separated values.
//!
//! --provider-version <provider-version>
//! Provider version that is being verified. This is required when publishing results.
//!
//! --request-timeout <request-timeout>
//! Sets the HTTP request timeout in milliseconds for requests to the target API and for state change requests.
//!
//! --scheme <scheme>
//! Provider URI scheme (defaults to http) [default: http] [possible values: http, https]
//!
//! -s, --state-change-url <state-change-url> URL to post state change requests to
//! -t, --token <token>
//! Bearer token to use when fetching pacts from URLS [env: PACT_BROKER_TOKEN=]
//!
//! -u, --url <url>... URL of pact file to verify (can be repeated)
//! --user
//! Username to use when fetching pacts from URLS [env: PACT_BROKER_USERNAME=]
//! ```
//!
//! ## Options
//!
//! ### Log Level
//!
//! You can control the log level with the `-l, --loglevel <loglevel>` option. It defaults to warn, and the options that you can specify are: error, warn, info, debug, trace, none.
//! | //! | Option | Type | Description |
//! |--------|------|-------------|
//! | `-f, --file <file>` | File | Loads a pact from the given file |
//! | `-u, --url <url>` | URL | Loads a pact from a URL resource |
//! | `-d, --dir <dir>` | Directory | Loads all the pacts from the given directory |
//! | `-b, --broker-url <broker-url>` | Pact Broker | Loads all the pacts for the provider from the pact broker. Requires the `-n, --provider-name <provider-name>` option |
//!
//! ### Provider Options
//!
//! The running provider can be specified with the following options:
//!
//! | Option | Description |
//! |--------|-------------|
//! | `-h, --hostname <hostname>` | The provider hostname, defaults to `localhost` |
//! | `-p, --port <port>` | The provider port (defaults to 8080) |
//! | `-n, --provider-name <provider-name>` | The name of the provider. Required if you are loading pacts from a pact broker |
//!
//! ### Filtering the interactions
//!
//! The interactions that are verified can be filtered by the following options:
//!
//! #### `-c, --filter-consumer <filter-consumer>`
//!
//! This will only verify the interactions of matching consumers. You can specify multiple consumers by either seperating the names with a comma, or repeating the option.
//!
//! #### `--filter-description <filter-description>`
//!
//! This option will filter the interactions that are verified that match by desciption. You can use a regular expression to match.
//!
//! #### `--filter-state <filter-state>`
//!
//! This option will filter the interactions that are verified that match by provider state. You can use a regular expression to match. Can't be used with the `--filter-no-state` option.
//!
//! #### `--filter-no-state`
//!
//! This option will filter the interactions that are verified that don't have a defined provider state. Can't be used with the `--filter-state` option.
//!
//! ### State change requests
//!
//! Provider states are a mechanism to define the state that the provider needs to be in to be able to verify a particular request. This is achieved by setting a state change URL that will receive a POST request with the provider state before the actual request is made.
//!
//! #### `-s, --state-change-url <state-change-url>`
//!
//! This sets the URL that the POST requests will be made to before each actual request.
//!
//! #### `--state-change-as-query`
//!
//! By default, the state for the state change request will be sent as a JSON document in the body of the request. This option forces it to be sent as a query parameter instead.
//!
//! #### `--state-change-teardown`
//!
//! This option will cause the verifier to also make a tear down request after the main request is made. It will receive a second field in the body or a query parameter named `action` with the value `teardown`.
//!
//! ## Example run
//!
//! This will verify all the pacts for the `happy_provider` found in the pact broker (running on localhost) against the provider running on localhost port 5050. Only the pacts for the consumers `Consumer` and `Consumer2` will be verified.
//!
//! ```console,ignore
//! $ pact_verifier_cli -b http://localhost -n 'happy_provider' -p 5050 --filter-consumer Consumer --filter-consumer Consumer2
//! 21:59:28 [WARN] pact_matching::models: No metadata found in pact file "http://localhost/pacts/provider/happy_provider/consumer/Consumer/version/1.0.0", assuming V1.1 specification
//! 21:59:28 [WARN] pact_matching::models: No metadata found in pact file "http://localhost/pacts/provider/happy_provider/consumer/Consumer2/version/1.0.0", assuming V1.1 specification
//!
//! Verifying a pact between Consumer and happy_provider
//! Given I am friends with Fred
//! WARNING: State Change ignored as there is no state change URL
//! Given I have no friends
//! WARNING: State Change ignored as there is no state change URL
//! a request to unfriend but no friends
//! returns a response which
//! has status code 200 (OK)
//! includes headers
//! has a matching body (OK)
//! a request friends
//! returns a response which
//! has status code 200 (FAILED)
//! includes headers
//! "Content-Type" with value "application/json" (FAILED)
//! has a matching body (FAILED)
//! a request to unfriend
//! returns a response which
//! has status code 200 (OK)
//! includes headers
//! "Content-Type" with value "application/json" (OK)
//! has a matching body (FAILED)
//!
//!
//! Verifying a pact between Consumer2 and happy_provider
//! Given I am friends with Fred
//! WARNING: State Change ignored as there is no state change URL
//! Given I have no friends
//! WARNING: State Change ignored as there is no state change URL
//! a request to unfriend but no friends
//! returns a response which
//! has status code 200 (OK)
//! includes headers
//! has a matching body (OK)
//! a request friends
//! returns a response which
//! has status code 200 (FAILED)
//! includes headers
//! "Content-Type" with value "application/json" (FAILED)
//! has a matching body (FAILED)
//! a request to unfriend
//! returns a response which
//! has status code 200 (OK)
//! includes headers
//! "Content-Type" with value "application/json" (OK)
//! has a matching body (FAILED)
//!
//!
//! Failures:
//!
//! 0) Verifying a pact between Consumer and happy_provider - a request friends returns a response which has a matching body
//! expected "application/json" body but was "text/plain"
//!
//! 1) Verifying a pact between Consumer and happy_provider - a request friends returns a response which has status code 200
//! expected 200 but was 404
//!
//! 2) Verifying a pact between Consumer and happy_provider - a request friends returns a response which includes header "Content-Type" with value "application/json"
//! Expected header "Content-Type" to have value "application/json" but was "text/plain"
//!
//! 3) Verifying a pact between Consumer and happy_provider Given I am friends with Fred - a request to unfriend returns a response which has a matching body
//! $.body -> Type mismatch: Expected Map {"reply":"Bye"} but received "Ok"
//!
//!
//! 4) Verifying a pact between Consumer2 and happy_provider - a request friends returns a response which has a matching body
//! expected "application/json" body but was "text/plain"
//!
//! 5) Verifying a pact between Consumer2 and happy_provider - a request friends returns a response which has status code 200
//! expected 200 but was 404
//!
//! 6) Verifying a pact between Consumer2 and happy_provider - a request friends returns a response which includes header "Content-Type" with value "application/json"
//! Expected header "Content-Type" to have value "application/json" but was "text/plain"
//!
//! 7) Verifying a pact between Consumer2 and happy_provider Given I am friends with Fred - a request to unfriend returns a response which has a matching body
//! $.body -> Type mismatch: Expected Map {"reply":"Bye"} but received "Ok"
//!
//!
//!
//! There were 8 pact failures
//!
//! ```
#![warn(missing_docs)]
// Due to large generated future for async fns
#![type_length_limit="100000000"]
use std::env;
use std::fs::File;
use std::io::Write;
use std::str::FromStr;
use std::sync::Arc;
use std::time::Duration;
use clap::{AppSettings, ArgMatches, ErrorKind};
use log::{debug, error, LevelFilter, warn};
use maplit::hashmap;
use pact_models::{PACT_RUST_VERSION, PactSpecification};
use pact_models::prelude::HttpAuth;
use serde_json::Value;
use simplelog::{ColorChoice, Config, TerminalMode, TermLogger};
use tokio::time::sleep;
use pact_verifier::{
FilterInfo,
NullRequestFilterExecutor,
PactSource,
ProviderInfo,
PublishOptions,
VerificationOptions,
verify_provider_async
};
use pact_verifier::callback_executors::HttpRequestProviderStateExecutor;
use pact_verifier::metrics::VerificationMetrics;
use pact_verifier::selectors::{consumer_tags_to_selectors, json_to_selectors};
use pact_verifier::verification_result::VerificationExecutionResult;
mod args;
/// Handles the command line arguments from the running process
pub async fn handle_cli(version: &str) -> Result<(), i32> {
let args: Vec<String> = env::args().collect();
let program = args[0].clone();
let app = args::setup_app(program, version);
let matches = app
.setting(AppSettings::ArgRequiredElseHelp)
.setting(AppSettings::ColoredHelp)
.get_matches_safe();
match matches {
Ok(results) => handle_matches(&results).await,
Err(ref err) => {
match err.kind {
ErrorKind::HelpDisplayed => {
println!("{}", err.message);
Ok(())
},
ErrorKind::VersionDisplayed => {
print_version(version);
println!();
Ok(())
},
_ => {
err.exit()
}
}
}
}
}
async fn handle_matches(matches: &clap::ArgMatches<'_>) -> Result<(), i32> {
let level = matches.value_of("loglevel").unwrap_or("warn");
let log_level = match level {
"none" => LevelFilter::Off,
_ => LevelFilter::from_str(level).unwrap()
};
TermLogger::init(log_level, Config::default(), TerminalMode::Mixed, ColorChoice::Auto).unwrap_or_default();
let provider = ProviderInfo {
host: matches.value_of("hostname").unwrap_or("localhost").to_string(),
port: matches.value_of("port").map(|port| port.parse::<u16>().unwrap()),
path: matches.value_of("base-path").unwrap_or("/").into(),
protocol: matches.value_of("scheme").unwrap_or("http").to_string(),
.. ProviderInfo::default()
};
let source = pact_source(matches);
let filter = interaction_filter(matches);
let provider_state_executor = Arc::new(HttpRequestProviderStateExecutor {
state_change_url: matches.value_of("state-change-url").map(|s| s.to_string()),
state_change_body:!matches.is_present("state-change-as-query"),
state_change_teardown: matches.is_present("state-change-teardown")
});
let mut custom_headers = hashmap!{};
if let Some(headers) = matches.values_of("custom-header") {
for header in headers {
let (key, value) = header.split_once('=').ok_or_else(|| {
error!("Custom header values must be in the form KEY=VALUE, where KEY and VALUE contain ASCII characters (32-127) only.");
3
})?;
custom_headers.insert(key.to_string(), value.to_string());
}
}
let verification_options = VerificationOptions {
request_filter: None::<Arc<NullRequestFilterExecutor>>,
disable_ssl_verification: matches.is_present("disable-ssl-verification"),
request_timeout: matches.value_of("request-timeout")
.map(|t| t.parse::<u64>().unwrap_or(5000)).unwrap_or(5000),
custom_headers
};
let publish_options = if matches.is_present("publish") {
Some(PublishOptions {
provider_version: matches.value_of("provider-version").map(|v| v.to_string()),
build_url: matches.value_of("build-url").map(|v| v.to_string()),
provider_tags: matches.values_of("provider-tags")
.map_or_else(Vec::new, |tags| tags.map(|tag| tag.to_string()).collect()),
provider_branch: matches.value_of("provider-branch").map(|v| v.to_string())
})
} else {
None
};
for s in &source {
debug!("Pact source to verify = {}", s);
};
verify_provider_async(
provider,
source,
filter,
matches.values_of_lossy("filter-consumer").unwrap_or_default(),
&verification_options,
publish_options.as_ref(),
&provider_state_executor,
Some(VerificationMetrics {
test_framework: "pact_verifier_cli".to_string(),
app_name: "pact_verifier_cli".to_string(),
app_version: env!("CARGO_PKG_VERSION").to_string()
}),
).await
.map_err(|err| {
error!("Verification failed with error: {}", err);
2
})
.and_then(|result| {
if let Some(json_file) = matches.value_of("json-file") {
if let Err(err) = write_json_report(&result, json_file) {
error!("Failed to write JSON report to '{json_file}' - {err}");
return Err(2)
}
}
if result.result { Ok(()) } else { Err(1) }
})
}
fn write_json_report(result: &VerificationExecutionResult, file_name: &str) -> anyhow::Result<()> {
debug!("Writing JSON result of the verification to '{file_name}'");
let mut f = File::create(file_name)?;
let json: Value = result.into();
f.write_all(json.to_string().as_bytes())?;
Ok(())
}
fn print_version(version: &str) {
println!("\npact verifier version : v{}", version);
println!("pact specification : v{}", PactSpecification::V4.version_str());
println!("models version : v{}", PACT_RUST_VERSION.unwrap_or_default());
}
fn pact_source(matches: &ArgMatches) -> Vec<PactSource> {
let mut sources = vec![];
if let Some(values) = matches.values_of("file") {
sources.extend(values.map(|v| PactSource::File(v.to_string())).collect::<Vec<PactSource>>());
};
if let Some(values) = matches.values_of("dir") {
sources.extend(values.map(|v| PactSource::Dir(v.to_string())).collect::<Vec<PactSource>>());
};
if let Some(values) = matches.values_of("url") {
sources.extend(values.map(|v| {
if matches.is_present("user") {
PactSource::URL(v.to_string(), matches.value_of("user").map(|user| {
HttpAuth::User(user.to_string(), matches.value_of("password").map(|p| p.to_string()))
}))
} else if matches.is_present("token") {
PactSource::URL(v.to_string(), matches.value_of("token").map(|token| HttpAuth::Token(token.to_string())))
} else {
PactSource::URL(v.to_string(), None)
}
}).collect::<Vec<PactSource>>());
};
if let Some(broker_url) = matches.value_of("broker-url") {
let name = matches.value_of("provider-name").map(|n| n.to_string()).unwrap_or_default();
let auth = matches.value_of("user").map(|user| {
HttpAuth::User(user.to_string(), matches.value_of("password").map(|p| p.to_string()))
}).or_else(|| matches.value_of("token").map(|t| HttpAuth::Token(t.to_string())));
let source = if matches.is_present("consumer-version-selectors") || matches.is_present("consumer-version-tags") {
let pending = matches.is_present("enable-pending");
let wip = matches.value_of("include-wip-pacts-since").map(|wip| wip.to_string());
let provider_tags = matches.values_of("provider-tags")
.map_or_else(Vec::new, |tags| tags.map(|tag| tag.to_string()).collect());
let provider_branch = matches.value_of("provider-branch").map(|v| v.to_string());
let selectors = if matches.is_present("consumer-version-selectors") {
matches.values_of("consumer-version-selectors")
.map_or_else(Vec::new, |s| json_to_selectors(s.collect::<Vec<_>>()))
} else if matches.is_present("consumer-version-tags") {
matches.values_of("consumer-version-tags")
.map_or_else(Vec::new, |tags| consumer_tags_to_selectors(tags.collect::<Vec<_>>()))
} else {
vec![]
};
PactSource::BrokerWithDynamicConfiguration {
provider_name: name,
broker_url: broker_url.into(),
enable_pending: pending,
include_wip_pacts_since: wip,
provider_tags,
provider_branch,
selectors,
auth,
links: vec![]
}
} else {
PactSource::BrokerUrl(name, broker_url.to_string(), auth, vec![])
};
sources.push(source);
};
sources
}
fn interaction_filter(matches: &ArgMatches) -> FilterInfo {
if matches.is_present("filter-description") &&
(matches.is_present("filter-state") || matches.is_present("filter-no-state")) {
if matches.is_present("filter-state") {
FilterInfo::DescriptionAndState(matches.value_of("filter-description").unwrap().to_string(),
matches.value_of("filter-state").unwrap().to_string())
} else {
FilterInfo::DescriptionAndState(matches.value_of("filter-description").unwrap().to_string(),
String::new())
}
} else if matches.is_present("filter-description") {
FilterInfo::Description(matches.value_of("filter-description").unwrap().to_string())
} else if matches.is_present("filter-state") {
FilterInfo::State(matches.value_of("filter-state").unwrap().to_string())
} else if matches.is_present("filter-no-state") {
FilterInfo::State(String::new())
} else {
FilterInfo::None
}
}
fn main() {
init_windows();
let runtime = tokio::runtime::Builder::new_multi_thread()
.enable_all()
.build()
.expect("Could not start a Tokio runtime for running async tasks");
let result = runtime.block_on(async {
let result = handle_cli(clap::crate_version!()).await;
// Add a small delay to let asynchronous tasks to complete
sleep(Duration::from_millis(500)).await;
result
});
runtime.shutdown_timeout(Duration::from_millis(500));
if let Err(err) = result {
std::process::exit(err);
}
}
#[cfg(windows)]
fn init_windows() {
if let Err(err) = ansi_term::enable_ansi_support() {
warn!("Could not enable ANSI console support - {err}");
}
}
#[cfg(not(windows))]
fn init_windows() { } | //! ### Pact File Sources
//!
//! You can specify the pacts to verify with the following options. They can be repeated to set multiple sources.
//! | random_line_split |
main.rs | //! # Standalone Pact Verifier
//!
//! This project provides a command line interface to verify pact files against a running provider. It is a single executable binary. It implements the [V2 Pact specification](https://github.com/pact-foundation/pact-specification/tree/version-2).
//!
//! [Online rust docs](https://docs.rs/pact_verifier_cli/)
//!
//! The Pact Verifier works by taking all the interactions (requests and responses) from a number of pact files. For each interaction, it will make the request defined in the pact to a running service provider and check the response received back against the one defined in the pact file. All mismatches will then be reported.
//!
//! ## Command line interface
//!
//! The pact verifier is bundled as a single binary executable `pact_verifier_cli`. Running this with out any options displays the standard help.
//!
//! ```console,ignore
//! pact_verifier_cli 0.9.8
//! Standalone Pact verifier
//!
//! USAGE:
//! pact_verifier_cli [FLAGS] [OPTIONS] --broker-url <broker-url> --dir <dir>... --file <file>... --provider-name <provider-name> --url <url>...
//!
//! FLAGS:
//! --disable-ssl-verification Disables validation of SSL certificates
//! --enable-pending Enables Pending Pacts
//! --help Prints help information
//! --publish Enables publishing of verification results back to the Pact Broker. Requires the
//! broker-url and provider-version parameters.
//! --state-change-as-query State change request data will be sent as query parameters instead of in the
//! request body
//! --state-change-teardown State change teardown requests are to be made after each interaction
//! -v, --version Prints version information
//!
//! OPTIONS:
//! --base-path <base-path> Base path to add to all requests
//! -b, --broker-url <broker-url>
//! URL of the pact broker to fetch pacts from to verify (requires the provider name parameter) [env:
//! PACT_BROKER_BASE_URL=]
//! --build-url <build-url>
//! URL of the build to associate with the published verification results.
//!
//! --consumer-version-selectors <consumer-version-selectors>...
//! Consumer version selectors to use when fetching pacts from the Broker. Accepts a JSON string as per
//! https://docs.pact.io/pact_broker/advanced_topics/consumer_version_selectors/
//! --consumer-version-tags <consumer-version-tags>
//! Consumer tags to use when fetching pacts from the Broker. Accepts comma-separated values.
//!
//! --header <custom-header>...
//! Add a custom header to be included in the calls to the provider. Values must be in the form KEY=VALUE, where
//! KEY and VALUE contain ASCII characters (32-127) only. Can be repeated.
//! -d, --dir <dir>...
//! Directory of pact files to verify (can be repeated)
//!
//! -f, --file <file>... Pact file to verify (can be repeated)
//! -c, --filter-consumer <filter-consumer>...
//! Consumer name to filter the pacts to be verified (can be repeated)
//!
//! --filter-description <filter-description>
//! Only validate interactions whose descriptions match this filter [env: PACT_DESCRIPTION=]
//!
//! --filter-no-state <filter-no-state>
//! Only validate interactions that have no defined provider state [env: PACT_PROVIDER_NO_STATE=]
//!
//! --filter-state <filter-state>
//! Only validate interactions whose provider states match this filter [env: PACT_PROVIDER_STATE=]
//!
//! -h, --hostname <hostname> Provider hostname (defaults to localhost)
//! --include-wip-pacts-since <include-wip-pacts-since>
//! Allow pacts that don't match given consumer selectors (or tags) to be verified, without causing the overall
//! task to fail. For more information, see https://pact.io/wip
//! -j, --json <json-file> Generate a JSON report of the verification
//! -l, --loglevel <loglevel>
//! Log level (defaults to warn) [possible values: error, warn, info, debug,
//! trace, none]
//! --password <password>
//! Password to use when fetching pacts from URLS [env: PACT_BROKER_PASSWORD=]
//!
//! -p, --port <port>
//! Provider port (defaults to protocol default 80/443)
//!
//! --provider-branch <provider-branch> Provider branch to use when publishing results
//! -n, --provider-name <provider-name> Provider name (defaults to provider)
//! --provider-tags <provider-tags>
//! Provider tags to use when publishing results. Accepts comma-separated values.
//!
//! --provider-version <provider-version>
//! Provider version that is being verified. This is required when publishing results.
//!
//! --request-timeout <request-timeout>
//! Sets the HTTP request timeout in milliseconds for requests to the target API and for state change requests.
//!
//! --scheme <scheme>
//! Provider URI scheme (defaults to http) [default: http] [possible values: http, https]
//!
//! -s, --state-change-url <state-change-url> URL to post state change requests to
//! -t, --token <token>
//! Bearer token to use when fetching pacts from URLS [env: PACT_BROKER_TOKEN=]
//!
//! -u, --url <url>... URL of pact file to verify (can be repeated)
//! --user
//! Username to use when fetching pacts from URLS [env: PACT_BROKER_USERNAME=]
//! ```
//!
//! ## Options
//!
//! ### Log Level
//!
//! You can control the log level with the `-l, --loglevel <loglevel>` option. It defaults to warn, and the options that you can specify are: error, warn, info, debug, trace, none.
//!
//! ### Pact File Sources
//!
//! You can specify the pacts to verify with the following options. They can be repeated to set multiple sources.
//!
//! | Option | Type | Description |
//! |--------|------|-------------|
//! | `-f, --file <file>` | File | Loads a pact from the given file |
//! | `-u, --url <url>` | URL | Loads a pact from a URL resource |
//! | `-d, --dir <dir>` | Directory | Loads all the pacts from the given directory |
//! | `-b, --broker-url <broker-url>` | Pact Broker | Loads all the pacts for the provider from the pact broker. Requires the `-n, --provider-name <provider-name>` option |
//!
//! ### Provider Options
//!
//! The running provider can be specified with the following options:
//!
//! | Option | Description |
//! |--------|-------------|
//! | `-h, --hostname <hostname>` | The provider hostname, defaults to `localhost` |
//! | `-p, --port <port>` | The provider port (defaults to 8080) |
//! | `-n, --provider-name <provider-name>` | The name of the provider. Required if you are loading pacts from a pact broker |
//!
//! ### Filtering the interactions
//!
//! The interactions that are verified can be filtered by the following options:
//!
//! #### `-c, --filter-consumer <filter-consumer>`
//!
//! This will only verify the interactions of matching consumers. You can specify multiple consumers by either seperating the names with a comma, or repeating the option.
//!
//! #### `--filter-description <filter-description>`
//!
//! This option will filter the interactions that are verified that match by desciption. You can use a regular expression to match.
//!
//! #### `--filter-state <filter-state>`
//!
//! This option will filter the interactions that are verified that match by provider state. You can use a regular expression to match. Can't be used with the `--filter-no-state` option.
//!
//! #### `--filter-no-state`
//!
//! This option will filter the interactions that are verified that don't have a defined provider state. Can't be used with the `--filter-state` option.
//!
//! ### State change requests
//!
//! Provider states are a mechanism to define the state that the provider needs to be in to be able to verify a particular request. This is achieved by setting a state change URL that will receive a POST request with the provider state before the actual request is made.
//!
//! #### `-s, --state-change-url <state-change-url>`
//!
//! This sets the URL that the POST requests will be made to before each actual request.
//!
//! #### `--state-change-as-query`
//!
//! By default, the state for the state change request will be sent as a JSON document in the body of the request. This option forces it to be sent as a query parameter instead.
//!
//! #### `--state-change-teardown`
//!
//! This option will cause the verifier to also make a tear down request after the main request is made. It will receive a second field in the body or a query parameter named `action` with the value `teardown`.
//!
//! ## Example run
//!
//! This will verify all the pacts for the `happy_provider` found in the pact broker (running on localhost) against the provider running on localhost port 5050. Only the pacts for the consumers `Consumer` and `Consumer2` will be verified.
//!
//! ```console,ignore
//! $ pact_verifier_cli -b http://localhost -n 'happy_provider' -p 5050 --filter-consumer Consumer --filter-consumer Consumer2
//! 21:59:28 [WARN] pact_matching::models: No metadata found in pact file "http://localhost/pacts/provider/happy_provider/consumer/Consumer/version/1.0.0", assuming V1.1 specification
//! 21:59:28 [WARN] pact_matching::models: No metadata found in pact file "http://localhost/pacts/provider/happy_provider/consumer/Consumer2/version/1.0.0", assuming V1.1 specification
//!
//! Verifying a pact between Consumer and happy_provider
//! Given I am friends with Fred
//! WARNING: State Change ignored as there is no state change URL
//! Given I have no friends
//! WARNING: State Change ignored as there is no state change URL
//! a request to unfriend but no friends
//! returns a response which
//! has status code 200 (OK)
//! includes headers
//! has a matching body (OK)
//! a request friends
//! returns a response which
//! has status code 200 (FAILED)
//! includes headers
//! "Content-Type" with value "application/json" (FAILED)
//! has a matching body (FAILED)
//! a request to unfriend
//! returns a response which
//! has status code 200 (OK)
//! includes headers
//! "Content-Type" with value "application/json" (OK)
//! has a matching body (FAILED)
//!
//!
//! Verifying a pact between Consumer2 and happy_provider
//! Given I am friends with Fred
//! WARNING: State Change ignored as there is no state change URL
//! Given I have no friends
//! WARNING: State Change ignored as there is no state change URL
//! a request to unfriend but no friends
//! returns a response which
//! has status code 200 (OK)
//! includes headers
//! has a matching body (OK)
//! a request friends
//! returns a response which
//! has status code 200 (FAILED)
//! includes headers
//! "Content-Type" with value "application/json" (FAILED)
//! has a matching body (FAILED)
//! a request to unfriend
//! returns a response which
//! has status code 200 (OK)
//! includes headers
//! "Content-Type" with value "application/json" (OK)
//! has a matching body (FAILED)
//!
//!
//! Failures:
//!
//! 0) Verifying a pact between Consumer and happy_provider - a request friends returns a response which has a matching body
//! expected "application/json" body but was "text/plain"
//!
//! 1) Verifying a pact between Consumer and happy_provider - a request friends returns a response which has status code 200
//! expected 200 but was 404
//!
//! 2) Verifying a pact between Consumer and happy_provider - a request friends returns a response which includes header "Content-Type" with value "application/json"
//! Expected header "Content-Type" to have value "application/json" but was "text/plain"
//!
//! 3) Verifying a pact between Consumer and happy_provider Given I am friends with Fred - a request to unfriend returns a response which has a matching body
//! $.body -> Type mismatch: Expected Map {"reply":"Bye"} but received "Ok"
//!
//!
//! 4) Verifying a pact between Consumer2 and happy_provider - a request friends returns a response which has a matching body
//! expected "application/json" body but was "text/plain"
//!
//! 5) Verifying a pact between Consumer2 and happy_provider - a request friends returns a response which has status code 200
//! expected 200 but was 404
//!
//! 6) Verifying a pact between Consumer2 and happy_provider - a request friends returns a response which includes header "Content-Type" with value "application/json"
//! Expected header "Content-Type" to have value "application/json" but was "text/plain"
//!
//! 7) Verifying a pact between Consumer2 and happy_provider Given I am friends with Fred - a request to unfriend returns a response which has a matching body
//! $.body -> Type mismatch: Expected Map {"reply":"Bye"} but received "Ok"
//!
//!
//!
//! There were 8 pact failures
//!
//! ```
#![warn(missing_docs)]
// Due to large generated future for async fns
#![type_length_limit="100000000"]
use std::env;
use std::fs::File;
use std::io::Write;
use std::str::FromStr;
use std::sync::Arc;
use std::time::Duration;
use clap::{AppSettings, ArgMatches, ErrorKind};
use log::{debug, error, LevelFilter, warn};
use maplit::hashmap;
use pact_models::{PACT_RUST_VERSION, PactSpecification};
use pact_models::prelude::HttpAuth;
use serde_json::Value;
use simplelog::{ColorChoice, Config, TerminalMode, TermLogger};
use tokio::time::sleep;
use pact_verifier::{
FilterInfo,
NullRequestFilterExecutor,
PactSource,
ProviderInfo,
PublishOptions,
VerificationOptions,
verify_provider_async
};
use pact_verifier::callback_executors::HttpRequestProviderStateExecutor;
use pact_verifier::metrics::VerificationMetrics;
use pact_verifier::selectors::{consumer_tags_to_selectors, json_to_selectors};
use pact_verifier::verification_result::VerificationExecutionResult;
mod args;
/// Handles the command line arguments from the running process
pub async fn handle_cli(version: &str) -> Result<(), i32> {
let args: Vec<String> = env::args().collect();
let program = args[0].clone();
let app = args::setup_app(program, version);
let matches = app
.setting(AppSettings::ArgRequiredElseHelp)
.setting(AppSettings::ColoredHelp)
.get_matches_safe();
match matches {
Ok(results) => handle_matches(&results).await,
Err(ref err) => {
match err.kind {
ErrorKind::HelpDisplayed => | ,
ErrorKind::VersionDisplayed => {
print_version(version);
println!();
Ok(())
},
_ => {
err.exit()
}
}
}
}
}
async fn handle_matches(matches: &clap::ArgMatches<'_>) -> Result<(), i32> {
let level = matches.value_of("loglevel").unwrap_or("warn");
let log_level = match level {
"none" => LevelFilter::Off,
_ => LevelFilter::from_str(level).unwrap()
};
TermLogger::init(log_level, Config::default(), TerminalMode::Mixed, ColorChoice::Auto).unwrap_or_default();
let provider = ProviderInfo {
host: matches.value_of("hostname").unwrap_or("localhost").to_string(),
port: matches.value_of("port").map(|port| port.parse::<u16>().unwrap()),
path: matches.value_of("base-path").unwrap_or("/").into(),
protocol: matches.value_of("scheme").unwrap_or("http").to_string(),
.. ProviderInfo::default()
};
let source = pact_source(matches);
let filter = interaction_filter(matches);
let provider_state_executor = Arc::new(HttpRequestProviderStateExecutor {
state_change_url: matches.value_of("state-change-url").map(|s| s.to_string()),
state_change_body:!matches.is_present("state-change-as-query"),
state_change_teardown: matches.is_present("state-change-teardown")
});
let mut custom_headers = hashmap!{};
if let Some(headers) = matches.values_of("custom-header") {
for header in headers {
let (key, value) = header.split_once('=').ok_or_else(|| {
error!("Custom header values must be in the form KEY=VALUE, where KEY and VALUE contain ASCII characters (32-127) only.");
3
})?;
custom_headers.insert(key.to_string(), value.to_string());
}
}
let verification_options = VerificationOptions {
request_filter: None::<Arc<NullRequestFilterExecutor>>,
disable_ssl_verification: matches.is_present("disable-ssl-verification"),
request_timeout: matches.value_of("request-timeout")
.map(|t| t.parse::<u64>().unwrap_or(5000)).unwrap_or(5000),
custom_headers
};
let publish_options = if matches.is_present("publish") {
Some(PublishOptions {
provider_version: matches.value_of("provider-version").map(|v| v.to_string()),
build_url: matches.value_of("build-url").map(|v| v.to_string()),
provider_tags: matches.values_of("provider-tags")
.map_or_else(Vec::new, |tags| tags.map(|tag| tag.to_string()).collect()),
provider_branch: matches.value_of("provider-branch").map(|v| v.to_string())
})
} else {
None
};
for s in &source {
debug!("Pact source to verify = {}", s);
};
verify_provider_async(
provider,
source,
filter,
matches.values_of_lossy("filter-consumer").unwrap_or_default(),
&verification_options,
publish_options.as_ref(),
&provider_state_executor,
Some(VerificationMetrics {
test_framework: "pact_verifier_cli".to_string(),
app_name: "pact_verifier_cli".to_string(),
app_version: env!("CARGO_PKG_VERSION").to_string()
}),
).await
.map_err(|err| {
error!("Verification failed with error: {}", err);
2
})
.and_then(|result| {
if let Some(json_file) = matches.value_of("json-file") {
if let Err(err) = write_json_report(&result, json_file) {
error!("Failed to write JSON report to '{json_file}' - {err}");
return Err(2)
}
}
if result.result { Ok(()) } else { Err(1) }
})
}
fn write_json_report(result: &VerificationExecutionResult, file_name: &str) -> anyhow::Result<()> {
debug!("Writing JSON result of the verification to '{file_name}'");
let mut f = File::create(file_name)?;
let json: Value = result.into();
f.write_all(json.to_string().as_bytes())?;
Ok(())
}
fn print_version(version: &str) {
println!("\npact verifier version : v{}", version);
println!("pact specification : v{}", PactSpecification::V4.version_str());
println!("models version : v{}", PACT_RUST_VERSION.unwrap_or_default());
}
fn pact_source(matches: &ArgMatches) -> Vec<PactSource> {
let mut sources = vec![];
if let Some(values) = matches.values_of("file") {
sources.extend(values.map(|v| PactSource::File(v.to_string())).collect::<Vec<PactSource>>());
};
if let Some(values) = matches.values_of("dir") {
sources.extend(values.map(|v| PactSource::Dir(v.to_string())).collect::<Vec<PactSource>>());
};
if let Some(values) = matches.values_of("url") {
sources.extend(values.map(|v| {
if matches.is_present("user") {
PactSource::URL(v.to_string(), matches.value_of("user").map(|user| {
HttpAuth::User(user.to_string(), matches.value_of("password").map(|p| p.to_string()))
}))
} else if matches.is_present("token") {
PactSource::URL(v.to_string(), matches.value_of("token").map(|token| HttpAuth::Token(token.to_string())))
} else {
PactSource::URL(v.to_string(), None)
}
}).collect::<Vec<PactSource>>());
};
if let Some(broker_url) = matches.value_of("broker-url") {
let name = matches.value_of("provider-name").map(|n| n.to_string()).unwrap_or_default();
let auth = matches.value_of("user").map(|user| {
HttpAuth::User(user.to_string(), matches.value_of("password").map(|p| p.to_string()))
}).or_else(|| matches.value_of("token").map(|t| HttpAuth::Token(t.to_string())));
let source = if matches.is_present("consumer-version-selectors") || matches.is_present("consumer-version-tags") {
let pending = matches.is_present("enable-pending");
let wip = matches.value_of("include-wip-pacts-since").map(|wip| wip.to_string());
let provider_tags = matches.values_of("provider-tags")
.map_or_else(Vec::new, |tags| tags.map(|tag| tag.to_string()).collect());
let provider_branch = matches.value_of("provider-branch").map(|v| v.to_string());
let selectors = if matches.is_present("consumer-version-selectors") {
matches.values_of("consumer-version-selectors")
.map_or_else(Vec::new, |s| json_to_selectors(s.collect::<Vec<_>>()))
} else if matches.is_present("consumer-version-tags") {
matches.values_of("consumer-version-tags")
.map_or_else(Vec::new, |tags| consumer_tags_to_selectors(tags.collect::<Vec<_>>()))
} else {
vec![]
};
PactSource::BrokerWithDynamicConfiguration {
provider_name: name,
broker_url: broker_url.into(),
enable_pending: pending,
include_wip_pacts_since: wip,
provider_tags,
provider_branch,
selectors,
auth,
links: vec![]
}
} else {
PactSource::BrokerUrl(name, broker_url.to_string(), auth, vec![])
};
sources.push(source);
};
sources
}
fn interaction_filter(matches: &ArgMatches) -> FilterInfo {
if matches.is_present("filter-description") &&
(matches.is_present("filter-state") || matches.is_present("filter-no-state")) {
if matches.is_present("filter-state") {
FilterInfo::DescriptionAndState(matches.value_of("filter-description").unwrap().to_string(),
matches.value_of("filter-state").unwrap().to_string())
} else {
FilterInfo::DescriptionAndState(matches.value_of("filter-description").unwrap().to_string(),
String::new())
}
} else if matches.is_present("filter-description") {
FilterInfo::Description(matches.value_of("filter-description").unwrap().to_string())
} else if matches.is_present("filter-state") {
FilterInfo::State(matches.value_of("filter-state").unwrap().to_string())
} else if matches.is_present("filter-no-state") {
FilterInfo::State(String::new())
} else {
FilterInfo::None
}
}
fn main() {
init_windows();
let runtime = tokio::runtime::Builder::new_multi_thread()
.enable_all()
.build()
.expect("Could not start a Tokio runtime for running async tasks");
let result = runtime.block_on(async {
let result = handle_cli(clap::crate_version!()).await;
// Add a small delay to let asynchronous tasks to complete
sleep(Duration::from_millis(500)).await;
result
});
runtime.shutdown_timeout(Duration::from_millis(500));
if let Err(err) = result {
std::process::exit(err);
}
}
#[cfg(windows)]
fn init_windows() {
if let Err(err) = ansi_term::enable_ansi_support() {
warn!("Could not enable ANSI console support - {err}");
}
}
#[cfg(not(windows))]
fn init_windows() { }
| {
println!("{}", err.message);
Ok(())
} | conditional_block |
main.rs | //! # Standalone Pact Verifier
//!
//! This project provides a command line interface to verify pact files against a running provider. It is a single executable binary. It implements the [V2 Pact specification](https://github.com/pact-foundation/pact-specification/tree/version-2).
//!
//! [Online rust docs](https://docs.rs/pact_verifier_cli/)
//!
//! The Pact Verifier works by taking all the interactions (requests and responses) from a number of pact files. For each interaction, it will make the request defined in the pact to a running service provider and check the response received back against the one defined in the pact file. All mismatches will then be reported.
//!
//! ## Command line interface
//!
//! The pact verifier is bundled as a single binary executable `pact_verifier_cli`. Running this with out any options displays the standard help.
//!
//! ```console,ignore
//! pact_verifier_cli 0.9.8
//! Standalone Pact verifier
//!
//! USAGE:
//! pact_verifier_cli [FLAGS] [OPTIONS] --broker-url <broker-url> --dir <dir>... --file <file>... --provider-name <provider-name> --url <url>...
//!
//! FLAGS:
//! --disable-ssl-verification Disables validation of SSL certificates
//! --enable-pending Enables Pending Pacts
//! --help Prints help information
//! --publish Enables publishing of verification results back to the Pact Broker. Requires the
//! broker-url and provider-version parameters.
//! --state-change-as-query State change request data will be sent as query parameters instead of in the
//! request body
//! --state-change-teardown State change teardown requests are to be made after each interaction
//! -v, --version Prints version information
//!
//! OPTIONS:
//! --base-path <base-path> Base path to add to all requests
//! -b, --broker-url <broker-url>
//! URL of the pact broker to fetch pacts from to verify (requires the provider name parameter) [env:
//! PACT_BROKER_BASE_URL=]
//! --build-url <build-url>
//! URL of the build to associate with the published verification results.
//!
//! --consumer-version-selectors <consumer-version-selectors>...
//! Consumer version selectors to use when fetching pacts from the Broker. Accepts a JSON string as per
//! https://docs.pact.io/pact_broker/advanced_topics/consumer_version_selectors/
//! --consumer-version-tags <consumer-version-tags>
//! Consumer tags to use when fetching pacts from the Broker. Accepts comma-separated values.
//!
//! --header <custom-header>...
//! Add a custom header to be included in the calls to the provider. Values must be in the form KEY=VALUE, where
//! KEY and VALUE contain ASCII characters (32-127) only. Can be repeated.
//! -d, --dir <dir>...
//! Directory of pact files to verify (can be repeated)
//!
//! -f, --file <file>... Pact file to verify (can be repeated)
//! -c, --filter-consumer <filter-consumer>...
//! Consumer name to filter the pacts to be verified (can be repeated)
//!
//! --filter-description <filter-description>
//! Only validate interactions whose descriptions match this filter [env: PACT_DESCRIPTION=]
//!
//! --filter-no-state <filter-no-state>
//! Only validate interactions that have no defined provider state [env: PACT_PROVIDER_NO_STATE=]
//!
//! --filter-state <filter-state>
//! Only validate interactions whose provider states match this filter [env: PACT_PROVIDER_STATE=]
//!
//! -h, --hostname <hostname> Provider hostname (defaults to localhost)
//! --include-wip-pacts-since <include-wip-pacts-since>
//! Allow pacts that don't match given consumer selectors (or tags) to be verified, without causing the overall
//! task to fail. For more information, see https://pact.io/wip
//! -j, --json <json-file> Generate a JSON report of the verification
//! -l, --loglevel <loglevel>
//! Log level (defaults to warn) [possible values: error, warn, info, debug,
//! trace, none]
//! --password <password>
//! Password to use when fetching pacts from URLS [env: PACT_BROKER_PASSWORD=]
//!
//! -p, --port <port>
//! Provider port (defaults to protocol default 80/443)
//!
//! --provider-branch <provider-branch> Provider branch to use when publishing results
//! -n, --provider-name <provider-name> Provider name (defaults to provider)
//! --provider-tags <provider-tags>
//! Provider tags to use when publishing results. Accepts comma-separated values.
//!
//! --provider-version <provider-version>
//! Provider version that is being verified. This is required when publishing results.
//!
//! --request-timeout <request-timeout>
//! Sets the HTTP request timeout in milliseconds for requests to the target API and for state change requests.
//!
//! --scheme <scheme>
//! Provider URI scheme (defaults to http) [default: http] [possible values: http, https]
//!
//! -s, --state-change-url <state-change-url> URL to post state change requests to
//! -t, --token <token>
//! Bearer token to use when fetching pacts from URLS [env: PACT_BROKER_TOKEN=]
//!
//! -u, --url <url>... URL of pact file to verify (can be repeated)
//! --user
//! Username to use when fetching pacts from URLS [env: PACT_BROKER_USERNAME=]
//! ```
//!
//! ## Options
//!
//! ### Log Level
//!
//! You can control the log level with the `-l, --loglevel <loglevel>` option. It defaults to warn, and the options that you can specify are: error, warn, info, debug, trace, none.
//!
//! ### Pact File Sources
//!
//! You can specify the pacts to verify with the following options. They can be repeated to set multiple sources.
//!
//! | Option | Type | Description |
//! |--------|------|-------------|
//! | `-f, --file <file>` | File | Loads a pact from the given file |
//! | `-u, --url <url>` | URL | Loads a pact from a URL resource |
//! | `-d, --dir <dir>` | Directory | Loads all the pacts from the given directory |
//! | `-b, --broker-url <broker-url>` | Pact Broker | Loads all the pacts for the provider from the pact broker. Requires the `-n, --provider-name <provider-name>` option |
//!
//! ### Provider Options
//!
//! The running provider can be specified with the following options:
//!
//! | Option | Description |
//! |--------|-------------|
//! | `-h, --hostname <hostname>` | The provider hostname, defaults to `localhost` |
//! | `-p, --port <port>` | The provider port (defaults to 8080) |
//! | `-n, --provider-name <provider-name>` | The name of the provider. Required if you are loading pacts from a pact broker |
//!
//! ### Filtering the interactions
//!
//! The interactions that are verified can be filtered by the following options:
//!
//! #### `-c, --filter-consumer <filter-consumer>`
//!
//! This will only verify the interactions of matching consumers. You can specify multiple consumers by either seperating the names with a comma, or repeating the option.
//!
//! #### `--filter-description <filter-description>`
//!
//! This option will filter the interactions that are verified that match by desciption. You can use a regular expression to match.
//!
//! #### `--filter-state <filter-state>`
//!
//! This option will filter the interactions that are verified that match by provider state. You can use a regular expression to match. Can't be used with the `--filter-no-state` option.
//!
//! #### `--filter-no-state`
//!
//! This option will filter the interactions that are verified that don't have a defined provider state. Can't be used with the `--filter-state` option.
//!
//! ### State change requests
//!
//! Provider states are a mechanism to define the state that the provider needs to be in to be able to verify a particular request. This is achieved by setting a state change URL that will receive a POST request with the provider state before the actual request is made.
//!
//! #### `-s, --state-change-url <state-change-url>`
//!
//! This sets the URL that the POST requests will be made to before each actual request.
//!
//! #### `--state-change-as-query`
//!
//! By default, the state for the state change request will be sent as a JSON document in the body of the request. This option forces it to be sent as a query parameter instead.
//!
//! #### `--state-change-teardown`
//!
//! This option will cause the verifier to also make a tear down request after the main request is made. It will receive a second field in the body or a query parameter named `action` with the value `teardown`.
//!
//! ## Example run
//!
//! This will verify all the pacts for the `happy_provider` found in the pact broker (running on localhost) against the provider running on localhost port 5050. Only the pacts for the consumers `Consumer` and `Consumer2` will be verified.
//!
//! ```console,ignore
//! $ pact_verifier_cli -b http://localhost -n 'happy_provider' -p 5050 --filter-consumer Consumer --filter-consumer Consumer2
//! 21:59:28 [WARN] pact_matching::models: No metadata found in pact file "http://localhost/pacts/provider/happy_provider/consumer/Consumer/version/1.0.0", assuming V1.1 specification
//! 21:59:28 [WARN] pact_matching::models: No metadata found in pact file "http://localhost/pacts/provider/happy_provider/consumer/Consumer2/version/1.0.0", assuming V1.1 specification
//!
//! Verifying a pact between Consumer and happy_provider
//! Given I am friends with Fred
//! WARNING: State Change ignored as there is no state change URL
//! Given I have no friends
//! WARNING: State Change ignored as there is no state change URL
//! a request to unfriend but no friends
//! returns a response which
//! has status code 200 (OK)
//! includes headers
//! has a matching body (OK)
//! a request friends
//! returns a response which
//! has status code 200 (FAILED)
//! includes headers
//! "Content-Type" with value "application/json" (FAILED)
//! has a matching body (FAILED)
//! a request to unfriend
//! returns a response which
//! has status code 200 (OK)
//! includes headers
//! "Content-Type" with value "application/json" (OK)
//! has a matching body (FAILED)
//!
//!
//! Verifying a pact between Consumer2 and happy_provider
//! Given I am friends with Fred
//! WARNING: State Change ignored as there is no state change URL
//! Given I have no friends
//! WARNING: State Change ignored as there is no state change URL
//! a request to unfriend but no friends
//! returns a response which
//! has status code 200 (OK)
//! includes headers
//! has a matching body (OK)
//! a request friends
//! returns a response which
//! has status code 200 (FAILED)
//! includes headers
//! "Content-Type" with value "application/json" (FAILED)
//! has a matching body (FAILED)
//! a request to unfriend
//! returns a response which
//! has status code 200 (OK)
//! includes headers
//! "Content-Type" with value "application/json" (OK)
//! has a matching body (FAILED)
//!
//!
//! Failures:
//!
//! 0) Verifying a pact between Consumer and happy_provider - a request friends returns a response which has a matching body
//! expected "application/json" body but was "text/plain"
//!
//! 1) Verifying a pact between Consumer and happy_provider - a request friends returns a response which has status code 200
//! expected 200 but was 404
//!
//! 2) Verifying a pact between Consumer and happy_provider - a request friends returns a response which includes header "Content-Type" with value "application/json"
//! Expected header "Content-Type" to have value "application/json" but was "text/plain"
//!
//! 3) Verifying a pact between Consumer and happy_provider Given I am friends with Fred - a request to unfriend returns a response which has a matching body
//! $.body -> Type mismatch: Expected Map {"reply":"Bye"} but received "Ok"
//!
//!
//! 4) Verifying a pact between Consumer2 and happy_provider - a request friends returns a response which has a matching body
//! expected "application/json" body but was "text/plain"
//!
//! 5) Verifying a pact between Consumer2 and happy_provider - a request friends returns a response which has status code 200
//! expected 200 but was 404
//!
//! 6) Verifying a pact between Consumer2 and happy_provider - a request friends returns a response which includes header "Content-Type" with value "application/json"
//! Expected header "Content-Type" to have value "application/json" but was "text/plain"
//!
//! 7) Verifying a pact between Consumer2 and happy_provider Given I am friends with Fred - a request to unfriend returns a response which has a matching body
//! $.body -> Type mismatch: Expected Map {"reply":"Bye"} but received "Ok"
//!
//!
//!
//! There were 8 pact failures
//!
//! ```
#![warn(missing_docs)]
// Due to large generated future for async fns
#![type_length_limit="100000000"]
use std::env;
use std::fs::File;
use std::io::Write;
use std::str::FromStr;
use std::sync::Arc;
use std::time::Duration;
use clap::{AppSettings, ArgMatches, ErrorKind};
use log::{debug, error, LevelFilter, warn};
use maplit::hashmap;
use pact_models::{PACT_RUST_VERSION, PactSpecification};
use pact_models::prelude::HttpAuth;
use serde_json::Value;
use simplelog::{ColorChoice, Config, TerminalMode, TermLogger};
use tokio::time::sleep;
use pact_verifier::{
FilterInfo,
NullRequestFilterExecutor,
PactSource,
ProviderInfo,
PublishOptions,
VerificationOptions,
verify_provider_async
};
use pact_verifier::callback_executors::HttpRequestProviderStateExecutor;
use pact_verifier::metrics::VerificationMetrics;
use pact_verifier::selectors::{consumer_tags_to_selectors, json_to_selectors};
use pact_verifier::verification_result::VerificationExecutionResult;
mod args;
/// Handles the command line arguments from the running process
pub async fn handle_cli(version: &str) -> Result<(), i32> {
let args: Vec<String> = env::args().collect();
let program = args[0].clone();
let app = args::setup_app(program, version);
let matches = app
.setting(AppSettings::ArgRequiredElseHelp)
.setting(AppSettings::ColoredHelp)
.get_matches_safe();
match matches {
Ok(results) => handle_matches(&results).await,
Err(ref err) => {
match err.kind {
ErrorKind::HelpDisplayed => {
println!("{}", err.message);
Ok(())
},
ErrorKind::VersionDisplayed => {
print_version(version);
println!();
Ok(())
},
_ => {
err.exit()
}
}
}
}
}
async fn handle_matches(matches: &clap::ArgMatches<'_>) -> Result<(), i32> {
let level = matches.value_of("loglevel").unwrap_or("warn");
let log_level = match level {
"none" => LevelFilter::Off,
_ => LevelFilter::from_str(level).unwrap()
};
TermLogger::init(log_level, Config::default(), TerminalMode::Mixed, ColorChoice::Auto).unwrap_or_default();
let provider = ProviderInfo {
host: matches.value_of("hostname").unwrap_or("localhost").to_string(),
port: matches.value_of("port").map(|port| port.parse::<u16>().unwrap()),
path: matches.value_of("base-path").unwrap_or("/").into(),
protocol: matches.value_of("scheme").unwrap_or("http").to_string(),
.. ProviderInfo::default()
};
let source = pact_source(matches);
let filter = interaction_filter(matches);
let provider_state_executor = Arc::new(HttpRequestProviderStateExecutor {
state_change_url: matches.value_of("state-change-url").map(|s| s.to_string()),
state_change_body:!matches.is_present("state-change-as-query"),
state_change_teardown: matches.is_present("state-change-teardown")
});
let mut custom_headers = hashmap!{};
if let Some(headers) = matches.values_of("custom-header") {
for header in headers {
let (key, value) = header.split_once('=').ok_or_else(|| {
error!("Custom header values must be in the form KEY=VALUE, where KEY and VALUE contain ASCII characters (32-127) only.");
3
})?;
custom_headers.insert(key.to_string(), value.to_string());
}
}
let verification_options = VerificationOptions {
request_filter: None::<Arc<NullRequestFilterExecutor>>,
disable_ssl_verification: matches.is_present("disable-ssl-verification"),
request_timeout: matches.value_of("request-timeout")
.map(|t| t.parse::<u64>().unwrap_or(5000)).unwrap_or(5000),
custom_headers
};
let publish_options = if matches.is_present("publish") {
Some(PublishOptions {
provider_version: matches.value_of("provider-version").map(|v| v.to_string()),
build_url: matches.value_of("build-url").map(|v| v.to_string()),
provider_tags: matches.values_of("provider-tags")
.map_or_else(Vec::new, |tags| tags.map(|tag| tag.to_string()).collect()),
provider_branch: matches.value_of("provider-branch").map(|v| v.to_string())
})
} else {
None
};
for s in &source {
debug!("Pact source to verify = {}", s);
};
verify_provider_async(
provider,
source,
filter,
matches.values_of_lossy("filter-consumer").unwrap_or_default(),
&verification_options,
publish_options.as_ref(),
&provider_state_executor,
Some(VerificationMetrics {
test_framework: "pact_verifier_cli".to_string(),
app_name: "pact_verifier_cli".to_string(),
app_version: env!("CARGO_PKG_VERSION").to_string()
}),
).await
.map_err(|err| {
error!("Verification failed with error: {}", err);
2
})
.and_then(|result| {
if let Some(json_file) = matches.value_of("json-file") {
if let Err(err) = write_json_report(&result, json_file) {
error!("Failed to write JSON report to '{json_file}' - {err}");
return Err(2)
}
}
if result.result { Ok(()) } else { Err(1) }
})
}
fn write_json_report(result: &VerificationExecutionResult, file_name: &str) -> anyhow::Result<()> {
debug!("Writing JSON result of the verification to '{file_name}'");
let mut f = File::create(file_name)?;
let json: Value = result.into();
f.write_all(json.to_string().as_bytes())?;
Ok(())
}
fn | (version: &str) {
println!("\npact verifier version : v{}", version);
println!("pact specification : v{}", PactSpecification::V4.version_str());
println!("models version : v{}", PACT_RUST_VERSION.unwrap_or_default());
}
fn pact_source(matches: &ArgMatches) -> Vec<PactSource> {
let mut sources = vec![];
if let Some(values) = matches.values_of("file") {
sources.extend(values.map(|v| PactSource::File(v.to_string())).collect::<Vec<PactSource>>());
};
if let Some(values) = matches.values_of("dir") {
sources.extend(values.map(|v| PactSource::Dir(v.to_string())).collect::<Vec<PactSource>>());
};
if let Some(values) = matches.values_of("url") {
sources.extend(values.map(|v| {
if matches.is_present("user") {
PactSource::URL(v.to_string(), matches.value_of("user").map(|user| {
HttpAuth::User(user.to_string(), matches.value_of("password").map(|p| p.to_string()))
}))
} else if matches.is_present("token") {
PactSource::URL(v.to_string(), matches.value_of("token").map(|token| HttpAuth::Token(token.to_string())))
} else {
PactSource::URL(v.to_string(), None)
}
}).collect::<Vec<PactSource>>());
};
if let Some(broker_url) = matches.value_of("broker-url") {
let name = matches.value_of("provider-name").map(|n| n.to_string()).unwrap_or_default();
let auth = matches.value_of("user").map(|user| {
HttpAuth::User(user.to_string(), matches.value_of("password").map(|p| p.to_string()))
}).or_else(|| matches.value_of("token").map(|t| HttpAuth::Token(t.to_string())));
let source = if matches.is_present("consumer-version-selectors") || matches.is_present("consumer-version-tags") {
let pending = matches.is_present("enable-pending");
let wip = matches.value_of("include-wip-pacts-since").map(|wip| wip.to_string());
let provider_tags = matches.values_of("provider-tags")
.map_or_else(Vec::new, |tags| tags.map(|tag| tag.to_string()).collect());
let provider_branch = matches.value_of("provider-branch").map(|v| v.to_string());
let selectors = if matches.is_present("consumer-version-selectors") {
matches.values_of("consumer-version-selectors")
.map_or_else(Vec::new, |s| json_to_selectors(s.collect::<Vec<_>>()))
} else if matches.is_present("consumer-version-tags") {
matches.values_of("consumer-version-tags")
.map_or_else(Vec::new, |tags| consumer_tags_to_selectors(tags.collect::<Vec<_>>()))
} else {
vec![]
};
PactSource::BrokerWithDynamicConfiguration {
provider_name: name,
broker_url: broker_url.into(),
enable_pending: pending,
include_wip_pacts_since: wip,
provider_tags,
provider_branch,
selectors,
auth,
links: vec![]
}
} else {
PactSource::BrokerUrl(name, broker_url.to_string(), auth, vec![])
};
sources.push(source);
};
sources
}
fn interaction_filter(matches: &ArgMatches) -> FilterInfo {
if matches.is_present("filter-description") &&
(matches.is_present("filter-state") || matches.is_present("filter-no-state")) {
if matches.is_present("filter-state") {
FilterInfo::DescriptionAndState(matches.value_of("filter-description").unwrap().to_string(),
matches.value_of("filter-state").unwrap().to_string())
} else {
FilterInfo::DescriptionAndState(matches.value_of("filter-description").unwrap().to_string(),
String::new())
}
} else if matches.is_present("filter-description") {
FilterInfo::Description(matches.value_of("filter-description").unwrap().to_string())
} else if matches.is_present("filter-state") {
FilterInfo::State(matches.value_of("filter-state").unwrap().to_string())
} else if matches.is_present("filter-no-state") {
FilterInfo::State(String::new())
} else {
FilterInfo::None
}
}
fn main() {
init_windows();
let runtime = tokio::runtime::Builder::new_multi_thread()
.enable_all()
.build()
.expect("Could not start a Tokio runtime for running async tasks");
let result = runtime.block_on(async {
let result = handle_cli(clap::crate_version!()).await;
// Add a small delay to let asynchronous tasks to complete
sleep(Duration::from_millis(500)).await;
result
});
runtime.shutdown_timeout(Duration::from_millis(500));
if let Err(err) = result {
std::process::exit(err);
}
}
#[cfg(windows)]
fn init_windows() {
if let Err(err) = ansi_term::enable_ansi_support() {
warn!("Could not enable ANSI console support - {err}");
}
}
#[cfg(not(windows))]
fn init_windows() { }
| print_version | identifier_name |
lib.rs | // Copyright 2017 Pants project contributors (see CONTRIBUTORS.md).
// Licensed under the Apache License, Version 2.0 (see LICENSE).
#![deny(warnings)]
// Enable all clippy lints except for many of the pedantic ones. It's a shame this needs to be copied and pasted across crates, but there doesn't appear to be a way to include inner attributes from a common source.
#![deny(
clippy::all,
clippy::default_trait_access,
clippy::expl_impl_clone_on_copy,
clippy::if_not_else,
clippy::needless_continue,
clippy::unseparated_literal_suffix,
clippy::used_underscore_binding
)]
// It is often more clear to show that nothing is being moved.
#![allow(clippy::match_ref_pats)]
// Subjective style.
#![allow(
clippy::len_without_is_empty,
clippy::redundant_field_names,
clippy::too_many_arguments
)]
// Default isn't as big a deal as people seem to think it is.
#![allow(clippy::new_without_default, clippy::new_ret_no_self)]
// Arc<Mutex> can be more clear than needing to grok Orderings: | use std::io::Write;
use std::os::unix::fs::PermissionsExt;
use std::path::Path;
use fs::RelativePath;
pub mod data;
pub mod file;
pub mod path;
pub fn owned_string_vec(args: &[&str]) -> Vec<String> {
args.iter().map(<&str>::to_string).collect()
}
pub fn relative_paths<'a>(paths: &'a [&str]) -> impl Iterator<Item = RelativePath> + 'a {
paths.iter().map(|p| RelativePath::new(p).unwrap())
}
pub fn as_byte_owned_vec(str: &str) -> Vec<u8> {
Vec::from(str.as_bytes())
}
pub fn as_bytes(str: &str) -> Bytes {
Bytes::copy_from_slice(str.as_bytes())
}
pub fn make_file(path: &Path, contents: &[u8], mode: u32) {
let mut file = std::fs::File::create(&path).unwrap();
file.write_all(contents).unwrap();
let mut permissions = std::fs::metadata(path).unwrap().permissions();
permissions.set_mode(mode);
file.set_permissions(permissions).unwrap();
}
pub fn append_to_existing_file(path: &Path, contents: &[u8]) {
let mut file = std::fs::OpenOptions::new().write(true).open(&path).unwrap();
file.write_all(contents).unwrap();
} | #![allow(clippy::mutex_atomic)]
use bytes::Bytes; | random_line_split |
lib.rs | // Copyright 2017 Pants project contributors (see CONTRIBUTORS.md).
// Licensed under the Apache License, Version 2.0 (see LICENSE).
#![deny(warnings)]
// Enable all clippy lints except for many of the pedantic ones. It's a shame this needs to be copied and pasted across crates, but there doesn't appear to be a way to include inner attributes from a common source.
#![deny(
clippy::all,
clippy::default_trait_access,
clippy::expl_impl_clone_on_copy,
clippy::if_not_else,
clippy::needless_continue,
clippy::unseparated_literal_suffix,
clippy::used_underscore_binding
)]
// It is often more clear to show that nothing is being moved.
#![allow(clippy::match_ref_pats)]
// Subjective style.
#![allow(
clippy::len_without_is_empty,
clippy::redundant_field_names,
clippy::too_many_arguments
)]
// Default isn't as big a deal as people seem to think it is.
#![allow(clippy::new_without_default, clippy::new_ret_no_self)]
// Arc<Mutex> can be more clear than needing to grok Orderings:
#![allow(clippy::mutex_atomic)]
use bytes::Bytes;
use std::io::Write;
use std::os::unix::fs::PermissionsExt;
use std::path::Path;
use fs::RelativePath;
pub mod data;
pub mod file;
pub mod path;
pub fn owned_string_vec(args: &[&str]) -> Vec<String> {
args.iter().map(<&str>::to_string).collect()
}
pub fn relative_paths<'a>(paths: &'a [&str]) -> impl Iterator<Item = RelativePath> + 'a {
paths.iter().map(|p| RelativePath::new(p).unwrap())
}
pub fn as_byte_owned_vec(str: &str) -> Vec<u8> {
Vec::from(str.as_bytes())
}
pub fn as_bytes(str: &str) -> Bytes |
pub fn make_file(path: &Path, contents: &[u8], mode: u32) {
let mut file = std::fs::File::create(&path).unwrap();
file.write_all(contents).unwrap();
let mut permissions = std::fs::metadata(path).unwrap().permissions();
permissions.set_mode(mode);
file.set_permissions(permissions).unwrap();
}
pub fn append_to_existing_file(path: &Path, contents: &[u8]) {
let mut file = std::fs::OpenOptions::new().write(true).open(&path).unwrap();
file.write_all(contents).unwrap();
}
| {
Bytes::copy_from_slice(str.as_bytes())
} | identifier_body |
lib.rs | // Copyright 2017 Pants project contributors (see CONTRIBUTORS.md).
// Licensed under the Apache License, Version 2.0 (see LICENSE).
#![deny(warnings)]
// Enable all clippy lints except for many of the pedantic ones. It's a shame this needs to be copied and pasted across crates, but there doesn't appear to be a way to include inner attributes from a common source.
#![deny(
clippy::all,
clippy::default_trait_access,
clippy::expl_impl_clone_on_copy,
clippy::if_not_else,
clippy::needless_continue,
clippy::unseparated_literal_suffix,
clippy::used_underscore_binding
)]
// It is often more clear to show that nothing is being moved.
#![allow(clippy::match_ref_pats)]
// Subjective style.
#![allow(
clippy::len_without_is_empty,
clippy::redundant_field_names,
clippy::too_many_arguments
)]
// Default isn't as big a deal as people seem to think it is.
#![allow(clippy::new_without_default, clippy::new_ret_no_self)]
// Arc<Mutex> can be more clear than needing to grok Orderings:
#![allow(clippy::mutex_atomic)]
use bytes::Bytes;
use std::io::Write;
use std::os::unix::fs::PermissionsExt;
use std::path::Path;
use fs::RelativePath;
pub mod data;
pub mod file;
pub mod path;
pub fn owned_string_vec(args: &[&str]) -> Vec<String> {
args.iter().map(<&str>::to_string).collect()
}
pub fn relative_paths<'a>(paths: &'a [&str]) -> impl Iterator<Item = RelativePath> + 'a {
paths.iter().map(|p| RelativePath::new(p).unwrap())
}
pub fn as_byte_owned_vec(str: &str) -> Vec<u8> {
Vec::from(str.as_bytes())
}
pub fn | (str: &str) -> Bytes {
Bytes::copy_from_slice(str.as_bytes())
}
pub fn make_file(path: &Path, contents: &[u8], mode: u32) {
let mut file = std::fs::File::create(&path).unwrap();
file.write_all(contents).unwrap();
let mut permissions = std::fs::metadata(path).unwrap().permissions();
permissions.set_mode(mode);
file.set_permissions(permissions).unwrap();
}
pub fn append_to_existing_file(path: &Path, contents: &[u8]) {
let mut file = std::fs::OpenOptions::new().write(true).open(&path).unwrap();
file.write_all(contents).unwrap();
}
| as_bytes | identifier_name |
sphere.rs | use shapes::geometry::Geometry;
use na::{Vector3};
use ray::Ray;
use intersection::RawIntersection;
use shapes::bbox::BBox;
#[derive(PartialEq, Clone)]
pub struct Sphere {
pub center: Vector3<f64>,
pub radius: f64,
}
impl Sphere{
pub fn new(center:Vector3<f64>, radius: f64) -> Sphere {
Sphere {
center: center,
radius: radius,
}
}
}
impl Geometry for Sphere {
fn intersects(&self, r: &Ray) -> Option<RawIntersection> |
if dist.is_sign_negative() {
// If dist is negative, ray started inside sphere so find other root
dist = (-b + d.sqrt()) / a;
}
if dist < 0. { return None; }
let point = r.ro + (r.rd.normalize() * dist);
return Some(
RawIntersection {
dist: dist,
point: point,
normal: (point - self.center).normalize()
})
}
fn bounds(&self) -> BBox {
BBox::new(
Vector3::new(&self.center.x - &self.radius,
&self.center.y - &self.radius,
&self.center.z - &self.radius
),
Vector3::new(&self.center.x + &self.radius,
&self.center.y + &self.radius,
&self.center.z + &self.radius
),
)
}
}
| {
let dst = r.ro - self.center;
let a = r.rd.dot(&r.rd);
let b = dst.dot(&r.rd.normalize());
let c = dst.dot(&dst) - self.radius * self.radius;
/*
if c > 0. && b > 0. {
// Exit if r’s origin outside s (c > 0) and r pointing away from s (b > 0)
return None;
}
*/
let d = b * b - a*c;
if d < 0. {
return None
}
let mut dist = (-b - d.sqrt()) / a; | identifier_body |
sphere.rs | use shapes::geometry::Geometry;
use na::{Vector3};
use ray::Ray;
use intersection::RawIntersection;
use shapes::bbox::BBox;
#[derive(PartialEq, Clone)]
pub struct Sphere {
pub center: Vector3<f64>,
pub radius: f64,
}
impl Sphere{
pub fn new(center:Vector3<f64>, radius: f64) -> Sphere {
Sphere {
center: center,
radius: radius,
}
}
}
impl Geometry for Sphere {
fn intersects(&self, r: &Ray) -> Option<RawIntersection> {
let dst = r.ro - self.center;
let a = r.rd.dot(&r.rd);
let b = dst.dot(&r.rd.normalize());
let c = dst.dot(&dst) - self.radius * self.radius;
/*
if c > 0. && b > 0. {
// Exit if r’s origin outside s (c > 0) and r pointing away from s (b > 0)
return None;
}
*/
let d = b * b - a*c;
if d < 0. {
return None
}
let mut dist = (-b - d.sqrt()) / a;
if dist.is_sign_negative() {
// If dist is negative, ray started inside sphere so find other root
dist = (-b + d.sqrt()) / a;
}
if dist < 0. { return None; }
let point = r.ro + (r.rd.normalize() * dist);
return Some(
RawIntersection {
dist: dist,
point: point,
normal: (point - self.center).normalize()
})
}
fn bo | self) -> BBox {
BBox::new(
Vector3::new(&self.center.x - &self.radius,
&self.center.y - &self.radius,
&self.center.z - &self.radius
),
Vector3::new(&self.center.x + &self.radius,
&self.center.y + &self.radius,
&self.center.z + &self.radius
),
)
}
}
| unds(& | identifier_name |
sphere.rs | use shapes::geometry::Geometry;
use na::{Vector3};
use ray::Ray;
use intersection::RawIntersection;
use shapes::bbox::BBox;
#[derive(PartialEq, Clone)]
pub struct Sphere {
pub center: Vector3<f64>,
pub radius: f64,
}
impl Sphere{
pub fn new(center:Vector3<f64>, radius: f64) -> Sphere {
Sphere {
center: center,
radius: radius,
}
}
}
impl Geometry for Sphere {
fn intersects(&self, r: &Ray) -> Option<RawIntersection> {
let dst = r.ro - self.center;
let a = r.rd.dot(&r.rd);
let b = dst.dot(&r.rd.normalize());
let c = dst.dot(&dst) - self.radius * self.radius;
/*
if c > 0. && b > 0. {
// Exit if r’s origin outside s (c > 0) and r pointing away from s (b > 0)
return None;
}
*/
let d = b * b - a*c;
if d < 0. {
return None
}
let mut dist = (-b - d.sqrt()) / a;
if dist.is_sign_negative() {
// If dist is negative, ray started inside sphere so find other root
dist = (-b + d.sqrt()) / a;
}
if dist < 0. { return None; }
let point = r.ro + (r.rd.normalize() * dist);
return Some(
RawIntersection {
dist: dist,
point: point,
normal: (point - self.center).normalize()
})
}
fn bounds(&self) -> BBox { | ),
Vector3::new(&self.center.x + &self.radius,
&self.center.y + &self.radius,
&self.center.z + &self.radius
),
)
}
} | BBox::new(
Vector3::new(&self.center.x - &self.radius,
&self.center.y - &self.radius,
&self.center.z - &self.radius | random_line_split |
sphere.rs | use shapes::geometry::Geometry;
use na::{Vector3};
use ray::Ray;
use intersection::RawIntersection;
use shapes::bbox::BBox;
#[derive(PartialEq, Clone)]
pub struct Sphere {
pub center: Vector3<f64>,
pub radius: f64,
}
impl Sphere{
pub fn new(center:Vector3<f64>, radius: f64) -> Sphere {
Sphere {
center: center,
radius: radius,
}
}
}
impl Geometry for Sphere {
fn intersects(&self, r: &Ray) -> Option<RawIntersection> {
let dst = r.ro - self.center;
let a = r.rd.dot(&r.rd);
let b = dst.dot(&r.rd.normalize());
let c = dst.dot(&dst) - self.radius * self.radius;
/*
if c > 0. && b > 0. {
// Exit if r’s origin outside s (c > 0) and r pointing away from s (b > 0)
return None;
}
*/
let d = b * b - a*c;
if d < 0. {
return None
}
let mut dist = (-b - d.sqrt()) / a;
if dist.is_sign_negative() {
// If dist is negative, ray started inside sphere so find other root
dist = (-b + d.sqrt()) / a;
}
if dist < 0. { | let point = r.ro + (r.rd.normalize() * dist);
return Some(
RawIntersection {
dist: dist,
point: point,
normal: (point - self.center).normalize()
})
}
fn bounds(&self) -> BBox {
BBox::new(
Vector3::new(&self.center.x - &self.radius,
&self.center.y - &self.radius,
&self.center.z - &self.radius
),
Vector3::new(&self.center.x + &self.radius,
&self.center.y + &self.radius,
&self.center.z + &self.radius
),
)
}
}
| return None; }
| conditional_block |
led.rs | //! LED *(Dialog-specification language)* functionalities.
//!
//! LED is a dialog-specification language whose purpose is not to be a complete programming language,
//! but rather to make dialog specification simpler than in C. Additionally it allows users to easily
//! edit your application layout from external files without touching any source code.
//!
//! In LED, attributes and expressions follow this form:
//!
//! `elem = element[attribute1=value1,attribute2=value2,...](...expression...)`
//!
//! The names of the elements must not contain the “iup” prefix.
//! Attribute values are always interpreted as strings, but they need to be in quotes (“…”) only
//! when they include spaces. The “IUP_” prefix must not be added to the names of the attributes
//! and predefined values. Expressions contain parameters for creating the element.
//!
//! In LED there is no distinction between upper and lower case, except for attribute names.
//!
//! Also there is no optional parameters, in arrays at least one parameter must exist.
//!
//! To simply view a LED file objects use the LED Viewer application called [IupView][1], in the
//! applications included in the distribution. Pre-compiled binaries are available at the
//! [Download][2].
//!
//! You need to check out the [IUP documentation][0] for each control to see their
//! respective function signatures in LED.
//!
//! **Note:** Using LED may allow you to create controls not yet implemented in iup-rust and
//! that's *fine*. Use a `Handle` to have access to controls created from LED.
//!
//! [0]: http://webserver2.tecgraf.puc-rio.br/iup/
//! [1]: http://webserver2.tecgraf.puc-rio.br/iup/en/led.html
//! [2]: http://webserver2.tecgraf.puc-rio.br/iup/en/download.html
use iup_sys;
use std::path::Path;
use std::result::Result;
use std::ffi::CString;
/// Compiles a LED specification from a file.
///
/// Each time the function loads a LED file, the elements contained in it are created.
/// Therefore, the same LED file cannot be loaded several times, otherwise the elements will also
/// be created several times.
///
/// In case of failure returns the compilation error message.
pub fn load<P: AsRef<Path>>(path: P) -> Result<(), String> {
let path = path.as_ref();
let str = path.to_str().ok_or_else(|| "Failed to convert Path to string".to_string())?;
let cstr = CString::new(str).unwrap();
|
/// Compiles a LED specification from a string.
///
/// See the `load` function for additional semantic details.
pub fn load_buffer<S: Into<String>>(buf: S) -> Result<(), String> {
let cstr = CString::new(buf.into()).unwrap();
match unsafe { iup_sys::IupLoadBuffer(cstr.as_ptr()) } {
err if err.is_null() => Ok(()),
err => Err(string_from_cstr!(err)),
}
} | match unsafe { iup_sys::IupLoad(cstr.as_ptr()) } {
err if err.is_null() => Ok(()),
err => Err(string_from_cstr!(err)),
}
} | random_line_split |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.