file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
issue-16272.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::process::Command;
use std::env;
fn main() {
let len = env::args().len();
if len == 1 {
test();
} else
|
}
fn test() {
let status = Command::new(&env::current_exe().unwrap())
.arg("foo").arg("")
.status().unwrap();
assert!(status.success());
}
|
{
assert_eq!(len, 3);
}
|
conditional_block
|
issue-16272.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::process::Command;
use std::env;
fn main() {
let len = env::args().len();
if len == 1 {
test();
} else {
assert_eq!(len, 3);
}
|
let status = Command::new(&env::current_exe().unwrap())
.arg("foo").arg("")
.status().unwrap();
assert!(status.success());
}
|
}
fn test() {
|
random_line_split
|
issue-16272.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::process::Command;
use std::env;
fn main() {
let len = env::args().len();
if len == 1 {
test();
} else {
assert_eq!(len, 3);
}
}
fn test()
|
{
let status = Command::new(&env::current_exe().unwrap())
.arg("foo").arg("")
.status().unwrap();
assert!(status.success());
}
|
identifier_body
|
|
lexer.rs
|
use lexeme::Lexeme;
/***
* Structure Lexer
* -> Lexical Analyzer structure used to analyze given source code
*/
#[allow(dead_code)]
pub struct Lexer<'a>{
index: usize,
errors: u32,
code: String,
whitespaces: String,
terminals: Vec<&'a str>,
lexemes: Vec<Lexeme>
}
#[allow(dead_code)]
impl<'a> Lexer<'a> {
// trait new for Lexer structure
pub fn
|
(code: String) -> Lexer<'a> {
Lexer {
index: 0,
errors: 0,
code: code,
whitespaces: String::from(" \t"),
terminals: vec![
"let","for","while","do_while","if",
"else_if","else","continue","break","return",
"true","false","fn",
">",">=","<","<=",
"==","!=","+","-","*","/","%",
"=",",",";","{","}","(",")","[",
"]","\n","&&", "||", "!"
],
lexemes: vec![
Lexeme::Let, Lexeme::For, Lexeme::While, Lexeme::DoWhile, Lexeme::If, Lexeme::ElseIf, Lexeme::Else,
Lexeme::Continue, Lexeme::Break, Lexeme::Return, Lexeme::True, Lexeme::False, Lexeme::Fn, Lexeme::Greater,
Lexeme::GreaterEqual, Lexeme::Less, Lexeme::LessEqual, Lexeme::IsEqual, Lexeme::IsNotEqual, Lexeme::Plus,
Lexeme::Minus, Lexeme::Multiply, Lexeme::Divide, Lexeme::Modulo, Lexeme::Equals, Lexeme::Comma, Lexeme::SemiColon,
Lexeme::OpenBrace, Lexeme::CloseBrace, Lexeme::OpenParenthesis, Lexeme::CloseParenthesis, Lexeme::OpenBracket,
Lexeme::CloseBracket, Lexeme::Newline, Lexeme::And, Lexeme::Or, Lexeme::Not
],
}
}
// returns lexemes for each token scanned from the source code
pub fn analyze(&mut self) -> Vec<Lexeme> {
let mut lexemes = Vec::<Lexeme>::new();
while self.index < self.code.len() - 1{
let mut construct = false;
self.skip();
for i in 0..self.terminals.len() {
let is_fit = self.terminals[i].len() + self.index < self.code.len();
if is_fit && self.peek(self.terminals[i].len()) == self.terminals[i] {
lexemes.push(self.lexemes[i].clone());
self.index += self.terminals[i].len();
construct = true;
break;
}
}
if!construct {
if self.getc().is_numeric() {
lexemes.push(self.number());
} else if self.getc() == '\"' {
lexemes.push(self.string());
} else if self.getc().is_alphabetic() {
lexemes.push(self.identifier());
} else {
self.errors += 1;
println!("Syntax Error at {} : {}.", self.index, self.code.len());
}
}
}
if self.errors > 0 {
println!("Total Errors : {}", self.errors);
} else {
println!("Build Successful!");
println!("_________________");
}
lexemes
}
// returns number of errors
pub fn count_errors(&mut self) -> u32 {
self.errors
}
// returns current character
fn getc(&self) -> char {
self.code.chars().nth(self.index).unwrap()
}
// skips whitespaces
fn skip(&mut self) {
while self.whitespaces.contains(self.getc()) {
self.index += 1;
}
}
// returns part of remaining slice
fn peek(&self, length: usize) -> &str {
&self.code[self.index..self.index+length]
}
// returns identifier token and its value
fn identifier(&mut self) -> Lexeme {
let mut varname = String::new();
if self.getc().is_alphabetic() {
varname.push(self.getc());
self.index += 1;
while self.getc().is_alphanumeric() {
varname.push(self.getc());
self.index += 1;
}
}
Lexeme::Identifier(varname)
}
// returns number token and its value
fn number(&mut self) -> Lexeme {
let mut n = String::new();
while self.getc().is_numeric() {
n.push(self.getc());
self.index += 1;
}
Lexeme::Number(n)
}
// returns string token and its value
fn string(&mut self) -> Lexeme {
let mut s = String::new();
self.index += 1;
while self.getc()!= '\"' {
s.push(self.getc());
self.index += 1;
}
self.index += 1;
Lexeme::StringLiteral(s)
}
}
|
new
|
identifier_name
|
lexer.rs
|
use lexeme::Lexeme;
/***
* Structure Lexer
* -> Lexical Analyzer structure used to analyze given source code
*/
#[allow(dead_code)]
pub struct Lexer<'a>{
index: usize,
errors: u32,
code: String,
whitespaces: String,
terminals: Vec<&'a str>,
lexemes: Vec<Lexeme>
}
#[allow(dead_code)]
impl<'a> Lexer<'a> {
// trait new for Lexer structure
pub fn new(code: String) -> Lexer<'a> {
Lexer {
index: 0,
errors: 0,
code: code,
whitespaces: String::from(" \t"),
terminals: vec![
"let","for","while","do_while","if",
"else_if","else","continue","break","return",
"true","false","fn",
">",">=","<","<=",
"==","!=","+","-","*","/","%",
"=",",",";","{","}","(",")","[",
"]","\n","&&", "||", "!"
],
lexemes: vec![
Lexeme::Let, Lexeme::For, Lexeme::While, Lexeme::DoWhile, Lexeme::If, Lexeme::ElseIf, Lexeme::Else,
Lexeme::Continue, Lexeme::Break, Lexeme::Return, Lexeme::True, Lexeme::False, Lexeme::Fn, Lexeme::Greater,
Lexeme::GreaterEqual, Lexeme::Less, Lexeme::LessEqual, Lexeme::IsEqual, Lexeme::IsNotEqual, Lexeme::Plus,
Lexeme::Minus, Lexeme::Multiply, Lexeme::Divide, Lexeme::Modulo, Lexeme::Equals, Lexeme::Comma, Lexeme::SemiColon,
Lexeme::OpenBrace, Lexeme::CloseBrace, Lexeme::OpenParenthesis, Lexeme::CloseParenthesis, Lexeme::OpenBracket,
Lexeme::CloseBracket, Lexeme::Newline, Lexeme::And, Lexeme::Or, Lexeme::Not
],
}
}
// returns lexemes for each token scanned from the source code
pub fn analyze(&mut self) -> Vec<Lexeme> {
let mut lexemes = Vec::<Lexeme>::new();
while self.index < self.code.len() - 1{
let mut construct = false;
self.skip();
for i in 0..self.terminals.len() {
let is_fit = self.terminals[i].len() + self.index < self.code.len();
if is_fit && self.peek(self.terminals[i].len()) == self.terminals[i] {
lexemes.push(self.lexemes[i].clone());
self.index += self.terminals[i].len();
construct = true;
break;
}
}
if!construct {
if self.getc().is_numeric() {
lexemes.push(self.number());
} else if self.getc() == '\"' {
lexemes.push(self.string());
} else if self.getc().is_alphabetic() {
lexemes.push(self.identifier());
} else {
self.errors += 1;
println!("Syntax Error at {} : {}.", self.index, self.code.len());
}
}
}
if self.errors > 0 {
println!("Total Errors : {}", self.errors);
} else {
println!("Build Successful!");
println!("_________________");
}
lexemes
}
// returns number of errors
pub fn count_errors(&mut self) -> u32 {
self.errors
}
// returns current character
fn getc(&self) -> char {
self.code.chars().nth(self.index).unwrap()
}
// skips whitespaces
fn skip(&mut self) {
while self.whitespaces.contains(self.getc()) {
self.index += 1;
}
}
// returns part of remaining slice
fn peek(&self, length: usize) -> &str {
&self.code[self.index..self.index+length]
}
// returns identifier token and its value
fn identifier(&mut self) -> Lexeme {
let mut varname = String::new();
if self.getc().is_alphabetic() {
varname.push(self.getc());
self.index += 1;
while self.getc().is_alphanumeric() {
varname.push(self.getc());
self.index += 1;
}
}
Lexeme::Identifier(varname)
}
// returns number token and its value
fn number(&mut self) -> Lexeme {
let mut n = String::new();
while self.getc().is_numeric() {
n.push(self.getc());
self.index += 1;
}
Lexeme::Number(n)
}
// returns string token and its value
fn string(&mut self) -> Lexeme
|
}
|
{
let mut s = String::new();
self.index += 1;
while self.getc() != '\"' {
s.push(self.getc());
self.index += 1;
}
self.index += 1;
Lexeme::StringLiteral(s)
}
|
identifier_body
|
lexer.rs
|
use lexeme::Lexeme;
/***
* Structure Lexer
* -> Lexical Analyzer structure used to analyze given source code
*/
#[allow(dead_code)]
pub struct Lexer<'a>{
index: usize,
errors: u32,
code: String,
whitespaces: String,
terminals: Vec<&'a str>,
lexemes: Vec<Lexeme>
}
#[allow(dead_code)]
impl<'a> Lexer<'a> {
// trait new for Lexer structure
pub fn new(code: String) -> Lexer<'a> {
Lexer {
index: 0,
errors: 0,
code: code,
whitespaces: String::from(" \t"),
terminals: vec![
"let","for","while","do_while","if",
"else_if","else","continue","break","return",
"true","false","fn",
">",">=","<","<=",
"==","!=","+","-","*","/","%",
"=",",",";","{","}","(",")","[",
"]","\n","&&", "||", "!"
],
lexemes: vec![
Lexeme::Let, Lexeme::For, Lexeme::While, Lexeme::DoWhile, Lexeme::If, Lexeme::ElseIf, Lexeme::Else,
Lexeme::Continue, Lexeme::Break, Lexeme::Return, Lexeme::True, Lexeme::False, Lexeme::Fn, Lexeme::Greater,
Lexeme::GreaterEqual, Lexeme::Less, Lexeme::LessEqual, Lexeme::IsEqual, Lexeme::IsNotEqual, Lexeme::Plus,
Lexeme::Minus, Lexeme::Multiply, Lexeme::Divide, Lexeme::Modulo, Lexeme::Equals, Lexeme::Comma, Lexeme::SemiColon,
Lexeme::OpenBrace, Lexeme::CloseBrace, Lexeme::OpenParenthesis, Lexeme::CloseParenthesis, Lexeme::OpenBracket,
Lexeme::CloseBracket, Lexeme::Newline, Lexeme::And, Lexeme::Or, Lexeme::Not
],
}
}
// returns lexemes for each token scanned from the source code
pub fn analyze(&mut self) -> Vec<Lexeme> {
let mut lexemes = Vec::<Lexeme>::new();
while self.index < self.code.len() - 1{
let mut construct = false;
self.skip();
for i in 0..self.terminals.len() {
let is_fit = self.terminals[i].len() + self.index < self.code.len();
if is_fit && self.peek(self.terminals[i].len()) == self.terminals[i]
|
}
if!construct {
if self.getc().is_numeric() {
lexemes.push(self.number());
} else if self.getc() == '\"' {
lexemes.push(self.string());
} else if self.getc().is_alphabetic() {
lexemes.push(self.identifier());
} else {
self.errors += 1;
println!("Syntax Error at {} : {}.", self.index, self.code.len());
}
}
}
if self.errors > 0 {
println!("Total Errors : {}", self.errors);
} else {
println!("Build Successful!");
println!("_________________");
}
lexemes
}
// returns number of errors
pub fn count_errors(&mut self) -> u32 {
self.errors
}
// returns current character
fn getc(&self) -> char {
self.code.chars().nth(self.index).unwrap()
}
// skips whitespaces
fn skip(&mut self) {
while self.whitespaces.contains(self.getc()) {
self.index += 1;
}
}
// returns part of remaining slice
fn peek(&self, length: usize) -> &str {
&self.code[self.index..self.index+length]
}
// returns identifier token and its value
fn identifier(&mut self) -> Lexeme {
let mut varname = String::new();
if self.getc().is_alphabetic() {
varname.push(self.getc());
self.index += 1;
while self.getc().is_alphanumeric() {
varname.push(self.getc());
self.index += 1;
}
}
Lexeme::Identifier(varname)
}
// returns number token and its value
fn number(&mut self) -> Lexeme {
let mut n = String::new();
while self.getc().is_numeric() {
n.push(self.getc());
self.index += 1;
}
Lexeme::Number(n)
}
// returns string token and its value
fn string(&mut self) -> Lexeme {
let mut s = String::new();
self.index += 1;
while self.getc()!= '\"' {
s.push(self.getc());
self.index += 1;
}
self.index += 1;
Lexeme::StringLiteral(s)
}
}
|
{
lexemes.push(self.lexemes[i].clone());
self.index += self.terminals[i].len();
construct = true;
break;
}
|
conditional_block
|
lexer.rs
|
use lexeme::Lexeme;
/***
* Structure Lexer
* -> Lexical Analyzer structure used to analyze given source code
*/
#[allow(dead_code)]
pub struct Lexer<'a>{
index: usize,
errors: u32,
code: String,
whitespaces: String,
terminals: Vec<&'a str>,
lexemes: Vec<Lexeme>
}
#[allow(dead_code)]
impl<'a> Lexer<'a> {
// trait new for Lexer structure
pub fn new(code: String) -> Lexer<'a> {
Lexer {
index: 0,
errors: 0,
code: code,
whitespaces: String::from(" \t"),
terminals: vec![
"let","for","while","do_while","if",
"else_if","else","continue","break","return",
"true","false","fn",
">",">=","<","<=",
"==","!=","+","-","*","/","%",
"=",",",";","{","}","(",")","[",
"]","\n","&&", "||", "!"
],
lexemes: vec![
Lexeme::Let, Lexeme::For, Lexeme::While, Lexeme::DoWhile, Lexeme::If, Lexeme::ElseIf, Lexeme::Else,
Lexeme::Continue, Lexeme::Break, Lexeme::Return, Lexeme::True, Lexeme::False, Lexeme::Fn, Lexeme::Greater,
Lexeme::GreaterEqual, Lexeme::Less, Lexeme::LessEqual, Lexeme::IsEqual, Lexeme::IsNotEqual, Lexeme::Plus,
Lexeme::Minus, Lexeme::Multiply, Lexeme::Divide, Lexeme::Modulo, Lexeme::Equals, Lexeme::Comma, Lexeme::SemiColon,
Lexeme::OpenBrace, Lexeme::CloseBrace, Lexeme::OpenParenthesis, Lexeme::CloseParenthesis, Lexeme::OpenBracket,
Lexeme::CloseBracket, Lexeme::Newline, Lexeme::And, Lexeme::Or, Lexeme::Not
],
}
}
// returns lexemes for each token scanned from the source code
pub fn analyze(&mut self) -> Vec<Lexeme> {
let mut lexemes = Vec::<Lexeme>::new();
while self.index < self.code.len() - 1{
let mut construct = false;
self.skip();
for i in 0..self.terminals.len() {
let is_fit = self.terminals[i].len() + self.index < self.code.len();
if is_fit && self.peek(self.terminals[i].len()) == self.terminals[i] {
lexemes.push(self.lexemes[i].clone());
self.index += self.terminals[i].len();
construct = true;
break;
}
}
if!construct {
if self.getc().is_numeric() {
lexemes.push(self.number());
} else if self.getc() == '\"' {
lexemes.push(self.string());
} else if self.getc().is_alphabetic() {
lexemes.push(self.identifier());
} else {
self.errors += 1;
println!("Syntax Error at {} : {}.", self.index, self.code.len());
|
if self.errors > 0 {
println!("Total Errors : {}", self.errors);
} else {
println!("Build Successful!");
println!("_________________");
}
lexemes
}
// returns number of errors
pub fn count_errors(&mut self) -> u32 {
self.errors
}
// returns current character
fn getc(&self) -> char {
self.code.chars().nth(self.index).unwrap()
}
// skips whitespaces
fn skip(&mut self) {
while self.whitespaces.contains(self.getc()) {
self.index += 1;
}
}
// returns part of remaining slice
fn peek(&self, length: usize) -> &str {
&self.code[self.index..self.index+length]
}
// returns identifier token and its value
fn identifier(&mut self) -> Lexeme {
let mut varname = String::new();
if self.getc().is_alphabetic() {
varname.push(self.getc());
self.index += 1;
while self.getc().is_alphanumeric() {
varname.push(self.getc());
self.index += 1;
}
}
Lexeme::Identifier(varname)
}
// returns number token and its value
fn number(&mut self) -> Lexeme {
let mut n = String::new();
while self.getc().is_numeric() {
n.push(self.getc());
self.index += 1;
}
Lexeme::Number(n)
}
// returns string token and its value
fn string(&mut self) -> Lexeme {
let mut s = String::new();
self.index += 1;
while self.getc()!= '\"' {
s.push(self.getc());
self.index += 1;
}
self.index += 1;
Lexeme::StringLiteral(s)
}
}
|
}
}
}
|
random_line_split
|
sphere.rs
|
use material::Material;
use vector::Vector4;
use ray::Ray;
use renderable::{ IntersectionRecord, Renderable };
use std::rc::Rc;
pub struct Sphere {
center: Vector4,
radius: f32,
material: Rc<Box<Material>>,
}
impl Sphere {
pub fn new<TMaterial: Material +'static>(center: Vector4, radius: f32, material: TMaterial) -> Self {
Sphere {
center: center,
radius: radius,
material: Rc::new(Box::new(material))
}
}
}
impl Renderable for Sphere {
fn intersects(&self, ray: &Ray, distance_min: f32, distance_max: f32)
-> Option<IntersectionRecord>
|
(intersection_point - self.center) / self.radius,
self.material.clone(),
));
}
let distance = (-b + sqrt_discriminant) / a;
if distance < distance_max && distance > distance_min {
let intersection_point = ray.point_at_distance(distance);
return Some(IntersectionRecord::new(
distance,
intersection_point,
(intersection_point - self.center) / self.radius,
self.material.clone(),
));
}
}
None
}
}
|
{
let o_minus_c = ray.origin() - self.center;
let a = ray.direction().dot3(ray.direction());
let b = o_minus_c.dot3(ray.direction());
let c = (o_minus_c).dot3(o_minus_c) - self.radius * self.radius;
let discriminant = b * b - a * c;
// Get the distance (d) value if a hit occurred ahead of the ray
if discriminant > 0.0 {
let sqrt_discriminant = discriminant.sqrt();
let distance = (-b - sqrt_discriminant) / a;
if distance < distance_max && distance > distance_min {
let intersection_point = ray.point_at_distance(distance);
return Some(IntersectionRecord::new(
distance,
intersection_point,
|
identifier_body
|
sphere.rs
|
use material::Material;
use vector::Vector4;
use ray::Ray;
use renderable::{ IntersectionRecord, Renderable };
use std::rc::Rc;
pub struct Sphere {
center: Vector4,
radius: f32,
material: Rc<Box<Material>>,
}
impl Sphere {
pub fn new<TMaterial: Material +'static>(center: Vector4, radius: f32, material: TMaterial) -> Self {
Sphere {
center: center,
radius: radius,
material: Rc::new(Box::new(material))
}
}
}
impl Renderable for Sphere {
fn intersects(&self, ray: &Ray, distance_min: f32, distance_max: f32)
-> Option<IntersectionRecord> {
let o_minus_c = ray.origin() - self.center;
let a = ray.direction().dot3(ray.direction());
let b = o_minus_c.dot3(ray.direction());
let c = (o_minus_c).dot3(o_minus_c) - self.radius * self.radius;
let discriminant = b * b - a * c;
// Get the distance (d) value if a hit occurred ahead of the ray
if discriminant > 0.0 {
let sqrt_discriminant = discriminant.sqrt();
let distance = (-b - sqrt_discriminant) / a;
if distance < distance_max && distance > distance_min {
let intersection_point = ray.point_at_distance(distance);
return Some(IntersectionRecord::new(
distance,
intersection_point,
(intersection_point - self.center) / self.radius,
self.material.clone(),
));
}
let distance = (-b + sqrt_discriminant) / a;
if distance < distance_max && distance > distance_min
|
}
None
}
}
|
{
let intersection_point = ray.point_at_distance(distance);
return Some(IntersectionRecord::new(
distance,
intersection_point,
(intersection_point - self.center) / self.radius,
self.material.clone(),
));
}
|
conditional_block
|
sphere.rs
|
use material::Material;
use vector::Vector4;
use ray::Ray;
use renderable::{ IntersectionRecord, Renderable };
use std::rc::Rc;
pub struct Sphere {
center: Vector4,
radius: f32,
material: Rc<Box<Material>>,
}
impl Sphere {
pub fn new<TMaterial: Material +'static>(center: Vector4, radius: f32, material: TMaterial) -> Self {
Sphere {
center: center,
radius: radius,
material: Rc::new(Box::new(material))
}
}
}
impl Renderable for Sphere {
fn
|
(&self, ray: &Ray, distance_min: f32, distance_max: f32)
-> Option<IntersectionRecord> {
let o_minus_c = ray.origin() - self.center;
let a = ray.direction().dot3(ray.direction());
let b = o_minus_c.dot3(ray.direction());
let c = (o_minus_c).dot3(o_minus_c) - self.radius * self.radius;
let discriminant = b * b - a * c;
// Get the distance (d) value if a hit occurred ahead of the ray
if discriminant > 0.0 {
let sqrt_discriminant = discriminant.sqrt();
let distance = (-b - sqrt_discriminant) / a;
if distance < distance_max && distance > distance_min {
let intersection_point = ray.point_at_distance(distance);
return Some(IntersectionRecord::new(
distance,
intersection_point,
(intersection_point - self.center) / self.radius,
self.material.clone(),
));
}
let distance = (-b + sqrt_discriminant) / a;
if distance < distance_max && distance > distance_min {
let intersection_point = ray.point_at_distance(distance);
return Some(IntersectionRecord::new(
distance,
intersection_point,
(intersection_point - self.center) / self.radius,
self.material.clone(),
));
}
}
None
}
}
|
intersects
|
identifier_name
|
sphere.rs
|
use material::Material;
use vector::Vector4;
use ray::Ray;
use renderable::{ IntersectionRecord, Renderable };
use std::rc::Rc;
pub struct Sphere {
center: Vector4,
radius: f32,
material: Rc<Box<Material>>,
}
impl Sphere {
pub fn new<TMaterial: Material +'static>(center: Vector4, radius: f32, material: TMaterial) -> Self {
Sphere {
center: center,
radius: radius,
material: Rc::new(Box::new(material))
}
}
}
impl Renderable for Sphere {
fn intersects(&self, ray: &Ray, distance_min: f32, distance_max: f32)
-> Option<IntersectionRecord> {
let o_minus_c = ray.origin() - self.center;
let a = ray.direction().dot3(ray.direction());
let b = o_minus_c.dot3(ray.direction());
let c = (o_minus_c).dot3(o_minus_c) - self.radius * self.radius;
let discriminant = b * b - a * c;
// Get the distance (d) value if a hit occurred ahead of the ray
if discriminant > 0.0 {
let sqrt_discriminant = discriminant.sqrt();
let distance = (-b - sqrt_discriminant) / a;
if distance < distance_max && distance > distance_min {
let intersection_point = ray.point_at_distance(distance);
return Some(IntersectionRecord::new(
distance,
intersection_point,
(intersection_point - self.center) / self.radius,
self.material.clone(),
));
}
let distance = (-b + sqrt_discriminant) / a;
if distance < distance_max && distance > distance_min {
let intersection_point = ray.point_at_distance(distance);
return Some(IntersectionRecord::new(
distance,
intersection_point,
(intersection_point - self.center) / self.radius,
self.material.clone(),
));
}
}
None
|
}
|
}
|
random_line_split
|
message_builder.rs
|
//! Builder for constructing Asynchronous message interactions
use std::collections::HashMap;
use bytes::Bytes;
use log::*;
use maplit::hashmap;
use pact_models::content_types::ContentType;
use pact_models::json_utils::json_to_string;
use pact_models::matchingrules::MatchingRuleCategory;
use pact_models::path_exp::DocPath;
use pact_models::plugins::PluginData;
use pact_models::prelude::{MatchingRules, OptionalBody, ProviderState};
use pact_models::v4::async_message::AsynchronousMessage;
use pact_models::v4::interaction::InteractionMarkup;
use pact_models::v4::message_parts::MessageContents;
use pact_plugin_driver::catalogue_manager::find_content_matcher;
use pact_plugin_driver::content::{ContentMatcher, InteractionContents, PluginConfiguration};
use pact_plugin_driver::plugin_models::PactPluginManifest;
use serde_json::{json, Map, Value};
use crate::patterns::JsonPattern;
use crate::prelude::Pattern;
#[derive(Clone, Debug)]
/// Asynchronous message interaction builder. Normally created via PactBuilder::message_interaction.
pub struct MessageInteractionBuilder {
description: String,
provider_states: Vec<ProviderState>,
comments: Vec<String>,
test_name: Option<String>,
interaction_type: String,
message_contents: InteractionContents,
contents_plugin: Option<PactPluginManifest>,
plugin_config: HashMap<String, PluginConfiguration>
}
impl MessageInteractionBuilder {
/// Create a new message interaction builder, Description is the interaction description
/// and interaction_type is the type of message (leave empty for the default type).
pub fn new<D: Into<String>>(description: D, interaction_type: D) -> MessageInteractionBuilder {
MessageInteractionBuilder {
description: description.into(),
provider_states: vec![],
comments: vec![],
test_name: None,
interaction_type: interaction_type.into(),
message_contents: Default::default(),
contents_plugin: None,
plugin_config: Default::default()
}
}
/// Specify a "provider state" for this interaction. This is normally use to
/// set up database fixtures when using a pact to test a provider.
pub fn
|
<G: Into<String>>(&mut self, given: G) -> &mut Self {
self.provider_states.push(ProviderState::default(&given.into()));
self
}
/// Adds a text comment to this interaction. This allows to specify just a bit more information
/// about the interaction. It has no functional impact, but can be displayed in the broker HTML
/// page, and potentially in the test output.
pub fn comment<G: Into<String>>(&mut self, comment: G) -> &mut Self {
self.comments.push(comment.into());
self
}
/// Sets the test name for this interaction. This allows to specify just a bit more information
/// about the interaction. It has no functional impact, but can be displayed in the broker HTML
/// page, and potentially in the test output.
pub fn test_name<G: Into<String>>(&mut self, name: G) -> &mut Self {
self.test_name = Some(name.into());
self
}
/// The interaction we've built (in V4 format).
pub fn build(&self) -> AsynchronousMessage {
debug!("Building V4 AsynchronousMessage interaction: {:?}", self);
let mut rules = MatchingRules::default();
rules.add_category("body")
.add_rules(self.message_contents.rules.as_ref().cloned().unwrap_or_default());
AsynchronousMessage {
id: None,
key: None,
description: self.description.clone(),
provider_states: self.provider_states.clone(),
contents: MessageContents {
contents: self.message_contents.body.clone(),
metadata: self.message_contents.metadata.as_ref().cloned().unwrap_or_default(),
matching_rules: rules,
generators: self.message_contents.generators.as_ref().cloned().unwrap_or_default()
},
comments: hashmap!{
"text".to_string() => json!(self.comments),
"testname".to_string() => json!(self.test_name)
},
pending: false,
plugin_config: self.contents_plugin.as_ref().map(|plugin| {
hashmap!{
plugin.name.clone() => self.message_contents.plugin_config.interaction_configuration.clone()
}
}).unwrap_or_default(),
interaction_markup: InteractionMarkup {
markup: self.message_contents.interaction_markup.clone(),
markup_type: self.message_contents.interaction_markup_type.clone()
}
}
}
/// Configure the interaction contents from a map
pub async fn contents_from(&mut self, contents: Value) -> &mut Self {
debug!("Configuring interaction from {:?}", contents);
let contents_map = contents.as_object().cloned().unwrap_or(Map::default());
let contents_hashmap = contents_map.iter()
.map(|(k, v)| (k.clone(), v.clone())).collect();
if let Some(content_type) = contents_map.get("pact:content-type") {
let ct = ContentType::parse(json_to_string(content_type).as_str()).unwrap();
if let Some(content_matcher) = find_content_matcher(&ct) {
debug!("Found a matcher for '{}': {:?}", ct, content_matcher);
if content_matcher.is_core() {
debug!("Content matcher is a core matcher, will use the internal implementation");
self.setup_core_matcher(&ct, &contents_hashmap, Some(content_matcher));
} else {
debug!("Plugin matcher, will get the plugin to provide the interaction contents");
match content_matcher.configure_interation(&ct, contents_hashmap).await {
Ok((contents, plugin_config)) => {
if let Some(contents) = contents.first() {
self.message_contents = contents.clone();
if!contents.plugin_config.is_empty() {
self.plugin_config.insert(content_matcher.plugin_name(), contents.plugin_config.clone());
}
}
self.contents_plugin = content_matcher.plugin();
if let Some(plugin_config) = plugin_config {
let plugin_name = content_matcher.plugin_name();
if self.plugin_config.contains_key(&*plugin_name) {
let entry = self.plugin_config.get_mut(&*plugin_name).unwrap();
for (k, v) in plugin_config.pact_configuration {
entry.pact_configuration.insert(k.clone(), v.clone());
}
} else {
self.plugin_config.insert(plugin_name.to_string(), plugin_config.clone());
}
}
}
Err(err) => panic!("Failed to call out to plugin - {}", err)
}
}
} else {
debug!("No content matcher found, will use the internal implementation");
self.setup_core_matcher(&ct, &contents_hashmap, None);
}
} else {
self.message_contents = InteractionContents {
body : OptionalBody::from(Value::Object(contents_map.clone())),
.. InteractionContents::default()
};
}
self
}
fn setup_core_matcher(
&mut self,
content_type: &ContentType,
config: &HashMap<String, Value>,
content_matcher: Option<ContentMatcher>
) {
self.message_contents = InteractionContents {
body: if let Some(contents) = config.get("contents") {
OptionalBody::Present(
Bytes::from(contents.to_string()),
Some(content_type.clone()),
None
)
} else {
OptionalBody::Missing
},
.. InteractionContents::default()
};
if let Some(_content_matcher) = content_matcher {
// TODO: get the content matcher to apply the matching rules and generators
// val (body, rules, generators, _, _) = contentMatcher.setupBodyFromConfig(bodyConfig)
// val matchingRules = MatchingRulesImpl()
// if (rules!= null) {
// matchingRules.addCategory(rules)
// }
// MessageContents(body, mapOf(), matchingRules, generators?: Generators())
}
}
/// Any global plugin config required to add to the Pact
pub fn plugin_config(&self) -> Option<PluginData> {
self.contents_plugin.as_ref().map(|plugin| {
let config = if let Some(config) = self.plugin_config.get(plugin.name.as_str()) {
config.pact_configuration.clone()
} else {
hashmap!{}
};
PluginData {
name: plugin.name.clone(),
version: plugin.version.clone(),
configuration: config
}
})
}
/// Specify the body as `JsonPattern`, possibly including special matching
/// rules.
///
/// ```
/// use pact_consumer::prelude::*;
/// use pact_consumer::*;
/// use pact_consumer::builders::MessageInteractionBuilder;
///
/// MessageInteractionBuilder::new("hello message", "core/interaction/message").json_body(json_pattern!({
/// "message": like!("Hello"),
/// }));
/// ```
pub fn json_body<B: Into<JsonPattern>>(&mut self, body: B) -> &mut Self {
let body = body.into();
{
let message_body = OptionalBody::Present(body.to_example().to_string().into(), Some("application/json".into()), None);
let mut rules = MatchingRuleCategory::empty("content");
body.extract_matching_rules(DocPath::root(), &mut rules);
self.message_contents.body = message_body;
if rules.is_not_empty() {
match &mut self.message_contents.rules {
None => self.message_contents.rules = Some(rules.clone()),
Some(mr) => mr.add_rules(rules.clone())
}
}
}
self
}
}
|
given
|
identifier_name
|
message_builder.rs
|
//! Builder for constructing Asynchronous message interactions
use std::collections::HashMap;
use bytes::Bytes;
use log::*;
use maplit::hashmap;
use pact_models::content_types::ContentType;
use pact_models::json_utils::json_to_string;
use pact_models::matchingrules::MatchingRuleCategory;
use pact_models::path_exp::DocPath;
use pact_models::plugins::PluginData;
use pact_models::prelude::{MatchingRules, OptionalBody, ProviderState};
use pact_models::v4::async_message::AsynchronousMessage;
use pact_models::v4::interaction::InteractionMarkup;
use pact_models::v4::message_parts::MessageContents;
use pact_plugin_driver::catalogue_manager::find_content_matcher;
use pact_plugin_driver::content::{ContentMatcher, InteractionContents, PluginConfiguration};
use pact_plugin_driver::plugin_models::PactPluginManifest;
use serde_json::{json, Map, Value};
use crate::patterns::JsonPattern;
use crate::prelude::Pattern;
#[derive(Clone, Debug)]
/// Asynchronous message interaction builder. Normally created via PactBuilder::message_interaction.
pub struct MessageInteractionBuilder {
description: String,
provider_states: Vec<ProviderState>,
comments: Vec<String>,
test_name: Option<String>,
interaction_type: String,
message_contents: InteractionContents,
contents_plugin: Option<PactPluginManifest>,
plugin_config: HashMap<String, PluginConfiguration>
}
impl MessageInteractionBuilder {
/// Create a new message interaction builder, Description is the interaction description
/// and interaction_type is the type of message (leave empty for the default type).
pub fn new<D: Into<String>>(description: D, interaction_type: D) -> MessageInteractionBuilder {
MessageInteractionBuilder {
description: description.into(),
provider_states: vec![],
comments: vec![],
test_name: None,
interaction_type: interaction_type.into(),
message_contents: Default::default(),
contents_plugin: None,
plugin_config: Default::default()
}
}
/// Specify a "provider state" for this interaction. This is normally use to
/// set up database fixtures when using a pact to test a provider.
pub fn given<G: Into<String>>(&mut self, given: G) -> &mut Self {
self.provider_states.push(ProviderState::default(&given.into()));
self
}
/// Adds a text comment to this interaction. This allows to specify just a bit more information
/// about the interaction. It has no functional impact, but can be displayed in the broker HTML
/// page, and potentially in the test output.
pub fn comment<G: Into<String>>(&mut self, comment: G) -> &mut Self {
self.comments.push(comment.into());
self
}
/// Sets the test name for this interaction. This allows to specify just a bit more information
/// about the interaction. It has no functional impact, but can be displayed in the broker HTML
/// page, and potentially in the test output.
pub fn test_name<G: Into<String>>(&mut self, name: G) -> &mut Self {
self.test_name = Some(name.into());
self
}
/// The interaction we've built (in V4 format).
pub fn build(&self) -> AsynchronousMessage {
debug!("Building V4 AsynchronousMessage interaction: {:?}", self);
let mut rules = MatchingRules::default();
rules.add_category("body")
.add_rules(self.message_contents.rules.as_ref().cloned().unwrap_or_default());
AsynchronousMessage {
id: None,
key: None,
description: self.description.clone(),
provider_states: self.provider_states.clone(),
contents: MessageContents {
contents: self.message_contents.body.clone(),
metadata: self.message_contents.metadata.as_ref().cloned().unwrap_or_default(),
matching_rules: rules,
generators: self.message_contents.generators.as_ref().cloned().unwrap_or_default()
},
comments: hashmap!{
"text".to_string() => json!(self.comments),
"testname".to_string() => json!(self.test_name)
},
pending: false,
plugin_config: self.contents_plugin.as_ref().map(|plugin| {
hashmap!{
plugin.name.clone() => self.message_contents.plugin_config.interaction_configuration.clone()
}
}).unwrap_or_default(),
interaction_markup: InteractionMarkup {
markup: self.message_contents.interaction_markup.clone(),
markup_type: self.message_contents.interaction_markup_type.clone()
}
}
}
/// Configure the interaction contents from a map
pub async fn contents_from(&mut self, contents: Value) -> &mut Self {
debug!("Configuring interaction from {:?}", contents);
let contents_map = contents.as_object().cloned().unwrap_or(Map::default());
let contents_hashmap = contents_map.iter()
.map(|(k, v)| (k.clone(), v.clone())).collect();
if let Some(content_type) = contents_map.get("pact:content-type") {
let ct = ContentType::parse(json_to_string(content_type).as_str()).unwrap();
if let Some(content_matcher) = find_content_matcher(&ct) {
debug!("Found a matcher for '{}': {:?}", ct, content_matcher);
if content_matcher.is_core() {
debug!("Content matcher is a core matcher, will use the internal implementation");
self.setup_core_matcher(&ct, &contents_hashmap, Some(content_matcher));
} else {
debug!("Plugin matcher, will get the plugin to provide the interaction contents");
match content_matcher.configure_interation(&ct, contents_hashmap).await {
Ok((contents, plugin_config)) => {
if let Some(contents) = contents.first() {
self.message_contents = contents.clone();
if!contents.plugin_config.is_empty() {
self.plugin_config.insert(content_matcher.plugin_name(), contents.plugin_config.clone());
}
}
self.contents_plugin = content_matcher.plugin();
if let Some(plugin_config) = plugin_config {
let plugin_name = content_matcher.plugin_name();
if self.plugin_config.contains_key(&*plugin_name) {
let entry = self.plugin_config.get_mut(&*plugin_name).unwrap();
for (k, v) in plugin_config.pact_configuration {
entry.pact_configuration.insert(k.clone(), v.clone());
}
} else {
self.plugin_config.insert(plugin_name.to_string(), plugin_config.clone());
}
}
}
Err(err) => panic!("Failed to call out to plugin - {}", err)
}
}
} else {
|
self.message_contents = InteractionContents {
body : OptionalBody::from(Value::Object(contents_map.clone())),
.. InteractionContents::default()
};
}
self
}
fn setup_core_matcher(
&mut self,
content_type: &ContentType,
config: &HashMap<String, Value>,
content_matcher: Option<ContentMatcher>
) {
self.message_contents = InteractionContents {
body: if let Some(contents) = config.get("contents") {
OptionalBody::Present(
Bytes::from(contents.to_string()),
Some(content_type.clone()),
None
)
} else {
OptionalBody::Missing
},
.. InteractionContents::default()
};
if let Some(_content_matcher) = content_matcher {
// TODO: get the content matcher to apply the matching rules and generators
// val (body, rules, generators, _, _) = contentMatcher.setupBodyFromConfig(bodyConfig)
// val matchingRules = MatchingRulesImpl()
// if (rules!= null) {
// matchingRules.addCategory(rules)
// }
// MessageContents(body, mapOf(), matchingRules, generators?: Generators())
}
}
/// Any global plugin config required to add to the Pact
pub fn plugin_config(&self) -> Option<PluginData> {
self.contents_plugin.as_ref().map(|plugin| {
let config = if let Some(config) = self.plugin_config.get(plugin.name.as_str()) {
config.pact_configuration.clone()
} else {
hashmap!{}
};
PluginData {
name: plugin.name.clone(),
version: plugin.version.clone(),
configuration: config
}
})
}
/// Specify the body as `JsonPattern`, possibly including special matching
/// rules.
///
/// ```
/// use pact_consumer::prelude::*;
/// use pact_consumer::*;
/// use pact_consumer::builders::MessageInteractionBuilder;
///
/// MessageInteractionBuilder::new("hello message", "core/interaction/message").json_body(json_pattern!({
/// "message": like!("Hello"),
/// }));
/// ```
pub fn json_body<B: Into<JsonPattern>>(&mut self, body: B) -> &mut Self {
let body = body.into();
{
let message_body = OptionalBody::Present(body.to_example().to_string().into(), Some("application/json".into()), None);
let mut rules = MatchingRuleCategory::empty("content");
body.extract_matching_rules(DocPath::root(), &mut rules);
self.message_contents.body = message_body;
if rules.is_not_empty() {
match &mut self.message_contents.rules {
None => self.message_contents.rules = Some(rules.clone()),
Some(mr) => mr.add_rules(rules.clone())
}
}
}
self
}
}
|
debug!("No content matcher found, will use the internal implementation");
self.setup_core_matcher(&ct, &contents_hashmap, None);
}
} else {
|
random_line_split
|
message_builder.rs
|
//! Builder for constructing Asynchronous message interactions
use std::collections::HashMap;
use bytes::Bytes;
use log::*;
use maplit::hashmap;
use pact_models::content_types::ContentType;
use pact_models::json_utils::json_to_string;
use pact_models::matchingrules::MatchingRuleCategory;
use pact_models::path_exp::DocPath;
use pact_models::plugins::PluginData;
use pact_models::prelude::{MatchingRules, OptionalBody, ProviderState};
use pact_models::v4::async_message::AsynchronousMessage;
use pact_models::v4::interaction::InteractionMarkup;
use pact_models::v4::message_parts::MessageContents;
use pact_plugin_driver::catalogue_manager::find_content_matcher;
use pact_plugin_driver::content::{ContentMatcher, InteractionContents, PluginConfiguration};
use pact_plugin_driver::plugin_models::PactPluginManifest;
use serde_json::{json, Map, Value};
use crate::patterns::JsonPattern;
use crate::prelude::Pattern;
#[derive(Clone, Debug)]
/// Asynchronous message interaction builder. Normally created via PactBuilder::message_interaction.
pub struct MessageInteractionBuilder {
description: String,
provider_states: Vec<ProviderState>,
comments: Vec<String>,
test_name: Option<String>,
interaction_type: String,
message_contents: InteractionContents,
contents_plugin: Option<PactPluginManifest>,
plugin_config: HashMap<String, PluginConfiguration>
}
impl MessageInteractionBuilder {
/// Create a new message interaction builder, Description is the interaction description
/// and interaction_type is the type of message (leave empty for the default type).
pub fn new<D: Into<String>>(description: D, interaction_type: D) -> MessageInteractionBuilder {
MessageInteractionBuilder {
description: description.into(),
provider_states: vec![],
comments: vec![],
test_name: None,
interaction_type: interaction_type.into(),
message_contents: Default::default(),
contents_plugin: None,
plugin_config: Default::default()
}
}
/// Specify a "provider state" for this interaction. This is normally use to
/// set up database fixtures when using a pact to test a provider.
pub fn given<G: Into<String>>(&mut self, given: G) -> &mut Self {
self.provider_states.push(ProviderState::default(&given.into()));
self
}
/// Adds a text comment to this interaction. This allows to specify just a bit more information
/// about the interaction. It has no functional impact, but can be displayed in the broker HTML
/// page, and potentially in the test output.
pub fn comment<G: Into<String>>(&mut self, comment: G) -> &mut Self {
self.comments.push(comment.into());
self
}
/// Sets the test name for this interaction. This allows to specify just a bit more information
/// about the interaction. It has no functional impact, but can be displayed in the broker HTML
/// page, and potentially in the test output.
pub fn test_name<G: Into<String>>(&mut self, name: G) -> &mut Self {
self.test_name = Some(name.into());
self
}
/// The interaction we've built (in V4 format).
pub fn build(&self) -> AsynchronousMessage {
debug!("Building V4 AsynchronousMessage interaction: {:?}", self);
let mut rules = MatchingRules::default();
rules.add_category("body")
.add_rules(self.message_contents.rules.as_ref().cloned().unwrap_or_default());
AsynchronousMessage {
id: None,
key: None,
description: self.description.clone(),
provider_states: self.provider_states.clone(),
contents: MessageContents {
contents: self.message_contents.body.clone(),
metadata: self.message_contents.metadata.as_ref().cloned().unwrap_or_default(),
matching_rules: rules,
generators: self.message_contents.generators.as_ref().cloned().unwrap_or_default()
},
comments: hashmap!{
"text".to_string() => json!(self.comments),
"testname".to_string() => json!(self.test_name)
},
pending: false,
plugin_config: self.contents_plugin.as_ref().map(|plugin| {
hashmap!{
plugin.name.clone() => self.message_contents.plugin_config.interaction_configuration.clone()
}
}).unwrap_or_default(),
interaction_markup: InteractionMarkup {
markup: self.message_contents.interaction_markup.clone(),
markup_type: self.message_contents.interaction_markup_type.clone()
}
}
}
/// Configure the interaction contents from a map
pub async fn contents_from(&mut self, contents: Value) -> &mut Self {
debug!("Configuring interaction from {:?}", contents);
let contents_map = contents.as_object().cloned().unwrap_or(Map::default());
let contents_hashmap = contents_map.iter()
.map(|(k, v)| (k.clone(), v.clone())).collect();
if let Some(content_type) = contents_map.get("pact:content-type") {
let ct = ContentType::parse(json_to_string(content_type).as_str()).unwrap();
if let Some(content_matcher) = find_content_matcher(&ct) {
debug!("Found a matcher for '{}': {:?}", ct, content_matcher);
if content_matcher.is_core() {
debug!("Content matcher is a core matcher, will use the internal implementation");
self.setup_core_matcher(&ct, &contents_hashmap, Some(content_matcher));
} else {
debug!("Plugin matcher, will get the plugin to provide the interaction contents");
match content_matcher.configure_interation(&ct, contents_hashmap).await {
Ok((contents, plugin_config)) => {
if let Some(contents) = contents.first() {
self.message_contents = contents.clone();
if!contents.plugin_config.is_empty() {
self.plugin_config.insert(content_matcher.plugin_name(), contents.plugin_config.clone());
}
}
self.contents_plugin = content_matcher.plugin();
if let Some(plugin_config) = plugin_config {
let plugin_name = content_matcher.plugin_name();
if self.plugin_config.contains_key(&*plugin_name) {
let entry = self.plugin_config.get_mut(&*plugin_name).unwrap();
for (k, v) in plugin_config.pact_configuration {
entry.pact_configuration.insert(k.clone(), v.clone());
}
} else {
self.plugin_config.insert(plugin_name.to_string(), plugin_config.clone());
}
}
}
Err(err) => panic!("Failed to call out to plugin - {}", err)
}
}
} else {
debug!("No content matcher found, will use the internal implementation");
self.setup_core_matcher(&ct, &contents_hashmap, None);
}
} else {
self.message_contents = InteractionContents {
body : OptionalBody::from(Value::Object(contents_map.clone())),
.. InteractionContents::default()
};
}
self
}
fn setup_core_matcher(
&mut self,
content_type: &ContentType,
config: &HashMap<String, Value>,
content_matcher: Option<ContentMatcher>
) {
self.message_contents = InteractionContents {
body: if let Some(contents) = config.get("contents") {
OptionalBody::Present(
Bytes::from(contents.to_string()),
Some(content_type.clone()),
None
)
} else {
OptionalBody::Missing
},
.. InteractionContents::default()
};
if let Some(_content_matcher) = content_matcher {
// TODO: get the content matcher to apply the matching rules and generators
// val (body, rules, generators, _, _) = contentMatcher.setupBodyFromConfig(bodyConfig)
// val matchingRules = MatchingRulesImpl()
// if (rules!= null) {
// matchingRules.addCategory(rules)
// }
// MessageContents(body, mapOf(), matchingRules, generators?: Generators())
}
}
/// Any global plugin config required to add to the Pact
pub fn plugin_config(&self) -> Option<PluginData>
|
/// Specify the body as `JsonPattern`, possibly including special matching
/// rules.
///
/// ```
/// use pact_consumer::prelude::*;
/// use pact_consumer::*;
/// use pact_consumer::builders::MessageInteractionBuilder;
///
/// MessageInteractionBuilder::new("hello message", "core/interaction/message").json_body(json_pattern!({
/// "message": like!("Hello"),
/// }));
/// ```
pub fn json_body<B: Into<JsonPattern>>(&mut self, body: B) -> &mut Self {
let body = body.into();
{
let message_body = OptionalBody::Present(body.to_example().to_string().into(), Some("application/json".into()), None);
let mut rules = MatchingRuleCategory::empty("content");
body.extract_matching_rules(DocPath::root(), &mut rules);
self.message_contents.body = message_body;
if rules.is_not_empty() {
match &mut self.message_contents.rules {
None => self.message_contents.rules = Some(rules.clone()),
Some(mr) => mr.add_rules(rules.clone())
}
}
}
self
}
}
|
{
self.contents_plugin.as_ref().map(|plugin| {
let config = if let Some(config) = self.plugin_config.get(plugin.name.as_str()) {
config.pact_configuration.clone()
} else {
hashmap!{}
};
PluginData {
name: plugin.name.clone(),
version: plugin.version.clone(),
configuration: config
}
})
}
|
identifier_body
|
pp.rs
|
i += 1us;
i %= n;
}
s.push(']');
s
}
#[derive(Copy)]
pub enum PrintStackBreak {
Fits,
Broken(Breaks),
}
#[derive(Copy)]
pub struct PrintStackElem {
offset: isize,
pbreak: PrintStackBreak
}
static SIZE_INFINITY: isize = 0xffff;
pub fn mk_printer(out: Box<io::Writer+'static>, linewidth: usize) -> Printer {
// Yes 3, it makes the ring buffers big enough to never
// fall behind.
let n: usize = 3 * linewidth;
debug!("mk_printer {}", linewidth);
let token: Vec<Token> = repeat(Token::Eof).take(n).collect();
let size: Vec<isize> = repeat(0is).take(n).collect();
let scan_stack: Vec<usize> = repeat(0us).take(n).collect();
Printer {
out: out,
buf_len: n,
margin: linewidth as isize,
space: linewidth as isize,
left: 0,
right: 0,
token: token,
size: size,
left_total: 0,
right_total: 0,
scan_stack: scan_stack,
scan_stack_empty: true,
top: 0,
bottom: 0,
print_stack: Vec::new(),
pending_indentation: 0
}
}
/// In case you do not have the paper, here is an explanation of what's going
/// on.
///
/// There is a stream of input tokens flowing through this printer.
///
/// The printer buffers up to 3N tokens inside itself, where N is linewidth.
/// Yes, linewidth is chars and tokens are multi-char, but in the worst
/// case every token worth buffering is 1 char long, so it's ok.
///
/// Tokens are String, Break, and Begin/End to delimit blocks.
///
/// Begin tokens can carry an offset, saying "how far to indent when you break
/// inside here", as well as a flag indicating "consistent" or "inconsistent"
/// breaking. Consistent breaking means that after the first break, no attempt
/// will be made to flow subsequent breaks together onto lines. Inconsistent
/// is the opposite. Inconsistent breaking example would be, say:
///
/// foo(hello, there, good, friends)
///
/// breaking inconsistently to become
///
/// foo(hello, there
/// good, friends);
///
/// whereas a consistent breaking would yield:
///
/// foo(hello,
/// there
/// good,
/// friends);
///
/// That is, in the consistent-break blocks we value vertical alignment
/// more than the ability to cram stuff onto a line. But in all cases if it
/// can make a block a one-liner, it'll do so.
///
/// Carrying on with high-level logic:
///
/// The buffered tokens go through a ring-buffer, 'tokens'. The 'left' and
/// 'right' indices denote the active portion of the ring buffer as well as
/// describing hypothetical points-in-the-infinite-stream at most 3N tokens
/// apart (i.e. "not wrapped to ring-buffer boundaries"). The paper will switch
/// between using 'left' and 'right' terms to denote the wrapped-to-ring-buffer
/// and point-in-infinite-stream senses freely.
///
/// There is a parallel ring buffer,'size', that holds the calculated size of
/// each token. Why calculated? Because for Begin/End pairs, the "size"
/// includes everything between the pair. That is, the "size" of Begin is
/// actually the sum of the sizes of everything between Begin and the paired
/// End that follows. Since that is arbitrarily far in the future,'size' is
/// being rewritten regularly while the printer runs; in fact most of the
/// machinery is here to work out'size' entries on the fly (and give up when
/// they're so obviously over-long that "infinity" is a good enough
/// approximation for purposes of line breaking).
///
/// The "input side" of the printer is managed as an abstract process called
/// SCAN, which uses'scan_stack','scan_stack_empty', 'top' and 'bottom', to
/// manage calculating'size'. SCAN is, in other words, the process of
/// calculating'size' entries.
///
/// The "output side" of the printer is managed by an abstract process called
/// PRINT, which uses 'print_stack','margin' and'space' to figure out what to
/// do with each token/size pair it consumes as it goes. It's trying to consume
/// the entire buffered window, but can't output anything until the size is >=
/// 0 (sizes are set to negative while they're pending calculation).
///
/// So SCAN takes input and buffers tokens and pending calculations, while
/// PRINT gobbles up completed calculations and tokens from the buffer. The
/// theory is that the two can never get more than 3N tokens apart, because
/// once there's "obviously" too much data to fit on a line, in a size
/// calculation, SCAN will write "infinity" to the size and let PRINT consume
/// it.
///
/// In this implementation (following the paper, again) the SCAN process is
/// the method called 'pretty_print', and the 'PRINT' process is the method
/// called 'print'.
pub struct Printer {
pub out: Box<io::Writer+'static>,
buf_len: usize,
/// Width of lines we're constrained to
margin: isize,
/// Number of spaces left on line
space: isize,
/// Index of left side of input stream
left: usize,
/// Index of right side of input stream
right: usize,
/// Ring-buffer stream goes through
token: Vec<Token>,
/// Ring-buffer of calculated sizes
size: Vec<isize>,
/// Running size of stream "...left"
left_total: isize,
/// Running size of stream "...right"
right_total: isize,
/// Pseudo-stack, really a ring too. Holds the
/// primary-ring-buffers index of the Begin that started the
/// current block, possibly with the most recent Break after that
/// Begin (if there is any) on top of it. Stuff is flushed off the
/// bottom as it becomes irrelevant due to the primary ring-buffer
/// advancing.
scan_stack: Vec<usize>,
/// Top==bottom disambiguator
scan_stack_empty: bool,
/// Index of top of scan_stack
top: usize,
/// Index of bottom of scan_stack
bottom: usize,
/// Stack of blocks-in-progress being flushed by print
print_stack: Vec<PrintStackElem>,
/// Buffered indentation to avoid writing trailing whitespace
pending_indentation: isize,
}
impl Printer {
pub fn last_token(&mut self) -> Token {
self.token[self.right].clone()
}
// be very careful with this!
pub fn replace_last_token(&mut self, t: Token) {
self.token[self.right] = t;
}
pub fn pretty_print(&mut self, token: Token) -> io::IoResult<()> {
debug!("pp ~[{},{}]", self.left, self.right);
match token {
Token::Eof => {
if!self.scan_stack_empty {
self.check_stack(0);
try!(self.advance_left());
}
self.indent(0);
Ok(())
}
Token::Begin(b) => {
if self.scan_stack_empty {
self.left_total = 1;
self.right_total = 1;
self.left = 0us;
self.right = 0us;
} else { self.advance_right(); }
debug!("pp Begin({})/buffer ~[{},{}]",
b.offset, self.left, self.right);
self.token[self.right] = token;
self.size[self.right] = -self.right_total;
let right = self.right;
self.scan_push(right);
Ok(())
}
Token::End => {
if self.scan_stack_empty {
debug!("pp End/print ~[{},{}]", self.left, self.right);
self.print(token, 0)
} else {
debug!("pp End/buffer ~[{},{}]", self.left, self.right);
self.advance_right();
self.token[self.right] = token;
self.size[self.right] = -1;
let right = self.right;
self.scan_push(right);
Ok(())
}
}
Token::Break(b) => {
if self.scan_stack_empty {
self.left_total = 1;
self.right_total = 1;
self.left = 0us;
self.right = 0us;
} else { self.advance_right(); }
debug!("pp Break({})/buffer ~[{},{}]",
b.offset, self.left, self.right);
self.check_stack(0);
let right = self.right;
self.scan_push(right);
self.token[self.right] = token;
self.size[self.right] = -self.right_total;
self.right_total += b.blank_space;
Ok(())
}
Token::String(s, len) => {
if self.scan_stack_empty {
debug!("pp String('{}')/print ~[{},{}]",
s, self.left, self.right);
self.print(Token::String(s, len), len)
} else {
debug!("pp String('{}')/buffer ~[{},{}]",
s, self.left, self.right);
self.advance_right();
self.token[self.right] = Token::String(s, len);
self.size[self.right] = len;
self.right_total += len;
self.check_stream()
}
}
}
}
pub fn check_stream(&mut self) -> io::IoResult<()> {
debug!("check_stream ~[{}, {}] with left_total={}, right_total={}",
self.left, self.right, self.left_total, self.right_total);
if self.right_total - self.left_total > self.space {
debug!("scan window is {}, longer than space on line ({})",
self.right_total - self.left_total, self.space);
if!self.scan_stack_empty {
if self.left == self.scan_stack[self.bottom] {
debug!("setting {} to infinity and popping", self.left);
let scanned = self.scan_pop_bottom();
self.size[scanned] = SIZE_INFINITY;
}
}
try!(self.advance_left());
if self.left!= self.right {
try!(self.check_stream());
}
}
Ok(())
}
pub fn scan_push(&mut self, x: usize) {
debug!("scan_push {}", x);
if self.scan_stack_empty {
self.scan_stack_empty = false;
} else {
self.top += 1us;
self.top %= self.buf_len;
assert!((self.top!= self.bottom));
}
self.scan_stack[self.top] = x;
}
pub fn scan_pop(&mut self) -> usize {
assert!((!self.scan_stack_empty));
let x = self.scan_stack[self.top];
if self.top == self.bottom {
self.scan_stack_empty = true;
} else {
self.top += self.buf_len - 1us; self.top %= self.buf_len;
}
return x;
}
pub fn scan_top(&mut self) -> usize {
assert!((!self.scan_stack_empty));
return self.scan_stack[self.top];
}
pub fn scan_pop_bottom(&mut self) -> usize {
assert!((!self.scan_stack_empty));
let x = self.scan_stack[self.bottom];
if self.top == self.bottom {
self.scan_stack_empty = true;
} else {
self.bottom += 1us; self.bottom %= self.buf_len;
}
return x;
}
pub fn advance_right(&mut self) {
self.right += 1us;
self.right %= self.buf_len;
assert!((self.right!= self.left));
}
pub fn advance_left(&mut self) -> io::IoResult<()> {
debug!("advance_left ~[{},{}], sizeof({})={}", self.left, self.right,
self.left, self.size[self.left]);
let mut left_size = self.size[self.left];
while left_size >= 0 {
let left = self.token[self.left].clone();
let len = match left {
Token::Break(b) => b.blank_space,
Token::String(_, len) => {
assert_eq!(len, left_size);
len
}
_ => 0
};
try!(self.print(left, left_size));
self.left_total += len;
if self.left == self.right {
break;
}
self.left += 1us;
self.left %= self.buf_len;
left_size = self.size[self.left];
}
Ok(())
}
pub fn check_stack(&mut self, k: isize) {
if!self.scan_stack_empty {
let x = self.scan_top();
match self.token[x] {
Token::Begin(_) => {
if k > 0 {
let popped = self.scan_pop();
self.size[popped] = self.size[x] + self.right_total;
self.check_stack(k - 1);
}
}
Token::End => {
// paper says + not =, but that makes no sense.
let popped = self.scan_pop();
self.size[popped] = 1;
self.check_stack(k + 1);
}
_ => {
let popped = self.scan_pop();
self.size[popped] = self.size[x] + self.right_total;
if k > 0 {
self.check_stack(k);
}
}
}
}
}
pub fn print_newline(&mut self, amount: isize) -> io::IoResult<()> {
debug!("NEWLINE {}", amount);
let ret = write!(self.out, "\n");
self.pending_indentation = 0;
self.indent(amount);
return ret;
}
pub fn indent(&mut self, amount: isize) {
debug!("INDENT {}", amount);
self.pending_indentation += amount;
}
pub fn get_top(&mut self) -> PrintStackElem {
let print_stack = &mut self.print_stack;
let n = print_stack.len();
if n!= 0us {
(*print_stack)[n - 1]
} else {
PrintStackElem {
offset: 0,
pbreak: PrintStackBreak::Broken(Breaks::Inconsistent)
}
}
}
pub fn print_str(&mut self, s: &str) -> io::IoResult<()> {
while self.pending_indentation > 0 {
try!(write!(self.out, " "));
self.pending_indentation -= 1;
}
write!(self.out, "{}", s)
}
pub fn print(&mut self, token: Token, l: isize) -> io::IoResult<()> {
debug!("print {} {} (remaining line space={})", tok_str(&token), l,
self.space);
debug!("{}", buf_str(&self.token[],
&self.size[],
self.left,
self.right,
6));
match token {
Token::Begin(b) => {
if l > self.space {
let col = self.margin - self.space + b.offset;
debug!("print Begin -> push broken block at col {}", col);
self.print_stack.push(PrintStackElem {
offset: col,
pbreak: PrintStackBreak::Broken(b.breaks)
});
} else {
debug!("print Begin -> push fitting block");
self.print_stack.push(PrintStackElem {
offset: 0,
pbreak: PrintStackBreak::Fits
});
}
Ok(())
}
Token::End => {
debug!("print End -> pop End");
let print_stack = &mut self.print_stack;
assert!((print_stack.len()!= 0us));
print_stack.pop().unwrap();
Ok(())
}
Token::Break(b) => {
let top = self.get_top();
match top.pbreak {
PrintStackBreak::Fits => {
debug!("print Break({}) in fitting block", b.blank_space);
self.space -= b.blank_space;
self.indent(b.blank_space);
Ok(())
}
PrintStackBreak::Broken(Breaks::Consistent) => {
debug!("print Break({}+{}) in consistent block",
top.offset, b.offset);
let ret = self.print_newline(top.offset + b.offset);
self.space = self.margin - (top.offset + b.offset);
ret
}
PrintStackBreak::Broken(Breaks::Inconsistent) => {
if l > self.space {
debug!("print Break({}+{}) w/ newline in inconsistent",
top.offset, b.offset);
let ret = self.print_newline(top.offset + b.offset);
self.space = self.margin - (top.offset + b.offset);
ret
} else {
debug!("print Break({}) w/o newline in inconsistent",
b.blank_space);
self.indent(b.blank_space);
self.space -= b.blank_space;
Ok(())
}
}
}
}
Token::String(s, len) => {
debug!("print String({})", s);
assert_eq!(l, len);
// assert!(l <= space);
self.space -= len;
self.print_str(&s[])
}
Token::Eof => {
// Eof should never get here.
panic!();
}
}
}
}
// Convenience functions to talk to the printer.
//
// "raw box"
pub fn rbox(p: &mut Printer, indent: usize, b: Breaks) -> io::IoResult<()> {
p.pretty_print(Token::Begin(BeginToken {
offset: indent as isize,
breaks: b
}))
}
pub fn ibox(p: &mut Printer, indent: usize) -> io::IoResult<()> {
rbox(p, indent, Breaks::Inconsistent)
}
pub fn cbox(p: &mut Printer, indent: usize) -> io::IoResult<()>
|
{
rbox(p, indent, Breaks::Consistent)
}
|
identifier_body
|
|
pp.rs
|
Printer {
out: out,
buf_len: n,
margin: linewidth as isize,
space: linewidth as isize,
left: 0,
right: 0,
token: token,
size: size,
left_total: 0,
right_total: 0,
scan_stack: scan_stack,
scan_stack_empty: true,
top: 0,
bottom: 0,
print_stack: Vec::new(),
pending_indentation: 0
}
}
/// In case you do not have the paper, here is an explanation of what's going
/// on.
///
/// There is a stream of input tokens flowing through this printer.
///
/// The printer buffers up to 3N tokens inside itself, where N is linewidth.
/// Yes, linewidth is chars and tokens are multi-char, but in the worst
/// case every token worth buffering is 1 char long, so it's ok.
///
/// Tokens are String, Break, and Begin/End to delimit blocks.
///
/// Begin tokens can carry an offset, saying "how far to indent when you break
/// inside here", as well as a flag indicating "consistent" or "inconsistent"
/// breaking. Consistent breaking means that after the first break, no attempt
/// will be made to flow subsequent breaks together onto lines. Inconsistent
/// is the opposite. Inconsistent breaking example would be, say:
///
/// foo(hello, there, good, friends)
///
/// breaking inconsistently to become
///
/// foo(hello, there
/// good, friends);
///
/// whereas a consistent breaking would yield:
///
/// foo(hello,
/// there
/// good,
/// friends);
///
/// That is, in the consistent-break blocks we value vertical alignment
/// more than the ability to cram stuff onto a line. But in all cases if it
/// can make a block a one-liner, it'll do so.
///
/// Carrying on with high-level logic:
///
/// The buffered tokens go through a ring-buffer, 'tokens'. The 'left' and
/// 'right' indices denote the active portion of the ring buffer as well as
/// describing hypothetical points-in-the-infinite-stream at most 3N tokens
/// apart (i.e. "not wrapped to ring-buffer boundaries"). The paper will switch
/// between using 'left' and 'right' terms to denote the wrapped-to-ring-buffer
/// and point-in-infinite-stream senses freely.
///
/// There is a parallel ring buffer,'size', that holds the calculated size of
/// each token. Why calculated? Because for Begin/End pairs, the "size"
/// includes everything between the pair. That is, the "size" of Begin is
/// actually the sum of the sizes of everything between Begin and the paired
/// End that follows. Since that is arbitrarily far in the future,'size' is
/// being rewritten regularly while the printer runs; in fact most of the
/// machinery is here to work out'size' entries on the fly (and give up when
/// they're so obviously over-long that "infinity" is a good enough
/// approximation for purposes of line breaking).
///
/// The "input side" of the printer is managed as an abstract process called
/// SCAN, which uses'scan_stack','scan_stack_empty', 'top' and 'bottom', to
/// manage calculating'size'. SCAN is, in other words, the process of
/// calculating'size' entries.
///
/// The "output side" of the printer is managed by an abstract process called
/// PRINT, which uses 'print_stack','margin' and'space' to figure out what to
/// do with each token/size pair it consumes as it goes. It's trying to consume
/// the entire buffered window, but can't output anything until the size is >=
/// 0 (sizes are set to negative while they're pending calculation).
///
/// So SCAN takes input and buffers tokens and pending calculations, while
/// PRINT gobbles up completed calculations and tokens from the buffer. The
/// theory is that the two can never get more than 3N tokens apart, because
/// once there's "obviously" too much data to fit on a line, in a size
/// calculation, SCAN will write "infinity" to the size and let PRINT consume
/// it.
///
/// In this implementation (following the paper, again) the SCAN process is
/// the method called 'pretty_print', and the 'PRINT' process is the method
/// called 'print'.
pub struct Printer {
pub out: Box<io::Writer+'static>,
buf_len: usize,
/// Width of lines we're constrained to
margin: isize,
/// Number of spaces left on line
space: isize,
/// Index of left side of input stream
left: usize,
/// Index of right side of input stream
right: usize,
/// Ring-buffer stream goes through
token: Vec<Token>,
/// Ring-buffer of calculated sizes
size: Vec<isize>,
/// Running size of stream "...left"
left_total: isize,
/// Running size of stream "...right"
right_total: isize,
/// Pseudo-stack, really a ring too. Holds the
/// primary-ring-buffers index of the Begin that started the
/// current block, possibly with the most recent Break after that
/// Begin (if there is any) on top of it. Stuff is flushed off the
/// bottom as it becomes irrelevant due to the primary ring-buffer
/// advancing.
scan_stack: Vec<usize>,
/// Top==bottom disambiguator
scan_stack_empty: bool,
/// Index of top of scan_stack
top: usize,
/// Index of bottom of scan_stack
bottom: usize,
/// Stack of blocks-in-progress being flushed by print
print_stack: Vec<PrintStackElem>,
/// Buffered indentation to avoid writing trailing whitespace
pending_indentation: isize,
}
impl Printer {
pub fn last_token(&mut self) -> Token {
self.token[self.right].clone()
}
// be very careful with this!
pub fn replace_last_token(&mut self, t: Token) {
self.token[self.right] = t;
}
pub fn pretty_print(&mut self, token: Token) -> io::IoResult<()> {
debug!("pp ~[{},{}]", self.left, self.right);
match token {
Token::Eof => {
if!self.scan_stack_empty {
self.check_stack(0);
try!(self.advance_left());
}
self.indent(0);
Ok(())
}
Token::Begin(b) => {
if self.scan_stack_empty {
self.left_total = 1;
self.right_total = 1;
self.left = 0us;
self.right = 0us;
} else { self.advance_right(); }
debug!("pp Begin({})/buffer ~[{},{}]",
b.offset, self.left, self.right);
self.token[self.right] = token;
self.size[self.right] = -self.right_total;
let right = self.right;
self.scan_push(right);
Ok(())
}
Token::End => {
if self.scan_stack_empty {
debug!("pp End/print ~[{},{}]", self.left, self.right);
self.print(token, 0)
} else {
debug!("pp End/buffer ~[{},{}]", self.left, self.right);
self.advance_right();
self.token[self.right] = token;
self.size[self.right] = -1;
let right = self.right;
self.scan_push(right);
Ok(())
}
}
Token::Break(b) => {
if self.scan_stack_empty {
self.left_total = 1;
self.right_total = 1;
self.left = 0us;
self.right = 0us;
} else { self.advance_right(); }
debug!("pp Break({})/buffer ~[{},{}]",
b.offset, self.left, self.right);
self.check_stack(0);
let right = self.right;
self.scan_push(right);
self.token[self.right] = token;
self.size[self.right] = -self.right_total;
self.right_total += b.blank_space;
Ok(())
}
Token::String(s, len) => {
if self.scan_stack_empty {
debug!("pp String('{}')/print ~[{},{}]",
s, self.left, self.right);
self.print(Token::String(s, len), len)
} else {
debug!("pp String('{}')/buffer ~[{},{}]",
s, self.left, self.right);
self.advance_right();
self.token[self.right] = Token::String(s, len);
self.size[self.right] = len;
self.right_total += len;
self.check_stream()
}
}
}
}
pub fn check_stream(&mut self) -> io::IoResult<()> {
debug!("check_stream ~[{}, {}] with left_total={}, right_total={}",
self.left, self.right, self.left_total, self.right_total);
if self.right_total - self.left_total > self.space {
debug!("scan window is {}, longer than space on line ({})",
self.right_total - self.left_total, self.space);
if!self.scan_stack_empty {
if self.left == self.scan_stack[self.bottom] {
debug!("setting {} to infinity and popping", self.left);
let scanned = self.scan_pop_bottom();
self.size[scanned] = SIZE_INFINITY;
}
}
try!(self.advance_left());
if self.left!= self.right {
try!(self.check_stream());
}
}
Ok(())
}
pub fn scan_push(&mut self, x: usize) {
debug!("scan_push {}", x);
if self.scan_stack_empty {
self.scan_stack_empty = false;
} else {
self.top += 1us;
self.top %= self.buf_len;
assert!((self.top!= self.bottom));
}
self.scan_stack[self.top] = x;
}
pub fn scan_pop(&mut self) -> usize {
assert!((!self.scan_stack_empty));
let x = self.scan_stack[self.top];
if self.top == self.bottom {
self.scan_stack_empty = true;
} else {
self.top += self.buf_len - 1us; self.top %= self.buf_len;
}
return x;
}
pub fn scan_top(&mut self) -> usize {
assert!((!self.scan_stack_empty));
return self.scan_stack[self.top];
}
pub fn scan_pop_bottom(&mut self) -> usize {
assert!((!self.scan_stack_empty));
let x = self.scan_stack[self.bottom];
if self.top == self.bottom {
self.scan_stack_empty = true;
} else {
self.bottom += 1us; self.bottom %= self.buf_len;
}
return x;
}
pub fn advance_right(&mut self) {
self.right += 1us;
self.right %= self.buf_len;
assert!((self.right!= self.left));
}
pub fn advance_left(&mut self) -> io::IoResult<()> {
debug!("advance_left ~[{},{}], sizeof({})={}", self.left, self.right,
self.left, self.size[self.left]);
let mut left_size = self.size[self.left];
while left_size >= 0 {
let left = self.token[self.left].clone();
let len = match left {
Token::Break(b) => b.blank_space,
Token::String(_, len) => {
assert_eq!(len, left_size);
len
}
_ => 0
};
try!(self.print(left, left_size));
self.left_total += len;
if self.left == self.right {
break;
}
self.left += 1us;
self.left %= self.buf_len;
left_size = self.size[self.left];
}
Ok(())
}
pub fn check_stack(&mut self, k: isize) {
if!self.scan_stack_empty {
let x = self.scan_top();
match self.token[x] {
Token::Begin(_) => {
if k > 0 {
let popped = self.scan_pop();
self.size[popped] = self.size[x] + self.right_total;
self.check_stack(k - 1);
}
}
Token::End => {
// paper says + not =, but that makes no sense.
let popped = self.scan_pop();
self.size[popped] = 1;
self.check_stack(k + 1);
}
_ => {
let popped = self.scan_pop();
self.size[popped] = self.size[x] + self.right_total;
if k > 0 {
self.check_stack(k);
}
}
}
}
}
pub fn print_newline(&mut self, amount: isize) -> io::IoResult<()> {
debug!("NEWLINE {}", amount);
let ret = write!(self.out, "\n");
self.pending_indentation = 0;
self.indent(amount);
return ret;
}
pub fn indent(&mut self, amount: isize) {
debug!("INDENT {}", amount);
self.pending_indentation += amount;
}
pub fn get_top(&mut self) -> PrintStackElem {
let print_stack = &mut self.print_stack;
let n = print_stack.len();
if n!= 0us {
(*print_stack)[n - 1]
} else {
PrintStackElem {
offset: 0,
pbreak: PrintStackBreak::Broken(Breaks::Inconsistent)
}
}
}
pub fn print_str(&mut self, s: &str) -> io::IoResult<()> {
while self.pending_indentation > 0 {
try!(write!(self.out, " "));
self.pending_indentation -= 1;
}
write!(self.out, "{}", s)
}
pub fn print(&mut self, token: Token, l: isize) -> io::IoResult<()> {
debug!("print {} {} (remaining line space={})", tok_str(&token), l,
self.space);
debug!("{}", buf_str(&self.token[],
&self.size[],
self.left,
self.right,
6));
match token {
Token::Begin(b) => {
if l > self.space {
let col = self.margin - self.space + b.offset;
debug!("print Begin -> push broken block at col {}", col);
self.print_stack.push(PrintStackElem {
offset: col,
pbreak: PrintStackBreak::Broken(b.breaks)
});
} else {
debug!("print Begin -> push fitting block");
self.print_stack.push(PrintStackElem {
offset: 0,
pbreak: PrintStackBreak::Fits
});
}
Ok(())
}
Token::End => {
debug!("print End -> pop End");
let print_stack = &mut self.print_stack;
assert!((print_stack.len()!= 0us));
print_stack.pop().unwrap();
Ok(())
}
Token::Break(b) => {
let top = self.get_top();
match top.pbreak {
PrintStackBreak::Fits => {
debug!("print Break({}) in fitting block", b.blank_space);
self.space -= b.blank_space;
self.indent(b.blank_space);
Ok(())
}
PrintStackBreak::Broken(Breaks::Consistent) => {
debug!("print Break({}+{}) in consistent block",
top.offset, b.offset);
let ret = self.print_newline(top.offset + b.offset);
self.space = self.margin - (top.offset + b.offset);
ret
}
PrintStackBreak::Broken(Breaks::Inconsistent) => {
if l > self.space {
debug!("print Break({}+{}) w/ newline in inconsistent",
top.offset, b.offset);
let ret = self.print_newline(top.offset + b.offset);
self.space = self.margin - (top.offset + b.offset);
ret
} else {
debug!("print Break({}) w/o newline in inconsistent",
b.blank_space);
self.indent(b.blank_space);
self.space -= b.blank_space;
Ok(())
}
}
}
}
Token::String(s, len) => {
debug!("print String({})", s);
assert_eq!(l, len);
// assert!(l <= space);
self.space -= len;
self.print_str(&s[])
}
Token::Eof => {
// Eof should never get here.
panic!();
}
}
}
}
// Convenience functions to talk to the printer.
//
// "raw box"
pub fn rbox(p: &mut Printer, indent: usize, b: Breaks) -> io::IoResult<()> {
p.pretty_print(Token::Begin(BeginToken {
offset: indent as isize,
breaks: b
}))
}
pub fn ibox(p: &mut Printer, indent: usize) -> io::IoResult<()> {
rbox(p, indent, Breaks::Inconsistent)
}
pub fn cbox(p: &mut Printer, indent: usize) -> io::IoResult<()> {
rbox(p, indent, Breaks::Consistent)
}
pub fn break_offset(p: &mut Printer, n: usize, off: isize) -> io::IoResult<()> {
p.pretty_print(Token::Break(BreakToken {
offset: off,
blank_space: n as isize
}))
}
pub fn end(p: &mut Printer) -> io::IoResult<()> {
p.pretty_print(Token::End)
}
pub fn eof(p: &mut Printer) -> io::IoResult<()> {
p.pretty_print(Token::Eof)
}
pub fn word(p: &mut Printer, wrd: &str) -> io::IoResult<()> {
p.pretty_print(Token::String(/* bad */ wrd.to_string(), wrd.len() as isize))
}
pub fn huge_word(p: &mut Printer, wrd: &str) -> io::IoResult<()> {
p.pretty_print(Token::String(/* bad */ wrd.to_string(), SIZE_INFINITY))
}
pub fn
|
zero_word
|
identifier_name
|
|
pp.rs
|
(or before each) and the breaking algorithm decides to break
//! there anyways (because the functions themselves are long) you wind up with
//! extra blank lines. If you don't put hardbreaks you can wind up with the
//! "thing which should be on its own line" not getting its own line in the
//! rare case of "really small functions" or such. This re-occurs with comments
//! and explicit blank lines. So in those cases we use a string with a payload
//! we want isolated to a line and an explicit length that's huge, surrounded
//! by two zero-length breaks. The algorithm will try its best to fit it on a
//! line (which it can't) and so naturally place the content on its own line to
//! avoid combining it with other lines and making matters even worse.
use std::io;
use std::string;
use std::iter::repeat;
#[derive(Clone, Copy, PartialEq)]
pub enum Breaks {
Consistent,
Inconsistent,
}
#[derive(Clone, Copy)]
pub struct BreakToken {
offset: isize,
blank_space: isize
}
#[derive(Clone, Copy)]
pub struct BeginToken {
offset: isize,
breaks: Breaks
}
#[derive(Clone)]
pub enum Token {
String(String, isize),
Break(BreakToken),
Begin(BeginToken),
End,
Eof,
}
impl Token {
pub fn is_eof(&self) -> bool {
match *self {
Token::Eof => true,
_ => false,
}
}
pub fn is_hardbreak_tok(&self) -> bool {
match *self {
Token::Break(BreakToken {
offset: 0,
blank_space: bs
}) if bs == SIZE_INFINITY =>
true,
_ =>
false
}
}
}
pub fn tok_str(token: &Token) -> String {
match *token {
Token::String(ref s, len) => format!("STR({},{})", s, len),
Token::Break(_) => "BREAK".to_string(),
Token::Begin(_) => "BEGIN".to_string(),
Token::End => "END".to_string(),
Token::Eof => "EOF".to_string()
}
}
pub fn buf_str(toks: &[Token],
szs: &[isize],
left: usize,
right: usize,
lim: usize)
-> String {
let n = toks.len();
assert_eq!(n, szs.len());
let mut i = left;
let mut l = lim;
let mut s = string::String::from_str("[");
while i!= right && l!= 0us {
l -= 1us;
if i!= left {
s.push_str(", ");
}
s.push_str(&format!("{}={}",
szs[i],
tok_str(&toks[i]))[]);
i += 1us;
i %= n;
}
s.push(']');
s
}
#[derive(Copy)]
pub enum PrintStackBreak {
Fits,
Broken(Breaks),
}
#[derive(Copy)]
pub struct PrintStackElem {
offset: isize,
pbreak: PrintStackBreak
}
static SIZE_INFINITY: isize = 0xffff;
pub fn mk_printer(out: Box<io::Writer+'static>, linewidth: usize) -> Printer {
// Yes 3, it makes the ring buffers big enough to never
// fall behind.
let n: usize = 3 * linewidth;
debug!("mk_printer {}", linewidth);
let token: Vec<Token> = repeat(Token::Eof).take(n).collect();
let size: Vec<isize> = repeat(0is).take(n).collect();
let scan_stack: Vec<usize> = repeat(0us).take(n).collect();
Printer {
out: out,
buf_len: n,
margin: linewidth as isize,
space: linewidth as isize,
left: 0,
right: 0,
token: token,
size: size,
left_total: 0,
right_total: 0,
scan_stack: scan_stack,
scan_stack_empty: true,
|
top: 0,
bottom: 0,
print_stack: Vec::new(),
pending_indentation: 0
}
}
/// In case you do not have the paper, here is an explanation of what's going
/// on.
///
/// There is a stream of input tokens flowing through this printer.
///
/// The printer buffers up to 3N tokens inside itself, where N is linewidth.
/// Yes, linewidth is chars and tokens are multi-char, but in the worst
/// case every token worth buffering is 1 char long, so it's ok.
///
/// Tokens are String, Break, and Begin/End to delimit blocks.
///
/// Begin tokens can carry an offset, saying "how far to indent when you break
/// inside here", as well as a flag indicating "consistent" or "inconsistent"
/// breaking. Consistent breaking means that after the first break, no attempt
/// will be made to flow subsequent breaks together onto lines. Inconsistent
/// is the opposite. Inconsistent breaking example would be, say:
///
/// foo(hello, there, good, friends)
///
/// breaking inconsistently to become
///
/// foo(hello, there
/// good, friends);
///
/// whereas a consistent breaking would yield:
///
/// foo(hello,
/// there
/// good,
/// friends);
///
/// That is, in the consistent-break blocks we value vertical alignment
/// more than the ability to cram stuff onto a line. But in all cases if it
/// can make a block a one-liner, it'll do so.
///
/// Carrying on with high-level logic:
///
/// The buffered tokens go through a ring-buffer, 'tokens'. The 'left' and
/// 'right' indices denote the active portion of the ring buffer as well as
/// describing hypothetical points-in-the-infinite-stream at most 3N tokens
/// apart (i.e. "not wrapped to ring-buffer boundaries"). The paper will switch
/// between using 'left' and 'right' terms to denote the wrapped-to-ring-buffer
/// and point-in-infinite-stream senses freely.
///
/// There is a parallel ring buffer,'size', that holds the calculated size of
/// each token. Why calculated? Because for Begin/End pairs, the "size"
/// includes everything between the pair. That is, the "size" of Begin is
/// actually the sum of the sizes of everything between Begin and the paired
/// End that follows. Since that is arbitrarily far in the future,'size' is
/// being rewritten regularly while the printer runs; in fact most of the
/// machinery is here to work out'size' entries on the fly (and give up when
/// they're so obviously over-long that "infinity" is a good enough
/// approximation for purposes of line breaking).
///
/// The "input side" of the printer is managed as an abstract process called
/// SCAN, which uses'scan_stack','scan_stack_empty', 'top' and 'bottom', to
/// manage calculating'size'. SCAN is, in other words, the process of
/// calculating'size' entries.
///
/// The "output side" of the printer is managed by an abstract process called
/// PRINT, which uses 'print_stack','margin' and'space' to figure out what to
/// do with each token/size pair it consumes as it goes. It's trying to consume
/// the entire buffered window, but can't output anything until the size is >=
/// 0 (sizes are set to negative while they're pending calculation).
///
/// So SCAN takes input and buffers tokens and pending calculations, while
/// PRINT gobbles up completed calculations and tokens from the buffer. The
/// theory is that the two can never get more than 3N tokens apart, because
/// once there's "obviously" too much data to fit on a line, in a size
/// calculation, SCAN will write "infinity" to the size and let PRINT consume
/// it.
///
/// In this implementation (following the paper, again) the SCAN process is
/// the method called 'pretty_print', and the 'PRINT' process is the method
/// called 'print'.
pub struct Printer {
pub out: Box<io::Writer+'static>,
buf_len: usize,
/// Width of lines we're constrained to
margin: isize,
/// Number of spaces left on line
space: isize,
/// Index of left side of input stream
left: usize,
/// Index of right side of input stream
right: usize,
/// Ring-buffer stream goes through
token: Vec<Token>,
/// Ring-buffer of calculated sizes
size: Vec<isize>,
/// Running size of stream "...left"
left_total: isize,
/// Running size of stream "...right"
right_total: isize,
/// Pseudo-stack, really a ring too. Holds the
/// primary-ring-buffers index of the Begin that started the
/// current block, possibly with the most recent Break after that
/// Begin (if there is any) on top of it. Stuff is flushed off the
/// bottom as it becomes irrelevant due to the primary ring-buffer
/// advancing.
scan_stack: Vec<usize>,
/// Top==bottom disambiguator
scan_stack_empty: bool,
/// Index of top of scan_stack
top: usize,
/// Index of bottom of scan_stack
bottom: usize,
/// Stack of blocks-in-progress being flushed by print
print_stack: Vec<PrintStackElem>,
/// Buffered indentation to avoid writing trailing whitespace
pending_indentation: isize,
}
impl Printer {
pub fn last_token(&mut self) -> Token {
self.token[self.right].clone()
}
// be very careful with this!
pub fn replace_last_token(&mut self, t: Token) {
self.token[self.right] = t;
}
pub fn pretty_print(&mut self, token: Token) -> io::IoResult<()> {
debug!("pp ~[{},{}]", self.left, self.right);
match token {
Token::Eof => {
if!self.scan_stack_empty {
self.check_stack(0);
try!(self.advance_left());
}
self.indent(0);
Ok(())
}
Token::Begin(b) => {
if self.scan_stack_empty {
self.left_total = 1;
self.right_total = 1;
self.left = 0us;
self.right = 0us;
} else { self.advance_right(); }
debug!("pp Begin({})/buffer ~[{},{}]",
b.offset, self.left, self.right);
self.token[self.right] = token;
self.size[self.right] = -self.right_total;
let right = self.right;
self.scan_push(right);
Ok(())
}
Token::End => {
if self.scan_stack_empty {
debug!("pp End/print ~[{},{}]", self.left, self.right);
self.print(token, 0)
} else {
debug!("pp End/buffer ~[{},{}]", self.left, self.right);
self.advance_right();
self.token[self.right] = token;
self.size[self.right] = -1;
let right = self.right;
self.scan_push(right);
Ok(())
}
}
Token::Break(b) => {
if self.scan_stack_empty {
self.left_total = 1;
self.right_total = 1;
self.left = 0us;
self.right = 0us;
} else { self.advance_right(); }
debug!("pp Break({})/buffer ~[{},{}]",
b.offset, self.left, self.right);
self.check_stack(0);
let right = self.right;
self.scan_push(right);
self.token[self.right] = token;
self.size[self.right] = -self.right_total;
self.right_total += b.blank_space;
Ok(())
}
Token::String(s, len) => {
if self.scan_stack_empty {
debug!("pp String('{}')/print ~[{},{}]",
s, self.left, self.right);
self.print(Token::String(s, len), len)
} else {
debug!("pp String('{}')/buffer ~[{},{}]",
s, self.left, self.right);
self.advance_right();
self.token[self.right] = Token::String(s, len);
self.size[self.right] = len;
self.right_total += len;
self.check_stream()
}
}
}
}
pub fn check_stream(&mut self) -> io::IoResult<()> {
debug!("check_stream ~[{}, {}] with left_total={}, right_total={}",
self.left, self.right, self.left_total, self.right_total);
if self.right_total - self.left_total > self.space {
debug!("scan window is {}, longer than space on line ({})",
self.right_total - self.left_total, self.space);
if!self.scan_stack_empty {
if self.left == self.scan_stack[self.bottom] {
debug!("setting {} to infinity and popping", self.left);
let scanned = self.scan_pop_bottom();
self.size[scanned] = SIZE_INFINITY;
}
}
try!(self.advance_left());
if self.left!= self.right {
try!(self.check_stream());
}
}
Ok(())
}
pub fn scan_push(&mut self, x: usize) {
debug!("scan_push {}", x);
if self.scan_stack_empty {
self.scan_stack_empty = false;
} else {
self.top += 1us;
self.top %= self.buf_len;
assert!((self.top!= self.bottom));
}
self.scan_stack[self.top] = x;
}
pub fn scan_pop(&mut self) -> usize {
assert!((!self.scan_stack_empty));
let x = self.scan_stack[self.top];
if self.top == self.bottom {
self.scan_stack_empty = true;
} else {
self.top += self.buf_len - 1us; self.top %= self.buf_len;
}
return x;
}
pub fn scan_top(&mut self) -> usize {
assert!((!self.scan_stack_empty));
return self.scan_stack[self.top];
}
pub fn scan_pop_bottom(&mut self) -> usize {
assert!((!self.scan_stack_empty));
let x = self.scan_stack[self.bottom];
if self.top == self.bottom {
self.scan_stack_empty = true;
} else {
self.bottom += 1us; self.bottom %= self.buf_len;
}
return x;
}
pub fn advance_right(&mut self) {
self.right += 1us;
self.right %= self.buf_len;
assert!((self.right!= self.left));
}
pub fn advance_left(&mut self) -> io::IoResult<()> {
debug!("advance_left ~[{},{}], sizeof({})={}", self.left, self.right,
self.left, self.size[self.left]);
let mut left_size = self.size[self.left];
while left_size >= 0 {
let left = self.token[self.left].clone();
let len = match left {
Token::Break(b) => b.blank_space,
Token::String(_, len) => {
assert_eq!(len, left_size);
len
}
_ => 0
};
try!(self.print(left, left_size));
self.left_total += len;
if self.left == self.right {
break;
}
self.left += 1us;
self.left %= self.buf_len;
left_size = self.size[self.left];
}
Ok(())
}
pub fn check_stack(&mut self, k: isize) {
if!self.scan_stack_empty {
let x = self.scan_top();
match self.token[x] {
Token::Begin(_) => {
if k > 0 {
let popped = self.scan_pop();
self.size[popped] = self.size[x] + self.right_total;
self.check_stack(k - 1);
}
}
Token::End => {
// paper says + not =, but that makes no sense.
let popped = self.scan_pop();
self.size[popped] = 1;
self.check_stack(k + 1);
}
_ => {
let popped = self.scan_pop();
self.size[popped] = self.size[x] + self.right_total;
if k > 0 {
self.check_stack(k);
}
}
}
}
}
pub fn print_newline(&mut self, amount: isize) -> io::IoResult<()> {
debug!("NEWLINE {}", amount);
let ret = write!(self.out, "\n");
self.pending_indentation = 0;
self.indent(amount);
return ret;
}
pub fn indent(&mut self, amount: isize) {
debug!("INDENT {}", amount);
self.pending_indentation += amount;
}
pub fn get_top(&mut self) -> PrintStackElem {
let print_stack = &mut self.print_stack;
let n = print_stack.len();
if n!= 0us {
(*print_stack)[n - 1]
} else {
PrintStackElem {
offset: 0,
pbreak: PrintStackBreak::Broken(Breaks::Inconsistent)
}
}
}
pub fn print_str(&mut self, s: &str) -> io::IoResult<()> {
while self.pending_indentation > 0 {
try!(write!(self.out, " "));
self.pending_indentation -= 1;
}
write!(self.out, "{}", s)
}
pub fn print(&mut self, token: Token, l: isize) -> io::IoResult<()> {
debug!("print {} {} (remaining line space={})", tok_str(&token), l,
self.space);
debug!("{}", buf_str(&self.token[],
&self.size[],
self.left,
self.right,
6));
match token {
Token::Begin(b) => {
if l > self.space {
let col = self.margin - self.space + b.offset;
debug!("print Begin -> push broken block at col {}", col);
self.print_stack.push(PrintStackElem {
offset: col,
pbreak: PrintStackBreak::Broken(b.breaks)
});
} else {
debug!("print Begin -> push fitting block");
self.print_stack.push(PrintStackElem {
offset: 0,
pbreak: PrintStackBreak::Fits
});
}
Ok(())
}
Token::End => {
debug!("print End -> pop End");
let print_stack = &mut self.print_stack;
assert!((print_stack.len()!= 0us));
print_stack.pop().unwrap();
Ok(())
}
Token::Break(b) => {
let top = self.get_top();
match top.pbreak {
PrintStackBreak::Fits => {
debug!("print Break({}) in fitting block", b.blank_space);
|
random_line_split
|
|
pp.rs
|
or before each) and the breaking algorithm decides to break
//! there anyways (because the functions themselves are long) you wind up with
//! extra blank lines. If you don't put hardbreaks you can wind up with the
//! "thing which should be on its own line" not getting its own line in the
//! rare case of "really small functions" or such. This re-occurs with comments
//! and explicit blank lines. So in those cases we use a string with a payload
//! we want isolated to a line and an explicit length that's huge, surrounded
//! by two zero-length breaks. The algorithm will try its best to fit it on a
//! line (which it can't) and so naturally place the content on its own line to
//! avoid combining it with other lines and making matters even worse.
use std::io;
use std::string;
use std::iter::repeat;
#[derive(Clone, Copy, PartialEq)]
pub enum Breaks {
Consistent,
Inconsistent,
}
#[derive(Clone, Copy)]
pub struct BreakToken {
offset: isize,
blank_space: isize
}
#[derive(Clone, Copy)]
pub struct BeginToken {
offset: isize,
breaks: Breaks
}
#[derive(Clone)]
pub enum Token {
String(String, isize),
Break(BreakToken),
Begin(BeginToken),
End,
Eof,
}
impl Token {
pub fn is_eof(&self) -> bool {
match *self {
Token::Eof => true,
_ => false,
}
}
pub fn is_hardbreak_tok(&self) -> bool {
match *self {
Token::Break(BreakToken {
offset: 0,
blank_space: bs
}) if bs == SIZE_INFINITY =>
true,
_ =>
false
}
}
}
pub fn tok_str(token: &Token) -> String {
match *token {
Token::String(ref s, len) => format!("STR({},{})", s, len),
Token::Break(_) => "BREAK".to_string(),
Token::Begin(_) => "BEGIN".to_string(),
Token::End => "END".to_string(),
Token::Eof => "EOF".to_string()
}
}
pub fn buf_str(toks: &[Token],
szs: &[isize],
left: usize,
right: usize,
lim: usize)
-> String {
let n = toks.len();
assert_eq!(n, szs.len());
let mut i = left;
let mut l = lim;
let mut s = string::String::from_str("[");
while i!= right && l!= 0us {
l -= 1us;
if i!= left {
s.push_str(", ");
}
s.push_str(&format!("{}={}",
szs[i],
tok_str(&toks[i]))[]);
i += 1us;
i %= n;
}
s.push(']');
s
}
#[derive(Copy)]
pub enum PrintStackBreak {
Fits,
Broken(Breaks),
}
#[derive(Copy)]
pub struct PrintStackElem {
offset: isize,
pbreak: PrintStackBreak
}
static SIZE_INFINITY: isize = 0xffff;
pub fn mk_printer(out: Box<io::Writer+'static>, linewidth: usize) -> Printer {
// Yes 3, it makes the ring buffers big enough to never
// fall behind.
let n: usize = 3 * linewidth;
debug!("mk_printer {}", linewidth);
let token: Vec<Token> = repeat(Token::Eof).take(n).collect();
let size: Vec<isize> = repeat(0is).take(n).collect();
let scan_stack: Vec<usize> = repeat(0us).take(n).collect();
Printer {
out: out,
buf_len: n,
margin: linewidth as isize,
space: linewidth as isize,
left: 0,
right: 0,
token: token,
size: size,
left_total: 0,
right_total: 0,
scan_stack: scan_stack,
scan_stack_empty: true,
top: 0,
bottom: 0,
print_stack: Vec::new(),
pending_indentation: 0
}
}
/// In case you do not have the paper, here is an explanation of what's going
/// on.
///
/// There is a stream of input tokens flowing through this printer.
///
/// The printer buffers up to 3N tokens inside itself, where N is linewidth.
/// Yes, linewidth is chars and tokens are multi-char, but in the worst
/// case every token worth buffering is 1 char long, so it's ok.
///
/// Tokens are String, Break, and Begin/End to delimit blocks.
///
/// Begin tokens can carry an offset, saying "how far to indent when you break
/// inside here", as well as a flag indicating "consistent" or "inconsistent"
/// breaking. Consistent breaking means that after the first break, no attempt
/// will be made to flow subsequent breaks together onto lines. Inconsistent
/// is the opposite. Inconsistent breaking example would be, say:
///
/// foo(hello, there, good, friends)
///
/// breaking inconsistently to become
///
/// foo(hello, there
/// good, friends);
///
/// whereas a consistent breaking would yield:
///
/// foo(hello,
/// there
/// good,
/// friends);
///
/// That is, in the consistent-break blocks we value vertical alignment
/// more than the ability to cram stuff onto a line. But in all cases if it
/// can make a block a one-liner, it'll do so.
///
/// Carrying on with high-level logic:
///
/// The buffered tokens go through a ring-buffer, 'tokens'. The 'left' and
/// 'right' indices denote the active portion of the ring buffer as well as
/// describing hypothetical points-in-the-infinite-stream at most 3N tokens
/// apart (i.e. "not wrapped to ring-buffer boundaries"). The paper will switch
/// between using 'left' and 'right' terms to denote the wrapped-to-ring-buffer
/// and point-in-infinite-stream senses freely.
///
/// There is a parallel ring buffer,'size', that holds the calculated size of
/// each token. Why calculated? Because for Begin/End pairs, the "size"
/// includes everything between the pair. That is, the "size" of Begin is
/// actually the sum of the sizes of everything between Begin and the paired
/// End that follows. Since that is arbitrarily far in the future,'size' is
/// being rewritten regularly while the printer runs; in fact most of the
/// machinery is here to work out'size' entries on the fly (and give up when
/// they're so obviously over-long that "infinity" is a good enough
/// approximation for purposes of line breaking).
///
/// The "input side" of the printer is managed as an abstract process called
/// SCAN, which uses'scan_stack','scan_stack_empty', 'top' and 'bottom', to
/// manage calculating'size'. SCAN is, in other words, the process of
/// calculating'size' entries.
///
/// The "output side" of the printer is managed by an abstract process called
/// PRINT, which uses 'print_stack','margin' and'space' to figure out what to
/// do with each token/size pair it consumes as it goes. It's trying to consume
/// the entire buffered window, but can't output anything until the size is >=
/// 0 (sizes are set to negative while they're pending calculation).
///
/// So SCAN takes input and buffers tokens and pending calculations, while
/// PRINT gobbles up completed calculations and tokens from the buffer. The
/// theory is that the two can never get more than 3N tokens apart, because
/// once there's "obviously" too much data to fit on a line, in a size
/// calculation, SCAN will write "infinity" to the size and let PRINT consume
/// it.
///
/// In this implementation (following the paper, again) the SCAN process is
/// the method called 'pretty_print', and the 'PRINT' process is the method
/// called 'print'.
pub struct Printer {
pub out: Box<io::Writer+'static>,
buf_len: usize,
/// Width of lines we're constrained to
margin: isize,
/// Number of spaces left on line
space: isize,
/// Index of left side of input stream
left: usize,
/// Index of right side of input stream
right: usize,
/// Ring-buffer stream goes through
token: Vec<Token>,
/// Ring-buffer of calculated sizes
size: Vec<isize>,
/// Running size of stream "...left"
left_total: isize,
/// Running size of stream "...right"
right_total: isize,
/// Pseudo-stack, really a ring too. Holds the
/// primary-ring-buffers index of the Begin that started the
/// current block, possibly with the most recent Break after that
/// Begin (if there is any) on top of it. Stuff is flushed off the
/// bottom as it becomes irrelevant due to the primary ring-buffer
/// advancing.
scan_stack: Vec<usize>,
/// Top==bottom disambiguator
scan_stack_empty: bool,
/// Index of top of scan_stack
top: usize,
/// Index of bottom of scan_stack
bottom: usize,
/// Stack of blocks-in-progress being flushed by print
print_stack: Vec<PrintStackElem>,
/// Buffered indentation to avoid writing trailing whitespace
pending_indentation: isize,
}
impl Printer {
pub fn last_token(&mut self) -> Token {
self.token[self.right].clone()
}
// be very careful with this!
pub fn replace_last_token(&mut self, t: Token) {
self.token[self.right] = t;
}
pub fn pretty_print(&mut self, token: Token) -> io::IoResult<()> {
debug!("pp ~[{},{}]", self.left, self.right);
match token {
Token::Eof => {
if!self.scan_stack_empty {
self.check_stack(0);
try!(self.advance_left());
}
self.indent(0);
Ok(())
}
Token::Begin(b) => {
if self.scan_stack_empty {
self.left_total = 1;
self.right_total = 1;
self.left = 0us;
self.right = 0us;
} else { self.advance_right(); }
debug!("pp Begin({})/buffer ~[{},{}]",
b.offset, self.left, self.right);
self.token[self.right] = token;
self.size[self.right] = -self.right_total;
let right = self.right;
self.scan_push(right);
Ok(())
}
Token::End => {
if self.scan_stack_empty {
debug!("pp End/print ~[{},{}]", self.left, self.right);
self.print(token, 0)
} else {
debug!("pp End/buffer ~[{},{}]", self.left, self.right);
self.advance_right();
self.token[self.right] = token;
self.size[self.right] = -1;
let right = self.right;
self.scan_push(right);
Ok(())
}
}
Token::Break(b) => {
if self.scan_stack_empty {
self.left_total = 1;
self.right_total = 1;
self.left = 0us;
self.right = 0us;
} else { self.advance_right(); }
debug!("pp Break({})/buffer ~[{},{}]",
b.offset, self.left, self.right);
self.check_stack(0);
let right = self.right;
self.scan_push(right);
self.token[self.right] = token;
self.size[self.right] = -self.right_total;
self.right_total += b.blank_space;
Ok(())
}
Token::String(s, len) => {
if self.scan_stack_empty {
debug!("pp String('{}')/print ~[{},{}]",
s, self.left, self.right);
self.print(Token::String(s, len), len)
} else {
debug!("pp String('{}')/buffer ~[{},{}]",
s, self.left, self.right);
self.advance_right();
self.token[self.right] = Token::String(s, len);
self.size[self.right] = len;
self.right_total += len;
self.check_stream()
}
}
}
}
pub fn check_stream(&mut self) -> io::IoResult<()> {
debug!("check_stream ~[{}, {}] with left_total={}, right_total={}",
self.left, self.right, self.left_total, self.right_total);
if self.right_total - self.left_total > self.space {
debug!("scan window is {}, longer than space on line ({})",
self.right_total - self.left_total, self.space);
if!self.scan_stack_empty {
if self.left == self.scan_stack[self.bottom] {
debug!("setting {} to infinity and popping", self.left);
let scanned = self.scan_pop_bottom();
self.size[scanned] = SIZE_INFINITY;
}
}
try!(self.advance_left());
if self.left!= self.right {
try!(self.check_stream());
}
}
Ok(())
}
pub fn scan_push(&mut self, x: usize) {
debug!("scan_push {}", x);
if self.scan_stack_empty {
self.scan_stack_empty = false;
} else {
self.top += 1us;
self.top %= self.buf_len;
assert!((self.top!= self.bottom));
}
self.scan_stack[self.top] = x;
}
pub fn scan_pop(&mut self) -> usize {
assert!((!self.scan_stack_empty));
let x = self.scan_stack[self.top];
if self.top == self.bottom {
self.scan_stack_empty = true;
} else {
self.top += self.buf_len - 1us; self.top %= self.buf_len;
}
return x;
}
pub fn scan_top(&mut self) -> usize {
assert!((!self.scan_stack_empty));
return self.scan_stack[self.top];
}
pub fn scan_pop_bottom(&mut self) -> usize {
assert!((!self.scan_stack_empty));
let x = self.scan_stack[self.bottom];
if self.top == self.bottom {
self.scan_stack_empty = true;
} else {
self.bottom += 1us; self.bottom %= self.buf_len;
}
return x;
}
pub fn advance_right(&mut self) {
self.right += 1us;
self.right %= self.buf_len;
assert!((self.right!= self.left));
}
pub fn advance_left(&mut self) -> io::IoResult<()> {
debug!("advance_left ~[{},{}], sizeof({})={}", self.left, self.right,
self.left, self.size[self.left]);
let mut left_size = self.size[self.left];
while left_size >= 0 {
let left = self.token[self.left].clone();
let len = match left {
Token::Break(b) => b.blank_space,
Token::String(_, len) => {
assert_eq!(len, left_size);
len
}
_ => 0
};
try!(self.print(left, left_size));
self.left_total += len;
if self.left == self.right {
break;
}
self.left += 1us;
self.left %= self.buf_len;
left_size = self.size[self.left];
}
Ok(())
}
pub fn check_stack(&mut self, k: isize) {
if!self.scan_stack_empty {
let x = self.scan_top();
match self.token[x] {
Token::Begin(_) => {
if k > 0 {
let popped = self.scan_pop();
self.size[popped] = self.size[x] + self.right_total;
self.check_stack(k - 1);
}
}
Token::End => {
// paper says + not =, but that makes no sense.
let popped = self.scan_pop();
self.size[popped] = 1;
self.check_stack(k + 1);
}
_ => {
let popped = self.scan_pop();
self.size[popped] = self.size[x] + self.right_total;
if k > 0 {
self.check_stack(k);
}
}
}
}
}
pub fn print_newline(&mut self, amount: isize) -> io::IoResult<()> {
debug!("NEWLINE {}", amount);
let ret = write!(self.out, "\n");
self.pending_indentation = 0;
self.indent(amount);
return ret;
}
pub fn indent(&mut self, amount: isize) {
debug!("INDENT {}", amount);
self.pending_indentation += amount;
}
pub fn get_top(&mut self) -> PrintStackElem {
let print_stack = &mut self.print_stack;
let n = print_stack.len();
if n!= 0us {
(*print_stack)[n - 1]
} else {
PrintStackElem {
offset: 0,
pbreak: PrintStackBreak::Broken(Breaks::Inconsistent)
}
}
}
pub fn print_str(&mut self, s: &str) -> io::IoResult<()> {
while self.pending_indentation > 0 {
try!(write!(self.out, " "));
self.pending_indentation -= 1;
}
write!(self.out, "{}", s)
}
pub fn print(&mut self, token: Token, l: isize) -> io::IoResult<()> {
debug!("print {} {} (remaining line space={})", tok_str(&token), l,
self.space);
debug!("{}", buf_str(&self.token[],
&self.size[],
self.left,
self.right,
6));
match token {
Token::Begin(b) => {
if l > self.space {
let col = self.margin - self.space + b.offset;
debug!("print Begin -> push broken block at col {}", col);
self.print_stack.push(PrintStackElem {
offset: col,
pbreak: PrintStackBreak::Broken(b.breaks)
});
} else
|
Ok(())
}
Token::End => {
debug!("print End -> pop End");
let print_stack = &mut self.print_stack;
assert!((print_stack.len()!= 0us));
print_stack.pop().unwrap();
Ok(())
}
Token::Break(b) => {
let top = self.get_top();
match top.pbreak {
PrintStackBreak::Fits => {
debug!("print Break({}) in fitting block", b.blank_space);
|
{
debug!("print Begin -> push fitting block");
self.print_stack.push(PrintStackElem {
offset: 0,
pbreak: PrintStackBreak::Fits
});
}
|
conditional_block
|
owned.rs
|
use super::*;
use std::borrow::Cow;
#[cfg(feature = "lazy")]
include!("lazy.rs");
#[allow(non_snake_case)]
#[derive(Debug, Default, Clone, PartialEq)]
pub struct OwnedIpInfo {
pub city_id: u32,
pub country: String,
pub region: String,
pub province: String,
pub city: String,
pub ISP: String,
}
impl OwnedIpInfo {
pub fn as_ref<'a>(&'a self) -> IpInfo<'a> {
IpInfo {
city_id: self.city_id,
country: &self.country,
region: &self.region,
province: &self.province,
city: &self.city,
ISP: &self.ISP,
}
}
}
impl fmt::Display for OwnedIpInfo {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.as_ref())
}
}
pub struct OwnedIp2Region {
// super block index info
first_index_ptr: u32,
// last_index_ptr: u32,
total_blocks: u32,
db_bin_bytes: Cow<'static, [u8]>,
}
impl OwnedIp2Region {
pub fn new(path: &str) -> io::Result<Self> {
let mut file = File::open(path)?;
Self::new2(&mut file)
}
pub(crate) fn new2(file: &mut File) -> io::Result<Self> {
let file_size = file.metadata()?.len();
let mut bytes = Vec::with_capacity(file_size as usize);
file.read_to_end(&mut bytes)?;
let first_index_ptr = get_u32(&bytes[..], 0);
let last_index_ptr = get_u32(&bytes[..], 4);
let total_blocks = (last_index_ptr - first_index_ptr) / INDEX_BLOCK_LENGTH + 1;
let db_bin_bytes = Cow::Owned(bytes);
Ok(OwnedIp2Region {
first_index_ptr,
total_blocks,
db_bin_bytes,
})
}
pub fn memory_search<S: AsRef<str>>(&self, ip_str: S) -> Result<IpInfo> {
let ip = ip_str.as_ref().parse()?;
self.memory_search_ip(&ip)
}
pub fn memory_search_ip(&self, ip_addr: &IpAddr) -> Result<IpInfo> {
let ip = ip2u32(ip_addr)?;
let mut h = self.total_blocks;
let (mut data_ptr, mut l) = (0u32, 0u32);
while l <= h {
let m = (l + h) >> 1;
let p = self.first_index_ptr + m * INDEX_BLOCK_LENGTH;
let sip = get_u32(&self.db_bin_bytes[..], p);
if ip < sip {
h = m - 1;
} else {
let eip = get_u32(&self.db_bin_bytes[..], p + 4);
if ip > eip {
l = m + 1;
} else {
data_ptr = get_u32(&self.db_bin_bytes[..], p + 8);
break;
}
}
|
if data_ptr == 0 {
Err(Error::NotFound)?;
}
let data_len = (data_ptr >> 24) & 0xff;
data_ptr = data_ptr & 0x00FFFFFF;
get_ip_info(
get_u32(&self.db_bin_bytes[..], data_ptr),
&self.db_bin_bytes[(data_ptr + 4) as usize..(data_ptr + data_len) as usize],
)
}
}
|
}
|
random_line_split
|
owned.rs
|
use super::*;
use std::borrow::Cow;
#[cfg(feature = "lazy")]
include!("lazy.rs");
#[allow(non_snake_case)]
#[derive(Debug, Default, Clone, PartialEq)]
pub struct OwnedIpInfo {
pub city_id: u32,
pub country: String,
pub region: String,
pub province: String,
pub city: String,
pub ISP: String,
}
impl OwnedIpInfo {
pub fn as_ref<'a>(&'a self) -> IpInfo<'a> {
IpInfo {
city_id: self.city_id,
country: &self.country,
region: &self.region,
province: &self.province,
city: &self.city,
ISP: &self.ISP,
}
}
}
impl fmt::Display for OwnedIpInfo {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.as_ref())
}
}
pub struct OwnedIp2Region {
// super block index info
first_index_ptr: u32,
// last_index_ptr: u32,
total_blocks: u32,
db_bin_bytes: Cow<'static, [u8]>,
}
impl OwnedIp2Region {
pub fn new(path: &str) -> io::Result<Self> {
let mut file = File::open(path)?;
Self::new2(&mut file)
}
pub(crate) fn new2(file: &mut File) -> io::Result<Self> {
let file_size = file.metadata()?.len();
let mut bytes = Vec::with_capacity(file_size as usize);
file.read_to_end(&mut bytes)?;
let first_index_ptr = get_u32(&bytes[..], 0);
let last_index_ptr = get_u32(&bytes[..], 4);
let total_blocks = (last_index_ptr - first_index_ptr) / INDEX_BLOCK_LENGTH + 1;
let db_bin_bytes = Cow::Owned(bytes);
Ok(OwnedIp2Region {
first_index_ptr,
total_blocks,
db_bin_bytes,
})
}
pub fn memory_search<S: AsRef<str>>(&self, ip_str: S) -> Result<IpInfo> {
let ip = ip_str.as_ref().parse()?;
self.memory_search_ip(&ip)
}
pub fn memory_search_ip(&self, ip_addr: &IpAddr) -> Result<IpInfo> {
let ip = ip2u32(ip_addr)?;
let mut h = self.total_blocks;
let (mut data_ptr, mut l) = (0u32, 0u32);
while l <= h {
let m = (l + h) >> 1;
let p = self.first_index_ptr + m * INDEX_BLOCK_LENGTH;
let sip = get_u32(&self.db_bin_bytes[..], p);
if ip < sip {
h = m - 1;
} else
|
}
if data_ptr == 0 {
Err(Error::NotFound)?;
}
let data_len = (data_ptr >> 24) & 0xff;
data_ptr = data_ptr & 0x00FFFFFF;
get_ip_info(
get_u32(&self.db_bin_bytes[..], data_ptr),
&self.db_bin_bytes[(data_ptr + 4) as usize..(data_ptr + data_len) as usize],
)
}
}
|
{
let eip = get_u32(&self.db_bin_bytes[..], p + 4);
if ip > eip {
l = m + 1;
} else {
data_ptr = get_u32(&self.db_bin_bytes[..], p + 8);
break;
}
}
|
conditional_block
|
owned.rs
|
use super::*;
use std::borrow::Cow;
#[cfg(feature = "lazy")]
include!("lazy.rs");
#[allow(non_snake_case)]
#[derive(Debug, Default, Clone, PartialEq)]
pub struct OwnedIpInfo {
pub city_id: u32,
pub country: String,
pub region: String,
pub province: String,
pub city: String,
pub ISP: String,
}
impl OwnedIpInfo {
pub fn as_ref<'a>(&'a self) -> IpInfo<'a> {
IpInfo {
city_id: self.city_id,
country: &self.country,
region: &self.region,
province: &self.province,
city: &self.city,
ISP: &self.ISP,
}
}
}
impl fmt::Display for OwnedIpInfo {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result
|
}
pub struct OwnedIp2Region {
// super block index info
first_index_ptr: u32,
// last_index_ptr: u32,
total_blocks: u32,
db_bin_bytes: Cow<'static, [u8]>,
}
impl OwnedIp2Region {
pub fn new(path: &str) -> io::Result<Self> {
let mut file = File::open(path)?;
Self::new2(&mut file)
}
pub(crate) fn new2(file: &mut File) -> io::Result<Self> {
let file_size = file.metadata()?.len();
let mut bytes = Vec::with_capacity(file_size as usize);
file.read_to_end(&mut bytes)?;
let first_index_ptr = get_u32(&bytes[..], 0);
let last_index_ptr = get_u32(&bytes[..], 4);
let total_blocks = (last_index_ptr - first_index_ptr) / INDEX_BLOCK_LENGTH + 1;
let db_bin_bytes = Cow::Owned(bytes);
Ok(OwnedIp2Region {
first_index_ptr,
total_blocks,
db_bin_bytes,
})
}
pub fn memory_search<S: AsRef<str>>(&self, ip_str: S) -> Result<IpInfo> {
let ip = ip_str.as_ref().parse()?;
self.memory_search_ip(&ip)
}
pub fn memory_search_ip(&self, ip_addr: &IpAddr) -> Result<IpInfo> {
let ip = ip2u32(ip_addr)?;
let mut h = self.total_blocks;
let (mut data_ptr, mut l) = (0u32, 0u32);
while l <= h {
let m = (l + h) >> 1;
let p = self.first_index_ptr + m * INDEX_BLOCK_LENGTH;
let sip = get_u32(&self.db_bin_bytes[..], p);
if ip < sip {
h = m - 1;
} else {
let eip = get_u32(&self.db_bin_bytes[..], p + 4);
if ip > eip {
l = m + 1;
} else {
data_ptr = get_u32(&self.db_bin_bytes[..], p + 8);
break;
}
}
}
if data_ptr == 0 {
Err(Error::NotFound)?;
}
let data_len = (data_ptr >> 24) & 0xff;
data_ptr = data_ptr & 0x00FFFFFF;
get_ip_info(
get_u32(&self.db_bin_bytes[..], data_ptr),
&self.db_bin_bytes[(data_ptr + 4) as usize..(data_ptr + data_len) as usize],
)
}
}
|
{
write!(f, "{}", self.as_ref())
}
|
identifier_body
|
owned.rs
|
use super::*;
use std::borrow::Cow;
#[cfg(feature = "lazy")]
include!("lazy.rs");
#[allow(non_snake_case)]
#[derive(Debug, Default, Clone, PartialEq)]
pub struct OwnedIpInfo {
pub city_id: u32,
pub country: String,
pub region: String,
pub province: String,
pub city: String,
pub ISP: String,
}
impl OwnedIpInfo {
pub fn as_ref<'a>(&'a self) -> IpInfo<'a> {
IpInfo {
city_id: self.city_id,
country: &self.country,
region: &self.region,
province: &self.province,
city: &self.city,
ISP: &self.ISP,
}
}
}
impl fmt::Display for OwnedIpInfo {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.as_ref())
}
}
pub struct OwnedIp2Region {
// super block index info
first_index_ptr: u32,
// last_index_ptr: u32,
total_blocks: u32,
db_bin_bytes: Cow<'static, [u8]>,
}
impl OwnedIp2Region {
pub fn new(path: &str) -> io::Result<Self> {
let mut file = File::open(path)?;
Self::new2(&mut file)
}
pub(crate) fn
|
(file: &mut File) -> io::Result<Self> {
let file_size = file.metadata()?.len();
let mut bytes = Vec::with_capacity(file_size as usize);
file.read_to_end(&mut bytes)?;
let first_index_ptr = get_u32(&bytes[..], 0);
let last_index_ptr = get_u32(&bytes[..], 4);
let total_blocks = (last_index_ptr - first_index_ptr) / INDEX_BLOCK_LENGTH + 1;
let db_bin_bytes = Cow::Owned(bytes);
Ok(OwnedIp2Region {
first_index_ptr,
total_blocks,
db_bin_bytes,
})
}
pub fn memory_search<S: AsRef<str>>(&self, ip_str: S) -> Result<IpInfo> {
let ip = ip_str.as_ref().parse()?;
self.memory_search_ip(&ip)
}
pub fn memory_search_ip(&self, ip_addr: &IpAddr) -> Result<IpInfo> {
let ip = ip2u32(ip_addr)?;
let mut h = self.total_blocks;
let (mut data_ptr, mut l) = (0u32, 0u32);
while l <= h {
let m = (l + h) >> 1;
let p = self.first_index_ptr + m * INDEX_BLOCK_LENGTH;
let sip = get_u32(&self.db_bin_bytes[..], p);
if ip < sip {
h = m - 1;
} else {
let eip = get_u32(&self.db_bin_bytes[..], p + 4);
if ip > eip {
l = m + 1;
} else {
data_ptr = get_u32(&self.db_bin_bytes[..], p + 8);
break;
}
}
}
if data_ptr == 0 {
Err(Error::NotFound)?;
}
let data_len = (data_ptr >> 24) & 0xff;
data_ptr = data_ptr & 0x00FFFFFF;
get_ip_info(
get_u32(&self.db_bin_bytes[..], data_ptr),
&self.db_bin_bytes[(data_ptr + 4) as usize..(data_ptr + data_len) as usize],
)
}
}
|
new2
|
identifier_name
|
task-comm-10.rs
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(std_misc)]
use std::thread;
use std::sync::mpsc::{channel, Sender};
fn
|
(tx: &Sender<Sender<String>>) {
let (tx2, rx) = channel();
tx.send(tx2).unwrap();
let mut a;
let mut b;
a = rx.recv().unwrap();
assert!(a == "A".to_string());
println!("{}", a);
b = rx.recv().unwrap();
assert!(b == "B".to_string());
println!("{}", b);
}
pub fn main() {
let (tx, rx) = channel();
let _child = thread::scoped(move|| { start(&tx) });
let mut c = rx.recv().unwrap();
c.send("A".to_string()).unwrap();
c.send("B".to_string()).unwrap();
thread::yield_now();
}
|
start
|
identifier_name
|
task-comm-10.rs
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(std_misc)]
use std::thread;
use std::sync::mpsc::{channel, Sender};
fn start(tx: &Sender<Sender<String>>) {
let (tx2, rx) = channel();
tx.send(tx2).unwrap();
let mut a;
let mut b;
a = rx.recv().unwrap();
assert!(a == "A".to_string());
println!("{}", a);
b = rx.recv().unwrap();
|
let (tx, rx) = channel();
let _child = thread::scoped(move|| { start(&tx) });
let mut c = rx.recv().unwrap();
c.send("A".to_string()).unwrap();
c.send("B".to_string()).unwrap();
thread::yield_now();
}
|
assert!(b == "B".to_string());
println!("{}", b);
}
pub fn main() {
|
random_line_split
|
task-comm-10.rs
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(std_misc)]
use std::thread;
use std::sync::mpsc::{channel, Sender};
fn start(tx: &Sender<Sender<String>>) {
let (tx2, rx) = channel();
tx.send(tx2).unwrap();
let mut a;
let mut b;
a = rx.recv().unwrap();
assert!(a == "A".to_string());
println!("{}", a);
b = rx.recv().unwrap();
assert!(b == "B".to_string());
println!("{}", b);
}
pub fn main()
|
{
let (tx, rx) = channel();
let _child = thread::scoped(move|| { start(&tx) });
let mut c = rx.recv().unwrap();
c.send("A".to_string()).unwrap();
c.send("B".to_string()).unwrap();
thread::yield_now();
}
|
identifier_body
|
|
main.rs
|
fn is_prime(number: i32) -> bool {
if number % 2 == 0 && number!= 2 || number == 1 {
return false;
}
let limit = (number as f32).sqrt() as i32 + 1;
// We test if the number is divisible by any odd number up to the limit
(3..limit).step_by(2).all(|x| number % x!= 0)
}
fn main() {
println!("{}", is_prime(15_485_863)); // The 1 000 000th prime.
println!("{}", is_prime(62_773_913)); // The product of the 1000th and 1001st primes.
}
#[test]
fn test_one() {
// https://primes.utm.edu/notes/faq/one.html
assert!(!is_prime(1));
}
#[test]
fn
|
() {
assert!(is_prime(2));
}
#[test]
fn test_many() {
let primes = [3, 5, 7, 11, 13, 17, 19, 23, 29, 31];
assert!(primes.iter().all(|&x| is_prime(x)));
}
|
test_two
|
identifier_name
|
main.rs
|
fn is_prime(number: i32) -> bool {
if number % 2 == 0 && number!= 2 || number == 1 {
return false;
}
let limit = (number as f32).sqrt() as i32 + 1;
// We test if the number is divisible by any odd number up to the limit
(3..limit).step_by(2).all(|x| number % x!= 0)
}
fn main() {
println!("{}", is_prime(15_485_863)); // The 1 000 000th prime.
println!("{}", is_prime(62_773_913)); // The product of the 1000th and 1001st primes.
}
#[test]
fn test_one() {
// https://primes.utm.edu/notes/faq/one.html
assert!(!is_prime(1));
}
#[test]
fn test_two() {
assert!(is_prime(2));
}
#[test]
fn test_many() {
let primes = [3, 5, 7, 11, 13, 17, 19, 23, 29, 31];
assert!(primes.iter().all(|&x| is_prime(x)));
|
}
|
random_line_split
|
|
main.rs
|
fn is_prime(number: i32) -> bool {
if number % 2 == 0 && number!= 2 || number == 1
|
let limit = (number as f32).sqrt() as i32 + 1;
// We test if the number is divisible by any odd number up to the limit
(3..limit).step_by(2).all(|x| number % x!= 0)
}
fn main() {
println!("{}", is_prime(15_485_863)); // The 1 000 000th prime.
println!("{}", is_prime(62_773_913)); // The product of the 1000th and 1001st primes.
}
#[test]
fn test_one() {
// https://primes.utm.edu/notes/faq/one.html
assert!(!is_prime(1));
}
#[test]
fn test_two() {
assert!(is_prime(2));
}
#[test]
fn test_many() {
let primes = [3, 5, 7, 11, 13, 17, 19, 23, 29, 31];
assert!(primes.iter().all(|&x| is_prime(x)));
}
|
{
return false;
}
|
conditional_block
|
main.rs
|
fn is_prime(number: i32) -> bool {
if number % 2 == 0 && number!= 2 || number == 1 {
return false;
}
let limit = (number as f32).sqrt() as i32 + 1;
// We test if the number is divisible by any odd number up to the limit
(3..limit).step_by(2).all(|x| number % x!= 0)
}
fn main() {
println!("{}", is_prime(15_485_863)); // The 1 000 000th prime.
println!("{}", is_prime(62_773_913)); // The product of the 1000th and 1001st primes.
}
#[test]
fn test_one()
|
#[test]
fn test_two() {
assert!(is_prime(2));
}
#[test]
fn test_many() {
let primes = [3, 5, 7, 11, 13, 17, 19, 23, 29, 31];
assert!(primes.iter().all(|&x| is_prime(x)));
}
|
{
// https://primes.utm.edu/notes/faq/one.html
assert!(!is_prime(1));
}
|
identifier_body
|
build_insert_query.rs
|
extern crate rustorm;
extern crate uuid;
extern crate chrono;
extern crate rustc_serialize;
use uuid::Uuid;
use rustorm::query::Query;
use rustorm::dao::{Dao, IsDao};
use rustorm::pool::ManagedPool;
#[derive(Debug, Clone)]
pub struct Photo {
pub photo_id: Uuid,
pub url: Option<String>,
}
impl IsDao for Photo{
fn
|
(dao: &Dao) -> Self {
Photo {
photo_id: dao.get("photo_id"),
url: dao.get_opt("url"),
}
}
fn to_dao(&self) -> Dao {
let mut dao = Dao::new();
dao.set("photo_id", &self.photo_id);
match self.url {
Some(ref _value) => dao.set("url", _value),
None => dao.set_null("url"),
}
dao
}
}
#[test]
fn test_insert_query() {
let url = "postgres://postgres:p0stgr3s@localhost/bazaar_v6";
let pool = ManagedPool::init(&url, 1).unwrap();
let db = pool.connect().unwrap();
let mut query = Query::insert();
query.into_table("bazaar.product")
.set("name", &"product1")
.returns(vec!["category.name"]);
let frag = query.build(db.as_ref());
let expected = "
INSERT INTO bazaar.product( name )\x20
VALUES ($1 )\x20
RETURNING name
".to_string();
println!("actual: {{\n{}}} [{}]", frag.sql, frag.sql.len());
println!("expected: {{{}}} [{}]", expected, expected.len());
assert!(frag.sql.trim() == expected.trim());
}
|
from_dao
|
identifier_name
|
build_insert_query.rs
|
extern crate rustorm;
extern crate uuid;
extern crate chrono;
extern crate rustc_serialize;
use uuid::Uuid;
use rustorm::query::Query;
use rustorm::dao::{Dao, IsDao};
use rustorm::pool::ManagedPool;
|
#[derive(Debug, Clone)]
pub struct Photo {
pub photo_id: Uuid,
pub url: Option<String>,
}
impl IsDao for Photo{
fn from_dao(dao: &Dao) -> Self {
Photo {
photo_id: dao.get("photo_id"),
url: dao.get_opt("url"),
}
}
fn to_dao(&self) -> Dao {
let mut dao = Dao::new();
dao.set("photo_id", &self.photo_id);
match self.url {
Some(ref _value) => dao.set("url", _value),
None => dao.set_null("url"),
}
dao
}
}
#[test]
fn test_insert_query() {
let url = "postgres://postgres:p0stgr3s@localhost/bazaar_v6";
let pool = ManagedPool::init(&url, 1).unwrap();
let db = pool.connect().unwrap();
let mut query = Query::insert();
query.into_table("bazaar.product")
.set("name", &"product1")
.returns(vec!["category.name"]);
let frag = query.build(db.as_ref());
let expected = "
INSERT INTO bazaar.product( name )\x20
VALUES ($1 )\x20
RETURNING name
".to_string();
println!("actual: {{\n{}}} [{}]", frag.sql, frag.sql.len());
println!("expected: {{{}}} [{}]", expected, expected.len());
assert!(frag.sql.trim() == expected.trim());
}
|
random_line_split
|
|
build_insert_query.rs
|
extern crate rustorm;
extern crate uuid;
extern crate chrono;
extern crate rustc_serialize;
use uuid::Uuid;
use rustorm::query::Query;
use rustorm::dao::{Dao, IsDao};
use rustorm::pool::ManagedPool;
#[derive(Debug, Clone)]
pub struct Photo {
pub photo_id: Uuid,
pub url: Option<String>,
}
impl IsDao for Photo{
fn from_dao(dao: &Dao) -> Self {
Photo {
photo_id: dao.get("photo_id"),
url: dao.get_opt("url"),
}
}
fn to_dao(&self) -> Dao
|
}
#[test]
fn test_insert_query() {
let url = "postgres://postgres:p0stgr3s@localhost/bazaar_v6";
let pool = ManagedPool::init(&url, 1).unwrap();
let db = pool.connect().unwrap();
let mut query = Query::insert();
query.into_table("bazaar.product")
.set("name", &"product1")
.returns(vec!["category.name"]);
let frag = query.build(db.as_ref());
let expected = "
INSERT INTO bazaar.product( name )\x20
VALUES ($1 )\x20
RETURNING name
".to_string();
println!("actual: {{\n{}}} [{}]", frag.sql, frag.sql.len());
println!("expected: {{{}}} [{}]", expected, expected.len());
assert!(frag.sql.trim() == expected.trim());
}
|
{
let mut dao = Dao::new();
dao.set("photo_id", &self.photo_id);
match self.url {
Some(ref _value) => dao.set("url", _value),
None => dao.set_null("url"),
}
dao
}
|
identifier_body
|
type-params-in-for-each.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(dead_code)]
// pretty-expanded FIXME #23616
struct S<T> {
a: T,
b: usize,
}
fn range_<F>(lo: usize, hi: usize, mut it: F) where F: FnMut(usize) {
let mut lo_ = lo;
while lo_ < hi { it(lo_); lo_ += 1; }
}
fn create_index<T>(_index: Vec<S<T>>, _hash_fn: extern fn(T) -> usize) {
range_(0, 256, |_i| {
let _bucket: Vec<T> = Vec::new();
})
}
|
pub fn main() { }
|
random_line_split
|
|
type-params-in-for-each.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(dead_code)]
// pretty-expanded FIXME #23616
struct S<T> {
a: T,
b: usize,
}
fn
|
<F>(lo: usize, hi: usize, mut it: F) where F: FnMut(usize) {
let mut lo_ = lo;
while lo_ < hi { it(lo_); lo_ += 1; }
}
fn create_index<T>(_index: Vec<S<T>>, _hash_fn: extern fn(T) -> usize) {
range_(0, 256, |_i| {
let _bucket: Vec<T> = Vec::new();
})
}
pub fn main() { }
|
range_
|
identifier_name
|
mod.rs
|
use std::fmt;
use std::error::Error;
use std::sync::Mutex;
use CapabilitiesSource;
use gl;
use version::Api;
use version::Version;
pub use self::compute::{ComputeShader, ComputeCommand};
pub use self::program::Program;
pub use self::reflection::{Uniform, UniformBlock, BlockLayout, OutputPrimitives};
pub use self::reflection::{Attribute, TransformFeedbackVarying, TransformFeedbackBuffer, TransformFeedbackMode};
mod compute;
mod program;
mod raw;
mod reflection;
mod shader;
mod uniforms_storage;
/// Returns true if the backend supports geometry shaders.
#[inline]
pub fn is_geometry_shader_supported<C>(ctxt: &C) -> bool where C: CapabilitiesSource {
shader::check_shader_type_compatibility(ctxt, gl::GEOMETRY_SHADER)
}
/// Returns true if the backend supports tessellation shaders.
#[inline]
pub fn is_tessellation_shader_supported<C>(ctxt: &C) -> bool where C: CapabilitiesSource {
shader::check_shader_type_compatibility(ctxt, gl::TESS_CONTROL_SHADER)
}
/// Returns true if the backend supports creating and retreiving binary format.
#[inline]
pub fn is_binary_supported<C>(ctxt: &C) -> bool where C: CapabilitiesSource {
ctxt.get_version() >= &Version(Api::Gl, 4, 1) || ctxt.get_version() >= &Version(Api::GlEs, 2, 0)
|| ctxt.get_extensions().gl_arb_get_programy_binary
}
/// Some shader compilers have race-condition issues, so we lock this mutex
/// in the GL thread every time we compile a shader or link a program.
// TODO: replace by a StaticMutex
lazy_static! {
static ref COMPILER_GLOBAL_LOCK: Mutex<()> = Mutex::new(());
}
/// Error that can be triggered when creating a `Program`.
#[derive(Clone, Debug)]
pub enum ProgramCreationError {
/// Error while compiling one of the shaders.
CompilationError(String),
/// Error while linking the program.
LinkingError(String),
/// One of the requested shader types is not supported by the backend.
///
/// Usually the case for geometry shaders.
ShaderTypeNotSupported,
/// The OpenGL implementation doesn't provide a compiler.
CompilationNotSupported,
/// You have requested transform feedback varyings, but transform feedback is not supported
/// by the backend.
TransformFeedbackNotSupported,
/// You have requested point size setting from the shader, but it's not
/// supported by the backend.
PointSizeNotSupported,
}
impl fmt::Display for ProgramCreationError {
fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> {
match self {
&ProgramCreationError::CompilationError(ref s) =>
formatter.write_fmt(format_args!("Compilation error in one of the shaders: {}", s)),
&ProgramCreationError::LinkingError(ref s) =>
formatter.write_fmt(format_args!("Error while linking shaders together: {}", s)),
&ProgramCreationError::ShaderTypeNotSupported =>
formatter.write_str("One of the request shader type is \
not supported by the backend"),
&ProgramCreationError::CompilationNotSupported =>
formatter.write_str("The backend doesn't support shaders compilation"),
&ProgramCreationError::TransformFeedbackNotSupported =>
formatter.write_str("You requested transform feedback, but this feature is not \
supported by the backend"),
&ProgramCreationError::PointSizeNotSupported =>
formatter.write_str("You requested point size setting, but it's not \
supported by the backend"),
}
}
}
impl Error for ProgramCreationError {
fn description(&self) -> &str {
match self {
&ProgramCreationError::CompilationError(_) => "Compilation error in one of the \
shaders",
&ProgramCreationError::LinkingError(_) => "Error while linking shaders together",
&ProgramCreationError::ShaderTypeNotSupported => "One of the request shader type is \
not supported by the backend",
&ProgramCreationError::CompilationNotSupported => "The backend doesn't support \
shaders compilation",
&ProgramCreationError::TransformFeedbackNotSupported => "Transform feedback is not \
supported by the backend.",
&ProgramCreationError::PointSizeNotSupported => "Point size is not supported by \
the backend.",
}
}
#[inline]
fn cause(&self) -> Option<&Error> {
None
}
}
/// Error while retreiving the binary representation of a program.
#[derive(Copy, Clone, Debug)]
pub enum GetBinaryError {
/// The backend doesn't support binary.
NotSupported,
}
/// Input when creating a program.
pub enum
|
<'a> {
/// Use GLSL source code.
SourceCode {
/// Source code of the vertex shader.
vertex_shader: &'a str,
/// Source code of the optional tessellation control shader.
tessellation_control_shader: Option<&'a str>,
/// Source code of the optional tessellation evaluation shader.
tessellation_evaluation_shader: Option<&'a str>,
/// Source code of the optional geometry shader.
geometry_shader: Option<&'a str>,
/// Source code of the fragment shader.
fragment_shader: &'a str,
/// The list of variables and mode to use for transform feedback.
///
/// The information specified here will be passed to the OpenGL linker. If you pass
/// `None`, then you won't be able to use transform feedback.
transform_feedback_varyings: Option<(Vec<String>, TransformFeedbackMode)>,
/// Whether the fragment shader outputs colors in `sRGB` or `RGB`. This is false by default,
/// meaning that the program outputs `RGB`.
///
/// If this is false, then `GL_FRAMEBUFFER_SRGB` will be enabled when this program is used
/// (if it is supported).
outputs_srgb: bool,
/// Whether the shader uses point size.
uses_point_size: bool,
},
/// Use a precompiled binary.
Binary {
/// The data.
data: Binary,
/// See `SourceCode::outputs_srgb`.
outputs_srgb: bool,
/// Whether the shader uses point size.
uses_point_size: bool,
}
}
/// Represents the source code of a program.
pub struct SourceCode<'a> {
/// Source code of the vertex shader.
pub vertex_shader: &'a str,
/// Source code of the optional tessellation control shader.
pub tessellation_control_shader: Option<&'a str>,
/// Source code of the optional tessellation evaluation shader.
pub tessellation_evaluation_shader: Option<&'a str>,
/// Source code of the optional geometry shader.
pub geometry_shader: Option<&'a str>,
/// Source code of the fragment shader.
pub fragment_shader: &'a str,
}
impl<'a> From<SourceCode<'a>> for ProgramCreationInput<'a> {
#[inline]
fn from(code: SourceCode<'a>) -> ProgramCreationInput<'a> {
let SourceCode { vertex_shader, fragment_shader, geometry_shader,
tessellation_control_shader, tessellation_evaluation_shader } = code;
ProgramCreationInput::SourceCode {
vertex_shader: vertex_shader,
tessellation_control_shader: tessellation_control_shader,
tessellation_evaluation_shader: tessellation_evaluation_shader,
geometry_shader: geometry_shader,
fragment_shader: fragment_shader,
transform_feedback_varyings: None,
outputs_srgb: false,
uses_point_size: false,
}
}
}
/// Represents the compiled binary data of a program.
pub struct Binary {
/// An implementation-defined format.
pub format: u32,
/// The binary data.
pub content: Vec<u8>,
}
impl<'a> From<Binary> for ProgramCreationInput<'a> {
#[inline]
fn from(binary: Binary) -> ProgramCreationInput<'a> {
ProgramCreationInput::Binary {
data: binary,
outputs_srgb: false,
uses_point_size: false,
}
}
}
|
ProgramCreationInput
|
identifier_name
|
mod.rs
|
use std::fmt;
use std::error::Error;
use std::sync::Mutex;
use CapabilitiesSource;
use gl;
use version::Api;
use version::Version;
pub use self::compute::{ComputeShader, ComputeCommand};
pub use self::program::Program;
pub use self::reflection::{Uniform, UniformBlock, BlockLayout, OutputPrimitives};
pub use self::reflection::{Attribute, TransformFeedbackVarying, TransformFeedbackBuffer, TransformFeedbackMode};
mod compute;
mod program;
mod raw;
mod reflection;
mod shader;
mod uniforms_storage;
/// Returns true if the backend supports geometry shaders.
#[inline]
pub fn is_geometry_shader_supported<C>(ctxt: &C) -> bool where C: CapabilitiesSource {
shader::check_shader_type_compatibility(ctxt, gl::GEOMETRY_SHADER)
}
/// Returns true if the backend supports tessellation shaders.
#[inline]
pub fn is_tessellation_shader_supported<C>(ctxt: &C) -> bool where C: CapabilitiesSource {
shader::check_shader_type_compatibility(ctxt, gl::TESS_CONTROL_SHADER)
}
/// Returns true if the backend supports creating and retreiving binary format.
#[inline]
pub fn is_binary_supported<C>(ctxt: &C) -> bool where C: CapabilitiesSource {
ctxt.get_version() >= &Version(Api::Gl, 4, 1) || ctxt.get_version() >= &Version(Api::GlEs, 2, 0)
|| ctxt.get_extensions().gl_arb_get_programy_binary
}
/// Some shader compilers have race-condition issues, so we lock this mutex
/// in the GL thread every time we compile a shader or link a program.
// TODO: replace by a StaticMutex
lazy_static! {
static ref COMPILER_GLOBAL_LOCK: Mutex<()> = Mutex::new(());
}
/// Error that can be triggered when creating a `Program`.
#[derive(Clone, Debug)]
pub enum ProgramCreationError {
/// Error while compiling one of the shaders.
CompilationError(String),
/// Error while linking the program.
LinkingError(String),
/// One of the requested shader types is not supported by the backend.
///
/// Usually the case for geometry shaders.
ShaderTypeNotSupported,
/// The OpenGL implementation doesn't provide a compiler.
CompilationNotSupported,
/// You have requested transform feedback varyings, but transform feedback is not supported
/// by the backend.
TransformFeedbackNotSupported,
/// You have requested point size setting from the shader, but it's not
/// supported by the backend.
PointSizeNotSupported,
}
impl fmt::Display for ProgramCreationError {
fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> {
match self {
&ProgramCreationError::CompilationError(ref s) =>
formatter.write_fmt(format_args!("Compilation error in one of the shaders: {}", s)),
&ProgramCreationError::LinkingError(ref s) =>
formatter.write_fmt(format_args!("Error while linking shaders together: {}", s)),
&ProgramCreationError::ShaderTypeNotSupported =>
formatter.write_str("One of the request shader type is \
not supported by the backend"),
&ProgramCreationError::CompilationNotSupported =>
formatter.write_str("The backend doesn't support shaders compilation"),
&ProgramCreationError::TransformFeedbackNotSupported =>
formatter.write_str("You requested transform feedback, but this feature is not \
supported by the backend"),
&ProgramCreationError::PointSizeNotSupported =>
formatter.write_str("You requested point size setting, but it's not \
supported by the backend"),
}
}
}
impl Error for ProgramCreationError {
fn description(&self) -> &str {
match self {
&ProgramCreationError::CompilationError(_) => "Compilation error in one of the \
shaders",
&ProgramCreationError::LinkingError(_) => "Error while linking shaders together",
&ProgramCreationError::ShaderTypeNotSupported => "One of the request shader type is \
not supported by the backend",
&ProgramCreationError::CompilationNotSupported => "The backend doesn't support \
shaders compilation",
&ProgramCreationError::TransformFeedbackNotSupported => "Transform feedback is not \
supported by the backend.",
&ProgramCreationError::PointSizeNotSupported => "Point size is not supported by \
the backend.",
}
}
#[inline]
fn cause(&self) -> Option<&Error> {
None
}
}
/// Error while retreiving the binary representation of a program.
#[derive(Copy, Clone, Debug)]
pub enum GetBinaryError {
/// The backend doesn't support binary.
NotSupported,
}
/// Input when creating a program.
pub enum ProgramCreationInput<'a> {
/// Use GLSL source code.
SourceCode {
/// Source code of the vertex shader.
vertex_shader: &'a str,
/// Source code of the optional tessellation control shader.
tessellation_control_shader: Option<&'a str>,
/// Source code of the optional tessellation evaluation shader.
tessellation_evaluation_shader: Option<&'a str>,
/// Source code of the optional geometry shader.
geometry_shader: Option<&'a str>,
/// Source code of the fragment shader.
fragment_shader: &'a str,
/// The list of variables and mode to use for transform feedback.
///
/// The information specified here will be passed to the OpenGL linker. If you pass
/// `None`, then you won't be able to use transform feedback.
transform_feedback_varyings: Option<(Vec<String>, TransformFeedbackMode)>,
/// Whether the fragment shader outputs colors in `sRGB` or `RGB`. This is false by default,
/// meaning that the program outputs `RGB`.
///
/// If this is false, then `GL_FRAMEBUFFER_SRGB` will be enabled when this program is used
/// (if it is supported).
outputs_srgb: bool,
/// Whether the shader uses point size.
uses_point_size: bool,
},
/// Use a precompiled binary.
Binary {
/// The data.
data: Binary,
/// See `SourceCode::outputs_srgb`.
outputs_srgb: bool,
/// Whether the shader uses point size.
uses_point_size: bool,
}
}
/// Represents the source code of a program.
pub struct SourceCode<'a> {
/// Source code of the vertex shader.
pub vertex_shader: &'a str,
/// Source code of the optional tessellation control shader.
|
/// Source code of the optional tessellation evaluation shader.
pub tessellation_evaluation_shader: Option<&'a str>,
/// Source code of the optional geometry shader.
pub geometry_shader: Option<&'a str>,
/// Source code of the fragment shader.
pub fragment_shader: &'a str,
}
impl<'a> From<SourceCode<'a>> for ProgramCreationInput<'a> {
#[inline]
fn from(code: SourceCode<'a>) -> ProgramCreationInput<'a> {
let SourceCode { vertex_shader, fragment_shader, geometry_shader,
tessellation_control_shader, tessellation_evaluation_shader } = code;
ProgramCreationInput::SourceCode {
vertex_shader: vertex_shader,
tessellation_control_shader: tessellation_control_shader,
tessellation_evaluation_shader: tessellation_evaluation_shader,
geometry_shader: geometry_shader,
fragment_shader: fragment_shader,
transform_feedback_varyings: None,
outputs_srgb: false,
uses_point_size: false,
}
}
}
/// Represents the compiled binary data of a program.
pub struct Binary {
/// An implementation-defined format.
pub format: u32,
/// The binary data.
pub content: Vec<u8>,
}
impl<'a> From<Binary> for ProgramCreationInput<'a> {
#[inline]
fn from(binary: Binary) -> ProgramCreationInput<'a> {
ProgramCreationInput::Binary {
data: binary,
outputs_srgb: false,
uses_point_size: false,
}
}
}
|
pub tessellation_control_shader: Option<&'a str>,
|
random_line_split
|
mod.rs
|
use std::fmt;
use std::error::Error;
use std::sync::Mutex;
use CapabilitiesSource;
use gl;
use version::Api;
use version::Version;
pub use self::compute::{ComputeShader, ComputeCommand};
pub use self::program::Program;
pub use self::reflection::{Uniform, UniformBlock, BlockLayout, OutputPrimitives};
pub use self::reflection::{Attribute, TransformFeedbackVarying, TransformFeedbackBuffer, TransformFeedbackMode};
mod compute;
mod program;
mod raw;
mod reflection;
mod shader;
mod uniforms_storage;
/// Returns true if the backend supports geometry shaders.
#[inline]
pub fn is_geometry_shader_supported<C>(ctxt: &C) -> bool where C: CapabilitiesSource {
shader::check_shader_type_compatibility(ctxt, gl::GEOMETRY_SHADER)
}
/// Returns true if the backend supports tessellation shaders.
#[inline]
pub fn is_tessellation_shader_supported<C>(ctxt: &C) -> bool where C: CapabilitiesSource {
shader::check_shader_type_compatibility(ctxt, gl::TESS_CONTROL_SHADER)
}
/// Returns true if the backend supports creating and retreiving binary format.
#[inline]
pub fn is_binary_supported<C>(ctxt: &C) -> bool where C: CapabilitiesSource
|
/// Some shader compilers have race-condition issues, so we lock this mutex
/// in the GL thread every time we compile a shader or link a program.
// TODO: replace by a StaticMutex
lazy_static! {
static ref COMPILER_GLOBAL_LOCK: Mutex<()> = Mutex::new(());
}
/// Error that can be triggered when creating a `Program`.
#[derive(Clone, Debug)]
pub enum ProgramCreationError {
/// Error while compiling one of the shaders.
CompilationError(String),
/// Error while linking the program.
LinkingError(String),
/// One of the requested shader types is not supported by the backend.
///
/// Usually the case for geometry shaders.
ShaderTypeNotSupported,
/// The OpenGL implementation doesn't provide a compiler.
CompilationNotSupported,
/// You have requested transform feedback varyings, but transform feedback is not supported
/// by the backend.
TransformFeedbackNotSupported,
/// You have requested point size setting from the shader, but it's not
/// supported by the backend.
PointSizeNotSupported,
}
impl fmt::Display for ProgramCreationError {
fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> {
match self {
&ProgramCreationError::CompilationError(ref s) =>
formatter.write_fmt(format_args!("Compilation error in one of the shaders: {}", s)),
&ProgramCreationError::LinkingError(ref s) =>
formatter.write_fmt(format_args!("Error while linking shaders together: {}", s)),
&ProgramCreationError::ShaderTypeNotSupported =>
formatter.write_str("One of the request shader type is \
not supported by the backend"),
&ProgramCreationError::CompilationNotSupported =>
formatter.write_str("The backend doesn't support shaders compilation"),
&ProgramCreationError::TransformFeedbackNotSupported =>
formatter.write_str("You requested transform feedback, but this feature is not \
supported by the backend"),
&ProgramCreationError::PointSizeNotSupported =>
formatter.write_str("You requested point size setting, but it's not \
supported by the backend"),
}
}
}
impl Error for ProgramCreationError {
fn description(&self) -> &str {
match self {
&ProgramCreationError::CompilationError(_) => "Compilation error in one of the \
shaders",
&ProgramCreationError::LinkingError(_) => "Error while linking shaders together",
&ProgramCreationError::ShaderTypeNotSupported => "One of the request shader type is \
not supported by the backend",
&ProgramCreationError::CompilationNotSupported => "The backend doesn't support \
shaders compilation",
&ProgramCreationError::TransformFeedbackNotSupported => "Transform feedback is not \
supported by the backend.",
&ProgramCreationError::PointSizeNotSupported => "Point size is not supported by \
the backend.",
}
}
#[inline]
fn cause(&self) -> Option<&Error> {
None
}
}
/// Error while retreiving the binary representation of a program.
#[derive(Copy, Clone, Debug)]
pub enum GetBinaryError {
/// The backend doesn't support binary.
NotSupported,
}
/// Input when creating a program.
pub enum ProgramCreationInput<'a> {
/// Use GLSL source code.
SourceCode {
/// Source code of the vertex shader.
vertex_shader: &'a str,
/// Source code of the optional tessellation control shader.
tessellation_control_shader: Option<&'a str>,
/// Source code of the optional tessellation evaluation shader.
tessellation_evaluation_shader: Option<&'a str>,
/// Source code of the optional geometry shader.
geometry_shader: Option<&'a str>,
/// Source code of the fragment shader.
fragment_shader: &'a str,
/// The list of variables and mode to use for transform feedback.
///
/// The information specified here will be passed to the OpenGL linker. If you pass
/// `None`, then you won't be able to use transform feedback.
transform_feedback_varyings: Option<(Vec<String>, TransformFeedbackMode)>,
/// Whether the fragment shader outputs colors in `sRGB` or `RGB`. This is false by default,
/// meaning that the program outputs `RGB`.
///
/// If this is false, then `GL_FRAMEBUFFER_SRGB` will be enabled when this program is used
/// (if it is supported).
outputs_srgb: bool,
/// Whether the shader uses point size.
uses_point_size: bool,
},
/// Use a precompiled binary.
Binary {
/// The data.
data: Binary,
/// See `SourceCode::outputs_srgb`.
outputs_srgb: bool,
/// Whether the shader uses point size.
uses_point_size: bool,
}
}
/// Represents the source code of a program.
pub struct SourceCode<'a> {
/// Source code of the vertex shader.
pub vertex_shader: &'a str,
/// Source code of the optional tessellation control shader.
pub tessellation_control_shader: Option<&'a str>,
/// Source code of the optional tessellation evaluation shader.
pub tessellation_evaluation_shader: Option<&'a str>,
/// Source code of the optional geometry shader.
pub geometry_shader: Option<&'a str>,
/// Source code of the fragment shader.
pub fragment_shader: &'a str,
}
impl<'a> From<SourceCode<'a>> for ProgramCreationInput<'a> {
#[inline]
fn from(code: SourceCode<'a>) -> ProgramCreationInput<'a> {
let SourceCode { vertex_shader, fragment_shader, geometry_shader,
tessellation_control_shader, tessellation_evaluation_shader } = code;
ProgramCreationInput::SourceCode {
vertex_shader: vertex_shader,
tessellation_control_shader: tessellation_control_shader,
tessellation_evaluation_shader: tessellation_evaluation_shader,
geometry_shader: geometry_shader,
fragment_shader: fragment_shader,
transform_feedback_varyings: None,
outputs_srgb: false,
uses_point_size: false,
}
}
}
/// Represents the compiled binary data of a program.
pub struct Binary {
/// An implementation-defined format.
pub format: u32,
/// The binary data.
pub content: Vec<u8>,
}
impl<'a> From<Binary> for ProgramCreationInput<'a> {
#[inline]
fn from(binary: Binary) -> ProgramCreationInput<'a> {
ProgramCreationInput::Binary {
data: binary,
outputs_srgb: false,
uses_point_size: false,
}
}
}
|
{
ctxt.get_version() >= &Version(Api::Gl, 4, 1) || ctxt.get_version() >= &Version(Api::GlEs, 2, 0)
|| ctxt.get_extensions().gl_arb_get_programy_binary
}
|
identifier_body
|
move-1.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
|
// except according to those terms.
struct Triple { x: int, y: int, z: int }
fn test(x: bool, foo: @Triple) -> int {
let bar = foo;
let mut y: @Triple;
if x { y = bar; } else { y = @Triple{x: 4, y: 5, z: 6}; }
return y.y;
}
pub fn main() {
let x = @Triple {x: 1, y: 2, z: 3};
assert!((test(true, x) == 2));
assert!((test(true, x) == 2));
assert!((test(true, x) == 2));
assert!((test(false, x) == 5));
}
|
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
|
random_line_split
|
move-1.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct Triple { x: int, y: int, z: int }
fn test(x: bool, foo: @Triple) -> int
|
pub fn main() {
let x = @Triple {x: 1, y: 2, z: 3};
assert!((test(true, x) == 2));
assert!((test(true, x) == 2));
assert!((test(true, x) == 2));
assert!((test(false, x) == 5));
}
|
{
let bar = foo;
let mut y: @Triple;
if x { y = bar; } else { y = @Triple{x: 4, y: 5, z: 6}; }
return y.y;
}
|
identifier_body
|
move-1.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct Triple { x: int, y: int, z: int }
fn test(x: bool, foo: @Triple) -> int {
let bar = foo;
let mut y: @Triple;
if x { y = bar; } else { y = @Triple{x: 4, y: 5, z: 6}; }
return y.y;
}
pub fn
|
() {
let x = @Triple {x: 1, y: 2, z: 3};
assert!((test(true, x) == 2));
assert!((test(true, x) == 2));
assert!((test(true, x) == 2));
assert!((test(false, x) == 5));
}
|
main
|
identifier_name
|
move-1.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct Triple { x: int, y: int, z: int }
fn test(x: bool, foo: @Triple) -> int {
let bar = foo;
let mut y: @Triple;
if x
|
else { y = @Triple{x: 4, y: 5, z: 6}; }
return y.y;
}
pub fn main() {
let x = @Triple {x: 1, y: 2, z: 3};
assert!((test(true, x) == 2));
assert!((test(true, x) == 2));
assert!((test(true, x) == 2));
assert!((test(false, x) == 5));
}
|
{ y = bar; }
|
conditional_block
|
event.rs
|
use collections::borrow::Cow;
use ::data::grid;
use ::data::shapes::tetromino::{Shape,RotatedShape,Rotation};
use ::game::data::Input;
//TODO: Document when the events triggers. If one triggers before or after mutation of the structure
///Events which can occur ingame.
///These should get signaled by the game state and listened to by a event listener.
#[derive(Clone,Debug,Serialize,Deserialize)]
pub enum Event<P,W>{//TODO: Merge with server packets in online multiplayer if possible and practical?
PlayerAdded{
player: P,
},
PlayerRemoved{//TODO: Implement
player: P,
},
PlayerMovedWorld{//TODO: Implement
player: P,
old: W,
new: W
},
PlayerCollidedOnRotation{//TODO: Implement
|
player: P,
current: Rotation,
target: Rotation,
cause: RotationCause,
},
PlayerCollidedOnMovement{//TODO: Implement
player: P,
current: grid::PosAxis,
target: grid::PosAxis,
cause: RotationCause,
},
PlayerRotated{//TODO: Implement
player: P,
old: Rotation,
new: Rotation,
cause: RotationCause,
},
PlayerMoved{
player: P,
old: grid::Pos,
new: grid::Pos,
cause: MovementCause,
},
PlayerChangedShape{
player: P,
shape: Shape,
pos: grid::Pos,
cause: ShapeChangeCause,
},
WorldImprintedShape{
world: W,
shape: (RotatedShape,grid::Pos),
full_rows: grid::SizeAxis,
cause: ShapeImprintCause<P>,
},
WorldAdded{//TODO: Implement
world: W,
},
WorldUpdated{//TODO: Implement
world: W,
},
WorldRemoved{//TODO: Implement
world: W,
},
WorldPaused{//TODO: Implement
world: W,
},
WorldUnpaused{//TODO: Implement
world: W,
},
GamePaused,//TODO: Implement
GameUnpaused,//TODO: Implement
}
#[derive(Clone,Debug,Serialize,Deserialize)]
pub enum MovementCause{
Gravity,
Input(Input),
Desync,
Other(Option<Cow<'static,str>>)
}
#[derive(Clone,Debug,Serialize,Deserialize)]
pub enum RotationCause{
Input(Input),
Desync,
Other(Option<Cow<'static,str>>)
}
#[derive(Clone,Debug,Serialize,Deserialize)]
pub enum ShapeChangeCause{
NewAfterImprint,
Desync,
Other(Option<Cow<'static,str>>)
}
#[derive(Clone,Debug,Serialize,Deserialize)]
pub enum ShapeImprintCause<P>{
PlayerInflicted(P),
Desync,
Other(Option<Cow<'static,str>>)
}
|
random_line_split
|
|
event.rs
|
use collections::borrow::Cow;
use ::data::grid;
use ::data::shapes::tetromino::{Shape,RotatedShape,Rotation};
use ::game::data::Input;
//TODO: Document when the events triggers. If one triggers before or after mutation of the structure
///Events which can occur ingame.
///These should get signaled by the game state and listened to by a event listener.
#[derive(Clone,Debug,Serialize,Deserialize)]
pub enum Event<P,W>{//TODO: Merge with server packets in online multiplayer if possible and practical?
PlayerAdded{
player: P,
},
PlayerRemoved{//TODO: Implement
player: P,
},
PlayerMovedWorld{//TODO: Implement
player: P,
old: W,
new: W
},
PlayerCollidedOnRotation{//TODO: Implement
player: P,
current: Rotation,
target: Rotation,
cause: RotationCause,
},
PlayerCollidedOnMovement{//TODO: Implement
player: P,
current: grid::PosAxis,
target: grid::PosAxis,
cause: RotationCause,
},
PlayerRotated{//TODO: Implement
player: P,
old: Rotation,
new: Rotation,
cause: RotationCause,
},
PlayerMoved{
player: P,
old: grid::Pos,
new: grid::Pos,
cause: MovementCause,
},
PlayerChangedShape{
player: P,
shape: Shape,
pos: grid::Pos,
cause: ShapeChangeCause,
},
WorldImprintedShape{
world: W,
shape: (RotatedShape,grid::Pos),
full_rows: grid::SizeAxis,
cause: ShapeImprintCause<P>,
},
WorldAdded{//TODO: Implement
world: W,
},
WorldUpdated{//TODO: Implement
world: W,
},
WorldRemoved{//TODO: Implement
world: W,
},
WorldPaused{//TODO: Implement
world: W,
},
WorldUnpaused{//TODO: Implement
world: W,
},
GamePaused,//TODO: Implement
GameUnpaused,//TODO: Implement
}
#[derive(Clone,Debug,Serialize,Deserialize)]
pub enum MovementCause{
Gravity,
Input(Input),
Desync,
Other(Option<Cow<'static,str>>)
}
#[derive(Clone,Debug,Serialize,Deserialize)]
pub enum RotationCause{
Input(Input),
Desync,
Other(Option<Cow<'static,str>>)
}
#[derive(Clone,Debug,Serialize,Deserialize)]
pub enum
|
{
NewAfterImprint,
Desync,
Other(Option<Cow<'static,str>>)
}
#[derive(Clone,Debug,Serialize,Deserialize)]
pub enum ShapeImprintCause<P>{
PlayerInflicted(P),
Desync,
Other(Option<Cow<'static,str>>)
}
|
ShapeChangeCause
|
identifier_name
|
workforce3.rs
|
// Copyright (c) 2016 Yusuke Sasaki
//
// This software is released under the MIT License.
|
// See http://opensource.org/licenses/mit-license.php or <LICENSE>.
extern crate gurobi;
extern crate itertools;
use gurobi::*;
use itertools::*;
mod workforce;
use workforce::make_model;
fn main() {
let mut env = Env::new("workforce3.log").unwrap();
env.set(param::LogToConsole, 0).unwrap();
let mut model = make_model(&env).unwrap();
model.optimize().unwrap();
match model.status().unwrap() {
Status::Infeasible => {
let mut model = model.copy().unwrap();
model.set(attr::ModelName, "assignment_relaxed".to_owned()).unwrap();
// do relaxation.
let constrs = model.get_constrs().cloned().collect_vec();
let slacks = {
let (_, svars, _, _) = model.feas_relax(RelaxType::Linear,
false,
&[],
&[],
&[],
&constrs[..],
RepeatN::new(1.0, constrs.len()).collect_vec().as_slice())
.unwrap();
svars.cloned().collect_vec()
};
model.optimize().unwrap();
println!("slack variables: ");
for slack in slacks {
let value = slack.get(&model, attr::X).unwrap();
let vname = slack.get(&model, attr::VarName).unwrap();
if value > 1e-6 {
println!(" * {} = {}", vname, value);
}
}
}
Status::Optimal => {
println!("The model is feasible and optimized.");
}
Status::InfOrUnbd | Status::Unbounded => {
println!("The model is unbounded.");
}
status => {
println!("Optimization is stopped with status {:?}", status);
}
}
}
|
random_line_split
|
|
workforce3.rs
|
// Copyright (c) 2016 Yusuke Sasaki
//
// This software is released under the MIT License.
// See http://opensource.org/licenses/mit-license.php or <LICENSE>.
extern crate gurobi;
extern crate itertools;
use gurobi::*;
use itertools::*;
mod workforce;
use workforce::make_model;
fn main()
|
&constrs[..],
RepeatN::new(1.0, constrs.len()).collect_vec().as_slice())
.unwrap();
svars.cloned().collect_vec()
};
model.optimize().unwrap();
println!("slack variables: ");
for slack in slacks {
let value = slack.get(&model, attr::X).unwrap();
let vname = slack.get(&model, attr::VarName).unwrap();
if value > 1e-6 {
println!(" * {} = {}", vname, value);
}
}
}
Status::Optimal => {
println!("The model is feasible and optimized.");
}
Status::InfOrUnbd | Status::Unbounded => {
println!("The model is unbounded.");
}
status => {
println!("Optimization is stopped with status {:?}", status);
}
}
}
|
{
let mut env = Env::new("workforce3.log").unwrap();
env.set(param::LogToConsole, 0).unwrap();
let mut model = make_model(&env).unwrap();
model.optimize().unwrap();
match model.status().unwrap() {
Status::Infeasible => {
let mut model = model.copy().unwrap();
model.set(attr::ModelName, "assignment_relaxed".to_owned()).unwrap();
// do relaxation.
let constrs = model.get_constrs().cloned().collect_vec();
let slacks = {
let (_, svars, _, _) = model.feas_relax(RelaxType::Linear,
false,
&[],
&[],
&[],
|
identifier_body
|
workforce3.rs
|
// Copyright (c) 2016 Yusuke Sasaki
//
// This software is released under the MIT License.
// See http://opensource.org/licenses/mit-license.php or <LICENSE>.
extern crate gurobi;
extern crate itertools;
use gurobi::*;
use itertools::*;
mod workforce;
use workforce::make_model;
fn main() {
let mut env = Env::new("workforce3.log").unwrap();
env.set(param::LogToConsole, 0).unwrap();
let mut model = make_model(&env).unwrap();
model.optimize().unwrap();
match model.status().unwrap() {
Status::Infeasible => {
let mut model = model.copy().unwrap();
model.set(attr::ModelName, "assignment_relaxed".to_owned()).unwrap();
// do relaxation.
let constrs = model.get_constrs().cloned().collect_vec();
let slacks = {
let (_, svars, _, _) = model.feas_relax(RelaxType::Linear,
false,
&[],
&[],
&[],
&constrs[..],
RepeatN::new(1.0, constrs.len()).collect_vec().as_slice())
.unwrap();
svars.cloned().collect_vec()
};
model.optimize().unwrap();
println!("slack variables: ");
for slack in slacks {
let value = slack.get(&model, attr::X).unwrap();
let vname = slack.get(&model, attr::VarName).unwrap();
if value > 1e-6 {
println!(" * {} = {}", vname, value);
}
}
}
Status::Optimal => {
println!("The model is feasible and optimized.");
}
Status::InfOrUnbd | Status::Unbounded => {
println!("The model is unbounded.");
}
status =>
|
}
}
|
{
println!("Optimization is stopped with status {:?}", status);
}
|
conditional_block
|
workforce3.rs
|
// Copyright (c) 2016 Yusuke Sasaki
//
// This software is released under the MIT License.
// See http://opensource.org/licenses/mit-license.php or <LICENSE>.
extern crate gurobi;
extern crate itertools;
use gurobi::*;
use itertools::*;
mod workforce;
use workforce::make_model;
fn
|
() {
let mut env = Env::new("workforce3.log").unwrap();
env.set(param::LogToConsole, 0).unwrap();
let mut model = make_model(&env).unwrap();
model.optimize().unwrap();
match model.status().unwrap() {
Status::Infeasible => {
let mut model = model.copy().unwrap();
model.set(attr::ModelName, "assignment_relaxed".to_owned()).unwrap();
// do relaxation.
let constrs = model.get_constrs().cloned().collect_vec();
let slacks = {
let (_, svars, _, _) = model.feas_relax(RelaxType::Linear,
false,
&[],
&[],
&[],
&constrs[..],
RepeatN::new(1.0, constrs.len()).collect_vec().as_slice())
.unwrap();
svars.cloned().collect_vec()
};
model.optimize().unwrap();
println!("slack variables: ");
for slack in slacks {
let value = slack.get(&model, attr::X).unwrap();
let vname = slack.get(&model, attr::VarName).unwrap();
if value > 1e-6 {
println!(" * {} = {}", vname, value);
}
}
}
Status::Optimal => {
println!("The model is feasible and optimized.");
}
Status::InfOrUnbd | Status::Unbounded => {
println!("The model is unbounded.");
}
status => {
println!("Optimization is stopped with status {:?}", status);
}
}
}
|
main
|
identifier_name
|
json.rs
|
extern crate serde_json;
use std::collections::HashMap;
use std::fs::File;
use std::io::{BufRead, BufReader, Lines};
use data::{Data, Number};
use expr::Expr;
use row::Row;
use source::{Source, SourceError};
pub struct JsonSource {
lines: Lines<BufReader<File>>,
}
impl JsonSource {
pub fn new(filename: &str) -> Result<Source, SourceError> {
let file = File::open(filename)?;
let reader = BufReader::new(file);
Ok(Box::new(JsonSource { lines: reader.lines() }))
}
}
impl Iterator for JsonSource {
type Item = Result<Row, SourceError>;
fn next(&mut self) -> Option<Self::Item> {
let line = match self.lines.next() {
None => return None,
Some(Err(e)) => return Some(Err(e.into())),
Some(Ok(l)) => l,
};
let map: HashMap<String, serde_json::Value> = match serde_json::from_str(&line) {
Ok(map) => map,
Err(e) => return Some(Err(e.into())),
};
let mut row = Row::new();
for (key, value) in map {
let val = match value {
serde_json::Value::Null => Data::Null,
serde_json::Value::Bool(b) => Data::Bool(b),
serde_json::Value::Number(n) => {
if let Some(i) = n.as_i64() {
Data::Number(Number::Int(i))
} else {
|
}
serde_json::Value::String(s) => Data::String(s),
_ => continue,
};
row.fields.insert(Expr::Column(key), val);
}
return Some(Ok(row));
}
}
#[cfg(test)]
mod tests {
use super::*;
use row::make_rows;
use source::open_file;
#[test]
fn json_source() {
let source = open_file("fixtures/accounts.json").unwrap();
let expected =
make_rows(
vec!["id", "name", "balance", "frozen", "last_transaction_amount"],
vec![
data_vec![1000, "Alice", 15.5, false, -4.5],
data_vec![1001, "Bob", -50.08, true, -100.99],
data_vec![1002, "Charlie", 0.0, false, Data::Null],
data_vec![1003, "Denise", -1024.64, true, -1024.64],
],
);
let actual: Vec<Result<Row, SourceError>> = source.collect();
assert_eq!(expected, actual);
}
}
|
Data::Number(Number::Float(n.as_f64().unwrap()))
}
|
random_line_split
|
json.rs
|
extern crate serde_json;
use std::collections::HashMap;
use std::fs::File;
use std::io::{BufRead, BufReader, Lines};
use data::{Data, Number};
use expr::Expr;
use row::Row;
use source::{Source, SourceError};
pub struct JsonSource {
lines: Lines<BufReader<File>>,
}
impl JsonSource {
pub fn
|
(filename: &str) -> Result<Source, SourceError> {
let file = File::open(filename)?;
let reader = BufReader::new(file);
Ok(Box::new(JsonSource { lines: reader.lines() }))
}
}
impl Iterator for JsonSource {
type Item = Result<Row, SourceError>;
fn next(&mut self) -> Option<Self::Item> {
let line = match self.lines.next() {
None => return None,
Some(Err(e)) => return Some(Err(e.into())),
Some(Ok(l)) => l,
};
let map: HashMap<String, serde_json::Value> = match serde_json::from_str(&line) {
Ok(map) => map,
Err(e) => return Some(Err(e.into())),
};
let mut row = Row::new();
for (key, value) in map {
let val = match value {
serde_json::Value::Null => Data::Null,
serde_json::Value::Bool(b) => Data::Bool(b),
serde_json::Value::Number(n) => {
if let Some(i) = n.as_i64() {
Data::Number(Number::Int(i))
} else {
Data::Number(Number::Float(n.as_f64().unwrap()))
}
}
serde_json::Value::String(s) => Data::String(s),
_ => continue,
};
row.fields.insert(Expr::Column(key), val);
}
return Some(Ok(row));
}
}
#[cfg(test)]
mod tests {
use super::*;
use row::make_rows;
use source::open_file;
#[test]
fn json_source() {
let source = open_file("fixtures/accounts.json").unwrap();
let expected =
make_rows(
vec!["id", "name", "balance", "frozen", "last_transaction_amount"],
vec![
data_vec![1000, "Alice", 15.5, false, -4.5],
data_vec![1001, "Bob", -50.08, true, -100.99],
data_vec![1002, "Charlie", 0.0, false, Data::Null],
data_vec![1003, "Denise", -1024.64, true, -1024.64],
],
);
let actual: Vec<Result<Row, SourceError>> = source.collect();
assert_eq!(expected, actual);
}
}
|
new
|
identifier_name
|
TestAsinpi.rs
|
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
|
#pragma version(1)
#pragma rs java_package_name(android.renderscript.cts)
// Don't edit this file! It is auto-generated by frameworks/rs/api/gen_runtime.
float __attribute__((kernel)) testAsinpiFloatFloat(float inV) {
return asinpi(inV);
}
float2 __attribute__((kernel)) testAsinpiFloat2Float2(float2 inV) {
return asinpi(inV);
}
float3 __attribute__((kernel)) testAsinpiFloat3Float3(float3 inV) {
return asinpi(inV);
}
float4 __attribute__((kernel)) testAsinpiFloat4Float4(float4 inV) {
return asinpi(inV);
}
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
|
random_line_split
|
method-missing-call.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Tests to make sure that parens are needed for method calls without arguments.
// outputs text to make sure either an anonymous function is provided or
// open-close '()' parens are given
struct Point {
x: isize,
y: isize
}
impl Point {
fn new() -> Point
|
fn get_x(&self) -> isize {
self.x
}
}
fn main() {
let point: Point = Point::new();
let px: isize = point
.get_x;//~ ERROR attempted to take value of method `get_x` on type `Point`
//~^ HELP maybe a `()` to call it is missing
// Ensure the span is useful
let ys = &[1,2,3,4,5,6,7];
let a = ys.iter()
.map(|x| x)
.filter(|&&x| x == 1)
.filter_map; //~ ERROR attempted to take value of method `filter_map` on type
//~^ HELP maybe a `()` to call it is missing
}
|
{
Point{x:0, y:0}
}
|
identifier_body
|
method-missing-call.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Tests to make sure that parens are needed for method calls without arguments.
// outputs text to make sure either an anonymous function is provided or
// open-close '()' parens are given
struct Point {
x: isize,
y: isize
}
impl Point {
fn new() -> Point {
Point{x:0, y:0}
}
fn get_x(&self) -> isize {
self.x
}
}
fn main() {
let point: Point = Point::new();
|
let ys = &[1,2,3,4,5,6,7];
let a = ys.iter()
.map(|x| x)
.filter(|&&x| x == 1)
.filter_map; //~ ERROR attempted to take value of method `filter_map` on type
//~^ HELP maybe a `()` to call it is missing
}
|
let px: isize = point
.get_x;//~ ERROR attempted to take value of method `get_x` on type `Point`
//~^ HELP maybe a `()` to call it is missing
// Ensure the span is useful
|
random_line_split
|
method-missing-call.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Tests to make sure that parens are needed for method calls without arguments.
// outputs text to make sure either an anonymous function is provided or
// open-close '()' parens are given
struct Point {
x: isize,
y: isize
}
impl Point {
fn
|
() -> Point {
Point{x:0, y:0}
}
fn get_x(&self) -> isize {
self.x
}
}
fn main() {
let point: Point = Point::new();
let px: isize = point
.get_x;//~ ERROR attempted to take value of method `get_x` on type `Point`
//~^ HELP maybe a `()` to call it is missing
// Ensure the span is useful
let ys = &[1,2,3,4,5,6,7];
let a = ys.iter()
.map(|x| x)
.filter(|&&x| x == 1)
.filter_map; //~ ERROR attempted to take value of method `filter_map` on type
//~^ HELP maybe a `()` to call it is missing
}
|
new
|
identifier_name
|
alternate_formats_country_code_set.rs
|
// Copyright (C) 2015 Guillaume Gomez
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
lazy_static! {
pub static ref countryCodeSet : HashSet<u32> = get_country_code_set();
}
fn get_country_code_set() -> HashSet<u32>
|
_countryCodeSet.insert(66);
_countryCodeSet.insert(81);
_countryCodeSet.insert(84);
_countryCodeSet.insert(90);
_countryCodeSet.insert(91);
_countryCodeSet.insert(94);
_countryCodeSet.insert(95);
_countryCodeSet.insert(255);
_countryCodeSet.insert(350);
_countryCodeSet.insert(351);
_countryCodeSet.insert(352);
_countryCodeSet.insert(358);
_countryCodeSet.insert(359);
_countryCodeSet.insert(372);
_countryCodeSet.insert(373);
_countryCodeSet.insert(380);
_countryCodeSet.insert(381);
_countryCodeSet.insert(385);
_countryCodeSet.insert(505);
_countryCodeSet.insert(506);
_countryCodeSet.insert(595);
_countryCodeSet.insert(675);
_countryCodeSet.insert(676);
_countryCodeSet.insert(679);
_countryCodeSet.insert(855);
_countryCodeSet.insert(971);
_countryCodeSet.insert(972);
_countryCodeSet.insert(995);
_countryCodeSet
}
|
{
// The capacity is set to 57 as there are 43 different entries,
// and this offers a load factor of roughly 0.75.
let mut _countryCodeSet = HashSet::with_capacity(57);
_countryCodeSet.insert(7);
_countryCodeSet.insert(27);
_countryCodeSet.insert(30);
_countryCodeSet.insert(31);
_countryCodeSet.insert(34);
_countryCodeSet.insert(36);
_countryCodeSet.insert(43);
_countryCodeSet.insert(44);
_countryCodeSet.insert(49);
_countryCodeSet.insert(54);
_countryCodeSet.insert(55);
_countryCodeSet.insert(58);
_countryCodeSet.insert(61);
_countryCodeSet.insert(62);
_countryCodeSet.insert(63);
|
identifier_body
|
alternate_formats_country_code_set.rs
|
// Copyright (C) 2015 Guillaume Gomez
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
lazy_static! {
pub static ref countryCodeSet : HashSet<u32> = get_country_code_set();
}
fn get_country_code_set() -> HashSet<u32> {
|
// and this offers a load factor of roughly 0.75.
let mut _countryCodeSet = HashSet::with_capacity(57);
_countryCodeSet.insert(7);
_countryCodeSet.insert(27);
_countryCodeSet.insert(30);
_countryCodeSet.insert(31);
_countryCodeSet.insert(34);
_countryCodeSet.insert(36);
_countryCodeSet.insert(43);
_countryCodeSet.insert(44);
_countryCodeSet.insert(49);
_countryCodeSet.insert(54);
_countryCodeSet.insert(55);
_countryCodeSet.insert(58);
_countryCodeSet.insert(61);
_countryCodeSet.insert(62);
_countryCodeSet.insert(63);
_countryCodeSet.insert(66);
_countryCodeSet.insert(81);
_countryCodeSet.insert(84);
_countryCodeSet.insert(90);
_countryCodeSet.insert(91);
_countryCodeSet.insert(94);
_countryCodeSet.insert(95);
_countryCodeSet.insert(255);
_countryCodeSet.insert(350);
_countryCodeSet.insert(351);
_countryCodeSet.insert(352);
_countryCodeSet.insert(358);
_countryCodeSet.insert(359);
_countryCodeSet.insert(372);
_countryCodeSet.insert(373);
_countryCodeSet.insert(380);
_countryCodeSet.insert(381);
_countryCodeSet.insert(385);
_countryCodeSet.insert(505);
_countryCodeSet.insert(506);
_countryCodeSet.insert(595);
_countryCodeSet.insert(675);
_countryCodeSet.insert(676);
_countryCodeSet.insert(679);
_countryCodeSet.insert(855);
_countryCodeSet.insert(971);
_countryCodeSet.insert(972);
_countryCodeSet.insert(995);
_countryCodeSet
}
|
// The capacity is set to 57 as there are 43 different entries,
|
random_line_split
|
alternate_formats_country_code_set.rs
|
// Copyright (C) 2015 Guillaume Gomez
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
lazy_static! {
pub static ref countryCodeSet : HashSet<u32> = get_country_code_set();
}
fn
|
() -> HashSet<u32> {
// The capacity is set to 57 as there are 43 different entries,
// and this offers a load factor of roughly 0.75.
let mut _countryCodeSet = HashSet::with_capacity(57);
_countryCodeSet.insert(7);
_countryCodeSet.insert(27);
_countryCodeSet.insert(30);
_countryCodeSet.insert(31);
_countryCodeSet.insert(34);
_countryCodeSet.insert(36);
_countryCodeSet.insert(43);
_countryCodeSet.insert(44);
_countryCodeSet.insert(49);
_countryCodeSet.insert(54);
_countryCodeSet.insert(55);
_countryCodeSet.insert(58);
_countryCodeSet.insert(61);
_countryCodeSet.insert(62);
_countryCodeSet.insert(63);
_countryCodeSet.insert(66);
_countryCodeSet.insert(81);
_countryCodeSet.insert(84);
_countryCodeSet.insert(90);
_countryCodeSet.insert(91);
_countryCodeSet.insert(94);
_countryCodeSet.insert(95);
_countryCodeSet.insert(255);
_countryCodeSet.insert(350);
_countryCodeSet.insert(351);
_countryCodeSet.insert(352);
_countryCodeSet.insert(358);
_countryCodeSet.insert(359);
_countryCodeSet.insert(372);
_countryCodeSet.insert(373);
_countryCodeSet.insert(380);
_countryCodeSet.insert(381);
_countryCodeSet.insert(385);
_countryCodeSet.insert(505);
_countryCodeSet.insert(506);
_countryCodeSet.insert(595);
_countryCodeSet.insert(675);
_countryCodeSet.insert(676);
_countryCodeSet.insert(679);
_countryCodeSet.insert(855);
_countryCodeSet.insert(971);
_countryCodeSet.insert(972);
_countryCodeSet.insert(995);
_countryCodeSet
}
|
get_country_code_set
|
identifier_name
|
enter.rs
|
use crate::errors::Result;
use crate::kernel::execve;
use crate::kernel::groups::syscall_group_from_sysnum;
use crate::kernel::groups::SyscallGroup::*;
use crate::kernel::heap::*;
use crate::kernel::ptrace::*;
use crate::kernel::socket::*;
use crate::kernel::standard::*;
use crate::process::proot::InfoBag;
use crate::process::tracee::Tracee;
use crate::register::Original;
pub fn
|
(info_bag: &InfoBag, tracee: &mut Tracee) -> Result<()> {
let sys_num = tracee.regs.get_sys_num(Original);
let sys_type = syscall_group_from_sysnum(sys_num);
match sys_type {
Accept => accept::enter(),
BindConnect => bind_connect::enter(),
Brk => brk::enter(),
Chdir => chdir::enter(tracee),
ChmodAccessMkNodAt => chmod_access_mknod_at::enter(tracee),
DirLinkAttr => dir_link_attr::enter(tracee),
Execve => execve::enter(tracee, &info_bag.loader),
GetCwd => getcwd::enter(tracee),
GetSockOrPeerName => get_sockorpeer_name::enter(),
InotifyAddWatch => inotify_add_watch::enter(),
Link => link_rename::enter(tracee),
LinkAt => link_at::enter(tracee),
Mount => mount::enter(),
Open => open::enter(tracee),
OpenAt => open_at::enter(tracee),
PivotRoot => pivot_root::enter(),
Ptrace => ptrace::enter(),
ReadLink => dir_link_attr::enter(tracee),
ReadLinkAt => unlink_mkdir_at::enter(tracee),
Rename => link_rename::enter(tracee),
RenameAt => rename_at::enter(tracee),
SocketCall => socketcall::enter(),
StandardSyscall => standard_syscall::enter(tracee),
StatAt => stat_at::enter(tracee),
SymLink => sym_link::enter(tracee),
SymLinkAt => sym_link_at::enter(tracee),
Wait => wait::enter(),
UnlinkMkdirAt => unlink_mkdir_at::enter(tracee),
_ => Ok(()),
}
}
|
translate
|
identifier_name
|
enter.rs
|
use crate::errors::Result;
use crate::kernel::execve;
use crate::kernel::groups::syscall_group_from_sysnum;
use crate::kernel::groups::SyscallGroup::*;
use crate::kernel::heap::*;
use crate::kernel::ptrace::*;
use crate::kernel::socket::*;
use crate::kernel::standard::*;
use crate::process::proot::InfoBag;
use crate::process::tracee::Tracee;
use crate::register::Original;
pub fn translate(info_bag: &InfoBag, tracee: &mut Tracee) -> Result<()>
|
PivotRoot => pivot_root::enter(),
Ptrace => ptrace::enter(),
ReadLink => dir_link_attr::enter(tracee),
ReadLinkAt => unlink_mkdir_at::enter(tracee),
Rename => link_rename::enter(tracee),
RenameAt => rename_at::enter(tracee),
SocketCall => socketcall::enter(),
StandardSyscall => standard_syscall::enter(tracee),
StatAt => stat_at::enter(tracee),
SymLink => sym_link::enter(tracee),
SymLinkAt => sym_link_at::enter(tracee),
Wait => wait::enter(),
UnlinkMkdirAt => unlink_mkdir_at::enter(tracee),
_ => Ok(()),
}
}
|
{
let sys_num = tracee.regs.get_sys_num(Original);
let sys_type = syscall_group_from_sysnum(sys_num);
match sys_type {
Accept => accept::enter(),
BindConnect => bind_connect::enter(),
Brk => brk::enter(),
Chdir => chdir::enter(tracee),
ChmodAccessMkNodAt => chmod_access_mknod_at::enter(tracee),
DirLinkAttr => dir_link_attr::enter(tracee),
Execve => execve::enter(tracee, &info_bag.loader),
GetCwd => getcwd::enter(tracee),
GetSockOrPeerName => get_sockorpeer_name::enter(),
InotifyAddWatch => inotify_add_watch::enter(),
Link => link_rename::enter(tracee),
LinkAt => link_at::enter(tracee),
Mount => mount::enter(),
Open => open::enter(tracee),
OpenAt => open_at::enter(tracee),
|
identifier_body
|
enter.rs
|
use crate::errors::Result;
use crate::kernel::execve;
use crate::kernel::groups::syscall_group_from_sysnum;
use crate::kernel::groups::SyscallGroup::*;
use crate::kernel::heap::*;
use crate::kernel::ptrace::*;
use crate::kernel::socket::*;
use crate::kernel::standard::*;
use crate::process::proot::InfoBag;
use crate::process::tracee::Tracee;
use crate::register::Original;
pub fn translate(info_bag: &InfoBag, tracee: &mut Tracee) -> Result<()> {
let sys_num = tracee.regs.get_sys_num(Original);
let sys_type = syscall_group_from_sysnum(sys_num);
match sys_type {
Accept => accept::enter(),
BindConnect => bind_connect::enter(),
Brk => brk::enter(),
Chdir => chdir::enter(tracee),
ChmodAccessMkNodAt => chmod_access_mknod_at::enter(tracee),
DirLinkAttr => dir_link_attr::enter(tracee),
Execve => execve::enter(tracee, &info_bag.loader),
GetCwd => getcwd::enter(tracee),
GetSockOrPeerName => get_sockorpeer_name::enter(),
InotifyAddWatch => inotify_add_watch::enter(),
Link => link_rename::enter(tracee),
LinkAt => link_at::enter(tracee),
Mount => mount::enter(),
Open => open::enter(tracee),
OpenAt => open_at::enter(tracee),
PivotRoot => pivot_root::enter(),
Ptrace => ptrace::enter(),
ReadLink => dir_link_attr::enter(tracee),
ReadLinkAt => unlink_mkdir_at::enter(tracee),
|
RenameAt => rename_at::enter(tracee),
SocketCall => socketcall::enter(),
StandardSyscall => standard_syscall::enter(tracee),
StatAt => stat_at::enter(tracee),
SymLink => sym_link::enter(tracee),
SymLinkAt => sym_link_at::enter(tracee),
Wait => wait::enter(),
UnlinkMkdirAt => unlink_mkdir_at::enter(tracee),
_ => Ok(()),
}
}
|
Rename => link_rename::enter(tracee),
|
random_line_split
|
ufcs-explicit-self-bad.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct Foo {
f: int,
}
impl Foo {
fn foo(self: int, x: int) -> int
|
}
struct Bar<T> {
f: T,
}
impl<T> Bar<T> {
fn foo(self: Bar<int>, x: int) -> int { //~ ERROR mismatched self type
//~^ ERROR not a valid type for `self`
x
}
fn bar(self: &Bar<uint>, x: int) -> int { //~ ERROR mismatched self type
//~^ ERROR not a valid type for `self`
x
}
}
fn main() {
let foo = box Foo {
f: 1,
};
println!("{}", foo.foo(2));
let bar = box Bar {
f: 1,
};
println!("{} {}", bar.foo(2), bar.bar(2));
}
|
{ //~ ERROR mismatched self type
//~^ ERROR not a valid type for `self`
self.f + x
}
|
identifier_body
|
ufcs-explicit-self-bad.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct Foo {
f: int,
}
impl Foo {
fn foo(self: int, x: int) -> int { //~ ERROR mismatched self type
//~^ ERROR not a valid type for `self`
self.f + x
}
}
|
impl<T> Bar<T> {
fn foo(self: Bar<int>, x: int) -> int { //~ ERROR mismatched self type
//~^ ERROR not a valid type for `self`
x
}
fn bar(self: &Bar<uint>, x: int) -> int { //~ ERROR mismatched self type
//~^ ERROR not a valid type for `self`
x
}
}
fn main() {
let foo = box Foo {
f: 1,
};
println!("{}", foo.foo(2));
let bar = box Bar {
f: 1,
};
println!("{} {}", bar.foo(2), bar.bar(2));
}
|
struct Bar<T> {
f: T,
}
|
random_line_split
|
ufcs-explicit-self-bad.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct Foo {
f: int,
}
impl Foo {
fn foo(self: int, x: int) -> int { //~ ERROR mismatched self type
//~^ ERROR not a valid type for `self`
self.f + x
}
}
struct
|
<T> {
f: T,
}
impl<T> Bar<T> {
fn foo(self: Bar<int>, x: int) -> int { //~ ERROR mismatched self type
//~^ ERROR not a valid type for `self`
x
}
fn bar(self: &Bar<uint>, x: int) -> int { //~ ERROR mismatched self type
//~^ ERROR not a valid type for `self`
x
}
}
fn main() {
let foo = box Foo {
f: 1,
};
println!("{}", foo.foo(2));
let bar = box Bar {
f: 1,
};
println!("{} {}", bar.foo(2), bar.bar(2));
}
|
Bar
|
identifier_name
|
parallel.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Implements parallel traversals over the DOM and flow trees.
//!
//! This code is highly unsafe. Keep this file small and easy to audit.
#![allow(unsafe_code)]
use context::{LayoutContext, SharedLayoutContext};
use flow::{self, Flow, MutableFlowUtils, PostorderFlowTraversal, PreorderFlowTraversal};
use flow_ref::FlowRef;
use profile_traits::time::{self, TimerMetadata, profile};
use std::mem;
use std::sync::atomic::{AtomicIsize, Ordering};
use style::dom::UnsafeNode;
use style::parallel::{CHUNK_SIZE, WorkQueueData};
use style::parallel::run_queue_with_custom_work_data_type;
use style::workqueue::{WorkQueue, WorkUnit, WorkerProxy};
use traversal::{AssignISizes, BubbleISizes};
use traversal::AssignBSizes;
use util::opts;
pub use style::parallel::traverse_dom;
#[allow(dead_code)]
fn static_assertion(node: UnsafeNode) {
unsafe {
let _: UnsafeFlow = ::std::intrinsics::transmute(node);
}
}
/// Vtable + pointer representation of a Flow trait object.
pub type UnsafeFlow = (usize, usize);
fn null_unsafe_flow() -> UnsafeFlow {
(0, 0)
}
pub fn mut_owned_flow_to_unsafe_flow(flow: *mut FlowRef) -> UnsafeFlow {
unsafe {
mem::transmute::<&Flow, UnsafeFlow>(&**flow)
}
}
pub fn borrowed_flow_to_unsafe_flow(flow: &Flow) -> UnsafeFlow {
unsafe {
mem::transmute::<&Flow, UnsafeFlow>(flow)
}
}
pub type UnsafeFlowList = (Box<Vec<UnsafeNode>>, usize);
pub type ChunkedFlowTraversalFunction =
extern "Rust" fn(UnsafeFlowList, &mut WorkerProxy<SharedLayoutContext, UnsafeFlowList>);
pub type FlowTraversalFunction = extern "Rust" fn(UnsafeFlow, &SharedLayoutContext);
/// Information that we need stored in each flow.
pub struct FlowParallelInfo {
/// The number of children that still need work done.
pub children_count: AtomicIsize,
/// The address of the parent flow.
pub parent: UnsafeFlow,
}
impl FlowParallelInfo {
pub fn new() -> FlowParallelInfo {
FlowParallelInfo {
children_count: AtomicIsize::new(0),
parent: null_unsafe_flow(),
}
}
}
/// A parallel bottom-up flow traversal.
trait ParallelPostorderFlowTraversal : PostorderFlowTraversal {
/// Process current flow and potentially traverse its ancestors.
///
/// If we are the last child that finished processing, recursively process
/// our parent. Else, stop. Also, stop at the root.
///
/// Thus, if we start with all the leaves of a tree, we end up traversing
/// the whole tree bottom-up because each parent will be processed exactly
/// once (by the last child that finishes processing).
///
/// The only communication between siblings is that they both
/// fetch-and-subtract the parent's children count.
fn run_parallel(&self, mut unsafe_flow: UnsafeFlow) {
loop {
// Get a real flow.
let flow: &mut Flow = unsafe {
mem::transmute(unsafe_flow)
};
// Perform the appropriate traversal.
if self.should_process(flow) {
self.process(flow);
}
let base = flow::mut_base(flow);
// Reset the count of children for the next layout traversal.
base.parallel.children_count.store(base.children.len() as isize,
Ordering::Relaxed);
// Possibly enqueue the parent.
let unsafe_parent = base.parallel.parent;
if unsafe_parent == null_unsafe_flow() {
// We're done!
break
}
// No, we're not at the root yet. Then are we the last child
// of our parent to finish processing? If so, we can continue
// on with our parent; otherwise, we've gotta wait.
let parent: &mut Flow = unsafe {
mem::transmute(unsafe_parent)
};
let parent_base = flow::mut_base(parent);
if parent_base.parallel.children_count.fetch_sub(1, Ordering::Relaxed) == 1 {
// We were the last child of our parent. Reflow our parent.
unsafe_flow = unsafe_parent
} else {
// Stop.
break
}
}
}
}
/// A parallel top-down flow traversal.
trait ParallelPreorderFlowTraversal : PreorderFlowTraversal {
fn run_parallel(&self,
unsafe_flows: UnsafeFlowList,
proxy: &mut WorkerProxy<SharedLayoutContext, UnsafeFlowList>);
fn should_record_thread_ids(&self) -> bool;
#[inline(always)]
fn run_parallel_helper(&self,
unsafe_flows: UnsafeFlowList,
proxy: &mut WorkerProxy<SharedLayoutContext, UnsafeFlowList>,
top_down_func: ChunkedFlowTraversalFunction,
bottom_up_func: FlowTraversalFunction) {
let mut discovered_child_flows = Vec::new();
for unsafe_flow in *unsafe_flows.0 {
let mut had_children = false;
unsafe {
// Get a real flow.
let flow: &mut Flow = mem::transmute(unsafe_flow);
if self.should_record_thread_ids() {
flow::mut_base(flow).thread_id = proxy.worker_index();
}
if self.should_process(flow) {
// Perform the appropriate traversal.
self.process(flow);
}
// Possibly enqueue the children.
for kid in flow::child_iter_mut(flow) {
had_children = true;
discovered_child_flows.push(borrowed_flow_to_unsafe_flow(kid));
}
}
// If there were no more children, start assigning block-sizes.
if!had_children {
bottom_up_func(unsafe_flow, proxy.user_data())
}
}
for chunk in discovered_child_flows.chunks(CHUNK_SIZE) {
proxy.push(WorkUnit {
fun: top_down_func,
data: (box chunk.iter().cloned().collect(), 0),
});
}
}
}
impl<'a> ParallelPreorderFlowTraversal for AssignISizes<'a> {
fn run_parallel(&self,
unsafe_flows: UnsafeFlowList,
proxy: &mut WorkerProxy<SharedLayoutContext, UnsafeFlowList>) {
self.run_parallel_helper(unsafe_flows,
proxy,
assign_inline_sizes,
assign_block_sizes_and_store_overflow)
}
fn should_record_thread_ids(&self) -> bool {
true
}
}
impl<'a> ParallelPostorderFlowTraversal for AssignBSizes<'a> {}
fn assign_inline_sizes(unsafe_flows: UnsafeFlowList,
proxy: &mut WorkerProxy<SharedLayoutContext, UnsafeFlowList>) {
let shared_layout_context = proxy.user_data();
let assign_inline_sizes_traversal = AssignISizes {
shared_context: &shared_layout_context.style_context,
};
assign_inline_sizes_traversal.run_parallel(unsafe_flows, proxy)
}
fn assign_block_sizes_and_store_overflow(
unsafe_flow: UnsafeFlow,
shared_layout_context: &SharedLayoutContext) {
let layout_context = LayoutContext::new(shared_layout_context);
let assign_block_sizes_traversal = AssignBSizes {
layout_context: &layout_context,
};
assign_block_sizes_traversal.run_parallel(unsafe_flow)
}
pub fn traverse_flow_tree_preorder(
root: &mut Flow,
profiler_metadata: Option<TimerMetadata>,
time_profiler_chan: time::ProfilerChan,
shared_layout_context: &SharedLayoutContext,
queue: &mut WorkQueue<SharedLayoutContext, WorkQueueData>) {
if opts::get().bubble_inline_sizes_separately {
let layout_context = LayoutContext::new(shared_layout_context);
let bubble_inline_sizes = BubbleISizes { layout_context: &layout_context };
root.traverse_postorder(&bubble_inline_sizes);
}
run_queue_with_custom_work_data_type(queue, |queue| {
profile(time::ProfilerCategory::LayoutParallelWarmup, profiler_metadata,
time_profiler_chan, || {
queue.push(WorkUnit {
fun: assign_inline_sizes,
data: (box vec![borrowed_flow_to_unsafe_flow(root)], 0),
})
|
});
}, shared_layout_context);
}
|
random_line_split
|
|
parallel.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Implements parallel traversals over the DOM and flow trees.
//!
//! This code is highly unsafe. Keep this file small and easy to audit.
#![allow(unsafe_code)]
use context::{LayoutContext, SharedLayoutContext};
use flow::{self, Flow, MutableFlowUtils, PostorderFlowTraversal, PreorderFlowTraversal};
use flow_ref::FlowRef;
use profile_traits::time::{self, TimerMetadata, profile};
use std::mem;
use std::sync::atomic::{AtomicIsize, Ordering};
use style::dom::UnsafeNode;
use style::parallel::{CHUNK_SIZE, WorkQueueData};
use style::parallel::run_queue_with_custom_work_data_type;
use style::workqueue::{WorkQueue, WorkUnit, WorkerProxy};
use traversal::{AssignISizes, BubbleISizes};
use traversal::AssignBSizes;
use util::opts;
pub use style::parallel::traverse_dom;
#[allow(dead_code)]
fn static_assertion(node: UnsafeNode) {
unsafe {
let _: UnsafeFlow = ::std::intrinsics::transmute(node);
}
}
/// Vtable + pointer representation of a Flow trait object.
pub type UnsafeFlow = (usize, usize);
fn null_unsafe_flow() -> UnsafeFlow {
(0, 0)
}
pub fn mut_owned_flow_to_unsafe_flow(flow: *mut FlowRef) -> UnsafeFlow {
unsafe {
mem::transmute::<&Flow, UnsafeFlow>(&**flow)
}
}
pub fn borrowed_flow_to_unsafe_flow(flow: &Flow) -> UnsafeFlow {
unsafe {
mem::transmute::<&Flow, UnsafeFlow>(flow)
}
}
pub type UnsafeFlowList = (Box<Vec<UnsafeNode>>, usize);
pub type ChunkedFlowTraversalFunction =
extern "Rust" fn(UnsafeFlowList, &mut WorkerProxy<SharedLayoutContext, UnsafeFlowList>);
pub type FlowTraversalFunction = extern "Rust" fn(UnsafeFlow, &SharedLayoutContext);
/// Information that we need stored in each flow.
pub struct FlowParallelInfo {
/// The number of children that still need work done.
pub children_count: AtomicIsize,
/// The address of the parent flow.
pub parent: UnsafeFlow,
}
impl FlowParallelInfo {
pub fn new() -> FlowParallelInfo {
FlowParallelInfo {
children_count: AtomicIsize::new(0),
parent: null_unsafe_flow(),
}
}
}
/// A parallel bottom-up flow traversal.
trait ParallelPostorderFlowTraversal : PostorderFlowTraversal {
/// Process current flow and potentially traverse its ancestors.
///
/// If we are the last child that finished processing, recursively process
/// our parent. Else, stop. Also, stop at the root.
///
/// Thus, if we start with all the leaves of a tree, we end up traversing
/// the whole tree bottom-up because each parent will be processed exactly
/// once (by the last child that finishes processing).
///
/// The only communication between siblings is that they both
/// fetch-and-subtract the parent's children count.
fn run_parallel(&self, mut unsafe_flow: UnsafeFlow) {
loop {
// Get a real flow.
let flow: &mut Flow = unsafe {
mem::transmute(unsafe_flow)
};
// Perform the appropriate traversal.
if self.should_process(flow) {
self.process(flow);
}
let base = flow::mut_base(flow);
// Reset the count of children for the next layout traversal.
base.parallel.children_count.store(base.children.len() as isize,
Ordering::Relaxed);
// Possibly enqueue the parent.
let unsafe_parent = base.parallel.parent;
if unsafe_parent == null_unsafe_flow() {
// We're done!
break
}
// No, we're not at the root yet. Then are we the last child
// of our parent to finish processing? If so, we can continue
// on with our parent; otherwise, we've gotta wait.
let parent: &mut Flow = unsafe {
mem::transmute(unsafe_parent)
};
let parent_base = flow::mut_base(parent);
if parent_base.parallel.children_count.fetch_sub(1, Ordering::Relaxed) == 1 {
// We were the last child of our parent. Reflow our parent.
unsafe_flow = unsafe_parent
} else
|
}
}
}
/// A parallel top-down flow traversal.
trait ParallelPreorderFlowTraversal : PreorderFlowTraversal {
fn run_parallel(&self,
unsafe_flows: UnsafeFlowList,
proxy: &mut WorkerProxy<SharedLayoutContext, UnsafeFlowList>);
fn should_record_thread_ids(&self) -> bool;
#[inline(always)]
fn run_parallel_helper(&self,
unsafe_flows: UnsafeFlowList,
proxy: &mut WorkerProxy<SharedLayoutContext, UnsafeFlowList>,
top_down_func: ChunkedFlowTraversalFunction,
bottom_up_func: FlowTraversalFunction) {
let mut discovered_child_flows = Vec::new();
for unsafe_flow in *unsafe_flows.0 {
let mut had_children = false;
unsafe {
// Get a real flow.
let flow: &mut Flow = mem::transmute(unsafe_flow);
if self.should_record_thread_ids() {
flow::mut_base(flow).thread_id = proxy.worker_index();
}
if self.should_process(flow) {
// Perform the appropriate traversal.
self.process(flow);
}
// Possibly enqueue the children.
for kid in flow::child_iter_mut(flow) {
had_children = true;
discovered_child_flows.push(borrowed_flow_to_unsafe_flow(kid));
}
}
// If there were no more children, start assigning block-sizes.
if!had_children {
bottom_up_func(unsafe_flow, proxy.user_data())
}
}
for chunk in discovered_child_flows.chunks(CHUNK_SIZE) {
proxy.push(WorkUnit {
fun: top_down_func,
data: (box chunk.iter().cloned().collect(), 0),
});
}
}
}
impl<'a> ParallelPreorderFlowTraversal for AssignISizes<'a> {
fn run_parallel(&self,
unsafe_flows: UnsafeFlowList,
proxy: &mut WorkerProxy<SharedLayoutContext, UnsafeFlowList>) {
self.run_parallel_helper(unsafe_flows,
proxy,
assign_inline_sizes,
assign_block_sizes_and_store_overflow)
}
fn should_record_thread_ids(&self) -> bool {
true
}
}
impl<'a> ParallelPostorderFlowTraversal for AssignBSizes<'a> {}
fn assign_inline_sizes(unsafe_flows: UnsafeFlowList,
proxy: &mut WorkerProxy<SharedLayoutContext, UnsafeFlowList>) {
let shared_layout_context = proxy.user_data();
let assign_inline_sizes_traversal = AssignISizes {
shared_context: &shared_layout_context.style_context,
};
assign_inline_sizes_traversal.run_parallel(unsafe_flows, proxy)
}
fn assign_block_sizes_and_store_overflow(
unsafe_flow: UnsafeFlow,
shared_layout_context: &SharedLayoutContext) {
let layout_context = LayoutContext::new(shared_layout_context);
let assign_block_sizes_traversal = AssignBSizes {
layout_context: &layout_context,
};
assign_block_sizes_traversal.run_parallel(unsafe_flow)
}
pub fn traverse_flow_tree_preorder(
root: &mut Flow,
profiler_metadata: Option<TimerMetadata>,
time_profiler_chan: time::ProfilerChan,
shared_layout_context: &SharedLayoutContext,
queue: &mut WorkQueue<SharedLayoutContext, WorkQueueData>) {
if opts::get().bubble_inline_sizes_separately {
let layout_context = LayoutContext::new(shared_layout_context);
let bubble_inline_sizes = BubbleISizes { layout_context: &layout_context };
root.traverse_postorder(&bubble_inline_sizes);
}
run_queue_with_custom_work_data_type(queue, |queue| {
profile(time::ProfilerCategory::LayoutParallelWarmup, profiler_metadata,
time_profiler_chan, || {
queue.push(WorkUnit {
fun: assign_inline_sizes,
data: (box vec![borrowed_flow_to_unsafe_flow(root)], 0),
})
});
}, shared_layout_context);
}
|
{
// Stop.
break
}
|
conditional_block
|
parallel.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Implements parallel traversals over the DOM and flow trees.
//!
//! This code is highly unsafe. Keep this file small and easy to audit.
#![allow(unsafe_code)]
use context::{LayoutContext, SharedLayoutContext};
use flow::{self, Flow, MutableFlowUtils, PostorderFlowTraversal, PreorderFlowTraversal};
use flow_ref::FlowRef;
use profile_traits::time::{self, TimerMetadata, profile};
use std::mem;
use std::sync::atomic::{AtomicIsize, Ordering};
use style::dom::UnsafeNode;
use style::parallel::{CHUNK_SIZE, WorkQueueData};
use style::parallel::run_queue_with_custom_work_data_type;
use style::workqueue::{WorkQueue, WorkUnit, WorkerProxy};
use traversal::{AssignISizes, BubbleISizes};
use traversal::AssignBSizes;
use util::opts;
pub use style::parallel::traverse_dom;
#[allow(dead_code)]
fn static_assertion(node: UnsafeNode) {
unsafe {
let _: UnsafeFlow = ::std::intrinsics::transmute(node);
}
}
/// Vtable + pointer representation of a Flow trait object.
pub type UnsafeFlow = (usize, usize);
fn null_unsafe_flow() -> UnsafeFlow {
(0, 0)
}
pub fn mut_owned_flow_to_unsafe_flow(flow: *mut FlowRef) -> UnsafeFlow {
unsafe {
mem::transmute::<&Flow, UnsafeFlow>(&**flow)
}
}
pub fn borrowed_flow_to_unsafe_flow(flow: &Flow) -> UnsafeFlow
|
pub type UnsafeFlowList = (Box<Vec<UnsafeNode>>, usize);
pub type ChunkedFlowTraversalFunction =
extern "Rust" fn(UnsafeFlowList, &mut WorkerProxy<SharedLayoutContext, UnsafeFlowList>);
pub type FlowTraversalFunction = extern "Rust" fn(UnsafeFlow, &SharedLayoutContext);
/// Information that we need stored in each flow.
pub struct FlowParallelInfo {
/// The number of children that still need work done.
pub children_count: AtomicIsize,
/// The address of the parent flow.
pub parent: UnsafeFlow,
}
impl FlowParallelInfo {
pub fn new() -> FlowParallelInfo {
FlowParallelInfo {
children_count: AtomicIsize::new(0),
parent: null_unsafe_flow(),
}
}
}
/// A parallel bottom-up flow traversal.
trait ParallelPostorderFlowTraversal : PostorderFlowTraversal {
/// Process current flow and potentially traverse its ancestors.
///
/// If we are the last child that finished processing, recursively process
/// our parent. Else, stop. Also, stop at the root.
///
/// Thus, if we start with all the leaves of a tree, we end up traversing
/// the whole tree bottom-up because each parent will be processed exactly
/// once (by the last child that finishes processing).
///
/// The only communication between siblings is that they both
/// fetch-and-subtract the parent's children count.
fn run_parallel(&self, mut unsafe_flow: UnsafeFlow) {
loop {
// Get a real flow.
let flow: &mut Flow = unsafe {
mem::transmute(unsafe_flow)
};
// Perform the appropriate traversal.
if self.should_process(flow) {
self.process(flow);
}
let base = flow::mut_base(flow);
// Reset the count of children for the next layout traversal.
base.parallel.children_count.store(base.children.len() as isize,
Ordering::Relaxed);
// Possibly enqueue the parent.
let unsafe_parent = base.parallel.parent;
if unsafe_parent == null_unsafe_flow() {
// We're done!
break
}
// No, we're not at the root yet. Then are we the last child
// of our parent to finish processing? If so, we can continue
// on with our parent; otherwise, we've gotta wait.
let parent: &mut Flow = unsafe {
mem::transmute(unsafe_parent)
};
let parent_base = flow::mut_base(parent);
if parent_base.parallel.children_count.fetch_sub(1, Ordering::Relaxed) == 1 {
// We were the last child of our parent. Reflow our parent.
unsafe_flow = unsafe_parent
} else {
// Stop.
break
}
}
}
}
/// A parallel top-down flow traversal.
trait ParallelPreorderFlowTraversal : PreorderFlowTraversal {
fn run_parallel(&self,
unsafe_flows: UnsafeFlowList,
proxy: &mut WorkerProxy<SharedLayoutContext, UnsafeFlowList>);
fn should_record_thread_ids(&self) -> bool;
#[inline(always)]
fn run_parallel_helper(&self,
unsafe_flows: UnsafeFlowList,
proxy: &mut WorkerProxy<SharedLayoutContext, UnsafeFlowList>,
top_down_func: ChunkedFlowTraversalFunction,
bottom_up_func: FlowTraversalFunction) {
let mut discovered_child_flows = Vec::new();
for unsafe_flow in *unsafe_flows.0 {
let mut had_children = false;
unsafe {
// Get a real flow.
let flow: &mut Flow = mem::transmute(unsafe_flow);
if self.should_record_thread_ids() {
flow::mut_base(flow).thread_id = proxy.worker_index();
}
if self.should_process(flow) {
// Perform the appropriate traversal.
self.process(flow);
}
// Possibly enqueue the children.
for kid in flow::child_iter_mut(flow) {
had_children = true;
discovered_child_flows.push(borrowed_flow_to_unsafe_flow(kid));
}
}
// If there were no more children, start assigning block-sizes.
if!had_children {
bottom_up_func(unsafe_flow, proxy.user_data())
}
}
for chunk in discovered_child_flows.chunks(CHUNK_SIZE) {
proxy.push(WorkUnit {
fun: top_down_func,
data: (box chunk.iter().cloned().collect(), 0),
});
}
}
}
impl<'a> ParallelPreorderFlowTraversal for AssignISizes<'a> {
fn run_parallel(&self,
unsafe_flows: UnsafeFlowList,
proxy: &mut WorkerProxy<SharedLayoutContext, UnsafeFlowList>) {
self.run_parallel_helper(unsafe_flows,
proxy,
assign_inline_sizes,
assign_block_sizes_and_store_overflow)
}
fn should_record_thread_ids(&self) -> bool {
true
}
}
impl<'a> ParallelPostorderFlowTraversal for AssignBSizes<'a> {}
fn assign_inline_sizes(unsafe_flows: UnsafeFlowList,
proxy: &mut WorkerProxy<SharedLayoutContext, UnsafeFlowList>) {
let shared_layout_context = proxy.user_data();
let assign_inline_sizes_traversal = AssignISizes {
shared_context: &shared_layout_context.style_context,
};
assign_inline_sizes_traversal.run_parallel(unsafe_flows, proxy)
}
fn assign_block_sizes_and_store_overflow(
unsafe_flow: UnsafeFlow,
shared_layout_context: &SharedLayoutContext) {
let layout_context = LayoutContext::new(shared_layout_context);
let assign_block_sizes_traversal = AssignBSizes {
layout_context: &layout_context,
};
assign_block_sizes_traversal.run_parallel(unsafe_flow)
}
pub fn traverse_flow_tree_preorder(
root: &mut Flow,
profiler_metadata: Option<TimerMetadata>,
time_profiler_chan: time::ProfilerChan,
shared_layout_context: &SharedLayoutContext,
queue: &mut WorkQueue<SharedLayoutContext, WorkQueueData>) {
if opts::get().bubble_inline_sizes_separately {
let layout_context = LayoutContext::new(shared_layout_context);
let bubble_inline_sizes = BubbleISizes { layout_context: &layout_context };
root.traverse_postorder(&bubble_inline_sizes);
}
run_queue_with_custom_work_data_type(queue, |queue| {
profile(time::ProfilerCategory::LayoutParallelWarmup, profiler_metadata,
time_profiler_chan, || {
queue.push(WorkUnit {
fun: assign_inline_sizes,
data: (box vec![borrowed_flow_to_unsafe_flow(root)], 0),
})
});
}, shared_layout_context);
}
|
{
unsafe {
mem::transmute::<&Flow, UnsafeFlow>(flow)
}
}
|
identifier_body
|
parallel.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Implements parallel traversals over the DOM and flow trees.
//!
//! This code is highly unsafe. Keep this file small and easy to audit.
#![allow(unsafe_code)]
use context::{LayoutContext, SharedLayoutContext};
use flow::{self, Flow, MutableFlowUtils, PostorderFlowTraversal, PreorderFlowTraversal};
use flow_ref::FlowRef;
use profile_traits::time::{self, TimerMetadata, profile};
use std::mem;
use std::sync::atomic::{AtomicIsize, Ordering};
use style::dom::UnsafeNode;
use style::parallel::{CHUNK_SIZE, WorkQueueData};
use style::parallel::run_queue_with_custom_work_data_type;
use style::workqueue::{WorkQueue, WorkUnit, WorkerProxy};
use traversal::{AssignISizes, BubbleISizes};
use traversal::AssignBSizes;
use util::opts;
pub use style::parallel::traverse_dom;
#[allow(dead_code)]
fn static_assertion(node: UnsafeNode) {
unsafe {
let _: UnsafeFlow = ::std::intrinsics::transmute(node);
}
}
/// Vtable + pointer representation of a Flow trait object.
pub type UnsafeFlow = (usize, usize);
fn null_unsafe_flow() -> UnsafeFlow {
(0, 0)
}
pub fn mut_owned_flow_to_unsafe_flow(flow: *mut FlowRef) -> UnsafeFlow {
unsafe {
mem::transmute::<&Flow, UnsafeFlow>(&**flow)
}
}
pub fn borrowed_flow_to_unsafe_flow(flow: &Flow) -> UnsafeFlow {
unsafe {
mem::transmute::<&Flow, UnsafeFlow>(flow)
}
}
pub type UnsafeFlowList = (Box<Vec<UnsafeNode>>, usize);
pub type ChunkedFlowTraversalFunction =
extern "Rust" fn(UnsafeFlowList, &mut WorkerProxy<SharedLayoutContext, UnsafeFlowList>);
pub type FlowTraversalFunction = extern "Rust" fn(UnsafeFlow, &SharedLayoutContext);
/// Information that we need stored in each flow.
pub struct
|
{
/// The number of children that still need work done.
pub children_count: AtomicIsize,
/// The address of the parent flow.
pub parent: UnsafeFlow,
}
impl FlowParallelInfo {
pub fn new() -> FlowParallelInfo {
FlowParallelInfo {
children_count: AtomicIsize::new(0),
parent: null_unsafe_flow(),
}
}
}
/// A parallel bottom-up flow traversal.
trait ParallelPostorderFlowTraversal : PostorderFlowTraversal {
/// Process current flow and potentially traverse its ancestors.
///
/// If we are the last child that finished processing, recursively process
/// our parent. Else, stop. Also, stop at the root.
///
/// Thus, if we start with all the leaves of a tree, we end up traversing
/// the whole tree bottom-up because each parent will be processed exactly
/// once (by the last child that finishes processing).
///
/// The only communication between siblings is that they both
/// fetch-and-subtract the parent's children count.
fn run_parallel(&self, mut unsafe_flow: UnsafeFlow) {
loop {
// Get a real flow.
let flow: &mut Flow = unsafe {
mem::transmute(unsafe_flow)
};
// Perform the appropriate traversal.
if self.should_process(flow) {
self.process(flow);
}
let base = flow::mut_base(flow);
// Reset the count of children for the next layout traversal.
base.parallel.children_count.store(base.children.len() as isize,
Ordering::Relaxed);
// Possibly enqueue the parent.
let unsafe_parent = base.parallel.parent;
if unsafe_parent == null_unsafe_flow() {
// We're done!
break
}
// No, we're not at the root yet. Then are we the last child
// of our parent to finish processing? If so, we can continue
// on with our parent; otherwise, we've gotta wait.
let parent: &mut Flow = unsafe {
mem::transmute(unsafe_parent)
};
let parent_base = flow::mut_base(parent);
if parent_base.parallel.children_count.fetch_sub(1, Ordering::Relaxed) == 1 {
// We were the last child of our parent. Reflow our parent.
unsafe_flow = unsafe_parent
} else {
// Stop.
break
}
}
}
}
/// A parallel top-down flow traversal.
trait ParallelPreorderFlowTraversal : PreorderFlowTraversal {
fn run_parallel(&self,
unsafe_flows: UnsafeFlowList,
proxy: &mut WorkerProxy<SharedLayoutContext, UnsafeFlowList>);
fn should_record_thread_ids(&self) -> bool;
#[inline(always)]
fn run_parallel_helper(&self,
unsafe_flows: UnsafeFlowList,
proxy: &mut WorkerProxy<SharedLayoutContext, UnsafeFlowList>,
top_down_func: ChunkedFlowTraversalFunction,
bottom_up_func: FlowTraversalFunction) {
let mut discovered_child_flows = Vec::new();
for unsafe_flow in *unsafe_flows.0 {
let mut had_children = false;
unsafe {
// Get a real flow.
let flow: &mut Flow = mem::transmute(unsafe_flow);
if self.should_record_thread_ids() {
flow::mut_base(flow).thread_id = proxy.worker_index();
}
if self.should_process(flow) {
// Perform the appropriate traversal.
self.process(flow);
}
// Possibly enqueue the children.
for kid in flow::child_iter_mut(flow) {
had_children = true;
discovered_child_flows.push(borrowed_flow_to_unsafe_flow(kid));
}
}
// If there were no more children, start assigning block-sizes.
if!had_children {
bottom_up_func(unsafe_flow, proxy.user_data())
}
}
for chunk in discovered_child_flows.chunks(CHUNK_SIZE) {
proxy.push(WorkUnit {
fun: top_down_func,
data: (box chunk.iter().cloned().collect(), 0),
});
}
}
}
impl<'a> ParallelPreorderFlowTraversal for AssignISizes<'a> {
fn run_parallel(&self,
unsafe_flows: UnsafeFlowList,
proxy: &mut WorkerProxy<SharedLayoutContext, UnsafeFlowList>) {
self.run_parallel_helper(unsafe_flows,
proxy,
assign_inline_sizes,
assign_block_sizes_and_store_overflow)
}
fn should_record_thread_ids(&self) -> bool {
true
}
}
impl<'a> ParallelPostorderFlowTraversal for AssignBSizes<'a> {}
fn assign_inline_sizes(unsafe_flows: UnsafeFlowList,
proxy: &mut WorkerProxy<SharedLayoutContext, UnsafeFlowList>) {
let shared_layout_context = proxy.user_data();
let assign_inline_sizes_traversal = AssignISizes {
shared_context: &shared_layout_context.style_context,
};
assign_inline_sizes_traversal.run_parallel(unsafe_flows, proxy)
}
fn assign_block_sizes_and_store_overflow(
unsafe_flow: UnsafeFlow,
shared_layout_context: &SharedLayoutContext) {
let layout_context = LayoutContext::new(shared_layout_context);
let assign_block_sizes_traversal = AssignBSizes {
layout_context: &layout_context,
};
assign_block_sizes_traversal.run_parallel(unsafe_flow)
}
pub fn traverse_flow_tree_preorder(
root: &mut Flow,
profiler_metadata: Option<TimerMetadata>,
time_profiler_chan: time::ProfilerChan,
shared_layout_context: &SharedLayoutContext,
queue: &mut WorkQueue<SharedLayoutContext, WorkQueueData>) {
if opts::get().bubble_inline_sizes_separately {
let layout_context = LayoutContext::new(shared_layout_context);
let bubble_inline_sizes = BubbleISizes { layout_context: &layout_context };
root.traverse_postorder(&bubble_inline_sizes);
}
run_queue_with_custom_work_data_type(queue, |queue| {
profile(time::ProfilerCategory::LayoutParallelWarmup, profiler_metadata,
time_profiler_chan, || {
queue.push(WorkUnit {
fun: assign_inline_sizes,
data: (box vec![borrowed_flow_to_unsafe_flow(root)], 0),
})
});
}, shared_layout_context);
}
|
FlowParallelInfo
|
identifier_name
|
logger.rs
|
/* Copyright (C) 2020 Open Information Security Foundation
*
* You can copy, redistribute or modify this Program under the terms of
* the GNU General Public License version 2 as published by the Free
* Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* version 2 along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
// Author: Frank Honza <[email protected]>
use std;
use std::fmt::Write;
use super::rfb::{RFBState, RFBTransaction};
use crate::jsonbuilder::{JsonBuilder, JsonError};
fn log_rfb(tx: &RFBTransaction, js: &mut JsonBuilder) -> Result<(), JsonError> {
js.open_object("rfb")?;
// Protocol version
if let Some(tx_spv) = &tx.tc_server_protocol_version {
js.open_object("server_protocol_version")?;
js.set_string("major", &tx_spv.major)?;
js.set_string("minor", &tx_spv.minor)?;
js.close()?;
}
if let Some(tx_cpv) = &tx.ts_client_protocol_version {
js.open_object("client_protocol_version")?;
js.set_string("major", &tx_cpv.major)?;
js.set_string("minor", &tx_cpv.minor)?;
js.close()?;
}
|
// Authentication
js.open_object("authentication")?;
if let Some(chosen_security_type) = tx.chosen_security_type {
js.set_uint("security_type", chosen_security_type as u64)?;
}
match tx.chosen_security_type {
Some(2) => {
js.open_object("vnc")?;
if let Some(ref sc) = tx.tc_vnc_challenge {
let mut s = String::new();
for &byte in &sc.secret[..] {
write!(&mut s, "{:02x}", byte).expect("Unable to write");
}
js.set_string("challenge", &s)?;
}
if let Some(ref sr) = tx.ts_vnc_response {
let mut s = String::new();
for &byte in &sr.secret[..] {
write!(&mut s, "{:02x}", byte).expect("Unable to write");
}
js.set_string("response", &s)?;
}
js.close()?;
}
_ => ()
}
if let Some(security_result) = &tx.tc_security_result {
let _ = match security_result.status {
0 => js.set_string("security_result", "OK")?,
1 => js.set_string("security-result", "FAIL")?,
2 => js.set_string("security_result", "TOOMANY")?,
_ => js.set_string("security_result",
&format!("UNKNOWN ({})", security_result.status))?,
};
}
js.close()?; // Close authentication.
if let Some(ref reason) = tx.tc_failure_reason {
js.set_string("server_security_failure_reason", &reason.reason_string)?;
}
// Client/Server init
if let Some(s) = &tx.ts_client_init {
js.set_bool("screen_shared", s.shared!= 0)?;
}
if let Some(tc_server_init) = &tx.tc_server_init {
js.open_object("framebuffer")?;
js.set_uint("width", tc_server_init.width as u64)?;
js.set_uint("height", tc_server_init.height as u64)?;
js.set_string_from_bytes("name", &tc_server_init.name)?;
js.open_object("pixel_format")?;
js.set_uint("bits_per_pixel", tc_server_init.pixel_format.bits_per_pixel as u64)?;
js.set_uint("depth", tc_server_init.pixel_format.depth as u64)?;
js.set_bool("big_endian", tc_server_init.pixel_format.big_endian_flag!= 0)?;
js.set_bool("true_color", tc_server_init.pixel_format.true_colour_flag!= 0)?;
js.set_uint("red_max", tc_server_init.pixel_format.red_max as u64)?;
js.set_uint("green_max", tc_server_init.pixel_format.green_max as u64)?;
js.set_uint("blue_max", tc_server_init.pixel_format.blue_max as u64)?;
js.set_uint("red_shift", tc_server_init.pixel_format.red_shift as u64)?;
js.set_uint("green_shift", tc_server_init.pixel_format.green_shift as u64)?;
js.set_uint("blue_shift", tc_server_init.pixel_format.blue_shift as u64)?;
js.set_uint("depth", tc_server_init.pixel_format.depth as u64)?;
js.close()?;
js.close()?;
}
js.close()?;
return Ok(());
}
#[no_mangle]
pub extern "C" fn rs_rfb_logger_log(_state: &mut RFBState,
tx: *mut std::os::raw::c_void,
js: &mut JsonBuilder) -> bool {
let tx = cast_pointer!(tx, RFBTransaction);
log_rfb(tx, js).is_ok()
}
|
random_line_split
|
|
logger.rs
|
/* Copyright (C) 2020 Open Information Security Foundation
*
* You can copy, redistribute or modify this Program under the terms of
* the GNU General Public License version 2 as published by the Free
* Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* version 2 along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
// Author: Frank Honza <[email protected]>
use std;
use std::fmt::Write;
use super::rfb::{RFBState, RFBTransaction};
use crate::jsonbuilder::{JsonBuilder, JsonError};
fn log_rfb(tx: &RFBTransaction, js: &mut JsonBuilder) -> Result<(), JsonError> {
js.open_object("rfb")?;
// Protocol version
if let Some(tx_spv) = &tx.tc_server_protocol_version {
js.open_object("server_protocol_version")?;
js.set_string("major", &tx_spv.major)?;
js.set_string("minor", &tx_spv.minor)?;
js.close()?;
}
if let Some(tx_cpv) = &tx.ts_client_protocol_version {
js.open_object("client_protocol_version")?;
js.set_string("major", &tx_cpv.major)?;
js.set_string("minor", &tx_cpv.minor)?;
js.close()?;
}
// Authentication
js.open_object("authentication")?;
if let Some(chosen_security_type) = tx.chosen_security_type {
js.set_uint("security_type", chosen_security_type as u64)?;
}
match tx.chosen_security_type {
Some(2) => {
js.open_object("vnc")?;
if let Some(ref sc) = tx.tc_vnc_challenge {
let mut s = String::new();
for &byte in &sc.secret[..] {
write!(&mut s, "{:02x}", byte).expect("Unable to write");
}
js.set_string("challenge", &s)?;
}
if let Some(ref sr) = tx.ts_vnc_response
|
js.close()?;
}
_ => ()
}
if let Some(security_result) = &tx.tc_security_result {
let _ = match security_result.status {
0 => js.set_string("security_result", "OK")?,
1 => js.set_string("security-result", "FAIL")?,
2 => js.set_string("security_result", "TOOMANY")?,
_ => js.set_string("security_result",
&format!("UNKNOWN ({})", security_result.status))?,
};
}
js.close()?; // Close authentication.
if let Some(ref reason) = tx.tc_failure_reason {
js.set_string("server_security_failure_reason", &reason.reason_string)?;
}
// Client/Server init
if let Some(s) = &tx.ts_client_init {
js.set_bool("screen_shared", s.shared!= 0)?;
}
if let Some(tc_server_init) = &tx.tc_server_init {
js.open_object("framebuffer")?;
js.set_uint("width", tc_server_init.width as u64)?;
js.set_uint("height", tc_server_init.height as u64)?;
js.set_string_from_bytes("name", &tc_server_init.name)?;
js.open_object("pixel_format")?;
js.set_uint("bits_per_pixel", tc_server_init.pixel_format.bits_per_pixel as u64)?;
js.set_uint("depth", tc_server_init.pixel_format.depth as u64)?;
js.set_bool("big_endian", tc_server_init.pixel_format.big_endian_flag!= 0)?;
js.set_bool("true_color", tc_server_init.pixel_format.true_colour_flag!= 0)?;
js.set_uint("red_max", tc_server_init.pixel_format.red_max as u64)?;
js.set_uint("green_max", tc_server_init.pixel_format.green_max as u64)?;
js.set_uint("blue_max", tc_server_init.pixel_format.blue_max as u64)?;
js.set_uint("red_shift", tc_server_init.pixel_format.red_shift as u64)?;
js.set_uint("green_shift", tc_server_init.pixel_format.green_shift as u64)?;
js.set_uint("blue_shift", tc_server_init.pixel_format.blue_shift as u64)?;
js.set_uint("depth", tc_server_init.pixel_format.depth as u64)?;
js.close()?;
js.close()?;
}
js.close()?;
return Ok(());
}
#[no_mangle]
pub extern "C" fn rs_rfb_logger_log(_state: &mut RFBState,
tx: *mut std::os::raw::c_void,
js: &mut JsonBuilder) -> bool {
let tx = cast_pointer!(tx, RFBTransaction);
log_rfb(tx, js).is_ok()
}
|
{
let mut s = String::new();
for &byte in &sr.secret[..] {
write!(&mut s, "{:02x}", byte).expect("Unable to write");
}
js.set_string("response", &s)?;
}
|
conditional_block
|
logger.rs
|
/* Copyright (C) 2020 Open Information Security Foundation
*
* You can copy, redistribute or modify this Program under the terms of
* the GNU General Public License version 2 as published by the Free
* Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* version 2 along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
// Author: Frank Honza <[email protected]>
use std;
use std::fmt::Write;
use super::rfb::{RFBState, RFBTransaction};
use crate::jsonbuilder::{JsonBuilder, JsonError};
fn log_rfb(tx: &RFBTransaction, js: &mut JsonBuilder) -> Result<(), JsonError>
|
js.set_uint("security_type", chosen_security_type as u64)?;
}
match tx.chosen_security_type {
Some(2) => {
js.open_object("vnc")?;
if let Some(ref sc) = tx.tc_vnc_challenge {
let mut s = String::new();
for &byte in &sc.secret[..] {
write!(&mut s, "{:02x}", byte).expect("Unable to write");
}
js.set_string("challenge", &s)?;
}
if let Some(ref sr) = tx.ts_vnc_response {
let mut s = String::new();
for &byte in &sr.secret[..] {
write!(&mut s, "{:02x}", byte).expect("Unable to write");
}
js.set_string("response", &s)?;
}
js.close()?;
}
_ => ()
}
if let Some(security_result) = &tx.tc_security_result {
let _ = match security_result.status {
0 => js.set_string("security_result", "OK")?,
1 => js.set_string("security-result", "FAIL")?,
2 => js.set_string("security_result", "TOOMANY")?,
_ => js.set_string("security_result",
&format!("UNKNOWN ({})", security_result.status))?,
};
}
js.close()?; // Close authentication.
if let Some(ref reason) = tx.tc_failure_reason {
js.set_string("server_security_failure_reason", &reason.reason_string)?;
}
// Client/Server init
if let Some(s) = &tx.ts_client_init {
js.set_bool("screen_shared", s.shared!= 0)?;
}
if let Some(tc_server_init) = &tx.tc_server_init {
js.open_object("framebuffer")?;
js.set_uint("width", tc_server_init.width as u64)?;
js.set_uint("height", tc_server_init.height as u64)?;
js.set_string_from_bytes("name", &tc_server_init.name)?;
js.open_object("pixel_format")?;
js.set_uint("bits_per_pixel", tc_server_init.pixel_format.bits_per_pixel as u64)?;
js.set_uint("depth", tc_server_init.pixel_format.depth as u64)?;
js.set_bool("big_endian", tc_server_init.pixel_format.big_endian_flag!= 0)?;
js.set_bool("true_color", tc_server_init.pixel_format.true_colour_flag!= 0)?;
js.set_uint("red_max", tc_server_init.pixel_format.red_max as u64)?;
js.set_uint("green_max", tc_server_init.pixel_format.green_max as u64)?;
js.set_uint("blue_max", tc_server_init.pixel_format.blue_max as u64)?;
js.set_uint("red_shift", tc_server_init.pixel_format.red_shift as u64)?;
js.set_uint("green_shift", tc_server_init.pixel_format.green_shift as u64)?;
js.set_uint("blue_shift", tc_server_init.pixel_format.blue_shift as u64)?;
js.set_uint("depth", tc_server_init.pixel_format.depth as u64)?;
js.close()?;
js.close()?;
}
js.close()?;
return Ok(());
}
#[no_mangle]
pub extern "C" fn rs_rfb_logger_log(_state: &mut RFBState,
tx: *mut std::os::raw::c_void,
js: &mut JsonBuilder) -> bool {
let tx = cast_pointer!(tx, RFBTransaction);
log_rfb(tx, js).is_ok()
}
|
{
js.open_object("rfb")?;
// Protocol version
if let Some(tx_spv) = &tx.tc_server_protocol_version {
js.open_object("server_protocol_version")?;
js.set_string("major", &tx_spv.major)?;
js.set_string("minor", &tx_spv.minor)?;
js.close()?;
}
if let Some(tx_cpv) = &tx.ts_client_protocol_version {
js.open_object("client_protocol_version")?;
js.set_string("major", &tx_cpv.major)?;
js.set_string("minor", &tx_cpv.minor)?;
js.close()?;
}
// Authentication
js.open_object("authentication")?;
if let Some(chosen_security_type) = tx.chosen_security_type {
|
identifier_body
|
logger.rs
|
/* Copyright (C) 2020 Open Information Security Foundation
*
* You can copy, redistribute or modify this Program under the terms of
* the GNU General Public License version 2 as published by the Free
* Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* version 2 along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
// Author: Frank Honza <[email protected]>
use std;
use std::fmt::Write;
use super::rfb::{RFBState, RFBTransaction};
use crate::jsonbuilder::{JsonBuilder, JsonError};
fn log_rfb(tx: &RFBTransaction, js: &mut JsonBuilder) -> Result<(), JsonError> {
js.open_object("rfb")?;
// Protocol version
if let Some(tx_spv) = &tx.tc_server_protocol_version {
js.open_object("server_protocol_version")?;
js.set_string("major", &tx_spv.major)?;
js.set_string("minor", &tx_spv.minor)?;
js.close()?;
}
if let Some(tx_cpv) = &tx.ts_client_protocol_version {
js.open_object("client_protocol_version")?;
js.set_string("major", &tx_cpv.major)?;
js.set_string("minor", &tx_cpv.minor)?;
js.close()?;
}
// Authentication
js.open_object("authentication")?;
if let Some(chosen_security_type) = tx.chosen_security_type {
js.set_uint("security_type", chosen_security_type as u64)?;
}
match tx.chosen_security_type {
Some(2) => {
js.open_object("vnc")?;
if let Some(ref sc) = tx.tc_vnc_challenge {
let mut s = String::new();
for &byte in &sc.secret[..] {
write!(&mut s, "{:02x}", byte).expect("Unable to write");
}
js.set_string("challenge", &s)?;
}
if let Some(ref sr) = tx.ts_vnc_response {
let mut s = String::new();
for &byte in &sr.secret[..] {
write!(&mut s, "{:02x}", byte).expect("Unable to write");
}
js.set_string("response", &s)?;
}
js.close()?;
}
_ => ()
}
if let Some(security_result) = &tx.tc_security_result {
let _ = match security_result.status {
0 => js.set_string("security_result", "OK")?,
1 => js.set_string("security-result", "FAIL")?,
2 => js.set_string("security_result", "TOOMANY")?,
_ => js.set_string("security_result",
&format!("UNKNOWN ({})", security_result.status))?,
};
}
js.close()?; // Close authentication.
if let Some(ref reason) = tx.tc_failure_reason {
js.set_string("server_security_failure_reason", &reason.reason_string)?;
}
// Client/Server init
if let Some(s) = &tx.ts_client_init {
js.set_bool("screen_shared", s.shared!= 0)?;
}
if let Some(tc_server_init) = &tx.tc_server_init {
js.open_object("framebuffer")?;
js.set_uint("width", tc_server_init.width as u64)?;
js.set_uint("height", tc_server_init.height as u64)?;
js.set_string_from_bytes("name", &tc_server_init.name)?;
js.open_object("pixel_format")?;
js.set_uint("bits_per_pixel", tc_server_init.pixel_format.bits_per_pixel as u64)?;
js.set_uint("depth", tc_server_init.pixel_format.depth as u64)?;
js.set_bool("big_endian", tc_server_init.pixel_format.big_endian_flag!= 0)?;
js.set_bool("true_color", tc_server_init.pixel_format.true_colour_flag!= 0)?;
js.set_uint("red_max", tc_server_init.pixel_format.red_max as u64)?;
js.set_uint("green_max", tc_server_init.pixel_format.green_max as u64)?;
js.set_uint("blue_max", tc_server_init.pixel_format.blue_max as u64)?;
js.set_uint("red_shift", tc_server_init.pixel_format.red_shift as u64)?;
js.set_uint("green_shift", tc_server_init.pixel_format.green_shift as u64)?;
js.set_uint("blue_shift", tc_server_init.pixel_format.blue_shift as u64)?;
js.set_uint("depth", tc_server_init.pixel_format.depth as u64)?;
js.close()?;
js.close()?;
}
js.close()?;
return Ok(());
}
#[no_mangle]
pub extern "C" fn
|
(_state: &mut RFBState,
tx: *mut std::os::raw::c_void,
js: &mut JsonBuilder) -> bool {
let tx = cast_pointer!(tx, RFBTransaction);
log_rfb(tx, js).is_ok()
}
|
rs_rfb_logger_log
|
identifier_name
|
tree_sortable.rs
|
// This file was generated by gir (5c017c9) from gir-files (71d73f0)
// DO NOT EDIT
use TreeModel;
use ffi;
use glib;
use glib::object::Downcast;
use glib::object::IsA;
use glib::signal::connect;
use glib::translate::*;
use glib_ffi;
use std::boxed::Box as Box_;
use std::mem::transmute;
|
glib_wrapper! {
pub struct TreeSortable(Object<ffi::GtkTreeSortable>): TreeModel;
match fn {
get_type => || ffi::gtk_tree_sortable_get_type(),
}
}
pub trait TreeSortableExt {
fn has_default_sort_func(&self) -> bool;
//fn set_default_sort_func<'a, P: Into<Option</*Unimplemented*/Fundamental: Pointer>>, Q: Into<Option<&'a /*Ignored*/glib::DestroyNotify>>>(&self, sort_func: /*Unknown conversion*//*Unimplemented*/TreeIterCompareFunc, user_data: P, destroy: Q);
//fn set_sort_func<'a, P: Into<Option</*Unimplemented*/Fundamental: Pointer>>, Q: Into<Option<&'a /*Ignored*/glib::DestroyNotify>>>(&self, sort_column_id: i32, sort_func: /*Unknown conversion*//*Unimplemented*/TreeIterCompareFunc, user_data: P, destroy: Q);
fn sort_column_changed(&self);
fn connect_sort_column_changed<F: Fn(&Self) +'static>(&self, f: F) -> u64;
}
impl<O: IsA<TreeSortable> + IsA<glib::object::Object>> TreeSortableExt for O {
fn has_default_sort_func(&self) -> bool {
unsafe {
from_glib(ffi::gtk_tree_sortable_has_default_sort_func(self.to_glib_none().0))
}
}
//fn set_default_sort_func<'a, P: Into<Option</*Unimplemented*/Fundamental: Pointer>>, Q: Into<Option<&'a /*Ignored*/glib::DestroyNotify>>>(&self, sort_func: /*Unknown conversion*//*Unimplemented*/TreeIterCompareFunc, user_data: P, destroy: Q) {
// unsafe { TODO: call ffi::gtk_tree_sortable_set_default_sort_func() }
//}
//fn set_sort_func<'a, P: Into<Option</*Unimplemented*/Fundamental: Pointer>>, Q: Into<Option<&'a /*Ignored*/glib::DestroyNotify>>>(&self, sort_column_id: i32, sort_func: /*Unknown conversion*//*Unimplemented*/TreeIterCompareFunc, user_data: P, destroy: Q) {
// unsafe { TODO: call ffi::gtk_tree_sortable_set_sort_func() }
//}
fn sort_column_changed(&self) {
unsafe {
ffi::gtk_tree_sortable_sort_column_changed(self.to_glib_none().0);
}
}
fn connect_sort_column_changed<F: Fn(&Self) +'static>(&self, f: F) -> u64 {
unsafe {
let f: Box_<Box_<Fn(&Self) +'static>> = Box_::new(Box_::new(f));
connect(self.to_glib_none().0, "sort-column-changed",
transmute(sort_column_changed_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _)
}
}
}
unsafe extern "C" fn sort_column_changed_trampoline<P>(this: *mut ffi::GtkTreeSortable, f: glib_ffi::gpointer)
where P: IsA<TreeSortable> {
callback_guard!();
let f: &Box_<Fn(&P) +'static> = transmute(f);
f(&TreeSortable::from_glib_none(this).downcast_unchecked())
}
|
random_line_split
|
|
tree_sortable.rs
|
// This file was generated by gir (5c017c9) from gir-files (71d73f0)
// DO NOT EDIT
use TreeModel;
use ffi;
use glib;
use glib::object::Downcast;
use glib::object::IsA;
use glib::signal::connect;
use glib::translate::*;
use glib_ffi;
use std::boxed::Box as Box_;
use std::mem::transmute;
glib_wrapper! {
pub struct TreeSortable(Object<ffi::GtkTreeSortable>): TreeModel;
match fn {
get_type => || ffi::gtk_tree_sortable_get_type(),
}
}
pub trait TreeSortableExt {
fn has_default_sort_func(&self) -> bool;
//fn set_default_sort_func<'a, P: Into<Option</*Unimplemented*/Fundamental: Pointer>>, Q: Into<Option<&'a /*Ignored*/glib::DestroyNotify>>>(&self, sort_func: /*Unknown conversion*//*Unimplemented*/TreeIterCompareFunc, user_data: P, destroy: Q);
//fn set_sort_func<'a, P: Into<Option</*Unimplemented*/Fundamental: Pointer>>, Q: Into<Option<&'a /*Ignored*/glib::DestroyNotify>>>(&self, sort_column_id: i32, sort_func: /*Unknown conversion*//*Unimplemented*/TreeIterCompareFunc, user_data: P, destroy: Q);
fn sort_column_changed(&self);
fn connect_sort_column_changed<F: Fn(&Self) +'static>(&self, f: F) -> u64;
}
impl<O: IsA<TreeSortable> + IsA<glib::object::Object>> TreeSortableExt for O {
fn has_default_sort_func(&self) -> bool {
unsafe {
from_glib(ffi::gtk_tree_sortable_has_default_sort_func(self.to_glib_none().0))
}
}
//fn set_default_sort_func<'a, P: Into<Option</*Unimplemented*/Fundamental: Pointer>>, Q: Into<Option<&'a /*Ignored*/glib::DestroyNotify>>>(&self, sort_func: /*Unknown conversion*//*Unimplemented*/TreeIterCompareFunc, user_data: P, destroy: Q) {
// unsafe { TODO: call ffi::gtk_tree_sortable_set_default_sort_func() }
//}
//fn set_sort_func<'a, P: Into<Option</*Unimplemented*/Fundamental: Pointer>>, Q: Into<Option<&'a /*Ignored*/glib::DestroyNotify>>>(&self, sort_column_id: i32, sort_func: /*Unknown conversion*//*Unimplemented*/TreeIterCompareFunc, user_data: P, destroy: Q) {
// unsafe { TODO: call ffi::gtk_tree_sortable_set_sort_func() }
//}
fn sort_column_changed(&self) {
unsafe {
ffi::gtk_tree_sortable_sort_column_changed(self.to_glib_none().0);
}
}
fn connect_sort_column_changed<F: Fn(&Self) +'static>(&self, f: F) -> u64 {
unsafe {
let f: Box_<Box_<Fn(&Self) +'static>> = Box_::new(Box_::new(f));
connect(self.to_glib_none().0, "sort-column-changed",
transmute(sort_column_changed_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _)
}
}
}
unsafe extern "C" fn sort_column_changed_trampoline<P>(this: *mut ffi::GtkTreeSortable, f: glib_ffi::gpointer)
where P: IsA<TreeSortable>
|
{
callback_guard!();
let f: &Box_<Fn(&P) + 'static> = transmute(f);
f(&TreeSortable::from_glib_none(this).downcast_unchecked())
}
|
identifier_body
|
|
tree_sortable.rs
|
// This file was generated by gir (5c017c9) from gir-files (71d73f0)
// DO NOT EDIT
use TreeModel;
use ffi;
use glib;
use glib::object::Downcast;
use glib::object::IsA;
use glib::signal::connect;
use glib::translate::*;
use glib_ffi;
use std::boxed::Box as Box_;
use std::mem::transmute;
glib_wrapper! {
pub struct TreeSortable(Object<ffi::GtkTreeSortable>): TreeModel;
match fn {
get_type => || ffi::gtk_tree_sortable_get_type(),
}
}
pub trait TreeSortableExt {
fn has_default_sort_func(&self) -> bool;
//fn set_default_sort_func<'a, P: Into<Option</*Unimplemented*/Fundamental: Pointer>>, Q: Into<Option<&'a /*Ignored*/glib::DestroyNotify>>>(&self, sort_func: /*Unknown conversion*//*Unimplemented*/TreeIterCompareFunc, user_data: P, destroy: Q);
//fn set_sort_func<'a, P: Into<Option</*Unimplemented*/Fundamental: Pointer>>, Q: Into<Option<&'a /*Ignored*/glib::DestroyNotify>>>(&self, sort_column_id: i32, sort_func: /*Unknown conversion*//*Unimplemented*/TreeIterCompareFunc, user_data: P, destroy: Q);
fn sort_column_changed(&self);
fn connect_sort_column_changed<F: Fn(&Self) +'static>(&self, f: F) -> u64;
}
impl<O: IsA<TreeSortable> + IsA<glib::object::Object>> TreeSortableExt for O {
fn has_default_sort_func(&self) -> bool {
unsafe {
from_glib(ffi::gtk_tree_sortable_has_default_sort_func(self.to_glib_none().0))
}
}
//fn set_default_sort_func<'a, P: Into<Option</*Unimplemented*/Fundamental: Pointer>>, Q: Into<Option<&'a /*Ignored*/glib::DestroyNotify>>>(&self, sort_func: /*Unknown conversion*//*Unimplemented*/TreeIterCompareFunc, user_data: P, destroy: Q) {
// unsafe { TODO: call ffi::gtk_tree_sortable_set_default_sort_func() }
//}
//fn set_sort_func<'a, P: Into<Option</*Unimplemented*/Fundamental: Pointer>>, Q: Into<Option<&'a /*Ignored*/glib::DestroyNotify>>>(&self, sort_column_id: i32, sort_func: /*Unknown conversion*//*Unimplemented*/TreeIterCompareFunc, user_data: P, destroy: Q) {
// unsafe { TODO: call ffi::gtk_tree_sortable_set_sort_func() }
//}
fn sort_column_changed(&self) {
unsafe {
ffi::gtk_tree_sortable_sort_column_changed(self.to_glib_none().0);
}
}
fn
|
<F: Fn(&Self) +'static>(&self, f: F) -> u64 {
unsafe {
let f: Box_<Box_<Fn(&Self) +'static>> = Box_::new(Box_::new(f));
connect(self.to_glib_none().0, "sort-column-changed",
transmute(sort_column_changed_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _)
}
}
}
unsafe extern "C" fn sort_column_changed_trampoline<P>(this: *mut ffi::GtkTreeSortable, f: glib_ffi::gpointer)
where P: IsA<TreeSortable> {
callback_guard!();
let f: &Box_<Fn(&P) +'static> = transmute(f);
f(&TreeSortable::from_glib_none(this).downcast_unchecked())
}
|
connect_sort_column_changed
|
identifier_name
|
tx.rs
|
use std::collections::HashMap;
use protobuf::Message;
use crypto::{PublicKey, SecretKey, Signature, sign, verify_signature};
use ironcoin_pb::{Commitment, DetachedSignature, Transaction, Transfer};
use error::{IroncError, IroncResult};
pub trait TransactionExt {
fn verify_signatures(&self) -> IroncResult<()>;
}
impl TransactionExt for Transaction {
fn verify_signatures(&self) -> IroncResult<()> {
let commit_bytes = &try!(self.get_commit().write_to_bytes());
let mut sign_map = HashMap::<&[u8], &[u8]>::new();
for sign in self.get_signatures().iter() {
sign_map.insert(sign.get_public_key(), sign.get_payload());
}
for transfer in self.get_commit().get_transfers().iter() {
match sign_map.get(transfer.get_source_pk()) {
Some(sign_bytes) =>
|
,
None => return Err(IroncError::new("Missing key."))
}
}
Ok(())
}
}
#[derive(Default)]
pub struct TransactionBuilder {
transfer_secret_keys: Vec<SecretKey>,
bounty_secret_key: Option<SecretKey>,
commit: Commitment
}
impl TransactionBuilder {
pub fn new() -> TransactionBuilder {
TransactionBuilder {
transfer_secret_keys: Vec::<SecretKey>::new(),
bounty_secret_key: None,
commit: Commitment::new()
}
}
pub fn add_transfer(
&mut self, sk: &SecretKey, source: &PublicKey, destination: &PublicKey,
tokens: u64, op_index:u32) -> &mut Self {
let mut transfer = Transfer::new();
transfer.set_op_index(op_index);
transfer.set_tokens(tokens);
transfer.mut_source_pk().push_all(&source.0);
transfer.mut_destination_pk().push_all(&destination.0);
self.transfer_secret_keys.push(sk.clone());
self.commit.mut_transfers().push(transfer);
self
}
pub fn set_bounty(&mut self, sk: &SecretKey, source: &PublicKey,
bounty: u64) -> &mut Self {
self.bounty_secret_key = Some(sk.clone());
self.commit.mut_bounty_pk().push_all(&source.0);
self.commit.set_bounty(bounty);
self
}
pub fn build(self) -> IroncResult<Transaction> {
let mut transaction = Transaction::new();
let commit_bytes = &self.commit.write_to_bytes().unwrap();
for (transfer, secret_key) in self.commit.get_transfers().iter()
.zip(self.transfer_secret_keys.iter())
{
let signature = sign(secret_key, commit_bytes);
let pk = try!(PublicKey::from_slice(transfer.get_source_pk()));
match verify_signature(&pk, commit_bytes, &signature) {
Ok(_) => {
let mut sign = DetachedSignature::new();
sign.set_public_key(pk.0.to_vec());
sign.set_payload(signature.0.to_vec());
transaction.mut_signatures().push(sign);
},
Err(_) => return Err(
IroncError::new("Invalid key for source account."))
}
}
transaction.set_commit(self.commit);
try!(transaction.verify_signatures());
Ok(transaction)
}
}
|
{
let public_key =
try!(PublicKey::from_slice(transfer.get_source_pk()));
let signature = try!(Signature::from_slice(sign_bytes));
try!(verify_signature(&public_key, commit_bytes, &signature));
}
|
conditional_block
|
tx.rs
|
use std::collections::HashMap;
use protobuf::Message;
use crypto::{PublicKey, SecretKey, Signature, sign, verify_signature};
use ironcoin_pb::{Commitment, DetachedSignature, Transaction, Transfer};
use error::{IroncError, IroncResult};
pub trait TransactionExt {
fn verify_signatures(&self) -> IroncResult<()>;
}
impl TransactionExt for Transaction {
fn verify_signatures(&self) -> IroncResult<()> {
let commit_bytes = &try!(self.get_commit().write_to_bytes());
let mut sign_map = HashMap::<&[u8], &[u8]>::new();
for sign in self.get_signatures().iter() {
sign_map.insert(sign.get_public_key(), sign.get_payload());
}
for transfer in self.get_commit().get_transfers().iter() {
match sign_map.get(transfer.get_source_pk()) {
Some(sign_bytes) => {
let public_key =
try!(PublicKey::from_slice(transfer.get_source_pk()));
let signature = try!(Signature::from_slice(sign_bytes));
try!(verify_signature(&public_key, commit_bytes, &signature));
},
None => return Err(IroncError::new("Missing key."))
}
}
Ok(())
}
}
#[derive(Default)]
pub struct TransactionBuilder {
transfer_secret_keys: Vec<SecretKey>,
bounty_secret_key: Option<SecretKey>,
commit: Commitment
}
impl TransactionBuilder {
pub fn new() -> TransactionBuilder {
TransactionBuilder {
transfer_secret_keys: Vec::<SecretKey>::new(),
bounty_secret_key: None,
commit: Commitment::new()
}
}
pub fn
|
(
&mut self, sk: &SecretKey, source: &PublicKey, destination: &PublicKey,
tokens: u64, op_index:u32) -> &mut Self {
let mut transfer = Transfer::new();
transfer.set_op_index(op_index);
transfer.set_tokens(tokens);
transfer.mut_source_pk().push_all(&source.0);
transfer.mut_destination_pk().push_all(&destination.0);
self.transfer_secret_keys.push(sk.clone());
self.commit.mut_transfers().push(transfer);
self
}
pub fn set_bounty(&mut self, sk: &SecretKey, source: &PublicKey,
bounty: u64) -> &mut Self {
self.bounty_secret_key = Some(sk.clone());
self.commit.mut_bounty_pk().push_all(&source.0);
self.commit.set_bounty(bounty);
self
}
pub fn build(self) -> IroncResult<Transaction> {
let mut transaction = Transaction::new();
let commit_bytes = &self.commit.write_to_bytes().unwrap();
for (transfer, secret_key) in self.commit.get_transfers().iter()
.zip(self.transfer_secret_keys.iter())
{
let signature = sign(secret_key, commit_bytes);
let pk = try!(PublicKey::from_slice(transfer.get_source_pk()));
match verify_signature(&pk, commit_bytes, &signature) {
Ok(_) => {
let mut sign = DetachedSignature::new();
sign.set_public_key(pk.0.to_vec());
sign.set_payload(signature.0.to_vec());
transaction.mut_signatures().push(sign);
},
Err(_) => return Err(
IroncError::new("Invalid key for source account."))
}
}
transaction.set_commit(self.commit);
try!(transaction.verify_signatures());
Ok(transaction)
}
}
|
add_transfer
|
identifier_name
|
tx.rs
|
use std::collections::HashMap;
use protobuf::Message;
use crypto::{PublicKey, SecretKey, Signature, sign, verify_signature};
use ironcoin_pb::{Commitment, DetachedSignature, Transaction, Transfer};
use error::{IroncError, IroncResult};
pub trait TransactionExt {
fn verify_signatures(&self) -> IroncResult<()>;
}
impl TransactionExt for Transaction {
fn verify_signatures(&self) -> IroncResult<()> {
let commit_bytes = &try!(self.get_commit().write_to_bytes());
let mut sign_map = HashMap::<&[u8], &[u8]>::new();
for sign in self.get_signatures().iter() {
sign_map.insert(sign.get_public_key(), sign.get_payload());
}
for transfer in self.get_commit().get_transfers().iter() {
match sign_map.get(transfer.get_source_pk()) {
Some(sign_bytes) => {
let public_key =
try!(PublicKey::from_slice(transfer.get_source_pk()));
let signature = try!(Signature::from_slice(sign_bytes));
try!(verify_signature(&public_key, commit_bytes, &signature));
},
None => return Err(IroncError::new("Missing key."))
}
}
Ok(())
}
}
#[derive(Default)]
pub struct TransactionBuilder {
transfer_secret_keys: Vec<SecretKey>,
bounty_secret_key: Option<SecretKey>,
commit: Commitment
}
impl TransactionBuilder {
pub fn new() -> TransactionBuilder
|
pub fn add_transfer(
&mut self, sk: &SecretKey, source: &PublicKey, destination: &PublicKey,
tokens: u64, op_index:u32) -> &mut Self {
let mut transfer = Transfer::new();
transfer.set_op_index(op_index);
transfer.set_tokens(tokens);
transfer.mut_source_pk().push_all(&source.0);
transfer.mut_destination_pk().push_all(&destination.0);
self.transfer_secret_keys.push(sk.clone());
self.commit.mut_transfers().push(transfer);
self
}
pub fn set_bounty(&mut self, sk: &SecretKey, source: &PublicKey,
bounty: u64) -> &mut Self {
self.bounty_secret_key = Some(sk.clone());
self.commit.mut_bounty_pk().push_all(&source.0);
self.commit.set_bounty(bounty);
self
}
pub fn build(self) -> IroncResult<Transaction> {
let mut transaction = Transaction::new();
let commit_bytes = &self.commit.write_to_bytes().unwrap();
for (transfer, secret_key) in self.commit.get_transfers().iter()
.zip(self.transfer_secret_keys.iter())
{
let signature = sign(secret_key, commit_bytes);
let pk = try!(PublicKey::from_slice(transfer.get_source_pk()));
match verify_signature(&pk, commit_bytes, &signature) {
Ok(_) => {
let mut sign = DetachedSignature::new();
sign.set_public_key(pk.0.to_vec());
sign.set_payload(signature.0.to_vec());
transaction.mut_signatures().push(sign);
},
Err(_) => return Err(
IroncError::new("Invalid key for source account."))
}
}
transaction.set_commit(self.commit);
try!(transaction.verify_signatures());
Ok(transaction)
}
}
|
{
TransactionBuilder {
transfer_secret_keys: Vec::<SecretKey>::new(),
bounty_secret_key: None,
commit: Commitment::new()
}
}
|
identifier_body
|
tx.rs
|
use std::collections::HashMap;
use protobuf::Message;
use crypto::{PublicKey, SecretKey, Signature, sign, verify_signature};
use ironcoin_pb::{Commitment, DetachedSignature, Transaction, Transfer};
use error::{IroncError, IroncResult};
pub trait TransactionExt {
fn verify_signatures(&self) -> IroncResult<()>;
}
impl TransactionExt for Transaction {
fn verify_signatures(&self) -> IroncResult<()> {
let commit_bytes = &try!(self.get_commit().write_to_bytes());
let mut sign_map = HashMap::<&[u8], &[u8]>::new();
for sign in self.get_signatures().iter() {
sign_map.insert(sign.get_public_key(), sign.get_payload());
}
for transfer in self.get_commit().get_transfers().iter() {
match sign_map.get(transfer.get_source_pk()) {
Some(sign_bytes) => {
let public_key =
try!(PublicKey::from_slice(transfer.get_source_pk()));
let signature = try!(Signature::from_slice(sign_bytes));
try!(verify_signature(&public_key, commit_bytes, &signature));
},
None => return Err(IroncError::new("Missing key."))
}
}
Ok(())
}
}
|
commit: Commitment
}
impl TransactionBuilder {
pub fn new() -> TransactionBuilder {
TransactionBuilder {
transfer_secret_keys: Vec::<SecretKey>::new(),
bounty_secret_key: None,
commit: Commitment::new()
}
}
pub fn add_transfer(
&mut self, sk: &SecretKey, source: &PublicKey, destination: &PublicKey,
tokens: u64, op_index:u32) -> &mut Self {
let mut transfer = Transfer::new();
transfer.set_op_index(op_index);
transfer.set_tokens(tokens);
transfer.mut_source_pk().push_all(&source.0);
transfer.mut_destination_pk().push_all(&destination.0);
self.transfer_secret_keys.push(sk.clone());
self.commit.mut_transfers().push(transfer);
self
}
pub fn set_bounty(&mut self, sk: &SecretKey, source: &PublicKey,
bounty: u64) -> &mut Self {
self.bounty_secret_key = Some(sk.clone());
self.commit.mut_bounty_pk().push_all(&source.0);
self.commit.set_bounty(bounty);
self
}
pub fn build(self) -> IroncResult<Transaction> {
let mut transaction = Transaction::new();
let commit_bytes = &self.commit.write_to_bytes().unwrap();
for (transfer, secret_key) in self.commit.get_transfers().iter()
.zip(self.transfer_secret_keys.iter())
{
let signature = sign(secret_key, commit_bytes);
let pk = try!(PublicKey::from_slice(transfer.get_source_pk()));
match verify_signature(&pk, commit_bytes, &signature) {
Ok(_) => {
let mut sign = DetachedSignature::new();
sign.set_public_key(pk.0.to_vec());
sign.set_payload(signature.0.to_vec());
transaction.mut_signatures().push(sign);
},
Err(_) => return Err(
IroncError::new("Invalid key for source account."))
}
}
transaction.set_commit(self.commit);
try!(transaction.verify_signatures());
Ok(transaction)
}
}
|
#[derive(Default)]
pub struct TransactionBuilder {
transfer_secret_keys: Vec<SecretKey>,
bounty_secret_key: Option<SecretKey>,
|
random_line_split
|
mod.rs
|
mod asset;
mod combat_screen;
mod input;
mod start_screen;
mod teams_screen;
mod text;
mod types;
use sdl2::render::Texture;
pub use asset::*;
pub use input::*;
pub use text::*;
pub use types::*;
use sdl2::pixels::Color;
use sdl2::rect::Rect;
use sdl2::render::WindowCanvas;
use std::time::Instant;
use crate::core::{DisplayStr, Game, UserInput, Sprite};
pub fn render(
cvs: &mut WindowCanvas,
ui: &UI,
game: &Game,
assets: &mut AssetRepo,
) -> Result<ClickAreas, String> {
// let now = Instant::now();
let (mut scene, click_areas) = match game {
Game::Start(..) => start_screen::render(ui.viewport),
Game::TeamSelection(_, _, game_objects) => {
let (_, _, w, h) = ui.viewport;
teams_screen::render((w, h), game_objects)
}
Game::Combat(combat_data) => {
let scroll_offset = ui.scrolling.as_ref().map(|s| s.offset).unwrap_or((0, 0));
combat_screen::render(ui.viewport, scroll_offset, combat_data)
}
};
scene.texts.push(
ScreenText::new(
DisplayStr::new(format!("FPS: {}", ui.fps)),
ScreenPos(10, ui.viewport.3 as i32 - 60),
)
.color((20, 150, 20, 255))
.padding(10)
.background((252, 251, 250, 255)),
);
// let time_create_scene = Instant::now() - now;
// let now = Instant::now();
draw_scene(cvs, assets, scene)?;
// let time_draw_scene = Instant::now() - now;
// cvs.present();
// println!("create scene: {}ms, draw scene {}ms", time_create_scene.as_millis(), time_draw_scene.as_millis());
Ok(click_areas)
}
fn draw_scene(
cvs: &mut WindowCanvas,
assets: &mut AssetRepo,
scene: Scene,
) -> Result<(), String> {
let (r, g, b) = scene.background;
cvs.set_draw_color(Color::RGB(r, g, b));
cvs.clear();
for (tex_name, ScreenSprite(pos, align, sprite)) in scene.images {
if let Some(ref mut t) = assets.textures.get_mut(&tex_name) {
draw_sprite(pos, align, sprite, t, cvs)?;
}
}
for ScreenSprite(pos, align, sprite) in scene.sprites {
let mut texture = assets.texture.as_mut();
if let Some(ref mut t) = texture {
draw_sprite(pos, align, sprite, t, cvs)?;
}
}
for txt in scene.texts {
let font = assets.fonts[txt.font as usize].as_mut().unwrap();
font.draw(txt, cvs)?;
}
cvs.present();
Ok(())
}
pub fn init_ui(viewport: (i32, i32, u32, u32), pixel_ratio: u8) -> UI {
UI {
viewport,
pixel_ratio,
fps: 0,
frames: 0,
last_check: Instant::now(),
scrolling: None,
}
}
pub fn step_ui(mut ui: UI, g: &Game, i: &Option<UserInput>) -> UI {
ui = update_fps(ui);
ui = update_scrolling(ui, g, i);
ui
}
fn update_fps(ui: UI) -> UI
|
fn update_scrolling(ui: UI, g: &Game, i: &Option<UserInput>) -> UI {
let scrolling = ui.scrolling;
let (_, _, w, h) = ui.viewport;
UI {
scrolling: match (scrolling, g, i) {
(Some(sd), Game::Combat(..), None) => Some(sd),
(None, Game::Combat(combat_data), _) => Some(ScrollData {
is_scrolling: false,
has_scrolled: false,
offset: combat_screen::init_scroll_offset(combat_data, (w, h)),
}),
(Some(sd), Game::Combat(..), Some(i)) => Some(get_scrolling(sd, i)),
_ => None,
},
..ui
}
// if ui.is_scrolling {
// return match i {
// UserInput::ScrollTo(dx, dy) => UI {
// scroll_offset: (ui.scroll_offset.0 + dx, ui.scroll_offset.1 + dy),
// has_scrolled: true,
// ..ui
// },
// _ => UI {
// is_scrolling: false,
// ..ui
// },
// };
// } else {
// if let UserInput::StartScrolling() = i {
// return UI {
// is_scrolling: true,
// has_scrolled: false,
// ..ui
// };
// }
// }
}
fn get_scrolling(sd: ScrollData, i: &UserInput) -> ScrollData {
if sd.is_scrolling {
return match i {
UserInput::ScrollTo(dx, dy) => ScrollData {
offset: (sd.offset.0 + dx, sd.offset.1 + dy),
has_scrolled: true,
..sd
},
_ => ScrollData {
is_scrolling: false,
..sd
},
};
} else {
if let UserInput::StartScrolling() = i {
return ScrollData {
is_scrolling: true,
has_scrolled: false,
..sd
};
}
}
sd
}
// fn get_scrolling(ui: &UI, game: &Game) -> Option<ScrollData> {
// if let Some
// }
fn draw_sprite(
pos: ScreenPos,
align: Align,
sprite: Sprite,
t: &mut Texture,
cvs: &mut WindowCanvas,
) -> Result<(), String> {
let (x, y) = sprite.source;
let (dx, dy) = sprite.offset;
let (w, h) = sprite.dim;
let tw = (w as f32 * sprite.scale).round() as u32;
let th = (h as f32 * sprite.scale).round() as u32;
let pos = pos.align(align, tw, th);
let from = Rect::new(x, y, w, h);
let to = Rect::new(pos.0 + dx, pos.1 + dy, tw, th);
t.set_alpha_mod(sprite.alpha);
cvs.copy(t, from, to)
}
|
{
if ui.frames == 50 {
let time_for_50_frames = ui.last_check.elapsed().as_nanos();
UI {
frames: 0,
fps: 50_000_000_000 / time_for_50_frames,
last_check: std::time::Instant::now(),
..ui
}
} else {
UI {
frames: ui.frames + 1,
..ui
}
}
}
|
identifier_body
|
mod.rs
|
mod asset;
mod combat_screen;
mod input;
mod start_screen;
mod teams_screen;
mod text;
mod types;
use sdl2::render::Texture;
pub use asset::*;
pub use input::*;
pub use text::*;
pub use types::*;
use sdl2::pixels::Color;
use sdl2::rect::Rect;
use sdl2::render::WindowCanvas;
use std::time::Instant;
use crate::core::{DisplayStr, Game, UserInput, Sprite};
pub fn render(
cvs: &mut WindowCanvas,
ui: &UI,
game: &Game,
assets: &mut AssetRepo,
) -> Result<ClickAreas, String> {
// let now = Instant::now();
let (mut scene, click_areas) = match game {
Game::Start(..) => start_screen::render(ui.viewport),
Game::TeamSelection(_, _, game_objects) => {
let (_, _, w, h) = ui.viewport;
teams_screen::render((w, h), game_objects)
}
Game::Combat(combat_data) => {
let scroll_offset = ui.scrolling.as_ref().map(|s| s.offset).unwrap_or((0, 0));
combat_screen::render(ui.viewport, scroll_offset, combat_data)
}
};
scene.texts.push(
ScreenText::new(
DisplayStr::new(format!("FPS: {}", ui.fps)),
ScreenPos(10, ui.viewport.3 as i32 - 60),
)
.color((20, 150, 20, 255))
.padding(10)
.background((252, 251, 250, 255)),
);
// let time_create_scene = Instant::now() - now;
// let now = Instant::now();
draw_scene(cvs, assets, scene)?;
// let time_draw_scene = Instant::now() - now;
// cvs.present();
// println!("create scene: {}ms, draw scene {}ms", time_create_scene.as_millis(), time_draw_scene.as_millis());
Ok(click_areas)
}
fn
|
(
cvs: &mut WindowCanvas,
assets: &mut AssetRepo,
scene: Scene,
) -> Result<(), String> {
let (r, g, b) = scene.background;
cvs.set_draw_color(Color::RGB(r, g, b));
cvs.clear();
for (tex_name, ScreenSprite(pos, align, sprite)) in scene.images {
if let Some(ref mut t) = assets.textures.get_mut(&tex_name) {
draw_sprite(pos, align, sprite, t, cvs)?;
}
}
for ScreenSprite(pos, align, sprite) in scene.sprites {
let mut texture = assets.texture.as_mut();
if let Some(ref mut t) = texture {
draw_sprite(pos, align, sprite, t, cvs)?;
}
}
for txt in scene.texts {
let font = assets.fonts[txt.font as usize].as_mut().unwrap();
font.draw(txt, cvs)?;
}
cvs.present();
Ok(())
}
pub fn init_ui(viewport: (i32, i32, u32, u32), pixel_ratio: u8) -> UI {
UI {
viewport,
pixel_ratio,
fps: 0,
frames: 0,
last_check: Instant::now(),
scrolling: None,
}
}
pub fn step_ui(mut ui: UI, g: &Game, i: &Option<UserInput>) -> UI {
ui = update_fps(ui);
ui = update_scrolling(ui, g, i);
ui
}
fn update_fps(ui: UI) -> UI {
if ui.frames == 50 {
let time_for_50_frames = ui.last_check.elapsed().as_nanos();
UI {
frames: 0,
fps: 50_000_000_000 / time_for_50_frames,
last_check: std::time::Instant::now(),
..ui
}
} else {
UI {
frames: ui.frames + 1,
..ui
}
}
}
fn update_scrolling(ui: UI, g: &Game, i: &Option<UserInput>) -> UI {
let scrolling = ui.scrolling;
let (_, _, w, h) = ui.viewport;
UI {
scrolling: match (scrolling, g, i) {
(Some(sd), Game::Combat(..), None) => Some(sd),
(None, Game::Combat(combat_data), _) => Some(ScrollData {
is_scrolling: false,
has_scrolled: false,
offset: combat_screen::init_scroll_offset(combat_data, (w, h)),
}),
(Some(sd), Game::Combat(..), Some(i)) => Some(get_scrolling(sd, i)),
_ => None,
},
..ui
}
// if ui.is_scrolling {
// return match i {
// UserInput::ScrollTo(dx, dy) => UI {
// scroll_offset: (ui.scroll_offset.0 + dx, ui.scroll_offset.1 + dy),
// has_scrolled: true,
// ..ui
// },
// _ => UI {
// is_scrolling: false,
// ..ui
// },
// };
// } else {
// if let UserInput::StartScrolling() = i {
// return UI {
// is_scrolling: true,
// has_scrolled: false,
// ..ui
// };
// }
// }
}
fn get_scrolling(sd: ScrollData, i: &UserInput) -> ScrollData {
if sd.is_scrolling {
return match i {
UserInput::ScrollTo(dx, dy) => ScrollData {
offset: (sd.offset.0 + dx, sd.offset.1 + dy),
has_scrolled: true,
..sd
},
_ => ScrollData {
is_scrolling: false,
..sd
},
};
} else {
if let UserInput::StartScrolling() = i {
return ScrollData {
is_scrolling: true,
has_scrolled: false,
..sd
};
}
}
sd
}
// fn get_scrolling(ui: &UI, game: &Game) -> Option<ScrollData> {
// if let Some
// }
fn draw_sprite(
pos: ScreenPos,
align: Align,
sprite: Sprite,
t: &mut Texture,
cvs: &mut WindowCanvas,
) -> Result<(), String> {
let (x, y) = sprite.source;
let (dx, dy) = sprite.offset;
let (w, h) = sprite.dim;
let tw = (w as f32 * sprite.scale).round() as u32;
let th = (h as f32 * sprite.scale).round() as u32;
let pos = pos.align(align, tw, th);
let from = Rect::new(x, y, w, h);
let to = Rect::new(pos.0 + dx, pos.1 + dy, tw, th);
t.set_alpha_mod(sprite.alpha);
cvs.copy(t, from, to)
}
|
draw_scene
|
identifier_name
|
mod.rs
|
mod asset;
mod combat_screen;
mod input;
mod start_screen;
mod teams_screen;
mod text;
mod types;
use sdl2::render::Texture;
pub use asset::*;
pub use input::*;
pub use text::*;
pub use types::*;
use sdl2::pixels::Color;
use sdl2::rect::Rect;
use sdl2::render::WindowCanvas;
use std::time::Instant;
use crate::core::{DisplayStr, Game, UserInput, Sprite};
pub fn render(
cvs: &mut WindowCanvas,
ui: &UI,
game: &Game,
assets: &mut AssetRepo,
) -> Result<ClickAreas, String> {
// let now = Instant::now();
let (mut scene, click_areas) = match game {
Game::Start(..) => start_screen::render(ui.viewport),
Game::TeamSelection(_, _, game_objects) => {
let (_, _, w, h) = ui.viewport;
teams_screen::render((w, h), game_objects)
}
Game::Combat(combat_data) => {
let scroll_offset = ui.scrolling.as_ref().map(|s| s.offset).unwrap_or((0, 0));
combat_screen::render(ui.viewport, scroll_offset, combat_data)
}
};
scene.texts.push(
ScreenText::new(
DisplayStr::new(format!("FPS: {}", ui.fps)),
ScreenPos(10, ui.viewport.3 as i32 - 60),
)
.color((20, 150, 20, 255))
.padding(10)
.background((252, 251, 250, 255)),
|
);
// let time_create_scene = Instant::now() - now;
// let now = Instant::now();
draw_scene(cvs, assets, scene)?;
// let time_draw_scene = Instant::now() - now;
// cvs.present();
// println!("create scene: {}ms, draw scene {}ms", time_create_scene.as_millis(), time_draw_scene.as_millis());
Ok(click_areas)
}
fn draw_scene(
cvs: &mut WindowCanvas,
assets: &mut AssetRepo,
scene: Scene,
) -> Result<(), String> {
let (r, g, b) = scene.background;
cvs.set_draw_color(Color::RGB(r, g, b));
cvs.clear();
for (tex_name, ScreenSprite(pos, align, sprite)) in scene.images {
if let Some(ref mut t) = assets.textures.get_mut(&tex_name) {
draw_sprite(pos, align, sprite, t, cvs)?;
}
}
for ScreenSprite(pos, align, sprite) in scene.sprites {
let mut texture = assets.texture.as_mut();
if let Some(ref mut t) = texture {
draw_sprite(pos, align, sprite, t, cvs)?;
}
}
for txt in scene.texts {
let font = assets.fonts[txt.font as usize].as_mut().unwrap();
font.draw(txt, cvs)?;
}
cvs.present();
Ok(())
}
pub fn init_ui(viewport: (i32, i32, u32, u32), pixel_ratio: u8) -> UI {
UI {
viewport,
pixel_ratio,
fps: 0,
frames: 0,
last_check: Instant::now(),
scrolling: None,
}
}
pub fn step_ui(mut ui: UI, g: &Game, i: &Option<UserInput>) -> UI {
ui = update_fps(ui);
ui = update_scrolling(ui, g, i);
ui
}
fn update_fps(ui: UI) -> UI {
if ui.frames == 50 {
let time_for_50_frames = ui.last_check.elapsed().as_nanos();
UI {
frames: 0,
fps: 50_000_000_000 / time_for_50_frames,
last_check: std::time::Instant::now(),
..ui
}
} else {
UI {
frames: ui.frames + 1,
..ui
}
}
}
fn update_scrolling(ui: UI, g: &Game, i: &Option<UserInput>) -> UI {
let scrolling = ui.scrolling;
let (_, _, w, h) = ui.viewport;
UI {
scrolling: match (scrolling, g, i) {
(Some(sd), Game::Combat(..), None) => Some(sd),
(None, Game::Combat(combat_data), _) => Some(ScrollData {
is_scrolling: false,
has_scrolled: false,
offset: combat_screen::init_scroll_offset(combat_data, (w, h)),
}),
(Some(sd), Game::Combat(..), Some(i)) => Some(get_scrolling(sd, i)),
_ => None,
},
..ui
}
// if ui.is_scrolling {
// return match i {
// UserInput::ScrollTo(dx, dy) => UI {
// scroll_offset: (ui.scroll_offset.0 + dx, ui.scroll_offset.1 + dy),
// has_scrolled: true,
// ..ui
// },
// _ => UI {
// is_scrolling: false,
// ..ui
// },
// };
// } else {
// if let UserInput::StartScrolling() = i {
// return UI {
// is_scrolling: true,
// has_scrolled: false,
// ..ui
// };
// }
// }
}
fn get_scrolling(sd: ScrollData, i: &UserInput) -> ScrollData {
if sd.is_scrolling {
return match i {
UserInput::ScrollTo(dx, dy) => ScrollData {
offset: (sd.offset.0 + dx, sd.offset.1 + dy),
has_scrolled: true,
..sd
},
_ => ScrollData {
is_scrolling: false,
..sd
},
};
} else {
if let UserInput::StartScrolling() = i {
return ScrollData {
is_scrolling: true,
has_scrolled: false,
..sd
};
}
}
sd
}
// fn get_scrolling(ui: &UI, game: &Game) -> Option<ScrollData> {
// if let Some
// }
fn draw_sprite(
pos: ScreenPos,
align: Align,
sprite: Sprite,
t: &mut Texture,
cvs: &mut WindowCanvas,
) -> Result<(), String> {
let (x, y) = sprite.source;
let (dx, dy) = sprite.offset;
let (w, h) = sprite.dim;
let tw = (w as f32 * sprite.scale).round() as u32;
let th = (h as f32 * sprite.scale).round() as u32;
let pos = pos.align(align, tw, th);
let from = Rect::new(x, y, w, h);
let to = Rect::new(pos.0 + dx, pos.1 + dy, tw, th);
t.set_alpha_mod(sprite.alpha);
cvs.copy(t, from, to)
}
|
random_line_split
|
|
label.rs
|
use std::fmt::{Write, self};
use std::iter;
use std::str;
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct Label {
bits: u8, // amount of bits in tail (last element of vec). always between 0 and 7
buffer: Vec<u8> // backing buffer
}
impl Label {
/**
* Small layout intermezzo
* A label is basically a bitvec, but the internal representation
* means the first pushed bit will always occupy the MSB position
* in the backing buffer. This as to allow the conversion from bitvec
* to unicode to happen smoothly
*/
pub fn new() -> Label {
Label {bits: 0, buffer: Vec::new()}
}
pub fn len(&self) -> usize {
let len = self.buffer.len();
if len == 0 {
return 0;
}
return len * 8 - 8 + self.bits as usize
}
pub fn push(&mut self, val: bool) {
if self.buffer.len() == 0 {
self.buffer.push(0)
}
self.bits += 1;
let len = self.buffer.len();
self.buffer[len - 1] |= (val as u8) << (8 - self.bits);
if self.bits == 8 {
self.buffer.push(0);
self.bits = 0;
}
}
pub fn
|
(&mut self) -> Option<bool> {
if self.bits == 0 {
if self.buffer.len() > 1 {
self.buffer.pop();
self.bits = 8;
} else {
return None;
}
}
let len = self.buffer.len();
let tail = &mut self.buffer[len - 1];
let val: u8 = *tail & (1 << (8 - self.bits));
*tail ^= val;
self.bits -= 1;
return Some(val!= 0);
}
}
impl fmt::Display for Label {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let len = self.buffer.len();
if self.bits == 0 && len > 0 {
if self.buffer[..len - 1].iter().all(|c| match *c {
b'a'..=b'z'
| b'A'..=b'Z'
| b'_' => true,
_ => false
}) {
// as the above characters are all ascii we can safely convert to utf-8
return f.write_str(str::from_utf8(&self.buffer[..len - 1]).unwrap());
}
}
f.write_char('_')?;
for bit in self.into_iter() {
f.write_char(if bit {'1'} else {'0'})?;
}
Ok(())
}
}
impl<'a> From<&'a [u8]> for Label {
fn from(buffer: &[u8]) -> Label {
let mut buffer = Vec::from(buffer);
buffer.push(0);
Label {bits: 0, buffer: buffer}
}
}
impl<'a> iter::IntoIterator for &'a Label {
type Item = bool;
type IntoIter = LabelIterator<'a>;
fn into_iter(self) -> LabelIterator<'a> {
LabelIterator {
label: self,
index: 0
}
}
}
#[derive(Debug, Clone)]
pub struct LabelIterator<'a> {
label: &'a Label,
index: usize,
}
impl<'a> iter::Iterator for LabelIterator<'a> {
type Item = bool;
fn next(&mut self) -> Option<bool> {
if self.index == self.label.len() {
return None
} else {
let byte = self.label.buffer[self.index / 8];
let bit = byte & (1 << (7 - self.index % 8));
self.index += 1;
Some(bit!= 0)
}
}
}
|
pop
|
identifier_name
|
label.rs
|
use std::fmt::{Write, self};
use std::iter;
use std::str;
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct Label {
bits: u8, // amount of bits in tail (last element of vec). always between 0 and 7
buffer: Vec<u8> // backing buffer
}
impl Label {
/**
* Small layout intermezzo
* A label is basically a bitvec, but the internal representation
* means the first pushed bit will always occupy the MSB position
* in the backing buffer. This as to allow the conversion from bitvec
* to unicode to happen smoothly
*/
pub fn new() -> Label
|
pub fn len(&self) -> usize {
let len = self.buffer.len();
if len == 0 {
return 0;
}
return len * 8 - 8 + self.bits as usize
}
pub fn push(&mut self, val: bool) {
if self.buffer.len() == 0 {
self.buffer.push(0)
}
self.bits += 1;
let len = self.buffer.len();
self.buffer[len - 1] |= (val as u8) << (8 - self.bits);
if self.bits == 8 {
self.buffer.push(0);
self.bits = 0;
}
}
pub fn pop(&mut self) -> Option<bool> {
if self.bits == 0 {
if self.buffer.len() > 1 {
self.buffer.pop();
self.bits = 8;
} else {
return None;
}
}
let len = self.buffer.len();
let tail = &mut self.buffer[len - 1];
let val: u8 = *tail & (1 << (8 - self.bits));
*tail ^= val;
self.bits -= 1;
return Some(val!= 0);
}
}
impl fmt::Display for Label {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let len = self.buffer.len();
if self.bits == 0 && len > 0 {
if self.buffer[..len - 1].iter().all(|c| match *c {
b'a'..=b'z'
| b'A'..=b'Z'
| b'_' => true,
_ => false
}) {
// as the above characters are all ascii we can safely convert to utf-8
return f.write_str(str::from_utf8(&self.buffer[..len - 1]).unwrap());
}
}
f.write_char('_')?;
for bit in self.into_iter() {
f.write_char(if bit {'1'} else {'0'})?;
}
Ok(())
}
}
impl<'a> From<&'a [u8]> for Label {
fn from(buffer: &[u8]) -> Label {
let mut buffer = Vec::from(buffer);
buffer.push(0);
Label {bits: 0, buffer: buffer}
}
}
impl<'a> iter::IntoIterator for &'a Label {
type Item = bool;
type IntoIter = LabelIterator<'a>;
fn into_iter(self) -> LabelIterator<'a> {
LabelIterator {
label: self,
index: 0
}
}
}
#[derive(Debug, Clone)]
pub struct LabelIterator<'a> {
label: &'a Label,
index: usize,
}
impl<'a> iter::Iterator for LabelIterator<'a> {
type Item = bool;
fn next(&mut self) -> Option<bool> {
if self.index == self.label.len() {
return None
} else {
let byte = self.label.buffer[self.index / 8];
let bit = byte & (1 << (7 - self.index % 8));
self.index += 1;
Some(bit!= 0)
}
}
}
|
{
Label {bits: 0, buffer: Vec::new()}
}
|
identifier_body
|
label.rs
|
use std::fmt::{Write, self};
use std::iter;
use std::str;
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct Label {
bits: u8, // amount of bits in tail (last element of vec). always between 0 and 7
buffer: Vec<u8> // backing buffer
}
impl Label {
/**
* Small layout intermezzo
* A label is basically a bitvec, but the internal representation
* means the first pushed bit will always occupy the MSB position
* in the backing buffer. This as to allow the conversion from bitvec
* to unicode to happen smoothly
*/
pub fn new() -> Label {
Label {bits: 0, buffer: Vec::new()}
}
pub fn len(&self) -> usize {
let len = self.buffer.len();
if len == 0 {
return 0;
}
return len * 8 - 8 + self.bits as usize
}
pub fn push(&mut self, val: bool) {
if self.buffer.len() == 0 {
self.buffer.push(0)
}
self.bits += 1;
let len = self.buffer.len();
self.buffer[len - 1] |= (val as u8) << (8 - self.bits);
if self.bits == 8 {
self.buffer.push(0);
self.bits = 0;
}
}
pub fn pop(&mut self) -> Option<bool> {
if self.bits == 0 {
if self.buffer.len() > 1 {
self.buffer.pop();
self.bits = 8;
} else {
return None;
}
}
let len = self.buffer.len();
let tail = &mut self.buffer[len - 1];
|
*tail ^= val;
self.bits -= 1;
return Some(val!= 0);
}
}
impl fmt::Display for Label {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let len = self.buffer.len();
if self.bits == 0 && len > 0 {
if self.buffer[..len - 1].iter().all(|c| match *c {
b'a'..=b'z'
| b'A'..=b'Z'
| b'_' => true,
_ => false
}) {
// as the above characters are all ascii we can safely convert to utf-8
return f.write_str(str::from_utf8(&self.buffer[..len - 1]).unwrap());
}
}
f.write_char('_')?;
for bit in self.into_iter() {
f.write_char(if bit {'1'} else {'0'})?;
}
Ok(())
}
}
impl<'a> From<&'a [u8]> for Label {
fn from(buffer: &[u8]) -> Label {
let mut buffer = Vec::from(buffer);
buffer.push(0);
Label {bits: 0, buffer: buffer}
}
}
impl<'a> iter::IntoIterator for &'a Label {
type Item = bool;
type IntoIter = LabelIterator<'a>;
fn into_iter(self) -> LabelIterator<'a> {
LabelIterator {
label: self,
index: 0
}
}
}
#[derive(Debug, Clone)]
pub struct LabelIterator<'a> {
label: &'a Label,
index: usize,
}
impl<'a> iter::Iterator for LabelIterator<'a> {
type Item = bool;
fn next(&mut self) -> Option<bool> {
if self.index == self.label.len() {
return None
} else {
let byte = self.label.buffer[self.index / 8];
let bit = byte & (1 << (7 - self.index % 8));
self.index += 1;
Some(bit!= 0)
}
}
}
|
let val: u8 = *tail & (1 << (8 - self.bits));
|
random_line_split
|
label.rs
|
use std::fmt::{Write, self};
use std::iter;
use std::str;
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct Label {
bits: u8, // amount of bits in tail (last element of vec). always between 0 and 7
buffer: Vec<u8> // backing buffer
}
impl Label {
/**
* Small layout intermezzo
* A label is basically a bitvec, but the internal representation
* means the first pushed bit will always occupy the MSB position
* in the backing buffer. This as to allow the conversion from bitvec
* to unicode to happen smoothly
*/
pub fn new() -> Label {
Label {bits: 0, buffer: Vec::new()}
}
pub fn len(&self) -> usize {
let len = self.buffer.len();
if len == 0 {
return 0;
}
return len * 8 - 8 + self.bits as usize
}
pub fn push(&mut self, val: bool) {
if self.buffer.len() == 0 {
self.buffer.push(0)
}
self.bits += 1;
let len = self.buffer.len();
self.buffer[len - 1] |= (val as u8) << (8 - self.bits);
if self.bits == 8
|
}
pub fn pop(&mut self) -> Option<bool> {
if self.bits == 0 {
if self.buffer.len() > 1 {
self.buffer.pop();
self.bits = 8;
} else {
return None;
}
}
let len = self.buffer.len();
let tail = &mut self.buffer[len - 1];
let val: u8 = *tail & (1 << (8 - self.bits));
*tail ^= val;
self.bits -= 1;
return Some(val!= 0);
}
}
impl fmt::Display for Label {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let len = self.buffer.len();
if self.bits == 0 && len > 0 {
if self.buffer[..len - 1].iter().all(|c| match *c {
b'a'..=b'z'
| b'A'..=b'Z'
| b'_' => true,
_ => false
}) {
// as the above characters are all ascii we can safely convert to utf-8
return f.write_str(str::from_utf8(&self.buffer[..len - 1]).unwrap());
}
}
f.write_char('_')?;
for bit in self.into_iter() {
f.write_char(if bit {'1'} else {'0'})?;
}
Ok(())
}
}
impl<'a> From<&'a [u8]> for Label {
fn from(buffer: &[u8]) -> Label {
let mut buffer = Vec::from(buffer);
buffer.push(0);
Label {bits: 0, buffer: buffer}
}
}
impl<'a> iter::IntoIterator for &'a Label {
type Item = bool;
type IntoIter = LabelIterator<'a>;
fn into_iter(self) -> LabelIterator<'a> {
LabelIterator {
label: self,
index: 0
}
}
}
#[derive(Debug, Clone)]
pub struct LabelIterator<'a> {
label: &'a Label,
index: usize,
}
impl<'a> iter::Iterator for LabelIterator<'a> {
type Item = bool;
fn next(&mut self) -> Option<bool> {
if self.index == self.label.len() {
return None
} else {
let byte = self.label.buffer[self.index / 8];
let bit = byte & (1 << (7 - self.index % 8));
self.index += 1;
Some(bit!= 0)
}
}
}
|
{
self.buffer.push(0);
self.bits = 0;
}
|
conditional_block
|
main.rs
|
extern crate stopwatch;
use stopwatch::{Stopwatch};
fn main() {
let timer = Stopwatch::start_new();
//digits are stored in reverse order (least significant first) as an optimization
match expand(196, 100000) {
Ok(iter) => println!("converged after {} iterations", iter),
Err(max_iters) => println!("did not converge after {} iterations", max_iters)
}
//print status
println!("total execution time: {} ms", timer.elapsed_ms());
}
fn expand(number: u64, max_iters: i64) -> Result<i64, i64> {
println!("expand {}", number);
let mut digits = to_digits(number);
//iterations will continue until palindromicity improves
for iter in 1.. max_iters + 1 {
reverse_add(&mut digits);
if iter % 10000 == 0
|
if is_palindrome(&digits) {
return Ok(iter);
}
}
//return iteration count as error value
Err(max_iters)
}
fn to_digits(n:u64) -> Vec<u8> {
let mut rem = n;
let mut digits:Vec<u8> = Vec::new();
while rem > 0 {
let digit = (rem % 10) as u8;
rem /= 10;
digits.push(digit);
}
return digits;
}
/*
note that digits here are in reverse order (least significant digit first)
e.g. the number 349 should be represented as vec![9,4,3]
this lets us easily grow the array when a new digit is created by overflow, as we can simply
append it. Otherwise we'd have to prepend, which is slower.
*/
fn reverse_add(digits: &mut Vec<u8>) {
//first fold in half and sum, to cut down on additions
let digits_len = digits.len();
for i in 0.. (digits_len + 1) / 2 {
let j = digits_len - 1 - i;
let sum = digits[i] + digits[j];
digits[i] = sum;
digits[j] = sum;
}
//now propagate overflow left to right (least significant digit to most significant)
let mut overflow = 0;
for i in 0.. digits_len {
digits[i] += overflow;
if digits[i] >= 10 {
digits[i] -= 10;
overflow = 1;
} else {
overflow = 0;
}
}
//finally add extra digit if we need it
if overflow > 0 {
//digits are in reverse order, so this new most-significant digit can just be put on the end
digits.push(overflow);
}
}
fn is_palindrome(digits: &Vec<u8>) -> bool {
let digits_len = digits.len();
for i in 0.. digits_len/2 {
if digits[i]!= digits[digits_len - 1 - i] {
return false;
}
}
return true;
}
|
{
println!("{} {}", iter, digits.len());
}
|
conditional_block
|
main.rs
|
extern crate stopwatch;
use stopwatch::{Stopwatch};
fn main() {
let timer = Stopwatch::start_new();
//digits are stored in reverse order (least significant first) as an optimization
match expand(196, 100000) {
Ok(iter) => println!("converged after {} iterations", iter),
Err(max_iters) => println!("did not converge after {} iterations", max_iters)
}
//print status
println!("total execution time: {} ms", timer.elapsed_ms());
}
fn expand(number: u64, max_iters: i64) -> Result<i64, i64> {
println!("expand {}", number);
let mut digits = to_digits(number);
//iterations will continue until palindromicity improves
for iter in 1.. max_iters + 1 {
reverse_add(&mut digits);
if iter % 10000 == 0 {
println!("{} {}", iter, digits.len());
}
if is_palindrome(&digits) {
return Ok(iter);
}
}
//return iteration count as error value
Err(max_iters)
}
fn to_digits(n:u64) -> Vec<u8> {
let mut rem = n;
let mut digits:Vec<u8> = Vec::new();
while rem > 0 {
let digit = (rem % 10) as u8;
rem /= 10;
digits.push(digit);
}
return digits;
}
/*
note that digits here are in reverse order (least significant digit first)
e.g. the number 349 should be represented as vec![9,4,3]
this lets us easily grow the array when a new digit is created by overflow, as we can simply
append it. Otherwise we'd have to prepend, which is slower.
*/
fn
|
(digits: &mut Vec<u8>) {
//first fold in half and sum, to cut down on additions
let digits_len = digits.len();
for i in 0.. (digits_len + 1) / 2 {
let j = digits_len - 1 - i;
let sum = digits[i] + digits[j];
digits[i] = sum;
digits[j] = sum;
}
//now propagate overflow left to right (least significant digit to most significant)
let mut overflow = 0;
for i in 0.. digits_len {
digits[i] += overflow;
if digits[i] >= 10 {
digits[i] -= 10;
overflow = 1;
} else {
overflow = 0;
}
}
//finally add extra digit if we need it
if overflow > 0 {
//digits are in reverse order, so this new most-significant digit can just be put on the end
digits.push(overflow);
}
}
fn is_palindrome(digits: &Vec<u8>) -> bool {
let digits_len = digits.len();
for i in 0.. digits_len/2 {
if digits[i]!= digits[digits_len - 1 - i] {
return false;
}
}
return true;
}
|
reverse_add
|
identifier_name
|
main.rs
|
extern crate stopwatch;
use stopwatch::{Stopwatch};
fn main() {
let timer = Stopwatch::start_new();
//digits are stored in reverse order (least significant first) as an optimization
match expand(196, 100000) {
Ok(iter) => println!("converged after {} iterations", iter),
Err(max_iters) => println!("did not converge after {} iterations", max_iters)
}
//print status
println!("total execution time: {} ms", timer.elapsed_ms());
}
fn expand(number: u64, max_iters: i64) -> Result<i64, i64> {
println!("expand {}", number);
let mut digits = to_digits(number);
//iterations will continue until palindromicity improves
for iter in 1.. max_iters + 1 {
reverse_add(&mut digits);
if iter % 10000 == 0 {
println!("{} {}", iter, digits.len());
}
if is_palindrome(&digits) {
return Ok(iter);
}
}
//return iteration count as error value
Err(max_iters)
}
fn to_digits(n:u64) -> Vec<u8>
|
/*
note that digits here are in reverse order (least significant digit first)
e.g. the number 349 should be represented as vec![9,4,3]
this lets us easily grow the array when a new digit is created by overflow, as we can simply
append it. Otherwise we'd have to prepend, which is slower.
*/
fn reverse_add(digits: &mut Vec<u8>) {
//first fold in half and sum, to cut down on additions
let digits_len = digits.len();
for i in 0.. (digits_len + 1) / 2 {
let j = digits_len - 1 - i;
let sum = digits[i] + digits[j];
digits[i] = sum;
digits[j] = sum;
}
//now propagate overflow left to right (least significant digit to most significant)
let mut overflow = 0;
for i in 0.. digits_len {
digits[i] += overflow;
if digits[i] >= 10 {
digits[i] -= 10;
overflow = 1;
} else {
overflow = 0;
}
}
//finally add extra digit if we need it
if overflow > 0 {
//digits are in reverse order, so this new most-significant digit can just be put on the end
digits.push(overflow);
}
}
fn is_palindrome(digits: &Vec<u8>) -> bool {
let digits_len = digits.len();
for i in 0.. digits_len/2 {
if digits[i]!= digits[digits_len - 1 - i] {
return false;
}
}
return true;
}
|
{
let mut rem = n;
let mut digits:Vec<u8> = Vec::new();
while rem > 0 {
let digit = (rem % 10) as u8;
rem /= 10;
digits.push(digit);
}
return digits;
}
|
identifier_body
|
main.rs
|
extern crate stopwatch;
use stopwatch::{Stopwatch};
fn main() {
let timer = Stopwatch::start_new();
//digits are stored in reverse order (least significant first) as an optimization
match expand(196, 100000) {
Ok(iter) => println!("converged after {} iterations", iter),
Err(max_iters) => println!("did not converge after {} iterations", max_iters)
}
//print status
println!("total execution time: {} ms", timer.elapsed_ms());
}
fn expand(number: u64, max_iters: i64) -> Result<i64, i64> {
println!("expand {}", number);
let mut digits = to_digits(number);
//iterations will continue until palindromicity improves
for iter in 1.. max_iters + 1 {
reverse_add(&mut digits);
if iter % 10000 == 0 {
println!("{} {}", iter, digits.len());
}
if is_palindrome(&digits) {
return Ok(iter);
}
}
//return iteration count as error value
Err(max_iters)
}
fn to_digits(n:u64) -> Vec<u8> {
let mut rem = n;
let mut digits:Vec<u8> = Vec::new();
while rem > 0 {
let digit = (rem % 10) as u8;
rem /= 10;
digits.push(digit);
}
return digits;
}
/*
note that digits here are in reverse order (least significant digit first)
e.g. the number 349 should be represented as vec![9,4,3]
this lets us easily grow the array when a new digit is created by overflow, as we can simply
append it. Otherwise we'd have to prepend, which is slower.
*/
fn reverse_add(digits: &mut Vec<u8>) {
//first fold in half and sum, to cut down on additions
let digits_len = digits.len();
for i in 0.. (digits_len + 1) / 2 {
let j = digits_len - 1 - i;
let sum = digits[i] + digits[j];
digits[i] = sum;
digits[j] = sum;
}
//now propagate overflow left to right (least significant digit to most significant)
let mut overflow = 0;
for i in 0.. digits_len {
digits[i] += overflow;
if digits[i] >= 10 {
digits[i] -= 10;
overflow = 1;
} else {
overflow = 0;
}
}
//finally add extra digit if we need it
if overflow > 0 {
//digits are in reverse order, so this new most-significant digit can just be put on the end
digits.push(overflow);
}
}
fn is_palindrome(digits: &Vec<u8>) -> bool {
let digits_len = digits.len();
for i in 0.. digits_len/2 {
if digits[i]!= digits[digits_len - 1 - i] {
return false;
|
}
|
}
}
return true;
|
random_line_split
|
unbind_3pid.rs
|
//! [POST /_matrix/client/r0/account/3pid/unbind](https://matrix.org/docs/spec/client_server/r0.6.0#post-matrix-client-r0-account-3pid-unbind)
use ruma_api::ruma_api;
use super::ThirdPartyIdRemovalStatus;
use crate::r0::thirdparty::Medium;
ruma_api! {
metadata {
description: "Unbind a 3PID from a user's account on an identity server.",
method: POST,
name: "unbind_3pid",
path: "/_matrix/client/r0/account/3pid/unbind",
rate_limited: false,
requires_authentication: true,
}
request {
/// Identity server to unbind from.
#[serde(skip_serializing_if = "Option::is_none")]
pub id_server: Option<String>,
/// Medium of the 3PID to be removed.
pub medium: Medium,
/// Third-party address being removed.
pub address: String,
}
response {
/// Result of unbind operation.
pub id_server_unbind_result: ThirdPartyIdRemovalStatus,
}
|
error: crate::Error
}
|
random_line_split
|
|
motion.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! Specified types for CSS values that are related to motion path.
use crate::parser::{Parse, ParserContext};
use crate::values::computed::motion::OffsetRotate as ComputedOffsetRotate;
use crate::values::computed::{Context, ToComputedValue};
use crate::values::generics::motion::{GenericOffsetPath, RayFunction, RaySize};
use crate::values::specified::{Angle, SVGPathData};
use crate::Zero;
use cssparser::Parser;
use style_traits::{ParseError, StyleParseErrorKind};
/// The specified value of `offset-path`.
pub type OffsetPath = GenericOffsetPath<Angle>;
impl Parse for RayFunction<Angle> {
fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
let mut angle = None;
let mut size = None;
let mut contain = false;
loop {
if angle.is_none() {
angle = input.try(|i| Angle::parse(context, i)).ok();
}
if size.is_none() {
size = input.try(RaySize::parse).ok();
if size.is_some() {
continue;
}
}
if!contain {
contain = input.try(|i| i.expect_ident_matching("contain")).is_ok();
if contain {
continue;
}
}
break;
}
if angle.is_none() || size.is_none() {
return Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError));
}
Ok(RayFunction {
angle: angle.unwrap(),
size: size.unwrap(),
contain,
})
}
}
impl Parse for OffsetPath {
fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
// Parse none.
if input.try(|i| i.expect_ident_matching("none")).is_ok() {
return Ok(OffsetPath::none());
}
// Parse possible functions.
let location = input.current_source_location();
let function = input.expect_function()?.clone();
input.parse_nested_block(move |i| {
match_ignore_ascii_case! { &function,
// Bug 1186329: Implement the parser for <basic-shape>, <geometry-box>,
// and <url>.
"path" => SVGPathData::parse(context, i).map(GenericOffsetPath::Path),
"ray" => RayFunction::parse(context, i).map(GenericOffsetPath::Ray),
_ => {
Err(location.new_custom_error(
StyleParseErrorKind::UnexpectedFunction(function.clone())
))
},
}
})
}
}
/// The direction of offset-rotate.
#[derive(Clone, Copy, Debug, MallocSizeOf, Parse, PartialEq, SpecifiedValueInfo, ToCss, ToShmem)]
#[repr(u8)]
pub enum OffsetRotateDirection {
/// Unspecified direction keyword.
#[css(skip)]
None,
/// 0deg offset (face forward).
Auto,
/// 180deg offset (face backward).
Reverse,
}
impl OffsetRotateDirection {
/// Returns true if it is none (i.e. the keyword is not specified).
#[inline]
fn is_none(&self) -> bool {
*self == OffsetRotateDirection::None
}
}
#[inline]
fn direction_specified_and_angle_is_zero(direction: &OffsetRotateDirection, angle: &Angle) -> bool {
!direction.is_none() && angle.is_zero()
}
/// The specified offset-rotate.
/// The syntax is: "[ auto | reverse ] || <angle>"
///
/// https://drafts.fxtf.org/motion-1/#offset-rotate-property
#[derive(Clone, Copy, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToCss, ToShmem)]
pub struct OffsetRotate {
/// [auto | reverse].
#[css(skip_if = "OffsetRotateDirection::is_none")]
direction: OffsetRotateDirection,
/// <angle>.
/// If direction is None, this is a fixed angle which indicates a
/// constant clockwise rotation transformation applied to it by this
/// specified rotation angle. Otherwise, the angle will be added to
/// the angle of the direction in layout.
#[css(contextual_skip_if = "direction_specified_and_angle_is_zero")]
angle: Angle,
}
impl OffsetRotate {
/// Returns the initial value, auto.
#[inline]
pub fn auto() -> Self {
OffsetRotate {
direction: OffsetRotateDirection::Auto,
angle: Angle::zero(),
}
}
/// Returns true if self is auto 0deg.
#[inline]
pub fn is_auto(&self) -> bool {
self.direction == OffsetRotateDirection::Auto && self.angle.is_zero()
}
}
impl Parse for OffsetRotate {
fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
let location = input.current_source_location();
let mut direction = input.try(OffsetRotateDirection::parse);
let angle = input.try(|i| Angle::parse(context, i));
if direction.is_err() {
// The direction and angle could be any order, so give it a change to parse
// direction again.
direction = input.try(OffsetRotateDirection::parse);
}
if direction.is_err() && angle.is_err() {
return Err(location.new_custom_error(StyleParseErrorKind::UnspecifiedError));
}
Ok(OffsetRotate {
direction: direction.unwrap_or(OffsetRotateDirection::None),
angle: angle.unwrap_or(Zero::zero()),
})
}
}
impl ToComputedValue for OffsetRotate {
type ComputedValue = ComputedOffsetRotate;
#[inline]
fn to_computed_value(&self, context: &Context) -> Self::ComputedValue {
use crate::values::computed::Angle as ComputedAngle;
ComputedOffsetRotate {
auto:!self.direction.is_none(),
angle: if self.direction == OffsetRotateDirection::Reverse {
// The computed value should always convert "reverse" into "auto".
// e.g. "reverse calc(20deg + 10deg)" => "auto 210deg"
self.angle.to_computed_value(context) + ComputedAngle::from_degrees(180.0)
} else {
self.angle.to_computed_value(context)
},
}
}
#[inline]
fn
|
(computed: &Self::ComputedValue) -> Self {
OffsetRotate {
direction: if computed.auto {
OffsetRotateDirection::Auto
} else {
OffsetRotateDirection::None
},
angle: ToComputedValue::from_computed_value(&computed.angle),
}
}
}
|
from_computed_value
|
identifier_name
|
motion.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! Specified types for CSS values that are related to motion path.
use crate::parser::{Parse, ParserContext};
use crate::values::computed::motion::OffsetRotate as ComputedOffsetRotate;
use crate::values::computed::{Context, ToComputedValue};
use crate::values::generics::motion::{GenericOffsetPath, RayFunction, RaySize};
use crate::values::specified::{Angle, SVGPathData};
use crate::Zero;
use cssparser::Parser;
use style_traits::{ParseError, StyleParseErrorKind};
/// The specified value of `offset-path`.
pub type OffsetPath = GenericOffsetPath<Angle>;
impl Parse for RayFunction<Angle> {
fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
let mut angle = None;
let mut size = None;
let mut contain = false;
loop {
if angle.is_none() {
angle = input.try(|i| Angle::parse(context, i)).ok();
}
if size.is_none() {
size = input.try(RaySize::parse).ok();
if size.is_some() {
continue;
}
}
if!contain {
contain = input.try(|i| i.expect_ident_matching("contain")).is_ok();
if contain {
continue;
}
}
break;
}
if angle.is_none() || size.is_none() {
return Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError));
}
Ok(RayFunction {
angle: angle.unwrap(),
size: size.unwrap(),
contain,
})
}
}
impl Parse for OffsetPath {
fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
// Parse none.
if input.try(|i| i.expect_ident_matching("none")).is_ok() {
return Ok(OffsetPath::none());
}
// Parse possible functions.
let location = input.current_source_location();
let function = input.expect_function()?.clone();
input.parse_nested_block(move |i| {
match_ignore_ascii_case! { &function,
// Bug 1186329: Implement the parser for <basic-shape>, <geometry-box>,
// and <url>.
"path" => SVGPathData::parse(context, i).map(GenericOffsetPath::Path),
"ray" => RayFunction::parse(context, i).map(GenericOffsetPath::Ray),
_ => {
Err(location.new_custom_error(
StyleParseErrorKind::UnexpectedFunction(function.clone())
))
},
}
})
}
}
|
#[derive(Clone, Copy, Debug, MallocSizeOf, Parse, PartialEq, SpecifiedValueInfo, ToCss, ToShmem)]
#[repr(u8)]
pub enum OffsetRotateDirection {
/// Unspecified direction keyword.
#[css(skip)]
None,
/// 0deg offset (face forward).
Auto,
/// 180deg offset (face backward).
Reverse,
}
impl OffsetRotateDirection {
/// Returns true if it is none (i.e. the keyword is not specified).
#[inline]
fn is_none(&self) -> bool {
*self == OffsetRotateDirection::None
}
}
#[inline]
fn direction_specified_and_angle_is_zero(direction: &OffsetRotateDirection, angle: &Angle) -> bool {
!direction.is_none() && angle.is_zero()
}
/// The specified offset-rotate.
/// The syntax is: "[ auto | reverse ] || <angle>"
///
/// https://drafts.fxtf.org/motion-1/#offset-rotate-property
#[derive(Clone, Copy, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToCss, ToShmem)]
pub struct OffsetRotate {
/// [auto | reverse].
#[css(skip_if = "OffsetRotateDirection::is_none")]
direction: OffsetRotateDirection,
/// <angle>.
/// If direction is None, this is a fixed angle which indicates a
/// constant clockwise rotation transformation applied to it by this
/// specified rotation angle. Otherwise, the angle will be added to
/// the angle of the direction in layout.
#[css(contextual_skip_if = "direction_specified_and_angle_is_zero")]
angle: Angle,
}
impl OffsetRotate {
/// Returns the initial value, auto.
#[inline]
pub fn auto() -> Self {
OffsetRotate {
direction: OffsetRotateDirection::Auto,
angle: Angle::zero(),
}
}
/// Returns true if self is auto 0deg.
#[inline]
pub fn is_auto(&self) -> bool {
self.direction == OffsetRotateDirection::Auto && self.angle.is_zero()
}
}
impl Parse for OffsetRotate {
fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
let location = input.current_source_location();
let mut direction = input.try(OffsetRotateDirection::parse);
let angle = input.try(|i| Angle::parse(context, i));
if direction.is_err() {
// The direction and angle could be any order, so give it a change to parse
// direction again.
direction = input.try(OffsetRotateDirection::parse);
}
if direction.is_err() && angle.is_err() {
return Err(location.new_custom_error(StyleParseErrorKind::UnspecifiedError));
}
Ok(OffsetRotate {
direction: direction.unwrap_or(OffsetRotateDirection::None),
angle: angle.unwrap_or(Zero::zero()),
})
}
}
impl ToComputedValue for OffsetRotate {
type ComputedValue = ComputedOffsetRotate;
#[inline]
fn to_computed_value(&self, context: &Context) -> Self::ComputedValue {
use crate::values::computed::Angle as ComputedAngle;
ComputedOffsetRotate {
auto:!self.direction.is_none(),
angle: if self.direction == OffsetRotateDirection::Reverse {
// The computed value should always convert "reverse" into "auto".
// e.g. "reverse calc(20deg + 10deg)" => "auto 210deg"
self.angle.to_computed_value(context) + ComputedAngle::from_degrees(180.0)
} else {
self.angle.to_computed_value(context)
},
}
}
#[inline]
fn from_computed_value(computed: &Self::ComputedValue) -> Self {
OffsetRotate {
direction: if computed.auto {
OffsetRotateDirection::Auto
} else {
OffsetRotateDirection::None
},
angle: ToComputedValue::from_computed_value(&computed.angle),
}
}
}
|
/// The direction of offset-rotate.
|
random_line_split
|
motion.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! Specified types for CSS values that are related to motion path.
use crate::parser::{Parse, ParserContext};
use crate::values::computed::motion::OffsetRotate as ComputedOffsetRotate;
use crate::values::computed::{Context, ToComputedValue};
use crate::values::generics::motion::{GenericOffsetPath, RayFunction, RaySize};
use crate::values::specified::{Angle, SVGPathData};
use crate::Zero;
use cssparser::Parser;
use style_traits::{ParseError, StyleParseErrorKind};
/// The specified value of `offset-path`.
pub type OffsetPath = GenericOffsetPath<Angle>;
impl Parse for RayFunction<Angle> {
fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
let mut angle = None;
let mut size = None;
let mut contain = false;
loop {
if angle.is_none() {
angle = input.try(|i| Angle::parse(context, i)).ok();
}
if size.is_none() {
size = input.try(RaySize::parse).ok();
if size.is_some() {
continue;
}
}
if!contain {
contain = input.try(|i| i.expect_ident_matching("contain")).is_ok();
if contain {
continue;
}
}
break;
}
if angle.is_none() || size.is_none() {
return Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError));
}
Ok(RayFunction {
angle: angle.unwrap(),
size: size.unwrap(),
contain,
})
}
}
impl Parse for OffsetPath {
fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
// Parse none.
if input.try(|i| i.expect_ident_matching("none")).is_ok() {
return Ok(OffsetPath::none());
}
// Parse possible functions.
let location = input.current_source_location();
let function = input.expect_function()?.clone();
input.parse_nested_block(move |i| {
match_ignore_ascii_case! { &function,
// Bug 1186329: Implement the parser for <basic-shape>, <geometry-box>,
// and <url>.
"path" => SVGPathData::parse(context, i).map(GenericOffsetPath::Path),
"ray" => RayFunction::parse(context, i).map(GenericOffsetPath::Ray),
_ => {
Err(location.new_custom_error(
StyleParseErrorKind::UnexpectedFunction(function.clone())
))
},
}
})
}
}
/// The direction of offset-rotate.
#[derive(Clone, Copy, Debug, MallocSizeOf, Parse, PartialEq, SpecifiedValueInfo, ToCss, ToShmem)]
#[repr(u8)]
pub enum OffsetRotateDirection {
/// Unspecified direction keyword.
#[css(skip)]
None,
/// 0deg offset (face forward).
Auto,
/// 180deg offset (face backward).
Reverse,
}
impl OffsetRotateDirection {
/// Returns true if it is none (i.e. the keyword is not specified).
#[inline]
fn is_none(&self) -> bool {
*self == OffsetRotateDirection::None
}
}
#[inline]
fn direction_specified_and_angle_is_zero(direction: &OffsetRotateDirection, angle: &Angle) -> bool {
!direction.is_none() && angle.is_zero()
}
/// The specified offset-rotate.
/// The syntax is: "[ auto | reverse ] || <angle>"
///
/// https://drafts.fxtf.org/motion-1/#offset-rotate-property
#[derive(Clone, Copy, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToCss, ToShmem)]
pub struct OffsetRotate {
/// [auto | reverse].
#[css(skip_if = "OffsetRotateDirection::is_none")]
direction: OffsetRotateDirection,
/// <angle>.
/// If direction is None, this is a fixed angle which indicates a
/// constant clockwise rotation transformation applied to it by this
/// specified rotation angle. Otherwise, the angle will be added to
/// the angle of the direction in layout.
#[css(contextual_skip_if = "direction_specified_and_angle_is_zero")]
angle: Angle,
}
impl OffsetRotate {
/// Returns the initial value, auto.
#[inline]
pub fn auto() -> Self {
OffsetRotate {
direction: OffsetRotateDirection::Auto,
angle: Angle::zero(),
}
}
/// Returns true if self is auto 0deg.
#[inline]
pub fn is_auto(&self) -> bool {
self.direction == OffsetRotateDirection::Auto && self.angle.is_zero()
}
}
impl Parse for OffsetRotate {
fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
let location = input.current_source_location();
let mut direction = input.try(OffsetRotateDirection::parse);
let angle = input.try(|i| Angle::parse(context, i));
if direction.is_err() {
// The direction and angle could be any order, so give it a change to parse
// direction again.
direction = input.try(OffsetRotateDirection::parse);
}
if direction.is_err() && angle.is_err() {
return Err(location.new_custom_error(StyleParseErrorKind::UnspecifiedError));
}
Ok(OffsetRotate {
direction: direction.unwrap_or(OffsetRotateDirection::None),
angle: angle.unwrap_or(Zero::zero()),
})
}
}
impl ToComputedValue for OffsetRotate {
type ComputedValue = ComputedOffsetRotate;
#[inline]
fn to_computed_value(&self, context: &Context) -> Self::ComputedValue {
use crate::values::computed::Angle as ComputedAngle;
ComputedOffsetRotate {
auto:!self.direction.is_none(),
angle: if self.direction == OffsetRotateDirection::Reverse {
// The computed value should always convert "reverse" into "auto".
// e.g. "reverse calc(20deg + 10deg)" => "auto 210deg"
self.angle.to_computed_value(context) + ComputedAngle::from_degrees(180.0)
} else {
self.angle.to_computed_value(context)
},
}
}
#[inline]
fn from_computed_value(computed: &Self::ComputedValue) -> Self
|
}
|
{
OffsetRotate {
direction: if computed.auto {
OffsetRotateDirection::Auto
} else {
OffsetRotateDirection::None
},
angle: ToComputedValue::from_computed_value(&computed.angle),
}
}
|
identifier_body
|
motion.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! Specified types for CSS values that are related to motion path.
use crate::parser::{Parse, ParserContext};
use crate::values::computed::motion::OffsetRotate as ComputedOffsetRotate;
use crate::values::computed::{Context, ToComputedValue};
use crate::values::generics::motion::{GenericOffsetPath, RayFunction, RaySize};
use crate::values::specified::{Angle, SVGPathData};
use crate::Zero;
use cssparser::Parser;
use style_traits::{ParseError, StyleParseErrorKind};
/// The specified value of `offset-path`.
pub type OffsetPath = GenericOffsetPath<Angle>;
impl Parse for RayFunction<Angle> {
fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
let mut angle = None;
let mut size = None;
let mut contain = false;
loop {
if angle.is_none() {
angle = input.try(|i| Angle::parse(context, i)).ok();
}
if size.is_none()
|
if!contain {
contain = input.try(|i| i.expect_ident_matching("contain")).is_ok();
if contain {
continue;
}
}
break;
}
if angle.is_none() || size.is_none() {
return Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError));
}
Ok(RayFunction {
angle: angle.unwrap(),
size: size.unwrap(),
contain,
})
}
}
impl Parse for OffsetPath {
fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
// Parse none.
if input.try(|i| i.expect_ident_matching("none")).is_ok() {
return Ok(OffsetPath::none());
}
// Parse possible functions.
let location = input.current_source_location();
let function = input.expect_function()?.clone();
input.parse_nested_block(move |i| {
match_ignore_ascii_case! { &function,
// Bug 1186329: Implement the parser for <basic-shape>, <geometry-box>,
// and <url>.
"path" => SVGPathData::parse(context, i).map(GenericOffsetPath::Path),
"ray" => RayFunction::parse(context, i).map(GenericOffsetPath::Ray),
_ => {
Err(location.new_custom_error(
StyleParseErrorKind::UnexpectedFunction(function.clone())
))
},
}
})
}
}
/// The direction of offset-rotate.
#[derive(Clone, Copy, Debug, MallocSizeOf, Parse, PartialEq, SpecifiedValueInfo, ToCss, ToShmem)]
#[repr(u8)]
pub enum OffsetRotateDirection {
/// Unspecified direction keyword.
#[css(skip)]
None,
/// 0deg offset (face forward).
Auto,
/// 180deg offset (face backward).
Reverse,
}
impl OffsetRotateDirection {
/// Returns true if it is none (i.e. the keyword is not specified).
#[inline]
fn is_none(&self) -> bool {
*self == OffsetRotateDirection::None
}
}
#[inline]
fn direction_specified_and_angle_is_zero(direction: &OffsetRotateDirection, angle: &Angle) -> bool {
!direction.is_none() && angle.is_zero()
}
/// The specified offset-rotate.
/// The syntax is: "[ auto | reverse ] || <angle>"
///
/// https://drafts.fxtf.org/motion-1/#offset-rotate-property
#[derive(Clone, Copy, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToCss, ToShmem)]
pub struct OffsetRotate {
/// [auto | reverse].
#[css(skip_if = "OffsetRotateDirection::is_none")]
direction: OffsetRotateDirection,
/// <angle>.
/// If direction is None, this is a fixed angle which indicates a
/// constant clockwise rotation transformation applied to it by this
/// specified rotation angle. Otherwise, the angle will be added to
/// the angle of the direction in layout.
#[css(contextual_skip_if = "direction_specified_and_angle_is_zero")]
angle: Angle,
}
impl OffsetRotate {
/// Returns the initial value, auto.
#[inline]
pub fn auto() -> Self {
OffsetRotate {
direction: OffsetRotateDirection::Auto,
angle: Angle::zero(),
}
}
/// Returns true if self is auto 0deg.
#[inline]
pub fn is_auto(&self) -> bool {
self.direction == OffsetRotateDirection::Auto && self.angle.is_zero()
}
}
impl Parse for OffsetRotate {
fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
let location = input.current_source_location();
let mut direction = input.try(OffsetRotateDirection::parse);
let angle = input.try(|i| Angle::parse(context, i));
if direction.is_err() {
// The direction and angle could be any order, so give it a change to parse
// direction again.
direction = input.try(OffsetRotateDirection::parse);
}
if direction.is_err() && angle.is_err() {
return Err(location.new_custom_error(StyleParseErrorKind::UnspecifiedError));
}
Ok(OffsetRotate {
direction: direction.unwrap_or(OffsetRotateDirection::None),
angle: angle.unwrap_or(Zero::zero()),
})
}
}
impl ToComputedValue for OffsetRotate {
type ComputedValue = ComputedOffsetRotate;
#[inline]
fn to_computed_value(&self, context: &Context) -> Self::ComputedValue {
use crate::values::computed::Angle as ComputedAngle;
ComputedOffsetRotate {
auto:!self.direction.is_none(),
angle: if self.direction == OffsetRotateDirection::Reverse {
// The computed value should always convert "reverse" into "auto".
// e.g. "reverse calc(20deg + 10deg)" => "auto 210deg"
self.angle.to_computed_value(context) + ComputedAngle::from_degrees(180.0)
} else {
self.angle.to_computed_value(context)
},
}
}
#[inline]
fn from_computed_value(computed: &Self::ComputedValue) -> Self {
OffsetRotate {
direction: if computed.auto {
OffsetRotateDirection::Auto
} else {
OffsetRotateDirection::None
},
angle: ToComputedValue::from_computed_value(&computed.angle),
}
}
}
|
{
size = input.try(RaySize::parse).ok();
if size.is_some() {
continue;
}
}
|
conditional_block
|
main.rs
|
// Powerlink Analyzer - Analyze Ethernet POWERLINK Network Traffic
// Copyright (C) 2016, Thomas Keh
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, version 3 of the License.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
//! The main module contains initialization tasks and user interaction.
extern crate pcap;
extern crate time;
#[macro_use] extern crate enum_primitive;
extern crate num;
#[macro_use] extern crate log;
extern crate simplelog;
extern crate rusqlite;
extern crate getopts;
extern crate regex;
mod plkan;
mod types;
mod database;
mod evaluation;
use pcap::*;
use std::path::Path;
use plkan::Plkan;
use database::*;
use evaluation::*;
use getopts::Options;
use std::env;
use simplelog::{SimpleLogger,LogLevelFilter};
use regex::Regex;
fn print_usage(program: &str, opts: Options) {
let brief = format!("Usage: {} [options] PCAPNG_FILE", program);
print!("{}", opts.usage(&brief));
}
fn main() {
let _ = SimpleLogger::init(LogLevelFilter::Info);
let args: Vec<String> = env::args().collect();
let program = args[0].clone();
let mut opts = Options::new();
opts.optflag("h", "help", "print this help menu");
opts.optflag("p", "pgftable", "prints master metrics as pgf table");
opts.optflag("c", "csv", "prints stats as csv");
opts.optflag("r", "raw", "prints raw response times as csv");
opts.optflag("s", "sort", "sort response times (in combination with --raw)");
opts.optopt("f", "filter", "EXPERT: filter response times (in combination with --raw)", "SQL_WHERE_CLAUSE");
let matches = match opts.parse(&args[1..]) {
Ok(m) => { m }
Err(f) => { panic!(f.to_string()) }
};
if matches.opt_present("h") {
print_usage(&program, opts);
return;
}
if matches.free.is_empty() {
error!("No input file given.");
//warn!("No input file given. Using example capture.");
//Path::new(concat!(env!("CARGO_MANIFEST_DIR"),"/res/example.pcapng"))
return;
}
let filter = if matches.opt_present("f") {
matches.opt_str("f").unwrap()
} else {
"type=='pres'".to_string()
};
for file_path in &matches.free {
//info!("Loading PCAP file {}.",file_path);
let file_path = Path::new(&file_path);
let mut cap = Capture::from_file_with_precision(file_path,Precision::Nano).expect("Loading PCAP file failed");
let mut db = Database::new();
{
let mut plkan = Plkan::new(&mut db);
while let Ok(packet) = cap.next() {
plkan.process_packet(&packet);
}
}
let eval = Evaluation::new(&mut db);
if matches.opt_present("p") {
let filename = file_path.to_str().unwrap();
let table_name = file_path.file_stem().unwrap().to_str().unwrap();
let re = Regex::new(r"[0-9_]").unwrap();
let table_name = re.replace_all(table_name, "");
eval.print_pgftable(&filename, &table_name);
} else if matches.opt_present("c") {
|
eval.print_errors::<StdoutPrinter>();
eval.print_state_changes::<StdoutPrinter>();
eval.print_stats::<StdoutPrinter>();
}
}
}
|
eval.print_stats::<CsvPrinter>();
} else if matches.opt_present("r") {
eval.print_raw(&filter, matches.opt_present("s"));
} else {
eval.print_metadata::<StdoutPrinter>();
|
random_line_split
|
main.rs
|
// Powerlink Analyzer - Analyze Ethernet POWERLINK Network Traffic
// Copyright (C) 2016, Thomas Keh
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, version 3 of the License.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
//! The main module contains initialization tasks and user interaction.
extern crate pcap;
extern crate time;
#[macro_use] extern crate enum_primitive;
extern crate num;
#[macro_use] extern crate log;
extern crate simplelog;
extern crate rusqlite;
extern crate getopts;
extern crate regex;
mod plkan;
mod types;
mod database;
mod evaluation;
use pcap::*;
use std::path::Path;
use plkan::Plkan;
use database::*;
use evaluation::*;
use getopts::Options;
use std::env;
use simplelog::{SimpleLogger,LogLevelFilter};
use regex::Regex;
fn print_usage(program: &str, opts: Options) {
let brief = format!("Usage: {} [options] PCAPNG_FILE", program);
print!("{}", opts.usage(&brief));
}
fn
|
() {
let _ = SimpleLogger::init(LogLevelFilter::Info);
let args: Vec<String> = env::args().collect();
let program = args[0].clone();
let mut opts = Options::new();
opts.optflag("h", "help", "print this help menu");
opts.optflag("p", "pgftable", "prints master metrics as pgf table");
opts.optflag("c", "csv", "prints stats as csv");
opts.optflag("r", "raw", "prints raw response times as csv");
opts.optflag("s", "sort", "sort response times (in combination with --raw)");
opts.optopt("f", "filter", "EXPERT: filter response times (in combination with --raw)", "SQL_WHERE_CLAUSE");
let matches = match opts.parse(&args[1..]) {
Ok(m) => { m }
Err(f) => { panic!(f.to_string()) }
};
if matches.opt_present("h") {
print_usage(&program, opts);
return;
}
if matches.free.is_empty() {
error!("No input file given.");
//warn!("No input file given. Using example capture.");
//Path::new(concat!(env!("CARGO_MANIFEST_DIR"),"/res/example.pcapng"))
return;
}
let filter = if matches.opt_present("f") {
matches.opt_str("f").unwrap()
} else {
"type=='pres'".to_string()
};
for file_path in &matches.free {
//info!("Loading PCAP file {}.",file_path);
let file_path = Path::new(&file_path);
let mut cap = Capture::from_file_with_precision(file_path,Precision::Nano).expect("Loading PCAP file failed");
let mut db = Database::new();
{
let mut plkan = Plkan::new(&mut db);
while let Ok(packet) = cap.next() {
plkan.process_packet(&packet);
}
}
let eval = Evaluation::new(&mut db);
if matches.opt_present("p") {
let filename = file_path.to_str().unwrap();
let table_name = file_path.file_stem().unwrap().to_str().unwrap();
let re = Regex::new(r"[0-9_]").unwrap();
let table_name = re.replace_all(table_name, "");
eval.print_pgftable(&filename, &table_name);
} else if matches.opt_present("c") {
eval.print_stats::<CsvPrinter>();
} else if matches.opt_present("r") {
eval.print_raw(&filter, matches.opt_present("s"));
} else {
eval.print_metadata::<StdoutPrinter>();
eval.print_errors::<StdoutPrinter>();
eval.print_state_changes::<StdoutPrinter>();
eval.print_stats::<StdoutPrinter>();
}
}
}
|
main
|
identifier_name
|
main.rs
|
// Powerlink Analyzer - Analyze Ethernet POWERLINK Network Traffic
// Copyright (C) 2016, Thomas Keh
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, version 3 of the License.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
//! The main module contains initialization tasks and user interaction.
extern crate pcap;
extern crate time;
#[macro_use] extern crate enum_primitive;
extern crate num;
#[macro_use] extern crate log;
extern crate simplelog;
extern crate rusqlite;
extern crate getopts;
extern crate regex;
mod plkan;
mod types;
mod database;
mod evaluation;
use pcap::*;
use std::path::Path;
use plkan::Plkan;
use database::*;
use evaluation::*;
use getopts::Options;
use std::env;
use simplelog::{SimpleLogger,LogLevelFilter};
use regex::Regex;
fn print_usage(program: &str, opts: Options)
|
fn main() {
let _ = SimpleLogger::init(LogLevelFilter::Info);
let args: Vec<String> = env::args().collect();
let program = args[0].clone();
let mut opts = Options::new();
opts.optflag("h", "help", "print this help menu");
opts.optflag("p", "pgftable", "prints master metrics as pgf table");
opts.optflag("c", "csv", "prints stats as csv");
opts.optflag("r", "raw", "prints raw response times as csv");
opts.optflag("s", "sort", "sort response times (in combination with --raw)");
opts.optopt("f", "filter", "EXPERT: filter response times (in combination with --raw)", "SQL_WHERE_CLAUSE");
let matches = match opts.parse(&args[1..]) {
Ok(m) => { m }
Err(f) => { panic!(f.to_string()) }
};
if matches.opt_present("h") {
print_usage(&program, opts);
return;
}
if matches.free.is_empty() {
error!("No input file given.");
//warn!("No input file given. Using example capture.");
//Path::new(concat!(env!("CARGO_MANIFEST_DIR"),"/res/example.pcapng"))
return;
}
let filter = if matches.opt_present("f") {
matches.opt_str("f").unwrap()
} else {
"type=='pres'".to_string()
};
for file_path in &matches.free {
//info!("Loading PCAP file {}.",file_path);
let file_path = Path::new(&file_path);
let mut cap = Capture::from_file_with_precision(file_path,Precision::Nano).expect("Loading PCAP file failed");
let mut db = Database::new();
{
let mut plkan = Plkan::new(&mut db);
while let Ok(packet) = cap.next() {
plkan.process_packet(&packet);
}
}
let eval = Evaluation::new(&mut db);
if matches.opt_present("p") {
let filename = file_path.to_str().unwrap();
let table_name = file_path.file_stem().unwrap().to_str().unwrap();
let re = Regex::new(r"[0-9_]").unwrap();
let table_name = re.replace_all(table_name, "");
eval.print_pgftable(&filename, &table_name);
} else if matches.opt_present("c") {
eval.print_stats::<CsvPrinter>();
} else if matches.opt_present("r") {
eval.print_raw(&filter, matches.opt_present("s"));
} else {
eval.print_metadata::<StdoutPrinter>();
eval.print_errors::<StdoutPrinter>();
eval.print_state_changes::<StdoutPrinter>();
eval.print_stats::<StdoutPrinter>();
}
}
}
|
{
let brief = format!("Usage: {} [options] PCAPNG_FILE", program);
print!("{}", opts.usage(&brief));
}
|
identifier_body
|
main.rs
|
// Powerlink Analyzer - Analyze Ethernet POWERLINK Network Traffic
// Copyright (C) 2016, Thomas Keh
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, version 3 of the License.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
//! The main module contains initialization tasks and user interaction.
extern crate pcap;
extern crate time;
#[macro_use] extern crate enum_primitive;
extern crate num;
#[macro_use] extern crate log;
extern crate simplelog;
extern crate rusqlite;
extern crate getopts;
extern crate regex;
mod plkan;
mod types;
mod database;
mod evaluation;
use pcap::*;
use std::path::Path;
use plkan::Plkan;
use database::*;
use evaluation::*;
use getopts::Options;
use std::env;
use simplelog::{SimpleLogger,LogLevelFilter};
use regex::Regex;
fn print_usage(program: &str, opts: Options) {
let brief = format!("Usage: {} [options] PCAPNG_FILE", program);
print!("{}", opts.usage(&brief));
}
fn main() {
let _ = SimpleLogger::init(LogLevelFilter::Info);
let args: Vec<String> = env::args().collect();
let program = args[0].clone();
let mut opts = Options::new();
opts.optflag("h", "help", "print this help menu");
opts.optflag("p", "pgftable", "prints master metrics as pgf table");
opts.optflag("c", "csv", "prints stats as csv");
opts.optflag("r", "raw", "prints raw response times as csv");
opts.optflag("s", "sort", "sort response times (in combination with --raw)");
opts.optopt("f", "filter", "EXPERT: filter response times (in combination with --raw)", "SQL_WHERE_CLAUSE");
let matches = match opts.parse(&args[1..]) {
Ok(m) => { m }
Err(f) => { panic!(f.to_string()) }
};
if matches.opt_present("h") {
print_usage(&program, opts);
return;
}
if matches.free.is_empty() {
error!("No input file given.");
//warn!("No input file given. Using example capture.");
//Path::new(concat!(env!("CARGO_MANIFEST_DIR"),"/res/example.pcapng"))
return;
}
let filter = if matches.opt_present("f")
|
else {
"type=='pres'".to_string()
};
for file_path in &matches.free {
//info!("Loading PCAP file {}.",file_path);
let file_path = Path::new(&file_path);
let mut cap = Capture::from_file_with_precision(file_path,Precision::Nano).expect("Loading PCAP file failed");
let mut db = Database::new();
{
let mut plkan = Plkan::new(&mut db);
while let Ok(packet) = cap.next() {
plkan.process_packet(&packet);
}
}
let eval = Evaluation::new(&mut db);
if matches.opt_present("p") {
let filename = file_path.to_str().unwrap();
let table_name = file_path.file_stem().unwrap().to_str().unwrap();
let re = Regex::new(r"[0-9_]").unwrap();
let table_name = re.replace_all(table_name, "");
eval.print_pgftable(&filename, &table_name);
} else if matches.opt_present("c") {
eval.print_stats::<CsvPrinter>();
} else if matches.opt_present("r") {
eval.print_raw(&filter, matches.opt_present("s"));
} else {
eval.print_metadata::<StdoutPrinter>();
eval.print_errors::<StdoutPrinter>();
eval.print_state_changes::<StdoutPrinter>();
eval.print_stats::<StdoutPrinter>();
}
}
}
|
{
matches.opt_str("f").unwrap()
}
|
conditional_block
|
recursive-struct.rs
|
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-tidy-linelength
// ignore-lldb
// compile-flags:-g
// gdb-command:run
// gdb-command:print stack_unique.value
// gdb-check:$1 = 0
// gdb-command:print stack_unique.next.RUST$ENCODED$ENUM$0$Empty.val->value
// gdb-check:$2 = 1
// gdb-command:print unique_unique->value
// gdb-check:$3 = 2
// gdb-command:print unique_unique->next.RUST$ENCODED$ENUM$0$Empty.val->value
// gdb-check:$4 = 3
// gdb-command:print vec_unique[0].value
// gdb-check:$5 = 6.5
// gdb-command:print vec_unique[0].next.RUST$ENCODED$ENUM$0$Empty.val->value
// gdb-check:$6 = 7.5
// gdb-command:print borrowed_unique->value
// gdb-check:$7 = 8.5
// gdb-command:print borrowed_unique->next.RUST$ENCODED$ENUM$0$Empty.val->value
// gdb-check:$8 = 9.5
// LONG CYCLE
// gdb-command:print long_cycle1.value
// gdb-check:$9 = 20
// gdb-command:print long_cycle1.next->value
// gdb-check:$10 = 21
// gdb-command:print long_cycle1.next->next->value
// gdb-check:$11 = 22
// gdb-command:print long_cycle1.next->next->next->value
// gdb-check:$12 = 23
// gdb-command:print long_cycle2.value
// gdb-check:$13 = 24
// gdb-command:print long_cycle2.next->value
// gdb-check:$14 = 25
// gdb-command:print long_cycle2.next->next->value
// gdb-check:$15 = 26
// gdb-command:print long_cycle3.value
// gdb-check:$16 = 27
// gdb-command:print long_cycle3.next->value
// gdb-check:$17 = 28
// gdb-command:print long_cycle4.value
// gdb-check:$18 = 29.5
// gdb-command:print (*****long_cycle_w_anonymous_types).value
// gdb-check:$19 = 30
// gdb-command:print (*****((*****long_cycle_w_anonymous_types).next.RUST$ENCODED$ENUM$0$Empty.val)).value
// gdb-check:$20 = 31
// gdb-command:continue
#![allow(unused_variables)]
#![feature(box_syntax)]
#![omit_gdb_pretty_printer_section]
use self::Opt::{Empty, Val};
enum Opt<T> {
Empty,
Val { val: T }
}
struct UniqueNode<T> {
next: Opt<Box<UniqueNode<T>>>,
value: T
}
struct LongCycle1<T> {
next: Box<LongCycle2<T>>,
value: T,
}
struct LongCycle2<T> {
next: Box<LongCycle3<T>>,
value: T,
}
struct LongCycle3<T> {
next: Box<LongCycle4<T>>,
value: T,
}
struct LongCycle4<T> {
next: Option<Box<LongCycle1<T>>>,
value: T,
}
struct LongCycleWithAnonymousTypes {
next: Opt<Box<Box<Box<Box<Box<LongCycleWithAnonymousTypes>>>>>>,
value: uint,
}
// This test case makes sure that recursive structs are properly described. The Node structs are
// generic so that we can have a new type (that newly needs to be described) for the different
// cases. The potential problem with recursive types is that the DI generation algorithm gets
// trapped in an endless loop. To make sure, we actually test this in the different cases, we have
// to operate on a new type each time, otherwise we would just hit the DI cache for all but the
// first case.
// The different cases below (stack_*, unique_*, box_*, etc) are set up so that the type description
// algorithm will enter the type reference cycle that is created by a recursive definition from a
// different context each time.
// The "long cycle" cases are constructed to span a longer, indirect recursion cycle between types.
// The different locals will cause the DI algorithm to enter the type reference cycle at different
// points.
fn main() {
let stack_unique: UniqueNode<u16> = UniqueNode {
next: Val {
val: box UniqueNode {
next: Empty,
value: 1,
}
},
value: 0,
};
let unique_unique: Box<UniqueNode<u32>> = box UniqueNode {
next: Val {
val: box UniqueNode {
next: Empty,
value: 3,
}
},
value: 2,
};
let vec_unique: [UniqueNode<f32>; 1] = [UniqueNode {
next: Val {
val: box UniqueNode {
next: Empty,
value: 7.5,
}
},
value: 6.5,
}];
let borrowed_unique: &UniqueNode<f64> = &UniqueNode {
next: Val {
val: box UniqueNode {
next: Empty,
value: 9.5,
}
},
value: 8.5,
};
// LONG CYCLE
let long_cycle1: LongCycle1<u16> = LongCycle1 {
next: box LongCycle2 {
next: box LongCycle3 {
next: box LongCycle4 {
next: None,
value: 23,
},
value: 22,
},
value: 21
},
value: 20
};
let long_cycle2: LongCycle2<u32> = LongCycle2 {
next: box LongCycle3 {
next: box LongCycle4 {
next: None,
value: 26,
},
value: 25,
},
value: 24
};
let long_cycle3: LongCycle3<u64> = LongCycle3 {
next: box LongCycle4 {
next: None,
value: 28,
},
value: 27,
};
let long_cycle4: LongCycle4<f32> = LongCycle4 {
|
};
// It's important that LongCycleWithAnonymousTypes is encountered only at the end of the
// `box` chain.
let long_cycle_w_anonymous_types = box box box box box LongCycleWithAnonymousTypes {
next: Val {
val: box box box box box LongCycleWithAnonymousTypes {
next: Empty,
value: 31,
}
},
value: 30
};
zzz(); // #break
}
fn zzz() {()}
|
next: None,
value: 29.5,
|
random_line_split
|
recursive-struct.rs
|
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-tidy-linelength
// ignore-lldb
// compile-flags:-g
// gdb-command:run
// gdb-command:print stack_unique.value
// gdb-check:$1 = 0
// gdb-command:print stack_unique.next.RUST$ENCODED$ENUM$0$Empty.val->value
// gdb-check:$2 = 1
// gdb-command:print unique_unique->value
// gdb-check:$3 = 2
// gdb-command:print unique_unique->next.RUST$ENCODED$ENUM$0$Empty.val->value
// gdb-check:$4 = 3
// gdb-command:print vec_unique[0].value
// gdb-check:$5 = 6.5
// gdb-command:print vec_unique[0].next.RUST$ENCODED$ENUM$0$Empty.val->value
// gdb-check:$6 = 7.5
// gdb-command:print borrowed_unique->value
// gdb-check:$7 = 8.5
// gdb-command:print borrowed_unique->next.RUST$ENCODED$ENUM$0$Empty.val->value
// gdb-check:$8 = 9.5
// LONG CYCLE
// gdb-command:print long_cycle1.value
// gdb-check:$9 = 20
// gdb-command:print long_cycle1.next->value
// gdb-check:$10 = 21
// gdb-command:print long_cycle1.next->next->value
// gdb-check:$11 = 22
// gdb-command:print long_cycle1.next->next->next->value
// gdb-check:$12 = 23
// gdb-command:print long_cycle2.value
// gdb-check:$13 = 24
// gdb-command:print long_cycle2.next->value
// gdb-check:$14 = 25
// gdb-command:print long_cycle2.next->next->value
// gdb-check:$15 = 26
// gdb-command:print long_cycle3.value
// gdb-check:$16 = 27
// gdb-command:print long_cycle3.next->value
// gdb-check:$17 = 28
// gdb-command:print long_cycle4.value
// gdb-check:$18 = 29.5
// gdb-command:print (*****long_cycle_w_anonymous_types).value
// gdb-check:$19 = 30
// gdb-command:print (*****((*****long_cycle_w_anonymous_types).next.RUST$ENCODED$ENUM$0$Empty.val)).value
// gdb-check:$20 = 31
// gdb-command:continue
#![allow(unused_variables)]
#![feature(box_syntax)]
#![omit_gdb_pretty_printer_section]
use self::Opt::{Empty, Val};
enum Opt<T> {
Empty,
Val { val: T }
}
struct UniqueNode<T> {
next: Opt<Box<UniqueNode<T>>>,
value: T
}
struct LongCycle1<T> {
next: Box<LongCycle2<T>>,
value: T,
}
struct
|
<T> {
next: Box<LongCycle3<T>>,
value: T,
}
struct LongCycle3<T> {
next: Box<LongCycle4<T>>,
value: T,
}
struct LongCycle4<T> {
next: Option<Box<LongCycle1<T>>>,
value: T,
}
struct LongCycleWithAnonymousTypes {
next: Opt<Box<Box<Box<Box<Box<LongCycleWithAnonymousTypes>>>>>>,
value: uint,
}
// This test case makes sure that recursive structs are properly described. The Node structs are
// generic so that we can have a new type (that newly needs to be described) for the different
// cases. The potential problem with recursive types is that the DI generation algorithm gets
// trapped in an endless loop. To make sure, we actually test this in the different cases, we have
// to operate on a new type each time, otherwise we would just hit the DI cache for all but the
// first case.
// The different cases below (stack_*, unique_*, box_*, etc) are set up so that the type description
// algorithm will enter the type reference cycle that is created by a recursive definition from a
// different context each time.
// The "long cycle" cases are constructed to span a longer, indirect recursion cycle between types.
// The different locals will cause the DI algorithm to enter the type reference cycle at different
// points.
fn main() {
let stack_unique: UniqueNode<u16> = UniqueNode {
next: Val {
val: box UniqueNode {
next: Empty,
value: 1,
}
},
value: 0,
};
let unique_unique: Box<UniqueNode<u32>> = box UniqueNode {
next: Val {
val: box UniqueNode {
next: Empty,
value: 3,
}
},
value: 2,
};
let vec_unique: [UniqueNode<f32>; 1] = [UniqueNode {
next: Val {
val: box UniqueNode {
next: Empty,
value: 7.5,
}
},
value: 6.5,
}];
let borrowed_unique: &UniqueNode<f64> = &UniqueNode {
next: Val {
val: box UniqueNode {
next: Empty,
value: 9.5,
}
},
value: 8.5,
};
// LONG CYCLE
let long_cycle1: LongCycle1<u16> = LongCycle1 {
next: box LongCycle2 {
next: box LongCycle3 {
next: box LongCycle4 {
next: None,
value: 23,
},
value: 22,
},
value: 21
},
value: 20
};
let long_cycle2: LongCycle2<u32> = LongCycle2 {
next: box LongCycle3 {
next: box LongCycle4 {
next: None,
value: 26,
},
value: 25,
},
value: 24
};
let long_cycle3: LongCycle3<u64> = LongCycle3 {
next: box LongCycle4 {
next: None,
value: 28,
},
value: 27,
};
let long_cycle4: LongCycle4<f32> = LongCycle4 {
next: None,
value: 29.5,
};
// It's important that LongCycleWithAnonymousTypes is encountered only at the end of the
// `box` chain.
let long_cycle_w_anonymous_types = box box box box box LongCycleWithAnonymousTypes {
next: Val {
val: box box box box box LongCycleWithAnonymousTypes {
next: Empty,
value: 31,
}
},
value: 30
};
zzz(); // #break
}
fn zzz() {()}
|
LongCycle2
|
identifier_name
|
nist-spce.rs
|
// Lumol, an extensible molecular simulation engine
// Copyright (C) 2015-2016 G. Fraux — BSD license
//! Testing energy computation for SPC/E water using data from
//! https://www.nist.gov/mml/csd/chemical-informatics-research-group/spce-water-reference-calculations-9%C3%A5-cutoff
//! https://www.nist.gov/mml/csd/chemical-informatics-research-group/spce-water-reference-calculations-10å-cutoff
extern crate lumol;
extern crate lumol_input as input;
use lumol::sys::{System, UnitCell};
use lumol::sys::Trajectory;
use lumol::energy::{PairInteraction, LennardJones, NullPotential};
use lumol::energy::{Ewald, PairRestriction, CoulombicPotential};
use lumol::consts::K_BOLTZMANN;
use std::path::Path;
use std::fs::File;
use std::io::prelude::*;
pub fn get_system(path: &str, cutoff: f64) -> System {
let path = Path::new(file!()).parent().unwrap()
.join("data")
.join("nist-spce")
.join(path);
let mut system = Trajectory::open(&path)
.and_then(|mut traj| traj.read())
.unwrap();
let mut file = File::open(path).unwrap();
let mut buffer = String::new();
file.read_to_string(&mut buffer).unwrap();
let line = buffer.lines().skip(1).next().unwrap();
let mut splited = line.split_whitespace();
assert_eq!(splited.next(), Some("cell:"));
let a: f64 = splited.next().expect("Missing 'a' cell parameter")
.parse().expect("'a' cell parameter is not a float");
let b: f64 = splited.next().expect("Missing 'b' cell parameter")
.parse().expect("'b' cell parameter is not a float");
let c: f64 = splited.next().expect("Missing 'c' cell parameter")
.parse().expect("'c' cell parameter is not a float");
system.set_cell(UnitCell::ortho(a, b, c));
for i in 0..system.size() {
if i % 3 == 0 {
system.add_bond(i, i + 1);
system.add_bond(i, i + 2);
}
}
for particle in &mut system {
particle.charge = match particle.name() {
"H" => 0.42380,
"O" => -2.0 * 0.42380,
other => panic!("Unknown particle name: {}", other)
}
}
let mut lj = PairInteraction::new(Box::new(LennardJones{
epsilon: 78.19743111 * K_BOLTZMANN,
sigma: 3.16555789
}), cutoff);
lj.enable_tail_corrections();
system.interactions_mut().add_pair("O", "O", lj);
system.interactions_mut().add_pair("O", "H",
PairInteraction::new(Box::new(NullPotential), cutoff)
);
system.interactions_mut().add_pair("H", "H",
PairInteraction::new(Box::new(NullPotential), cutoff)
);
let mut ewald = Ewald::new(cutoff, 5);
ewald.set_restriction(PairRestriction::InterMolecular);
ewald.set_alpha(5.6 / f64::min(f64::min(a, b), c));
system.interactions_mut().set_coulomb(Box::new(ewald));
return system;
}
mod cutoff_9 {
use super::*;
use lumol::consts::K_BOLTZMANN;
#[test]
fn nist1() {
let system = get_system("spce-1.xyz", 9.0);
let energy = system.potential_energy() / K_BOLTZMANN;
let expected = -4.88608e5;
assert!(f64::abs((energy - expected)/expected) < 1e-3);
}
#[test]
fn nist2() {
let system = get_system("spce-2.xyz", 9.0);
let energy = system.potential_energy() / K_BOLTZMANN;
let expected = -1.06602e6;
assert!(f64::abs((energy - expected)/expected) < 1e-3);
}
#[test]
fn nist3() {
let system = get_system("spce-3.xyz", 9.0);
let energy = system.potential_energy() / K_BOLTZMANN;
let expected = -1.71488e6;
assert!(f64::abs((energy - expected)/expected) < 1e-3);
}
#[test]
fn nist4() {
let system = get_system("spce-4.xyz", 9.0);
let energy = system.potential_energy() / K_BOLTZMANN;
let expected = -3.08010e6;
assert!(f64::abs((energy - expected)/expected) < 1e-3);
}
}
mod cutoff_10 {
use super::*;
use lumol::consts::K_BOLTZMANN;
#[test]
fn nis
|
{
let system = get_system("spce-1.xyz", 10.0);
let energy = system.potential_energy() / K_BOLTZMANN;
let expected = -4.88604e5;
assert!(f64::abs((energy - expected)/expected) < 1e-3);
}
#[test]
fn nist2() {
let system = get_system("spce-2.xyz", 10.0);
let energy = system.potential_energy() / K_BOLTZMANN;
let expected = -1.06590e6;
assert!(f64::abs((energy - expected)/expected) < 1e-3);
}
#[test]
fn nist3() {
let system = get_system("spce-3.xyz", 10.0);
let energy = system.potential_energy() / K_BOLTZMANN;
let expected = -1.71488e6;
assert!(f64::abs((energy - expected)/expected) < 1e-3);
}
#[test]
fn nist4() {
let system = get_system("spce-4.xyz", 10.0);
let energy = system.potential_energy() / K_BOLTZMANN;
let expected = -3.20501e6;
assert!(f64::abs((energy - expected)/expected) < 1e-3);
}
}
|
t1()
|
identifier_name
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.