file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
01a_quick_example.rs
|
extern crate clap;
use clap::{App, SubCommand};
fn
|
() {
// This example shows how to create an application with several arguments using usage strings, which can be
// far less verbose that shown in 01b_QuickExample.rs, but is more readable. The downside is you cannot set
// the more advanced configuration options using this method (well...actually you can, you'll see ;) )
//
// The example below is functionally identical to the 01b_quick_example.rs and 01c_quick_example.rs
//
// Create an application with 5 possible arguments (2 auto generated) and 2 subcommands (1 auto generated)
// - A config file
// + Uses "-c filename" or "--config filename"
// - An output file
// + A positional argument (i.e. "$ myapp output_filename")
// - A debug flag
// + Uses "-d" or "--debug"
// + Allows multiple occurrences of such as "-dd" (for vary levels of debugging, as an example)
// - A help flag (automatically generated by clap)
// + Uses "-h" or "--help" (Only autogenerated if you do NOT specify your own "-h" or "--help")
// - A version flag (automatically generated by clap)
// + Uses "-V" or "--version" (Only autogenerated if you do NOT specify your own "-V" or "--version")
// - A subcommand "test" (subcommands behave like their own apps, with their own arguments
// + Used by "$ myapp test" with the following arguments
// > A list flag
// = Uses "-l" (usage is "$ myapp test -l"
// > A help flag (automatically generated by clap
// = Uses "-h" or "--help" (full usage "$ myapp test -h" or "$ myapp test --help")
// > A version flag (automatically generated by clap
// = Uses "-V" or "--version" (full usage "$ myapp test -V" or "$ myapp test --version")
// - A subcommand "help" (automatically generated by clap because we specified a subcommand of our own)
// + Used by "$ myapp help" (same functionality as "-h" or "--help")
let matches = App::new("MyApp")
.version("1.0")
.author("Kevin K. <[email protected]>")
.about("Does awesome things")
.args_from_usage("-c, --config=[FILE] 'Sets a custom config file'
<output> 'Sets an optional output file'
-d... 'Turn debugging information on'")
.subcommand(SubCommand::with_name("test")
.about("does testing things")
.arg_from_usage("-l, --list 'lists test values'"))
.get_matches();
// You can check the value provided by positional arguments, or option arguments
if let Some(o) = matches.value_of("output") {
println!("Value for output: {}", o);
}
if let Some(c) = matches.value_of("config") {
println!("Value for config: {}", c);
}
// You can see how many times a particular flag or argument occurred
// Note, only flags can have multiple occurrences
match matches.occurrences_of("d") {
0 => println!("Debug mode is off"),
1 => println!("Debug mode is kind of on"),
2 => println!("Debug mode is on"),
3 | _ => println!("Don't be crazy"),
}
// You can check for the existence of subcommands, and if found use their
// matches just as you would the top level app
if let Some(matches) = matches.subcommand_matches("test") {
// "$ myapp test" was run
if matches.is_present("list") {
// "$ myapp test -l" was run
println!("Printing testing lists...");
} else {
println!("Not printing testing lists...");
}
}
// Continued program logic goes here...
}
|
main
|
identifier_name
|
01a_quick_example.rs
|
extern crate clap;
use clap::{App, SubCommand};
fn main()
|
// - A subcommand "test" (subcommands behave like their own apps, with their own arguments
// + Used by "$ myapp test" with the following arguments
// > A list flag
// = Uses "-l" (usage is "$ myapp test -l"
// > A help flag (automatically generated by clap
// = Uses "-h" or "--help" (full usage "$ myapp test -h" or "$ myapp test --help")
// > A version flag (automatically generated by clap
// = Uses "-V" or "--version" (full usage "$ myapp test -V" or "$ myapp test --version")
// - A subcommand "help" (automatically generated by clap because we specified a subcommand of our own)
// + Used by "$ myapp help" (same functionality as "-h" or "--help")
let matches = App::new("MyApp")
.version("1.0")
.author("Kevin K. <[email protected]>")
.about("Does awesome things")
.args_from_usage("-c, --config=[FILE] 'Sets a custom config file'
<output> 'Sets an optional output file'
-d... 'Turn debugging information on'")
.subcommand(SubCommand::with_name("test")
.about("does testing things")
.arg_from_usage("-l, --list 'lists test values'"))
.get_matches();
// You can check the value provided by positional arguments, or option arguments
if let Some(o) = matches.value_of("output") {
println!("Value for output: {}", o);
}
if let Some(c) = matches.value_of("config") {
println!("Value for config: {}", c);
}
// You can see how many times a particular flag or argument occurred
// Note, only flags can have multiple occurrences
match matches.occurrences_of("d") {
0 => println!("Debug mode is off"),
1 => println!("Debug mode is kind of on"),
2 => println!("Debug mode is on"),
3 | _ => println!("Don't be crazy"),
}
// You can check for the existence of subcommands, and if found use their
// matches just as you would the top level app
if let Some(matches) = matches.subcommand_matches("test") {
// "$ myapp test" was run
if matches.is_present("list") {
// "$ myapp test -l" was run
println!("Printing testing lists...");
} else {
println!("Not printing testing lists...");
}
}
// Continued program logic goes here...
}
|
{
// This example shows how to create an application with several arguments using usage strings, which can be
// far less verbose that shown in 01b_QuickExample.rs, but is more readable. The downside is you cannot set
// the more advanced configuration options using this method (well...actually you can, you'll see ;) )
//
// The example below is functionally identical to the 01b_quick_example.rs and 01c_quick_example.rs
//
// Create an application with 5 possible arguments (2 auto generated) and 2 subcommands (1 auto generated)
// - A config file
// + Uses "-c filename" or "--config filename"
// - An output file
// + A positional argument (i.e. "$ myapp output_filename")
// - A debug flag
// + Uses "-d" or "--debug"
// + Allows multiple occurrences of such as "-dd" (for vary levels of debugging, as an example)
// - A help flag (automatically generated by clap)
// + Uses "-h" or "--help" (Only autogenerated if you do NOT specify your own "-h" or "--help")
// - A version flag (automatically generated by clap)
// + Uses "-V" or "--version" (Only autogenerated if you do NOT specify your own "-V" or "--version")
|
identifier_body
|
01a_quick_example.rs
|
extern crate clap;
use clap::{App, SubCommand};
fn main() {
// This example shows how to create an application with several arguments using usage strings, which can be
// far less verbose that shown in 01b_QuickExample.rs, but is more readable. The downside is you cannot set
// the more advanced configuration options using this method (well...actually you can, you'll see ;) )
//
// The example below is functionally identical to the 01b_quick_example.rs and 01c_quick_example.rs
//
// Create an application with 5 possible arguments (2 auto generated) and 2 subcommands (1 auto generated)
// - A config file
// + Uses "-c filename" or "--config filename"
// - An output file
// + A positional argument (i.e. "$ myapp output_filename")
// - A debug flag
// + Uses "-d" or "--debug"
// + Allows multiple occurrences of such as "-dd" (for vary levels of debugging, as an example)
// - A help flag (automatically generated by clap)
// + Uses "-h" or "--help" (Only autogenerated if you do NOT specify your own "-h" or "--help")
// - A version flag (automatically generated by clap)
// + Uses "-V" or "--version" (Only autogenerated if you do NOT specify your own "-V" or "--version")
// - A subcommand "test" (subcommands behave like their own apps, with their own arguments
// + Used by "$ myapp test" with the following arguments
// > A list flag
// = Uses "-l" (usage is "$ myapp test -l"
// > A help flag (automatically generated by clap
// = Uses "-h" or "--help" (full usage "$ myapp test -h" or "$ myapp test --help")
// > A version flag (automatically generated by clap
// = Uses "-V" or "--version" (full usage "$ myapp test -V" or "$ myapp test --version")
// - A subcommand "help" (automatically generated by clap because we specified a subcommand of our own)
// + Used by "$ myapp help" (same functionality as "-h" or "--help")
let matches = App::new("MyApp")
.version("1.0")
.author("Kevin K. <[email protected]>")
.about("Does awesome things")
.args_from_usage("-c, --config=[FILE] 'Sets a custom config file'
<output> 'Sets an optional output file'
-d... 'Turn debugging information on'")
.subcommand(SubCommand::with_name("test")
.about("does testing things")
.arg_from_usage("-l, --list 'lists test values'"))
.get_matches();
// You can check the value provided by positional arguments, or option arguments
if let Some(o) = matches.value_of("output") {
println!("Value for output: {}", o);
}
if let Some(c) = matches.value_of("config") {
println!("Value for config: {}", c);
}
|
// Note, only flags can have multiple occurrences
match matches.occurrences_of("d") {
0 => println!("Debug mode is off"),
1 => println!("Debug mode is kind of on"),
2 => println!("Debug mode is on"),
3 | _ => println!("Don't be crazy"),
}
// You can check for the existence of subcommands, and if found use their
// matches just as you would the top level app
if let Some(matches) = matches.subcommand_matches("test") {
// "$ myapp test" was run
if matches.is_present("list") {
// "$ myapp test -l" was run
println!("Printing testing lists...");
} else {
println!("Not printing testing lists...");
}
}
// Continued program logic goes here...
}
|
// You can see how many times a particular flag or argument occurred
|
random_line_split
|
01a_quick_example.rs
|
extern crate clap;
use clap::{App, SubCommand};
fn main() {
// This example shows how to create an application with several arguments using usage strings, which can be
// far less verbose that shown in 01b_QuickExample.rs, but is more readable. The downside is you cannot set
// the more advanced configuration options using this method (well...actually you can, you'll see ;) )
//
// The example below is functionally identical to the 01b_quick_example.rs and 01c_quick_example.rs
//
// Create an application with 5 possible arguments (2 auto generated) and 2 subcommands (1 auto generated)
// - A config file
// + Uses "-c filename" or "--config filename"
// - An output file
// + A positional argument (i.e. "$ myapp output_filename")
// - A debug flag
// + Uses "-d" or "--debug"
// + Allows multiple occurrences of such as "-dd" (for vary levels of debugging, as an example)
// - A help flag (automatically generated by clap)
// + Uses "-h" or "--help" (Only autogenerated if you do NOT specify your own "-h" or "--help")
// - A version flag (automatically generated by clap)
// + Uses "-V" or "--version" (Only autogenerated if you do NOT specify your own "-V" or "--version")
// - A subcommand "test" (subcommands behave like their own apps, with their own arguments
// + Used by "$ myapp test" with the following arguments
// > A list flag
// = Uses "-l" (usage is "$ myapp test -l"
// > A help flag (automatically generated by clap
// = Uses "-h" or "--help" (full usage "$ myapp test -h" or "$ myapp test --help")
// > A version flag (automatically generated by clap
// = Uses "-V" or "--version" (full usage "$ myapp test -V" or "$ myapp test --version")
// - A subcommand "help" (automatically generated by clap because we specified a subcommand of our own)
// + Used by "$ myapp help" (same functionality as "-h" or "--help")
let matches = App::new("MyApp")
.version("1.0")
.author("Kevin K. <[email protected]>")
.about("Does awesome things")
.args_from_usage("-c, --config=[FILE] 'Sets a custom config file'
<output> 'Sets an optional output file'
-d... 'Turn debugging information on'")
.subcommand(SubCommand::with_name("test")
.about("does testing things")
.arg_from_usage("-l, --list 'lists test values'"))
.get_matches();
// You can check the value provided by positional arguments, or option arguments
if let Some(o) = matches.value_of("output") {
println!("Value for output: {}", o);
}
if let Some(c) = matches.value_of("config")
|
// You can see how many times a particular flag or argument occurred
// Note, only flags can have multiple occurrences
match matches.occurrences_of("d") {
0 => println!("Debug mode is off"),
1 => println!("Debug mode is kind of on"),
2 => println!("Debug mode is on"),
3 | _ => println!("Don't be crazy"),
}
// You can check for the existence of subcommands, and if found use their
// matches just as you would the top level app
if let Some(matches) = matches.subcommand_matches("test") {
// "$ myapp test" was run
if matches.is_present("list") {
// "$ myapp test -l" was run
println!("Printing testing lists...");
} else {
println!("Not printing testing lists...");
}
}
// Continued program logic goes here...
}
|
{
println!("Value for config: {}", c);
}
|
conditional_block
|
parser.rs
|
use std::collections::{ HashMap };
use lexer::{ Lexer, Token };
type GrammarFunction = fn(&mut Parser);
struct Rule {
prefix: Option<GrammarFunction>,
infix: Option<GrammarFunction>,
postfix: Option<GrammarFunction>,
}
const LiteralRule: Rule = Rule { prefix: Some(Parser::literal), infix: None, postfix: None };
struct Function {
identifiers: HashMap<String, u32>,
}
pub struct Parser {
lexer: Lexer,
}
impl Parser {
pub fn new(lexer: Lexer) -> Parser {
Parser {
lexer: lexer,
}
}
pub fn parse(&mut self) {
}
fn get_rule(token: Token) -> Rule {
match token {
Token::Identifier(_) => LiteralRule,
Token::Number(_) => LiteralRule,
_ => panic!("Illegal token"),
}
}
fn
|
(&mut self) {
match self.lexer.current() {
_ => panic!("Expected token of literal type. Got {:?}", self.lexer.current()),
}
}
}
|
literal
|
identifier_name
|
parser.rs
|
use std::collections::{ HashMap };
use lexer::{ Lexer, Token };
type GrammarFunction = fn(&mut Parser);
struct Rule {
prefix: Option<GrammarFunction>,
infix: Option<GrammarFunction>,
|
postfix: Option<GrammarFunction>,
}
const LiteralRule: Rule = Rule { prefix: Some(Parser::literal), infix: None, postfix: None };
struct Function {
identifiers: HashMap<String, u32>,
}
pub struct Parser {
lexer: Lexer,
}
impl Parser {
pub fn new(lexer: Lexer) -> Parser {
Parser {
lexer: lexer,
}
}
pub fn parse(&mut self) {
}
fn get_rule(token: Token) -> Rule {
match token {
Token::Identifier(_) => LiteralRule,
Token::Number(_) => LiteralRule,
_ => panic!("Illegal token"),
}
}
fn literal(&mut self) {
match self.lexer.current() {
_ => panic!("Expected token of literal type. Got {:?}", self.lexer.current()),
}
}
}
|
random_line_split
|
|
misc_crypto.rs
|
// Copyright lowRISC contributors.
// Licensed under the Apache License, Version 2.0, see LICENSE for details.
// SPDX-License-Identifier: Apache-2.0
//! Miscellaneous cryptograhic test data that isn't worth maintaining custom
//! scripts for.
/// A plaintext string.
pub const PLAIN_TEXT: &[u8] = b"I'm setting the alarm clock for July.";
/// The SHA-256 hash of `PLAIN_TEXT`.
///
/// Generate with:
/// ```text
/// echo -n "I'm setting the alarm clock for July." \
/// | openssl dgst -sha256 -binary \
/// | xxd -i -c 8
/// ```
#[rustfmt::skip]
pub const PLAIN_SHA256: &[u8] = &[
0xc9, 0xfd, 0xba, 0xae, 0x28, 0xe7, 0x49, 0x5c,
0xe1, 0x13, 0xbc, 0x87, 0xc8, 0x20, 0x6c, 0xba,
0xc2, 0xd1, 0x0c, 0x28, 0x17, 0xf0, 0x06, 0x11,
0xd0, 0xc6, 0x19, 0x2f, 0x47, 0x64, 0xdb, 0xba,
];
/// The SHA-256 HMAC of `PLAIN_TEXT`, using itself as the key.
///
/// Generate with:
/// ```text
/// echo -n "I'm setting the alarm clock for July." \
/// | openssl dgst -sha256 -binary \
/// -hmac "I'm setting the alarm clock for July." \
/// | xxd -i -c 8
/// ```
#[rustfmt::skip]
pub const PLAIN_HMAC256: &[u8] = &[
0x95, 0xcb, 0xa4, 0x97, 0x4d, 0x09, 0xa3, 0x9f,
0x2d, 0x97, 0xd0, 0x32, 0xa4, 0x0a, 0x3a, 0xd4,
0x04, 0xe7, 0x1b, 0x4f, 0x74, 0x35, 0xb0, 0xf5,
0x99, 0xe6, 0xc5, 0x9c, 0x01, 0x52, 0x40, 0x51,
];
/// An RSA signature for `PLAIN_TEXT`, generated thus:
///
/// ```text
/// echo -n "I'm setting the alarm clock for July." \
/// | openssl dgst -sha256 -keyform DER \
/// -sign testutil/src/data/keys/key1.rsa.pk8 \
/// | xxd -i -c 8
/// ```
///
/// The signature is in PKCS v1.5 format.
#[rustfmt::skip]
pub const KEY1_SHA256_SIG: &[u8] = &[
0x4d, 0xd8, 0x99, 0xbf, 0x42, 0xc0, 0xef, 0xf4,
0xd6, 0x5f, 0xb6, 0xa4, 0x9c, 0xeb, 0x63, 0xc3,
0x06, 0x00, 0xc3, 0xaa, 0x7e, 0xcb, 0x78, 0x8e,
0x13, 0xc6, 0xbb, 0xbc, 0x5a, 0x05, 0x34, 0xb8,
0xe8, 0xa9, 0xef, 0x43, 0xa8, 0x2d, 0x63, 0xe8,
0x64, 0xc4, 0x5d, 0x32, 0xaa, 0xed, 0x15, 0xf8,
0xf6, 0x1a, 0xeb, 0x95, 0xc3, 0x4d, 0x09, 0x91,
0x3b, 0xdd, 0x69, 0x94, 0x4f, 0xd6, 0x16, 0xca,
0x50, 0x88, 0x2d, 0xcf, 0xe7, 0x94, 0x43, 0x9c,
0xd8, 0xbd, 0x68, 0xdd, 0xdb, 0x48, 0xab, 0x60,
0xd5, 0xca, 0x34, 0xab, 0x18, 0x69, 0xb9, 0x34,
0xca, 0x5a, 0x3d, 0xdd, 0x65, 0xde, 0x51, 0x8d,
0x54, 0x67, 0x2b, 0xd1, 0x4e, 0xae, 0x8d, 0xcd,
0xa5, 0xaa, 0x62, 0x5d, 0xa0, 0x30, 0x97, 0xd9,
0x91, 0x38, 0xd4, 0x81, 0x83, 0x7c, 0xf9, 0xc5,
0xbe, 0xc5, 0xef, 0xfc, 0x34, 0x21, 0xce, 0x27,
0x81, 0xf2, 0x79, 0x51, 0x3a, 0x3b, 0x02, 0x2d,
0xe6, 0x1d, 0x0f, 0x38, 0x77, 0x63, 0xbd, 0x30,
0xce, 0x39, 0x63, 0x8a, 0x63, 0x7e, 0x1e, 0x0b,
0xb5, 0x39, 0xd5, 0xa7, 0x42, 0xb0, 0x1d, 0x69,
0x02, 0x81, 0x9a, 0x65, 0x4d, 0x51, 0xfd, 0x0b,
0xc5, 0x57, 0x20, 0xae, 0x2e, 0xf8, 0x62, 0x6b,
0xce, 0x35, 0xb6, 0xd4, 0x9b, 0x0a, 0x5e, 0x26,
0xfa, 0x10, 0x54, 0x5a, 0x95, 0x57, 0xe2, 0xd8,
0xf3, 0xa4, 0x1a, 0x11, 0x07, 0x40, 0xec, 0x3d,
0x84, 0x99, 0x56, 0xe1, 0x63, 0x7f, 0xec, 0x35,
0x5d, 0xf2, 0x3d, 0x21, 0xb2, 0x74, 0x42, 0x02,
0xad, 0xcb, 0x42, 0x7e, 0x45, 0x40, 0xef, 0x93,
0x23, 0xdd, 0x7d, 0xce, 0xcc, 0x6c, 0x63, 0x45,
0x9e, 0x26, 0x7b, 0x7c, 0x9a, 0xea, 0x07, 0x15,
0x33, 0x36, 0xcc, 0x3c, 0x96, 0x46, 0xbf, 0x79,
|
];
|
0x07, 0x3c, 0x3c, 0x9d, 0x8c, 0x72, 0x0c, 0x79,
|
random_line_split
|
greatest_common_divisor.rs
|
// http://rosettacode.org/wiki/Greatest_common_divisor
//! This task demonstrates three possible implementation strategies.
/// The num crate.
extern crate num;
use num::integer::gcd;
/// Iterative Euclid algorithm
fn iterative_euclid_gcd(mut m: i32, mut n: i32) -> i32 {
while m!= 0 {
let old_m = m;
m = n % m;
n = old_m;
}
n.abs()
}
/// Recursive Euclid algorithm
fn recursive_euclid_gcd(m: i32, n: i32) -> i32
|
fn main() {
println!("gcd(399, -3999) = {}", gcd(399, -3999));
println!("gcd(0, 3999) = {}", iterative_euclid_gcd(0, 3999));
println!("gcd(13 * 13, 13 * 29) = {}",
recursive_euclid_gcd(13 * 13, 13 * 29));
}
#[cfg(test)]
mod tests {
use super::{iterative_euclid_gcd, recursive_euclid_gcd};
#[test]
fn iterative() {
assert_eq!(3, iterative_euclid_gcd(399, -3999));
assert_eq!(3999, iterative_euclid_gcd(0, 3999));
assert_eq!(13, iterative_euclid_gcd(13 * 13, 13 * 29));
}
#[test]
fn recursive() {
assert_eq!(3, recursive_euclid_gcd(399, -3999));
assert_eq!(3999, recursive_euclid_gcd(0, 3999));
assert_eq!(13, recursive_euclid_gcd(13 * 13, 13 * 29));
}
}
|
{
if m == 0 {
n.abs()
} else {
recursive_euclid_gcd(n % m, m)
}
}
|
identifier_body
|
greatest_common_divisor.rs
|
// http://rosettacode.org/wiki/Greatest_common_divisor
//! This task demonstrates three possible implementation strategies.
/// The num crate.
extern crate num;
use num::integer::gcd;
/// Iterative Euclid algorithm
fn iterative_euclid_gcd(mut m: i32, mut n: i32) -> i32 {
while m!= 0 {
let old_m = m;
|
}
/// Recursive Euclid algorithm
fn recursive_euclid_gcd(m: i32, n: i32) -> i32 {
if m == 0 {
n.abs()
} else {
recursive_euclid_gcd(n % m, m)
}
}
fn main() {
println!("gcd(399, -3999) = {}", gcd(399, -3999));
println!("gcd(0, 3999) = {}", iterative_euclid_gcd(0, 3999));
println!("gcd(13 * 13, 13 * 29) = {}",
recursive_euclid_gcd(13 * 13, 13 * 29));
}
#[cfg(test)]
mod tests {
use super::{iterative_euclid_gcd, recursive_euclid_gcd};
#[test]
fn iterative() {
assert_eq!(3, iterative_euclid_gcd(399, -3999));
assert_eq!(3999, iterative_euclid_gcd(0, 3999));
assert_eq!(13, iterative_euclid_gcd(13 * 13, 13 * 29));
}
#[test]
fn recursive() {
assert_eq!(3, recursive_euclid_gcd(399, -3999));
assert_eq!(3999, recursive_euclid_gcd(0, 3999));
assert_eq!(13, recursive_euclid_gcd(13 * 13, 13 * 29));
}
}
|
m = n % m;
n = old_m;
}
n.abs()
|
random_line_split
|
greatest_common_divisor.rs
|
// http://rosettacode.org/wiki/Greatest_common_divisor
//! This task demonstrates three possible implementation strategies.
/// The num crate.
extern crate num;
use num::integer::gcd;
/// Iterative Euclid algorithm
fn iterative_euclid_gcd(mut m: i32, mut n: i32) -> i32 {
while m!= 0 {
let old_m = m;
m = n % m;
n = old_m;
}
n.abs()
}
/// Recursive Euclid algorithm
fn recursive_euclid_gcd(m: i32, n: i32) -> i32 {
if m == 0 {
n.abs()
} else {
recursive_euclid_gcd(n % m, m)
}
}
fn main() {
println!("gcd(399, -3999) = {}", gcd(399, -3999));
println!("gcd(0, 3999) = {}", iterative_euclid_gcd(0, 3999));
println!("gcd(13 * 13, 13 * 29) = {}",
recursive_euclid_gcd(13 * 13, 13 * 29));
}
#[cfg(test)]
mod tests {
use super::{iterative_euclid_gcd, recursive_euclid_gcd};
#[test]
fn
|
() {
assert_eq!(3, iterative_euclid_gcd(399, -3999));
assert_eq!(3999, iterative_euclid_gcd(0, 3999));
assert_eq!(13, iterative_euclid_gcd(13 * 13, 13 * 29));
}
#[test]
fn recursive() {
assert_eq!(3, recursive_euclid_gcd(399, -3999));
assert_eq!(3999, recursive_euclid_gcd(0, 3999));
assert_eq!(13, recursive_euclid_gcd(13 * 13, 13 * 29));
}
}
|
iterative
|
identifier_name
|
greatest_common_divisor.rs
|
// http://rosettacode.org/wiki/Greatest_common_divisor
//! This task demonstrates three possible implementation strategies.
/// The num crate.
extern crate num;
use num::integer::gcd;
/// Iterative Euclid algorithm
fn iterative_euclid_gcd(mut m: i32, mut n: i32) -> i32 {
while m!= 0 {
let old_m = m;
m = n % m;
n = old_m;
}
n.abs()
}
/// Recursive Euclid algorithm
fn recursive_euclid_gcd(m: i32, n: i32) -> i32 {
if m == 0
|
else {
recursive_euclid_gcd(n % m, m)
}
}
fn main() {
println!("gcd(399, -3999) = {}", gcd(399, -3999));
println!("gcd(0, 3999) = {}", iterative_euclid_gcd(0, 3999));
println!("gcd(13 * 13, 13 * 29) = {}",
recursive_euclid_gcd(13 * 13, 13 * 29));
}
#[cfg(test)]
mod tests {
use super::{iterative_euclid_gcd, recursive_euclid_gcd};
#[test]
fn iterative() {
assert_eq!(3, iterative_euclid_gcd(399, -3999));
assert_eq!(3999, iterative_euclid_gcd(0, 3999));
assert_eq!(13, iterative_euclid_gcd(13 * 13, 13 * 29));
}
#[test]
fn recursive() {
assert_eq!(3, recursive_euclid_gcd(399, -3999));
assert_eq!(3999, recursive_euclid_gcd(0, 3999));
assert_eq!(13, recursive_euclid_gcd(13 * 13, 13 * 29));
}
}
|
{
n.abs()
}
|
conditional_block
|
attr.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use app_units::Au;
use style::attr::{parse_length, AttrValue, LengthOrPercentageOrAuto};
#[test]
fn test_parse_double() {
let value = String::from("432.5e2");
match AttrValue::from_double(value, 0.0) {
AttrValue::Double(_, num) => assert_eq!(num, 43250f64),
_ => panic!("expected a double value"),
}
}
#[test]
fn test_parse_double_negative_prefix() {
let value = String::from("-5.6");
match AttrValue::from_double(value, 0.0) {
AttrValue::Double(_, num) => assert_eq!(num, -5.6f64),
_ => panic!("expected a double value"),
}
}
#[test]
fn test_parse_double_positive_prefix() {
let value = String::from("+5.6");
match AttrValue::from_double(value, 0.0) {
AttrValue::Double(_, num) => assert_eq!(num, 5.6f64),
_ => panic!("expected a double value"),
}
}
#[test]
fn test_from_limited_i32_should_be_default_when_less_than_0() {
let value = String::from("-1");
match AttrValue::from_limited_i32(value, 0) {
AttrValue::Int(_, 0) => (),
_ => panic!("expected an IndexSize error"),
}
}
#[test]
fn test_from_limited_i32_should_parse_a_uint_when_value_is_0_or_greater() {
match AttrValue::from_limited_i32(String::from("1"), 0) {
AttrValue::Int(_, 1) => (),
_ => panic!("expected an successful parsing"),
}
}
#[test]
fn test_from_limited_i32_should_keep_parsed_value_when_not_an_int() {
match AttrValue::from_limited_i32(String::from("parsed-value"), 0) {
AttrValue::Int(p, 0) => assert_eq!(p, String::from("parsed-value")),
_ => panic!("expected an successful parsing"),
}
}
#[test]
pub fn test_parse_length() {
fn check(input: &str, expected: LengthOrPercentageOrAuto)
|
check("0", LengthOrPercentageOrAuto::Length(Au::from_px(0)));
check("0.000%", LengthOrPercentageOrAuto::Percentage(0.0));
check("+5.82%", LengthOrPercentageOrAuto::Percentage(0.0582));
check(
"5.82",
LengthOrPercentageOrAuto::Length(Au::from_f64_px(5.82)),
);
check("invalid", LengthOrPercentageOrAuto::Auto);
check(
"12 followed by invalid",
LengthOrPercentageOrAuto::Length(Au::from_px(12)),
);
}
|
{
let parsed = parse_length(input);
assert_eq!(parsed, expected);
}
|
identifier_body
|
attr.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use app_units::Au;
use style::attr::{parse_length, AttrValue, LengthOrPercentageOrAuto};
#[test]
fn test_parse_double() {
let value = String::from("432.5e2");
match AttrValue::from_double(value, 0.0) {
AttrValue::Double(_, num) => assert_eq!(num, 43250f64),
_ => panic!("expected a double value"),
}
}
#[test]
fn test_parse_double_negative_prefix() {
let value = String::from("-5.6");
match AttrValue::from_double(value, 0.0) {
AttrValue::Double(_, num) => assert_eq!(num, -5.6f64),
_ => panic!("expected a double value"),
}
}
#[test]
fn test_parse_double_positive_prefix() {
let value = String::from("+5.6");
match AttrValue::from_double(value, 0.0) {
AttrValue::Double(_, num) => assert_eq!(num, 5.6f64),
_ => panic!("expected a double value"),
}
}
#[test]
fn test_from_limited_i32_should_be_default_when_less_than_0() {
let value = String::from("-1");
match AttrValue::from_limited_i32(value, 0) {
AttrValue::Int(_, 0) => (),
_ => panic!("expected an IndexSize error"),
}
}
#[test]
fn test_from_limited_i32_should_parse_a_uint_when_value_is_0_or_greater() {
match AttrValue::from_limited_i32(String::from("1"), 0) {
AttrValue::Int(_, 1) => (),
_ => panic!("expected an successful parsing"),
}
}
#[test]
fn test_from_limited_i32_should_keep_parsed_value_when_not_an_int() {
match AttrValue::from_limited_i32(String::from("parsed-value"), 0) {
AttrValue::Int(p, 0) => assert_eq!(p, String::from("parsed-value")),
_ => panic!("expected an successful parsing"),
}
}
#[test]
pub fn
|
() {
fn check(input: &str, expected: LengthOrPercentageOrAuto) {
let parsed = parse_length(input);
assert_eq!(parsed, expected);
}
check("0", LengthOrPercentageOrAuto::Length(Au::from_px(0)));
check("0.000%", LengthOrPercentageOrAuto::Percentage(0.0));
check("+5.82%", LengthOrPercentageOrAuto::Percentage(0.0582));
check(
"5.82",
LengthOrPercentageOrAuto::Length(Au::from_f64_px(5.82)),
);
check("invalid", LengthOrPercentageOrAuto::Auto);
check(
"12 followed by invalid",
LengthOrPercentageOrAuto::Length(Au::from_px(12)),
);
}
|
test_parse_length
|
identifier_name
|
attr.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use app_units::Au;
use style::attr::{parse_length, AttrValue, LengthOrPercentageOrAuto};
#[test]
fn test_parse_double() {
let value = String::from("432.5e2");
match AttrValue::from_double(value, 0.0) {
AttrValue::Double(_, num) => assert_eq!(num, 43250f64),
_ => panic!("expected a double value"),
}
}
#[test]
fn test_parse_double_negative_prefix() {
let value = String::from("-5.6");
match AttrValue::from_double(value, 0.0) {
AttrValue::Double(_, num) => assert_eq!(num, -5.6f64),
_ => panic!("expected a double value"),
}
}
#[test]
fn test_parse_double_positive_prefix() {
let value = String::from("+5.6");
match AttrValue::from_double(value, 0.0) {
AttrValue::Double(_, num) => assert_eq!(num, 5.6f64),
_ => panic!("expected a double value"),
}
}
#[test]
|
let value = String::from("-1");
match AttrValue::from_limited_i32(value, 0) {
AttrValue::Int(_, 0) => (),
_ => panic!("expected an IndexSize error"),
}
}
#[test]
fn test_from_limited_i32_should_parse_a_uint_when_value_is_0_or_greater() {
match AttrValue::from_limited_i32(String::from("1"), 0) {
AttrValue::Int(_, 1) => (),
_ => panic!("expected an successful parsing"),
}
}
#[test]
fn test_from_limited_i32_should_keep_parsed_value_when_not_an_int() {
match AttrValue::from_limited_i32(String::from("parsed-value"), 0) {
AttrValue::Int(p, 0) => assert_eq!(p, String::from("parsed-value")),
_ => panic!("expected an successful parsing"),
}
}
#[test]
pub fn test_parse_length() {
fn check(input: &str, expected: LengthOrPercentageOrAuto) {
let parsed = parse_length(input);
assert_eq!(parsed, expected);
}
check("0", LengthOrPercentageOrAuto::Length(Au::from_px(0)));
check("0.000%", LengthOrPercentageOrAuto::Percentage(0.0));
check("+5.82%", LengthOrPercentageOrAuto::Percentage(0.0582));
check(
"5.82",
LengthOrPercentageOrAuto::Length(Au::from_f64_px(5.82)),
);
check("invalid", LengthOrPercentageOrAuto::Auto);
check(
"12 followed by invalid",
LengthOrPercentageOrAuto::Length(Au::from_px(12)),
);
}
|
fn test_from_limited_i32_should_be_default_when_less_than_0() {
|
random_line_split
|
main.rs
|
//! A helper tool for generating urls and sha256 checksums of cargo-bazel binaries and writing them to a module.
use std::collections::HashMap;
use std::io::{BufRead, BufReader};
use std::path::{Path, PathBuf};
use std::process::Command;
use std::{env, fs};
use clap::Parser;
use hex::ToHex;
use sha2::{Digest, Sha256};
#[derive(Parser, Debug)]
struct Options {
/// The path to an artifacts directory expecting to contain directories
/// named after platform tripes with binaries inside.
#[clap(long)]
pub artifacts_dir: PathBuf,
/// A url prefix where the artifacts can be found
#[clap(long)]
pub url_prefix: String,
/// The path to a buildifier binary. If set, it will be ran on the module
#[clap(long)]
pub buildifier: Option<PathBuf>,
}
struct Artifact {
pub url: String,
pub triple: String,
pub sha256: String,
}
fn calculate_sha256(file_path: &Path) -> String {
let file = fs::File::open(file_path).unwrap();
let mut reader = BufReader::new(file);
let mut hasher = Sha256::new();
loop {
let consummed = {
let buffer = reader.fill_buf().unwrap();
if buffer.is_empty() {
break;
}
hasher.update(buffer);
buffer.len()
};
reader.consume(consummed);
}
let digest = hasher.finalize();
digest.encode_hex::<String>()
}
fn locate_artifacts(artifacts_dir: &Path, url_prefix: &str) -> Vec<Artifact> {
let artifact_dirs: Vec<PathBuf> = artifacts_dir
.read_dir()
.unwrap()
.flatten()
.filter(|entry| entry.path().is_dir())
.map(|entry| entry.path())
.collect();
artifact_dirs
.iter()
.map(|path| {
let triple = path.file_name().unwrap().to_string_lossy();
let mut artifacts: Vec<Artifact> = path
.read_dir()
.unwrap()
.flatten()
.map(|f_entry| {
let f_path = f_entry.path();
let stem = f_path.file_stem().unwrap().to_string_lossy();
Artifact {
url: format!("{}/{}-{}", url_prefix, stem, triple),
triple: triple.to_string(),
sha256: calculate_sha256(&f_entry.path()),
}
})
.collect();
if artifacts.len() > 1 {
panic!("Too many artifacts given for {}", triple)
}
artifacts.pop().unwrap()
})
.collect()
}
const TEMPLATE: &str = r#""""A file containing urls and associated sha256 values for cargo-bazel binaries
This file is auto-generated for each release to match the urls and sha256s of
the binaries produced for it.
"""
# Example:
# {
# "x86_64-unknown-linux-gnu": "https://domain.com/downloads/cargo-bazel-x86_64-unknown-linux-gnu",
# "x86_64-apple-darwin": "https://domain.com/downloads/cargo-bazel-x86_64-apple-darwin",
# "x86_64-pc-windows-msvc": "https://domain.com/downloads/cargo-bazel-x86_64-pc-windows-msvc",
# }
CARGO_BAZEL_URLS = {}
# Example:
# {
# "x86_64-unknown-linux-gnu": "1d687fcc860dc8a1aa6198e531f0aee0637ed506d6a412fe2b9884ff5b2b17c0",
# "x86_64-apple-darwin": "0363e450125002f581d29cf632cc876225d738cfa433afa85ca557afb671eafa",
# "x86_64-pc-windows-msvc": "f5647261d989f63dafb2c3cb8e131b225338a790386c06cf7112e43dd9805882",
# }
CARGO_BAZEL_SHA256S = {}
# Example:
# Label("//crate_universe:cargo_bazel_bin")
CARGO_BAZEL_LABEL = Label("@cargo_bazel_bootstrap//:binary")
"#;
fn render_module(artifacts: &[Artifact]) -> String {
let urls: HashMap<&String, &String> = artifacts
.iter()
.map(|artifact| (&artifact.triple, &artifact.url))
.collect();
let sha256s: HashMap<&String, &String> = artifacts
.iter()
.map(|artifact| (&artifact.triple, &artifact.sha256))
.collect();
TEMPLATE
.replace(
"CARGO_BAZEL_URLS = {}",
&format!(
"CARGO_BAZEL_URLS = {}",
serde_json::to_string_pretty(&urls).unwrap()
),
)
.replace(
"CARGO_BAZEL_SHA256S = {}",
&format!(
"CARGO_BAZEL_SHA256S = {}",
serde_json::to_string_pretty(&sha256s).unwrap()
),
)
.replace(
"CARGO_BAZEL_LABEL = Label(\"@cargo_bazel_bootstrap//:binary\")",
"CARGO_BAZEL_LABEL = Label(\"//crate_universe:cargo_bazel_bin\")",
)
}
fn write_module(content: &str) -> PathBuf {
let dest = PathBuf::from(
env::var("BUILD_WORKSPACE_DIRECTORY").expect("This binary is required to run under Bazel"),
)
.join(env!("MODULE_ROOT_PATH"));
fs::write(&dest, content).unwrap();
dest
}
fn run_buildifier(buildifier_path: &Path, module: &Path) {
Command::new(buildifier_path)
.arg("-lint=fix")
.arg("-mode=fix")
.arg("-warnings=all")
.arg(module)
.output()
.unwrap();
}
fn main() {
let opt = Options::parse();
let artifacts = locate_artifacts(&opt.artifacts_dir, &opt.url_prefix);
let content = render_module(&artifacts);
let path = write_module(&content);
if let Some(buildifier_path) = opt.buildifier
|
}
|
{
run_buildifier(&buildifier_path, &path);
}
|
conditional_block
|
main.rs
|
//! A helper tool for generating urls and sha256 checksums of cargo-bazel binaries and writing them to a module.
use std::collections::HashMap;
use std::io::{BufRead, BufReader};
use std::path::{Path, PathBuf};
use std::process::Command;
use std::{env, fs};
use clap::Parser;
use hex::ToHex;
use sha2::{Digest, Sha256};
#[derive(Parser, Debug)]
struct Options {
/// The path to an artifacts directory expecting to contain directories
/// named after platform tripes with binaries inside.
#[clap(long)]
pub artifacts_dir: PathBuf,
/// A url prefix where the artifacts can be found
#[clap(long)]
pub url_prefix: String,
/// The path to a buildifier binary. If set, it will be ran on the module
#[clap(long)]
pub buildifier: Option<PathBuf>,
}
struct Artifact {
pub url: String,
pub triple: String,
pub sha256: String,
}
fn calculate_sha256(file_path: &Path) -> String {
let file = fs::File::open(file_path).unwrap();
let mut reader = BufReader::new(file);
let mut hasher = Sha256::new();
loop {
let consummed = {
let buffer = reader.fill_buf().unwrap();
if buffer.is_empty() {
break;
}
hasher.update(buffer);
buffer.len()
};
reader.consume(consummed);
}
let digest = hasher.finalize();
digest.encode_hex::<String>()
}
fn locate_artifacts(artifacts_dir: &Path, url_prefix: &str) -> Vec<Artifact> {
let artifact_dirs: Vec<PathBuf> = artifacts_dir
.read_dir()
.unwrap()
.flatten()
.filter(|entry| entry.path().is_dir())
.map(|entry| entry.path())
.collect();
artifact_dirs
.iter()
.map(|path| {
let triple = path.file_name().unwrap().to_string_lossy();
let mut artifacts: Vec<Artifact> = path
.read_dir()
.unwrap()
.flatten()
.map(|f_entry| {
let f_path = f_entry.path();
let stem = f_path.file_stem().unwrap().to_string_lossy();
Artifact {
url: format!("{}/{}-{}", url_prefix, stem, triple),
triple: triple.to_string(),
sha256: calculate_sha256(&f_entry.path()),
}
})
.collect();
if artifacts.len() > 1 {
panic!("Too many artifacts given for {}", triple)
}
artifacts.pop().unwrap()
})
.collect()
}
const TEMPLATE: &str = r#""""A file containing urls and associated sha256 values for cargo-bazel binaries
This file is auto-generated for each release to match the urls and sha256s of
the binaries produced for it.
"""
# Example:
# {
# "x86_64-unknown-linux-gnu": "https://domain.com/downloads/cargo-bazel-x86_64-unknown-linux-gnu",
# "x86_64-apple-darwin": "https://domain.com/downloads/cargo-bazel-x86_64-apple-darwin",
# "x86_64-pc-windows-msvc": "https://domain.com/downloads/cargo-bazel-x86_64-pc-windows-msvc",
# }
CARGO_BAZEL_URLS = {}
# Example:
# {
# "x86_64-unknown-linux-gnu": "1d687fcc860dc8a1aa6198e531f0aee0637ed506d6a412fe2b9884ff5b2b17c0",
# "x86_64-apple-darwin": "0363e450125002f581d29cf632cc876225d738cfa433afa85ca557afb671eafa",
# "x86_64-pc-windows-msvc": "f5647261d989f63dafb2c3cb8e131b225338a790386c06cf7112e43dd9805882",
# }
CARGO_BAZEL_SHA256S = {}
# Example:
# Label("//crate_universe:cargo_bazel_bin")
CARGO_BAZEL_LABEL = Label("@cargo_bazel_bootstrap//:binary")
"#;
|
fn render_module(artifacts: &[Artifact]) -> String {
let urls: HashMap<&String, &String> = artifacts
.iter()
.map(|artifact| (&artifact.triple, &artifact.url))
.collect();
let sha256s: HashMap<&String, &String> = artifacts
.iter()
.map(|artifact| (&artifact.triple, &artifact.sha256))
.collect();
TEMPLATE
.replace(
"CARGO_BAZEL_URLS = {}",
&format!(
"CARGO_BAZEL_URLS = {}",
serde_json::to_string_pretty(&urls).unwrap()
),
)
.replace(
"CARGO_BAZEL_SHA256S = {}",
&format!(
"CARGO_BAZEL_SHA256S = {}",
serde_json::to_string_pretty(&sha256s).unwrap()
),
)
.replace(
"CARGO_BAZEL_LABEL = Label(\"@cargo_bazel_bootstrap//:binary\")",
"CARGO_BAZEL_LABEL = Label(\"//crate_universe:cargo_bazel_bin\")",
)
}
fn write_module(content: &str) -> PathBuf {
let dest = PathBuf::from(
env::var("BUILD_WORKSPACE_DIRECTORY").expect("This binary is required to run under Bazel"),
)
.join(env!("MODULE_ROOT_PATH"));
fs::write(&dest, content).unwrap();
dest
}
fn run_buildifier(buildifier_path: &Path, module: &Path) {
Command::new(buildifier_path)
.arg("-lint=fix")
.arg("-mode=fix")
.arg("-warnings=all")
.arg(module)
.output()
.unwrap();
}
fn main() {
let opt = Options::parse();
let artifacts = locate_artifacts(&opt.artifacts_dir, &opt.url_prefix);
let content = render_module(&artifacts);
let path = write_module(&content);
if let Some(buildifier_path) = opt.buildifier {
run_buildifier(&buildifier_path, &path);
}
}
|
random_line_split
|
|
main.rs
|
//! A helper tool for generating urls and sha256 checksums of cargo-bazel binaries and writing them to a module.
use std::collections::HashMap;
use std::io::{BufRead, BufReader};
use std::path::{Path, PathBuf};
use std::process::Command;
use std::{env, fs};
use clap::Parser;
use hex::ToHex;
use sha2::{Digest, Sha256};
#[derive(Parser, Debug)]
struct Options {
/// The path to an artifacts directory expecting to contain directories
/// named after platform tripes with binaries inside.
#[clap(long)]
pub artifacts_dir: PathBuf,
/// A url prefix where the artifacts can be found
#[clap(long)]
pub url_prefix: String,
/// The path to a buildifier binary. If set, it will be ran on the module
#[clap(long)]
pub buildifier: Option<PathBuf>,
}
struct Artifact {
pub url: String,
pub triple: String,
pub sha256: String,
}
fn calculate_sha256(file_path: &Path) -> String {
let file = fs::File::open(file_path).unwrap();
let mut reader = BufReader::new(file);
let mut hasher = Sha256::new();
loop {
let consummed = {
let buffer = reader.fill_buf().unwrap();
if buffer.is_empty() {
break;
}
hasher.update(buffer);
buffer.len()
};
reader.consume(consummed);
}
let digest = hasher.finalize();
digest.encode_hex::<String>()
}
fn locate_artifacts(artifacts_dir: &Path, url_prefix: &str) -> Vec<Artifact> {
let artifact_dirs: Vec<PathBuf> = artifacts_dir
.read_dir()
.unwrap()
.flatten()
.filter(|entry| entry.path().is_dir())
.map(|entry| entry.path())
.collect();
artifact_dirs
.iter()
.map(|path| {
let triple = path.file_name().unwrap().to_string_lossy();
let mut artifacts: Vec<Artifact> = path
.read_dir()
.unwrap()
.flatten()
.map(|f_entry| {
let f_path = f_entry.path();
let stem = f_path.file_stem().unwrap().to_string_lossy();
Artifact {
url: format!("{}/{}-{}", url_prefix, stem, triple),
triple: triple.to_string(),
sha256: calculate_sha256(&f_entry.path()),
}
})
.collect();
if artifacts.len() > 1 {
panic!("Too many artifacts given for {}", triple)
}
artifacts.pop().unwrap()
})
.collect()
}
const TEMPLATE: &str = r#""""A file containing urls and associated sha256 values for cargo-bazel binaries
This file is auto-generated for each release to match the urls and sha256s of
the binaries produced for it.
"""
# Example:
# {
# "x86_64-unknown-linux-gnu": "https://domain.com/downloads/cargo-bazel-x86_64-unknown-linux-gnu",
# "x86_64-apple-darwin": "https://domain.com/downloads/cargo-bazel-x86_64-apple-darwin",
# "x86_64-pc-windows-msvc": "https://domain.com/downloads/cargo-bazel-x86_64-pc-windows-msvc",
# }
CARGO_BAZEL_URLS = {}
# Example:
# {
# "x86_64-unknown-linux-gnu": "1d687fcc860dc8a1aa6198e531f0aee0637ed506d6a412fe2b9884ff5b2b17c0",
# "x86_64-apple-darwin": "0363e450125002f581d29cf632cc876225d738cfa433afa85ca557afb671eafa",
# "x86_64-pc-windows-msvc": "f5647261d989f63dafb2c3cb8e131b225338a790386c06cf7112e43dd9805882",
# }
CARGO_BAZEL_SHA256S = {}
# Example:
# Label("//crate_universe:cargo_bazel_bin")
CARGO_BAZEL_LABEL = Label("@cargo_bazel_bootstrap//:binary")
"#;
fn render_module(artifacts: &[Artifact]) -> String {
let urls: HashMap<&String, &String> = artifacts
.iter()
.map(|artifact| (&artifact.triple, &artifact.url))
.collect();
let sha256s: HashMap<&String, &String> = artifacts
.iter()
.map(|artifact| (&artifact.triple, &artifact.sha256))
.collect();
TEMPLATE
.replace(
"CARGO_BAZEL_URLS = {}",
&format!(
"CARGO_BAZEL_URLS = {}",
serde_json::to_string_pretty(&urls).unwrap()
),
)
.replace(
"CARGO_BAZEL_SHA256S = {}",
&format!(
"CARGO_BAZEL_SHA256S = {}",
serde_json::to_string_pretty(&sha256s).unwrap()
),
)
.replace(
"CARGO_BAZEL_LABEL = Label(\"@cargo_bazel_bootstrap//:binary\")",
"CARGO_BAZEL_LABEL = Label(\"//crate_universe:cargo_bazel_bin\")",
)
}
fn
|
(content: &str) -> PathBuf {
let dest = PathBuf::from(
env::var("BUILD_WORKSPACE_DIRECTORY").expect("This binary is required to run under Bazel"),
)
.join(env!("MODULE_ROOT_PATH"));
fs::write(&dest, content).unwrap();
dest
}
fn run_buildifier(buildifier_path: &Path, module: &Path) {
Command::new(buildifier_path)
.arg("-lint=fix")
.arg("-mode=fix")
.arg("-warnings=all")
.arg(module)
.output()
.unwrap();
}
fn main() {
let opt = Options::parse();
let artifacts = locate_artifacts(&opt.artifacts_dir, &opt.url_prefix);
let content = render_module(&artifacts);
let path = write_module(&content);
if let Some(buildifier_path) = opt.buildifier {
run_buildifier(&buildifier_path, &path);
}
}
|
write_module
|
identifier_name
|
main.rs
|
//! A helper tool for generating urls and sha256 checksums of cargo-bazel binaries and writing them to a module.
use std::collections::HashMap;
use std::io::{BufRead, BufReader};
use std::path::{Path, PathBuf};
use std::process::Command;
use std::{env, fs};
use clap::Parser;
use hex::ToHex;
use sha2::{Digest, Sha256};
#[derive(Parser, Debug)]
struct Options {
/// The path to an artifacts directory expecting to contain directories
/// named after platform tripes with binaries inside.
#[clap(long)]
pub artifacts_dir: PathBuf,
/// A url prefix where the artifacts can be found
#[clap(long)]
pub url_prefix: String,
/// The path to a buildifier binary. If set, it will be ran on the module
#[clap(long)]
pub buildifier: Option<PathBuf>,
}
struct Artifact {
pub url: String,
pub triple: String,
pub sha256: String,
}
fn calculate_sha256(file_path: &Path) -> String {
let file = fs::File::open(file_path).unwrap();
let mut reader = BufReader::new(file);
let mut hasher = Sha256::new();
loop {
let consummed = {
let buffer = reader.fill_buf().unwrap();
if buffer.is_empty() {
break;
}
hasher.update(buffer);
buffer.len()
};
reader.consume(consummed);
}
let digest = hasher.finalize();
digest.encode_hex::<String>()
}
fn locate_artifacts(artifacts_dir: &Path, url_prefix: &str) -> Vec<Artifact> {
let artifact_dirs: Vec<PathBuf> = artifacts_dir
.read_dir()
.unwrap()
.flatten()
.filter(|entry| entry.path().is_dir())
.map(|entry| entry.path())
.collect();
artifact_dirs
.iter()
.map(|path| {
let triple = path.file_name().unwrap().to_string_lossy();
let mut artifacts: Vec<Artifact> = path
.read_dir()
.unwrap()
.flatten()
.map(|f_entry| {
let f_path = f_entry.path();
let stem = f_path.file_stem().unwrap().to_string_lossy();
Artifact {
url: format!("{}/{}-{}", url_prefix, stem, triple),
triple: triple.to_string(),
sha256: calculate_sha256(&f_entry.path()),
}
})
.collect();
if artifacts.len() > 1 {
panic!("Too many artifacts given for {}", triple)
}
artifacts.pop().unwrap()
})
.collect()
}
const TEMPLATE: &str = r#""""A file containing urls and associated sha256 values for cargo-bazel binaries
This file is auto-generated for each release to match the urls and sha256s of
the binaries produced for it.
"""
# Example:
# {
# "x86_64-unknown-linux-gnu": "https://domain.com/downloads/cargo-bazel-x86_64-unknown-linux-gnu",
# "x86_64-apple-darwin": "https://domain.com/downloads/cargo-bazel-x86_64-apple-darwin",
# "x86_64-pc-windows-msvc": "https://domain.com/downloads/cargo-bazel-x86_64-pc-windows-msvc",
# }
CARGO_BAZEL_URLS = {}
# Example:
# {
# "x86_64-unknown-linux-gnu": "1d687fcc860dc8a1aa6198e531f0aee0637ed506d6a412fe2b9884ff5b2b17c0",
# "x86_64-apple-darwin": "0363e450125002f581d29cf632cc876225d738cfa433afa85ca557afb671eafa",
# "x86_64-pc-windows-msvc": "f5647261d989f63dafb2c3cb8e131b225338a790386c06cf7112e43dd9805882",
# }
CARGO_BAZEL_SHA256S = {}
# Example:
# Label("//crate_universe:cargo_bazel_bin")
CARGO_BAZEL_LABEL = Label("@cargo_bazel_bootstrap//:binary")
"#;
fn render_module(artifacts: &[Artifact]) -> String {
let urls: HashMap<&String, &String> = artifacts
.iter()
.map(|artifact| (&artifact.triple, &artifact.url))
.collect();
let sha256s: HashMap<&String, &String> = artifacts
.iter()
.map(|artifact| (&artifact.triple, &artifact.sha256))
.collect();
TEMPLATE
.replace(
"CARGO_BAZEL_URLS = {}",
&format!(
"CARGO_BAZEL_URLS = {}",
serde_json::to_string_pretty(&urls).unwrap()
),
)
.replace(
"CARGO_BAZEL_SHA256S = {}",
&format!(
"CARGO_BAZEL_SHA256S = {}",
serde_json::to_string_pretty(&sha256s).unwrap()
),
)
.replace(
"CARGO_BAZEL_LABEL = Label(\"@cargo_bazel_bootstrap//:binary\")",
"CARGO_BAZEL_LABEL = Label(\"//crate_universe:cargo_bazel_bin\")",
)
}
fn write_module(content: &str) -> PathBuf {
let dest = PathBuf::from(
env::var("BUILD_WORKSPACE_DIRECTORY").expect("This binary is required to run under Bazel"),
)
.join(env!("MODULE_ROOT_PATH"));
fs::write(&dest, content).unwrap();
dest
}
fn run_buildifier(buildifier_path: &Path, module: &Path)
|
fn main() {
let opt = Options::parse();
let artifacts = locate_artifacts(&opt.artifacts_dir, &opt.url_prefix);
let content = render_module(&artifacts);
let path = write_module(&content);
if let Some(buildifier_path) = opt.buildifier {
run_buildifier(&buildifier_path, &path);
}
}
|
{
Command::new(buildifier_path)
.arg("-lint=fix")
.arg("-mode=fix")
.arg("-warnings=all")
.arg(module)
.output()
.unwrap();
}
|
identifier_body
|
lib.rs
|
extern crate chrono;
#[macro_use]
extern crate nom;
#[cfg(target_os = "linux")]
extern crate alsa;
#[cfg(feature = "dbus")]
extern crate dbus;
extern crate libc;
#[cfg(feature = "systemstat")]
extern crate systemstat;
#[cfg(feature = "xkb")]
extern crate xcb;
#[macro_use]
extern crate chan;
extern crate serde;
extern crate serde_json;
#[macro_use]
extern crate serde_derive;
pub mod format;
pub mod widget;
pub use format::*;
use std::collections::BTreeMap;
pub use widget::*;
pub struct UnixBar<F: Formatter> {
formatter: F,
widgets: Vec<Box<Widget>>,
fns: BTreeMap<String, Box<FnMut()>>,
}
impl<F: Formatter> UnixBar<F> {
pub fn new(formatter: F) -> UnixBar<F> {
UnixBar {
formatter,
widgets: Vec::new(),
fns: BTreeMap::new(),
}
}
pub fn register_fn<Fn>(&mut self, name: &str, func: Fn) -> &mut UnixBar<F>
where
Fn: FnMut() +'static,
|
pub fn add(&mut self, widget: Box<Widget>) -> &mut UnixBar<F> {
self.widgets.push(widget);
self
}
pub fn run(&mut self) {
let (wid_tx, wid_rx) = chan::async();
for widget in &mut self.widgets {
widget.spawn_notifier(wid_tx.clone());
}
self.show();
let (stdin_tx, stdin_rx) = chan::async();
std::thread::spawn(move || {
let stdin = std::io::stdin();
let mut line = String::new();
loop {
line.clear();
if stdin.read_line(&mut line).is_ok() {
stdin_tx.send(line.clone());
}
}
});
loop {
chan_select! {
wid_rx.recv() => self.show(),
stdin_rx.recv() -> line => self.formatter.handle_stdin(line, &mut self.fns),
}
}
}
fn show(&mut self) {
let vals: Vec<Format> = self.widgets.iter().map(|ref w| w.current_value()).collect();
let line = self.formatter.format_all(&vals);
println!("{}", line.replace("\n", ""));
}
}
|
{
self.fns.insert(name.to_owned(), Box::new(func));
self
}
|
identifier_body
|
lib.rs
|
extern crate chrono;
#[macro_use]
extern crate nom;
#[cfg(target_os = "linux")]
extern crate alsa;
#[cfg(feature = "dbus")]
extern crate dbus;
extern crate libc;
#[cfg(feature = "systemstat")]
extern crate systemstat;
#[cfg(feature = "xkb")]
extern crate xcb;
#[macro_use]
extern crate chan;
extern crate serde;
extern crate serde_json;
#[macro_use]
extern crate serde_derive;
pub mod format;
pub mod widget;
pub use format::*;
use std::collections::BTreeMap;
pub use widget::*;
pub struct UnixBar<F: Formatter> {
formatter: F,
widgets: Vec<Box<Widget>>,
fns: BTreeMap<String, Box<FnMut()>>,
}
impl<F: Formatter> UnixBar<F> {
pub fn new(formatter: F) -> UnixBar<F> {
UnixBar {
formatter,
widgets: Vec::new(),
fns: BTreeMap::new(),
}
}
pub fn register_fn<Fn>(&mut self, name: &str, func: Fn) -> &mut UnixBar<F>
where
Fn: FnMut() +'static,
{
self.fns.insert(name.to_owned(), Box::new(func));
self
}
pub fn
|
(&mut self, widget: Box<Widget>) -> &mut UnixBar<F> {
self.widgets.push(widget);
self
}
pub fn run(&mut self) {
let (wid_tx, wid_rx) = chan::async();
for widget in &mut self.widgets {
widget.spawn_notifier(wid_tx.clone());
}
self.show();
let (stdin_tx, stdin_rx) = chan::async();
std::thread::spawn(move || {
let stdin = std::io::stdin();
let mut line = String::new();
loop {
line.clear();
if stdin.read_line(&mut line).is_ok() {
stdin_tx.send(line.clone());
}
}
});
loop {
chan_select! {
wid_rx.recv() => self.show(),
stdin_rx.recv() -> line => self.formatter.handle_stdin(line, &mut self.fns),
}
}
}
fn show(&mut self) {
let vals: Vec<Format> = self.widgets.iter().map(|ref w| w.current_value()).collect();
let line = self.formatter.format_all(&vals);
println!("{}", line.replace("\n", ""));
}
}
|
add
|
identifier_name
|
lib.rs
|
extern crate chrono;
#[macro_use]
extern crate nom;
#[cfg(target_os = "linux")]
extern crate alsa;
#[cfg(feature = "dbus")]
extern crate dbus;
extern crate libc;
#[cfg(feature = "systemstat")]
extern crate systemstat;
#[cfg(feature = "xkb")]
extern crate xcb;
#[macro_use]
extern crate chan;
extern crate serde;
extern crate serde_json;
#[macro_use]
extern crate serde_derive;
pub mod format;
pub mod widget;
pub use format::*;
use std::collections::BTreeMap;
pub use widget::*;
pub struct UnixBar<F: Formatter> {
formatter: F,
widgets: Vec<Box<Widget>>,
fns: BTreeMap<String, Box<FnMut()>>,
}
impl<F: Formatter> UnixBar<F> {
pub fn new(formatter: F) -> UnixBar<F> {
UnixBar {
formatter,
widgets: Vec::new(),
fns: BTreeMap::new(),
}
}
pub fn register_fn<Fn>(&mut self, name: &str, func: Fn) -> &mut UnixBar<F>
where
Fn: FnMut() +'static,
{
self.fns.insert(name.to_owned(), Box::new(func));
self
}
pub fn add(&mut self, widget: Box<Widget>) -> &mut UnixBar<F> {
self.widgets.push(widget);
self
}
pub fn run(&mut self) {
let (wid_tx, wid_rx) = chan::async();
for widget in &mut self.widgets {
widget.spawn_notifier(wid_tx.clone());
}
self.show();
let (stdin_tx, stdin_rx) = chan::async();
std::thread::spawn(move || {
let stdin = std::io::stdin();
let mut line = String::new();
loop {
line.clear();
if stdin.read_line(&mut line).is_ok() {
stdin_tx.send(line.clone());
}
}
});
loop {
chan_select! {
|
stdin_rx.recv() -> line => self.formatter.handle_stdin(line, &mut self.fns),
}
}
}
fn show(&mut self) {
let vals: Vec<Format> = self.widgets.iter().map(|ref w| w.current_value()).collect();
let line = self.formatter.format_all(&vals);
println!("{}", line.replace("\n", ""));
}
}
|
wid_rx.recv() => self.show(),
|
random_line_split
|
lib.rs
|
extern crate chrono;
#[macro_use]
extern crate nom;
#[cfg(target_os = "linux")]
extern crate alsa;
#[cfg(feature = "dbus")]
extern crate dbus;
extern crate libc;
#[cfg(feature = "systemstat")]
extern crate systemstat;
#[cfg(feature = "xkb")]
extern crate xcb;
#[macro_use]
extern crate chan;
extern crate serde;
extern crate serde_json;
#[macro_use]
extern crate serde_derive;
pub mod format;
pub mod widget;
pub use format::*;
use std::collections::BTreeMap;
pub use widget::*;
pub struct UnixBar<F: Formatter> {
formatter: F,
widgets: Vec<Box<Widget>>,
fns: BTreeMap<String, Box<FnMut()>>,
}
impl<F: Formatter> UnixBar<F> {
pub fn new(formatter: F) -> UnixBar<F> {
UnixBar {
formatter,
widgets: Vec::new(),
fns: BTreeMap::new(),
}
}
pub fn register_fn<Fn>(&mut self, name: &str, func: Fn) -> &mut UnixBar<F>
where
Fn: FnMut() +'static,
{
self.fns.insert(name.to_owned(), Box::new(func));
self
}
pub fn add(&mut self, widget: Box<Widget>) -> &mut UnixBar<F> {
self.widgets.push(widget);
self
}
pub fn run(&mut self) {
let (wid_tx, wid_rx) = chan::async();
for widget in &mut self.widgets {
widget.spawn_notifier(wid_tx.clone());
}
self.show();
let (stdin_tx, stdin_rx) = chan::async();
std::thread::spawn(move || {
let stdin = std::io::stdin();
let mut line = String::new();
loop {
line.clear();
if stdin.read_line(&mut line).is_ok()
|
}
});
loop {
chan_select! {
wid_rx.recv() => self.show(),
stdin_rx.recv() -> line => self.formatter.handle_stdin(line, &mut self.fns),
}
}
}
fn show(&mut self) {
let vals: Vec<Format> = self.widgets.iter().map(|ref w| w.current_value()).collect();
let line = self.formatter.format_all(&vals);
println!("{}", line.replace("\n", ""));
}
}
|
{
stdin_tx.send(line.clone());
}
|
conditional_block
|
test3.rs
|
extern crate blurz;
use std::error::Error;
use std::time::Duration;
use std::thread;
use blurz::bluetooth_adapter::BluetoothAdapter as Adapter;
use blurz::bluetooth_device::BluetoothDevice as Device;
use blurz::bluetooth_discovery_session::BluetoothDiscoverySession as DiscoverySession;
fn test3() -> Result<(), Box<Error>> {
let adapter: Adapter = try!(Adapter::init());
try!(adapter.set_powered(true));
loop {
let session = try!(DiscoverySession::create_session(adapter.get_id()));
thread::sleep(Duration::from_millis(200));
try!(session.start_discovery());
thread::sleep(Duration::from_millis(800));
let devices = try!(adapter.get_device_list());
println!("{} device(s) found", devices.len());
'device_loop: for d in devices {
let device = Device::new(d.clone());
println!("{} {:?} {:?}", device.get_id(), device.get_address(),device.get_rssi());
try!(adapter.remove_device(device.get_id()));
}
try!(session.stop_discovery());
}
}
fn main()
|
{
match test3() {
Ok(_) => (),
Err(e) => println!("{:?}", e),
}
}
|
identifier_body
|
|
test3.rs
|
extern crate blurz;
use std::error::Error;
use std::time::Duration;
use std::thread;
use blurz::bluetooth_adapter::BluetoothAdapter as Adapter;
use blurz::bluetooth_device::BluetoothDevice as Device;
use blurz::bluetooth_discovery_session::BluetoothDiscoverySession as DiscoverySession;
fn test3() -> Result<(), Box<Error>> {
let adapter: Adapter = try!(Adapter::init());
try!(adapter.set_powered(true));
loop {
let session = try!(DiscoverySession::create_session(adapter.get_id()));
thread::sleep(Duration::from_millis(200));
try!(session.start_discovery());
thread::sleep(Duration::from_millis(800));
let devices = try!(adapter.get_device_list());
println!("{} device(s) found", devices.len());
'device_loop: for d in devices {
|
}
}
fn main() {
match test3() {
Ok(_) => (),
Err(e) => println!("{:?}", e),
}
}
|
let device = Device::new(d.clone());
println!("{} {:?} {:?}", device.get_id(), device.get_address(),device.get_rssi());
try!(adapter.remove_device(device.get_id()));
}
try!(session.stop_discovery());
|
random_line_split
|
test3.rs
|
extern crate blurz;
use std::error::Error;
use std::time::Duration;
use std::thread;
use blurz::bluetooth_adapter::BluetoothAdapter as Adapter;
use blurz::bluetooth_device::BluetoothDevice as Device;
use blurz::bluetooth_discovery_session::BluetoothDiscoverySession as DiscoverySession;
fn
|
() -> Result<(), Box<Error>> {
let adapter: Adapter = try!(Adapter::init());
try!(adapter.set_powered(true));
loop {
let session = try!(DiscoverySession::create_session(adapter.get_id()));
thread::sleep(Duration::from_millis(200));
try!(session.start_discovery());
thread::sleep(Duration::from_millis(800));
let devices = try!(adapter.get_device_list());
println!("{} device(s) found", devices.len());
'device_loop: for d in devices {
let device = Device::new(d.clone());
println!("{} {:?} {:?}", device.get_id(), device.get_address(),device.get_rssi());
try!(adapter.remove_device(device.get_id()));
}
try!(session.stop_discovery());
}
}
fn main() {
match test3() {
Ok(_) => (),
Err(e) => println!("{:?}", e),
}
}
|
test3
|
identifier_name
|
build.rs
|
use std::{env, error::Error, path::PathBuf, process::Command, str};
use walkdir::WalkDir;
fn command_output(cmd: &mut Command) -> String {
str::from_utf8(&cmd.output().unwrap().stdout)
.unwrap()
.trim()
.to_string()
}
fn command_ok(cmd: &mut Command) -> bool {
cmd.status().ok().map_or(false, |s| s.success())
}
fn ghc(builder: &str, arg: &str) -> String {
command_output(Command::new(builder).args(&["exec", "--", "ghc", arg]))
}
// Each os has a diferent extesion for the Dynamic Libraries. This compiles for
// the correct ones.
#[cfg(not(any(target_os = "macos", target_os = "windows")))]
const DYLIB_EXTENSION: &'static str = ".so";
#[cfg(target_os = "macos")]
const DYLIB_EXTENSION: &'static str = ".a";
#[cfg(target_os = "windows")]
const DYLIB_EXTENSION: &'static str = ".dll";
fn main() {
println!("cargo:rerun-if-changed=../haskell/htest");
// Traverse the directory to link all of the libs in ghc
// then tell cargo where to get htest for linking
match link_ghc_libs() {
Err(e) => panic!("Unable to link ghc_libs: {}", e),
Ok(_) => {
// Only link this if the build artifact exists
if PathBuf::from("../haskell/htest/dist-newstyle").exists() {
println!("cargo:rustc-link-search=native=../haskell/htest/dist-newstyle/build/x86_64-osx/ghc-8.8.3/htest-0.1.0.0/build/");
println!("cargo:rustc-link-lib=static=HShtest-0.1.0.0-inplace");
}
let bindings = bindgen::Builder::default()
|
dir.push("HsFFI.h");
dir.as_os_str().to_owned().to_string_lossy()
})
// Tell cargo to invalidate the built crate whenever any of the
// included header files changed.
.parse_callbacks(Box::new(bindgen::CargoCallbacks))
// Finish the builder and generate the bindings.
.generate()
// Unwrap the Result and panic on failure.
.expect("Unable to generate bindings");
// Write the bindings to the $OUT_DIR/bindings.rs file.
let out_path = PathBuf::from(env::var("OUT_DIR").unwrap());
bindings
.write_to_file(out_path.join("bindings.rs"))
.expect("Couldn't write bindings!");
}
}
}
fn link_ghc_libs() -> Result<(), Box<dyn Error>> {
std::env::set_current_dir("../haskell")?;
let builder = if command_ok(Command::new("stack").arg("--version")) {
"stack"
} else {
"cabal"
};
// Go to the libdir for ghc then traverse all the entries
for entry in WalkDir::new(&ghc(builder, "--print-libdir"))
.min_depth(1)
.into_iter()
{
let entry = entry?;
match (entry.path().to_str(), entry.file_name().to_str()) {
(Some(e), Some(file_name)) => {
// This filters out every file shipped with GHC that isn't
// a static archive file and only one of every type (RTS and
// C FFI have a few different versions)
if entry.path().extension().map(|y| y!= "a").unwrap_or(true)
|| entry.path().is_dir()
|| e.ends_with("_p.a")
|| e.ends_with("_thr.a")
|| e.ends_with("_thr_l.a")
|| e.ends_with("_thr_debug.a")
|| e.ends_with("_l.a")
|| e.ends_with("_debug.a")
|| e.contains("_debug.a")
{
continue;
}
// Get the path without the file in the name
let lib_path = {
let mut path = entry.path().to_owned();
path.pop();
path
}
.to_str()
.unwrap()
.to_owned();
println!("cargo:rustc-link-search=native={}", lib_path);
// Get rid of lib from the file name
let temp = file_name.split_at(3).1;
// Get rid of the.so from the file name
let trimmed = temp.split_at(temp.len() - DYLIB_EXTENSION.len()).0;
println!("cargo:rustc-link-lib=static={}", trimmed);
}
_ => panic!("Unable to link GHC libs at all"),
}
}
// Mac specific linking bugs cause it really is developer hostile and
// doesn't care about you and working!
#[cfg(target_os = "macos")]
println!("cargo:rustc-link-search=native=/usr/lib");
#[cfg(target_os = "macos")]
println!("cargo:rustc-link-lib=dylib=iconv");
Ok(())
}
|
// The input header we would like to generate
// bindings for.
.header({
let mut dir = PathBuf::from(ghc("cabal", "--print-libdir"));
dir.push("include");
|
random_line_split
|
build.rs
|
use std::{env, error::Error, path::PathBuf, process::Command, str};
use walkdir::WalkDir;
fn command_output(cmd: &mut Command) -> String {
str::from_utf8(&cmd.output().unwrap().stdout)
.unwrap()
.trim()
.to_string()
}
fn command_ok(cmd: &mut Command) -> bool {
cmd.status().ok().map_or(false, |s| s.success())
}
fn ghc(builder: &str, arg: &str) -> String {
command_output(Command::new(builder).args(&["exec", "--", "ghc", arg]))
}
// Each os has a diferent extesion for the Dynamic Libraries. This compiles for
// the correct ones.
#[cfg(not(any(target_os = "macos", target_os = "windows")))]
const DYLIB_EXTENSION: &'static str = ".so";
#[cfg(target_os = "macos")]
const DYLIB_EXTENSION: &'static str = ".a";
#[cfg(target_os = "windows")]
const DYLIB_EXTENSION: &'static str = ".dll";
fn main() {
println!("cargo:rerun-if-changed=../haskell/htest");
// Traverse the directory to link all of the libs in ghc
// then tell cargo where to get htest for linking
match link_ghc_libs() {
Err(e) => panic!("Unable to link ghc_libs: {}", e),
Ok(_) => {
// Only link this if the build artifact exists
if PathBuf::from("../haskell/htest/dist-newstyle").exists() {
println!("cargo:rustc-link-search=native=../haskell/htest/dist-newstyle/build/x86_64-osx/ghc-8.8.3/htest-0.1.0.0/build/");
println!("cargo:rustc-link-lib=static=HShtest-0.1.0.0-inplace");
}
let bindings = bindgen::Builder::default()
// The input header we would like to generate
// bindings for.
.header({
let mut dir = PathBuf::from(ghc("cabal", "--print-libdir"));
dir.push("include");
dir.push("HsFFI.h");
dir.as_os_str().to_owned().to_string_lossy()
})
// Tell cargo to invalidate the built crate whenever any of the
// included header files changed.
.parse_callbacks(Box::new(bindgen::CargoCallbacks))
// Finish the builder and generate the bindings.
.generate()
// Unwrap the Result and panic on failure.
.expect("Unable to generate bindings");
// Write the bindings to the $OUT_DIR/bindings.rs file.
let out_path = PathBuf::from(env::var("OUT_DIR").unwrap());
bindings
.write_to_file(out_path.join("bindings.rs"))
.expect("Couldn't write bindings!");
}
}
}
fn link_ghc_libs() -> Result<(), Box<dyn Error>>
|
|| entry.path().is_dir()
|| e.ends_with("_p.a")
|| e.ends_with("_thr.a")
|| e.ends_with("_thr_l.a")
|| e.ends_with("_thr_debug.a")
|| e.ends_with("_l.a")
|| e.ends_with("_debug.a")
|| e.contains("_debug.a")
{
continue;
}
// Get the path without the file in the name
let lib_path = {
let mut path = entry.path().to_owned();
path.pop();
path
}
.to_str()
.unwrap()
.to_owned();
println!("cargo:rustc-link-search=native={}", lib_path);
// Get rid of lib from the file name
let temp = file_name.split_at(3).1;
// Get rid of the.so from the file name
let trimmed = temp.split_at(temp.len() - DYLIB_EXTENSION.len()).0;
println!("cargo:rustc-link-lib=static={}", trimmed);
}
_ => panic!("Unable to link GHC libs at all"),
}
}
// Mac specific linking bugs cause it really is developer hostile and
// doesn't care about you and working!
#[cfg(target_os = "macos")]
println!("cargo:rustc-link-search=native=/usr/lib");
#[cfg(target_os = "macos")]
println!("cargo:rustc-link-lib=dylib=iconv");
Ok(())
}
|
{
std::env::set_current_dir("../haskell")?;
let builder = if command_ok(Command::new("stack").arg("--version")) {
"stack"
} else {
"cabal"
};
// Go to the libdir for ghc then traverse all the entries
for entry in WalkDir::new(&ghc(builder, "--print-libdir"))
.min_depth(1)
.into_iter()
{
let entry = entry?;
match (entry.path().to_str(), entry.file_name().to_str()) {
(Some(e), Some(file_name)) => {
// This filters out every file shipped with GHC that isn't
// a static archive file and only one of every type (RTS and
// C FFI have a few different versions)
if entry.path().extension().map(|y| y != "a").unwrap_or(true)
|
identifier_body
|
build.rs
|
use std::{env, error::Error, path::PathBuf, process::Command, str};
use walkdir::WalkDir;
fn command_output(cmd: &mut Command) -> String {
str::from_utf8(&cmd.output().unwrap().stdout)
.unwrap()
.trim()
.to_string()
}
fn
|
(cmd: &mut Command) -> bool {
cmd.status().ok().map_or(false, |s| s.success())
}
fn ghc(builder: &str, arg: &str) -> String {
command_output(Command::new(builder).args(&["exec", "--", "ghc", arg]))
}
// Each os has a diferent extesion for the Dynamic Libraries. This compiles for
// the correct ones.
#[cfg(not(any(target_os = "macos", target_os = "windows")))]
const DYLIB_EXTENSION: &'static str = ".so";
#[cfg(target_os = "macos")]
const DYLIB_EXTENSION: &'static str = ".a";
#[cfg(target_os = "windows")]
const DYLIB_EXTENSION: &'static str = ".dll";
fn main() {
println!("cargo:rerun-if-changed=../haskell/htest");
// Traverse the directory to link all of the libs in ghc
// then tell cargo where to get htest for linking
match link_ghc_libs() {
Err(e) => panic!("Unable to link ghc_libs: {}", e),
Ok(_) => {
// Only link this if the build artifact exists
if PathBuf::from("../haskell/htest/dist-newstyle").exists() {
println!("cargo:rustc-link-search=native=../haskell/htest/dist-newstyle/build/x86_64-osx/ghc-8.8.3/htest-0.1.0.0/build/");
println!("cargo:rustc-link-lib=static=HShtest-0.1.0.0-inplace");
}
let bindings = bindgen::Builder::default()
// The input header we would like to generate
// bindings for.
.header({
let mut dir = PathBuf::from(ghc("cabal", "--print-libdir"));
dir.push("include");
dir.push("HsFFI.h");
dir.as_os_str().to_owned().to_string_lossy()
})
// Tell cargo to invalidate the built crate whenever any of the
// included header files changed.
.parse_callbacks(Box::new(bindgen::CargoCallbacks))
// Finish the builder and generate the bindings.
.generate()
// Unwrap the Result and panic on failure.
.expect("Unable to generate bindings");
// Write the bindings to the $OUT_DIR/bindings.rs file.
let out_path = PathBuf::from(env::var("OUT_DIR").unwrap());
bindings
.write_to_file(out_path.join("bindings.rs"))
.expect("Couldn't write bindings!");
}
}
}
fn link_ghc_libs() -> Result<(), Box<dyn Error>> {
std::env::set_current_dir("../haskell")?;
let builder = if command_ok(Command::new("stack").arg("--version")) {
"stack"
} else {
"cabal"
};
// Go to the libdir for ghc then traverse all the entries
for entry in WalkDir::new(&ghc(builder, "--print-libdir"))
.min_depth(1)
.into_iter()
{
let entry = entry?;
match (entry.path().to_str(), entry.file_name().to_str()) {
(Some(e), Some(file_name)) => {
// This filters out every file shipped with GHC that isn't
// a static archive file and only one of every type (RTS and
// C FFI have a few different versions)
if entry.path().extension().map(|y| y!= "a").unwrap_or(true)
|| entry.path().is_dir()
|| e.ends_with("_p.a")
|| e.ends_with("_thr.a")
|| e.ends_with("_thr_l.a")
|| e.ends_with("_thr_debug.a")
|| e.ends_with("_l.a")
|| e.ends_with("_debug.a")
|| e.contains("_debug.a")
{
continue;
}
// Get the path without the file in the name
let lib_path = {
let mut path = entry.path().to_owned();
path.pop();
path
}
.to_str()
.unwrap()
.to_owned();
println!("cargo:rustc-link-search=native={}", lib_path);
// Get rid of lib from the file name
let temp = file_name.split_at(3).1;
// Get rid of the.so from the file name
let trimmed = temp.split_at(temp.len() - DYLIB_EXTENSION.len()).0;
println!("cargo:rustc-link-lib=static={}", trimmed);
}
_ => panic!("Unable to link GHC libs at all"),
}
}
// Mac specific linking bugs cause it really is developer hostile and
// doesn't care about you and working!
#[cfg(target_os = "macos")]
println!("cargo:rustc-link-search=native=/usr/lib");
#[cfg(target_os = "macos")]
println!("cargo:rustc-link-lib=dylib=iconv");
Ok(())
}
|
command_ok
|
identifier_name
|
nth.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use std::ascii::AsciiExt;
use super::{Token, Parser, ParserInput, BasicParseError};
/// Parse the *An+B* notation, as found in the `:nth-child()` selector.
/// The input is typically the arguments of a function,
/// in which case the caller needs to check if the arguments’ parser is exhausted.
/// Return `Ok((A, B))`, or `Err(())` for a syntax error.
pub fn parse_nth<'i, 't>(input: &mut Parser<'i, 't>) -> Result<(i32, i32), BasicParseError<'i>> {
// FIXME: remove.clone() when lifetimes are non-lexical.
match input.next()?.clone() {
Token::Number { int_value: Some(b),.. } => {
Ok((0, b))
}
Token::Dimension { int_value: Some(a), unit,.. } => {
match_ignore_ascii_case! {
&unit,
"n" => Ok(try!(parse_b(input, a))),
"n-" => Ok(try!(parse_signless_b(input, a, -1))),
_ => match parse_n_dash_digits(&*unit) {
Ok(b) => Ok((a, b)),
Err(()) => Err(input.new_basic_unexpected_token_error(Token::Ident(unit.clone())))
}
}
}
Token::Ident(value) => {
match_ignore_ascii_case! { &value,
"even" => Ok((2, 0)),
"odd" => Ok((2, 1)),
"n" => Ok(try!(parse_b(input, 1))),
"-n" => Ok(try!(parse_b(input, -1))),
"n-" => Ok(try!(parse_signless_b(input, 1, -1))),
"-n-" => Ok(try!(parse_signless_b(input, -1, -1))),
_ => {
let (slice, a) = if value.starts_with("-") {
(&value[1..], -1)
} else {
(&*value, 1)
};
match parse_n_dash_digits(slice) {
Ok(b) => Ok((a, b)),
Err(()) => Err(input.new_basic_unexpected_token_error(Token::Ident(value.clone())))
}
}
}
}
// FIXME: remove.clone() when lifetimes are non-lexical.
Token::Delim('+') => match input.next_including_whitespace()?.clone() {
Token::Ident(value) => {
match_ignore_ascii_case! { &value,
"n" => parse_b(input, 1),
"n-" => parse_signless_b(input, 1, -1),
_ => match parse_n_dash_digits(&*value) {
Ok(b) => Ok((1, b)),
Err(()) => Err(input.new_basic_unexpected_token_error(Token::Ident(value.clone())))
}
}
}
token => Err(input.new_basic_unexpected_token_error(token)),
},
token => Err(input.new_basic_unexpected_token_error(token)),
}
}
fn parse_b<'i, 't>(input: &mut Parser<'i, 't>, a: i32) -> Result<(i32, i32), BasicParseError<'i>> {
let start = input.state();
match input.next() {
Ok(&Token::Delim('+')) => parse_signless_b(input, a, 1),
Ok(&Token::Delim('-')) => parse_signless_b(input, a, -1),
Ok(&Token::Number { has_sign: true, int_value: Some(b),.. }) => Ok((a, b)),
_ => {
input.reset(&start);
Ok((a, 0))
}
}
}
fn parse_signless_b<'i, 't>(input: &mut Parser<'i, 't>, a: i32, b_sign: i32) -> Result<(i32, i32), BasicParseError<'i>> {
// FIXME: remove.clone() when lifetimes are non-lexical.
match input.next()?.clone() {
Token::Number { has_sign: false, int_value: Some(b),.. } => Ok((a, b_sign * b)),
token => Err(input.new_basic_unexpected_token_error(token))
}
}
fn parse_n_dash_digits(string: &str) -> Result<i32, ()> {
let bytes = string.as_bytes();
if bytes.len() >= 3
&& bytes[..2].eq_ignore_ascii_case(b"n-")
&& bytes[2..].iter().all(|&c| matches!(c, b'0'...b'9'))
|
} else {
Err(())
}
}
fn parse_number_saturate(string: &str) -> Result<i32, ()> {
let mut input = ParserInput::new(string);
let mut parser = Parser::new(&mut input);
let int = if let Ok(&Token::Number {int_value: Some(int),..})
= parser.next_including_whitespace_and_comments() {
int
} else {
return Err(())
};
if!parser.is_exhausted() {
return Err(())
}
Ok(int)
}
|
{
Ok(parse_number_saturate(&string[1..]).unwrap()) // Include the minus sign
|
random_line_split
|
nth.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use std::ascii::AsciiExt;
use super::{Token, Parser, ParserInput, BasicParseError};
/// Parse the *An+B* notation, as found in the `:nth-child()` selector.
/// The input is typically the arguments of a function,
/// in which case the caller needs to check if the arguments’ parser is exhausted.
/// Return `Ok((A, B))`, or `Err(())` for a syntax error.
pub fn parse_nth<'i, 't>(input: &mut Parser<'i, 't>) -> Result<(i32, i32), BasicParseError<'i>> {
// FIXME: remove.clone() when lifetimes are non-lexical.
match input.next()?.clone() {
Token::Number { int_value: Some(b),.. } => {
Ok((0, b))
}
Token::Dimension { int_value: Some(a), unit,.. } => {
match_ignore_ascii_case! {
&unit,
"n" => Ok(try!(parse_b(input, a))),
"n-" => Ok(try!(parse_signless_b(input, a, -1))),
_ => match parse_n_dash_digits(&*unit) {
Ok(b) => Ok((a, b)),
Err(()) => Err(input.new_basic_unexpected_token_error(Token::Ident(unit.clone())))
}
}
}
Token::Ident(value) => {
match_ignore_ascii_case! { &value,
"even" => Ok((2, 0)),
"odd" => Ok((2, 1)),
"n" => Ok(try!(parse_b(input, 1))),
"-n" => Ok(try!(parse_b(input, -1))),
"n-" => Ok(try!(parse_signless_b(input, 1, -1))),
"-n-" => Ok(try!(parse_signless_b(input, -1, -1))),
_ => {
let (slice, a) = if value.starts_with("-") {
(&value[1..], -1)
} else {
(&*value, 1)
};
match parse_n_dash_digits(slice) {
Ok(b) => Ok((a, b)),
Err(()) => Err(input.new_basic_unexpected_token_error(Token::Ident(value.clone())))
}
}
}
}
// FIXME: remove.clone() when lifetimes are non-lexical.
Token::Delim('+') => match input.next_including_whitespace()?.clone() {
Token::Ident(value) => {
match_ignore_ascii_case! { &value,
"n" => parse_b(input, 1),
"n-" => parse_signless_b(input, 1, -1),
_ => match parse_n_dash_digits(&*value) {
Ok(b) => Ok((1, b)),
Err(()) => Err(input.new_basic_unexpected_token_error(Token::Ident(value.clone())))
}
}
}
token => Err(input.new_basic_unexpected_token_error(token)),
},
token => Err(input.new_basic_unexpected_token_error(token)),
}
}
fn pa
|
i, 't>(input: &mut Parser<'i, 't>, a: i32) -> Result<(i32, i32), BasicParseError<'i>> {
let start = input.state();
match input.next() {
Ok(&Token::Delim('+')) => parse_signless_b(input, a, 1),
Ok(&Token::Delim('-')) => parse_signless_b(input, a, -1),
Ok(&Token::Number { has_sign: true, int_value: Some(b),.. }) => Ok((a, b)),
_ => {
input.reset(&start);
Ok((a, 0))
}
}
}
fn parse_signless_b<'i, 't>(input: &mut Parser<'i, 't>, a: i32, b_sign: i32) -> Result<(i32, i32), BasicParseError<'i>> {
// FIXME: remove.clone() when lifetimes are non-lexical.
match input.next()?.clone() {
Token::Number { has_sign: false, int_value: Some(b),.. } => Ok((a, b_sign * b)),
token => Err(input.new_basic_unexpected_token_error(token))
}
}
fn parse_n_dash_digits(string: &str) -> Result<i32, ()> {
let bytes = string.as_bytes();
if bytes.len() >= 3
&& bytes[..2].eq_ignore_ascii_case(b"n-")
&& bytes[2..].iter().all(|&c| matches!(c, b'0'...b'9'))
{
Ok(parse_number_saturate(&string[1..]).unwrap()) // Include the minus sign
} else {
Err(())
}
}
fn parse_number_saturate(string: &str) -> Result<i32, ()> {
let mut input = ParserInput::new(string);
let mut parser = Parser::new(&mut input);
let int = if let Ok(&Token::Number {int_value: Some(int),..})
= parser.next_including_whitespace_and_comments() {
int
} else {
return Err(())
};
if!parser.is_exhausted() {
return Err(())
}
Ok(int)
}
|
rse_b<'
|
identifier_name
|
nth.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use std::ascii::AsciiExt;
use super::{Token, Parser, ParserInput, BasicParseError};
/// Parse the *An+B* notation, as found in the `:nth-child()` selector.
/// The input is typically the arguments of a function,
/// in which case the caller needs to check if the arguments’ parser is exhausted.
/// Return `Ok((A, B))`, or `Err(())` for a syntax error.
pub fn parse_nth<'i, 't>(input: &mut Parser<'i, 't>) -> Result<(i32, i32), BasicParseError<'i>> {
|
"n" => Ok(try!(parse_b(input, 1))),
"-n" => Ok(try!(parse_b(input, -1))),
"n-" => Ok(try!(parse_signless_b(input, 1, -1))),
"-n-" => Ok(try!(parse_signless_b(input, -1, -1))),
_ => {
let (slice, a) = if value.starts_with("-") {
(&value[1..], -1)
} else {
(&*value, 1)
};
match parse_n_dash_digits(slice) {
Ok(b) => Ok((a, b)),
Err(()) => Err(input.new_basic_unexpected_token_error(Token::Ident(value.clone())))
}
}
}
}
// FIXME: remove.clone() when lifetimes are non-lexical.
Token::Delim('+') => match input.next_including_whitespace()?.clone() {
Token::Ident(value) => {
match_ignore_ascii_case! { &value,
"n" => parse_b(input, 1),
"n-" => parse_signless_b(input, 1, -1),
_ => match parse_n_dash_digits(&*value) {
Ok(b) => Ok((1, b)),
Err(()) => Err(input.new_basic_unexpected_token_error(Token::Ident(value.clone())))
}
}
}
token => Err(input.new_basic_unexpected_token_error(token)),
},
token => Err(input.new_basic_unexpected_token_error(token)),
}
}
fn parse_b<'i, 't>(input: &mut Parser<'i, 't>, a: i32) -> Result<(i32, i32), BasicParseError<'i>> {
let start = input.state();
match input.next() {
Ok(&Token::Delim('+')) => parse_signless_b(input, a, 1),
Ok(&Token::Delim('-')) => parse_signless_b(input, a, -1),
Ok(&Token::Number { has_sign: true, int_value: Some(b),.. }) => Ok((a, b)),
_ => {
input.reset(&start);
Ok((a, 0))
}
}
}
fn parse_signless_b<'i, 't>(input: &mut Parser<'i, 't>, a: i32, b_sign: i32) -> Result<(i32, i32), BasicParseError<'i>> {
// FIXME: remove.clone() when lifetimes are non-lexical.
match input.next()?.clone() {
Token::Number { has_sign: false, int_value: Some(b),.. } => Ok((a, b_sign * b)),
token => Err(input.new_basic_unexpected_token_error(token))
}
}
fn parse_n_dash_digits(string: &str) -> Result<i32, ()> {
let bytes = string.as_bytes();
if bytes.len() >= 3
&& bytes[..2].eq_ignore_ascii_case(b"n-")
&& bytes[2..].iter().all(|&c| matches!(c, b'0'...b'9'))
{
Ok(parse_number_saturate(&string[1..]).unwrap()) // Include the minus sign
} else {
Err(())
}
}
fn parse_number_saturate(string: &str) -> Result<i32, ()> {
let mut input = ParserInput::new(string);
let mut parser = Parser::new(&mut input);
let int = if let Ok(&Token::Number {int_value: Some(int),..})
= parser.next_including_whitespace_and_comments() {
int
} else {
return Err(())
};
if!parser.is_exhausted() {
return Err(())
}
Ok(int)
}
|
// FIXME: remove .clone() when lifetimes are non-lexical.
match input.next()?.clone() {
Token::Number { int_value: Some(b), .. } => {
Ok((0, b))
}
Token::Dimension { int_value: Some(a), unit, .. } => {
match_ignore_ascii_case! {
&unit,
"n" => Ok(try!(parse_b(input, a))),
"n-" => Ok(try!(parse_signless_b(input, a, -1))),
_ => match parse_n_dash_digits(&*unit) {
Ok(b) => Ok((a, b)),
Err(()) => Err(input.new_basic_unexpected_token_error(Token::Ident(unit.clone())))
}
}
}
Token::Ident(value) => {
match_ignore_ascii_case! { &value,
"even" => Ok((2, 0)),
"odd" => Ok((2, 1)),
|
identifier_body
|
error.rs
|
use front::stdlib::object::PROTOTYPE;
use front::stdlib::value::{Value, ResultValue, to_value};
use front::stdlib::function::Function;
/// Create a new error
pub fn make_error(args:Vec<Value>, _:Value, _:Value, this:Value) -> ResultValue {
if args.len() >= 1 {
this.set_field("message", args[0]);
}
Ok(Value::undefined())
}
/// Get the string representation of the error
pub fn
|
(_:Vec<Value>, _:Value, _:Value, this:Value) -> ResultValue {
let name = this.get_field("name");
let message = this.get_field("message");
Ok(to_value(format!("{}: {}", name, message).into_string()))
}
/// Create a new `Error` object
pub fn _create(global: Value) -> Value {
let prototype = js!(global, {
"message": "",
"name": "Error",
"toString": Function::make(to_string, [])
});
let error = Function::make(make_error, ["message"]);
error.set_field(PROTOTYPE, prototype);
error
}
/// Initialise the global object with the `Error` object
pub fn init(global:Value) {
js_extend!(global, {
"Error": _create(global)
});
}
|
to_string
|
identifier_name
|
error.rs
|
use front::stdlib::object::PROTOTYPE;
use front::stdlib::value::{Value, ResultValue, to_value};
use front::stdlib::function::Function;
/// Create a new error
pub fn make_error(args:Vec<Value>, _:Value, _:Value, this:Value) -> ResultValue {
if args.len() >= 1 {
this.set_field("message", args[0]);
}
Ok(Value::undefined())
}
/// Get the string representation of the error
pub fn to_string(_:Vec<Value>, _:Value, _:Value, this:Value) -> ResultValue
|
/// Create a new `Error` object
pub fn _create(global: Value) -> Value {
let prototype = js!(global, {
"message": "",
"name": "Error",
"toString": Function::make(to_string, [])
});
let error = Function::make(make_error, ["message"]);
error.set_field(PROTOTYPE, prototype);
error
}
/// Initialise the global object with the `Error` object
pub fn init(global:Value) {
js_extend!(global, {
"Error": _create(global)
});
}
|
{
let name = this.get_field("name");
let message = this.get_field("message");
Ok(to_value(format!("{}: {}", name, message).into_string()))
}
|
identifier_body
|
error.rs
|
use front::stdlib::object::PROTOTYPE;
use front::stdlib::value::{Value, ResultValue, to_value};
use front::stdlib::function::Function;
/// Create a new error
pub fn make_error(args:Vec<Value>, _:Value, _:Value, this:Value) -> ResultValue {
if args.len() >= 1
|
Ok(Value::undefined())
}
/// Get the string representation of the error
pub fn to_string(_:Vec<Value>, _:Value, _:Value, this:Value) -> ResultValue {
let name = this.get_field("name");
let message = this.get_field("message");
Ok(to_value(format!("{}: {}", name, message).into_string()))
}
/// Create a new `Error` object
pub fn _create(global: Value) -> Value {
let prototype = js!(global, {
"message": "",
"name": "Error",
"toString": Function::make(to_string, [])
});
let error = Function::make(make_error, ["message"]);
error.set_field(PROTOTYPE, prototype);
error
}
/// Initialise the global object with the `Error` object
pub fn init(global:Value) {
js_extend!(global, {
"Error": _create(global)
});
}
|
{
this.set_field("message", args[0]);
}
|
conditional_block
|
error.rs
|
use front::stdlib::object::PROTOTYPE;
use front::stdlib::value::{Value, ResultValue, to_value};
use front::stdlib::function::Function;
|
/// Create a new error
pub fn make_error(args:Vec<Value>, _:Value, _:Value, this:Value) -> ResultValue {
if args.len() >= 1 {
this.set_field("message", args[0]);
}
Ok(Value::undefined())
}
/// Get the string representation of the error
pub fn to_string(_:Vec<Value>, _:Value, _:Value, this:Value) -> ResultValue {
let name = this.get_field("name");
let message = this.get_field("message");
Ok(to_value(format!("{}: {}", name, message).into_string()))
}
/// Create a new `Error` object
pub fn _create(global: Value) -> Value {
let prototype = js!(global, {
"message": "",
"name": "Error",
"toString": Function::make(to_string, [])
});
let error = Function::make(make_error, ["message"]);
error.set_field(PROTOTYPE, prototype);
error
}
/// Initialise the global object with the `Error` object
pub fn init(global:Value) {
js_extend!(global, {
"Error": _create(global)
});
}
|
random_line_split
|
|
main.rs
|
extern crate libc;
extern crate llvm_sys;
extern crate argparse;
extern crate typed_arena;
macro_rules! cstr {
($s:expr) => (
concat!($s, "\0").as_ptr() as *const ::libc::c_char
)
}
macro_rules! fl {
() => ((file!(), line!()));
(line:$expr) => ((file!(), line));
}
enum
|
<L, R> {
Left(L),
Right(R),
}
// TODO: unify parameter ordering
mod parse;
mod ast;
mod ty;
mod mir;
use parse::Lexer;
use ast::Ast;
fn main() {
use std::io::Read;
let mut name = "".to_owned();
let mut output = None;
let mut print_mir = false;
let mut print_llir = false;
let mut opt = false;
{
use argparse::{ArgumentParser, Store, StoreOption, StoreTrue};
let mut ap = ArgumentParser::new();
ap.set_description("The syavac compiler for the syava language.\n\
Written in Rust.");
ap.refer(&mut name).required().add_argument("name", Store, "The file to compile");
ap.refer(&mut output).add_option(&["-o", "--output"], StoreOption, "The file to output to");
ap.refer(&mut print_mir).add_option(&["--print-mir"],
StoreTrue,
"Pass if you would like to print the generated MIR");
ap.refer(&mut print_llir).add_option(&["--print-llir"],
StoreTrue,
"Pass if you would like to print the generated LLVM \
IR");
ap.refer(&mut opt).add_option(&["--opt", "-O"],
StoreTrue,
"Pass if you would like to optimize the generated LLVM IR");
ap.parse_args_or_exit();
}
let output = output.unwrap_or(get_output_from_name(&name));
let mut file = Vec::new();
std::fs::File::open(&name)
.expect(&name)
.read_to_end(&mut file)
.unwrap();
let file = String::from_utf8(file).unwrap();
let lexer = Lexer::new(&file);
let tyctxt = ty::TypeContext::new();
let ast = match Ast::create(lexer, &tyctxt) {
Ok(ast) => ast,
Err(e) => panic!("\n{:#?}", e),
};
let mir = match ast.typeck(opt) {
Ok(mir) => mir,
Err(e) => panic!("\n{:#?}", e),
};
if print_mir {
println!("{}", mir);
}
mir.build_and_write(&output, print_llir)
}
// TODO: take off the ".sva" of the input file
fn get_output_from_name(name: &str) -> String {
format!("{}.s", name)
}
|
Either
|
identifier_name
|
main.rs
|
extern crate libc;
extern crate llvm_sys;
extern crate argparse;
extern crate typed_arena;
macro_rules! cstr {
($s:expr) => (
concat!($s, "\0").as_ptr() as *const ::libc::c_char
)
}
macro_rules! fl {
() => ((file!(), line!()));
(line:$expr) => ((file!(), line));
}
enum Either<L, R> {
Left(L),
Right(R),
}
// TODO: unify parameter ordering
mod parse;
mod ast;
mod ty;
mod mir;
use parse::Lexer;
use ast::Ast;
fn main() {
use std::io::Read;
let mut name = "".to_owned();
let mut output = None;
let mut print_mir = false;
let mut print_llir = false;
let mut opt = false;
{
use argparse::{ArgumentParser, Store, StoreOption, StoreTrue};
let mut ap = ArgumentParser::new();
ap.set_description("The syavac compiler for the syava language.\n\
Written in Rust.");
ap.refer(&mut name).required().add_argument("name", Store, "The file to compile");
ap.refer(&mut output).add_option(&["-o", "--output"], StoreOption, "The file to output to");
ap.refer(&mut print_mir).add_option(&["--print-mir"],
StoreTrue,
"Pass if you would like to print the generated MIR");
ap.refer(&mut print_llir).add_option(&["--print-llir"],
StoreTrue,
"Pass if you would like to print the generated LLVM \
IR");
ap.refer(&mut opt).add_option(&["--opt", "-O"],
StoreTrue,
"Pass if you would like to optimize the generated LLVM IR");
ap.parse_args_or_exit();
}
let output = output.unwrap_or(get_output_from_name(&name));
let mut file = Vec::new();
std::fs::File::open(&name)
.expect(&name)
.read_to_end(&mut file)
.unwrap();
let file = String::from_utf8(file).unwrap();
let lexer = Lexer::new(&file);
let tyctxt = ty::TypeContext::new();
let ast = match Ast::create(lexer, &tyctxt) {
Ok(ast) => ast,
Err(e) => panic!("\n{:#?}", e),
};
let mir = match ast.typeck(opt) {
Ok(mir) => mir,
Err(e) => panic!("\n{:#?}", e),
};
if print_mir {
println!("{}", mir);
}
mir.build_and_write(&output, print_llir)
}
// TODO: take off the ".sva" of the input file
fn get_output_from_name(name: &str) -> String
|
{
format!("{}.s", name)
}
|
identifier_body
|
|
main.rs
|
extern crate libc;
extern crate llvm_sys;
extern crate argparse;
extern crate typed_arena;
macro_rules! cstr {
($s:expr) => (
concat!($s, "\0").as_ptr() as *const ::libc::c_char
)
}
macro_rules! fl {
() => ((file!(), line!()));
(line:$expr) => ((file!(), line));
}
enum Either<L, R> {
Left(L),
Right(R),
}
// TODO: unify parameter ordering
mod parse;
mod ast;
mod ty;
mod mir;
use parse::Lexer;
use ast::Ast;
fn main() {
use std::io::Read;
let mut name = "".to_owned();
let mut output = None;
let mut print_mir = false;
let mut print_llir = false;
let mut opt = false;
{
use argparse::{ArgumentParser, Store, StoreOption, StoreTrue};
let mut ap = ArgumentParser::new();
ap.set_description("The syavac compiler for the syava language.\n\
Written in Rust.");
ap.refer(&mut name).required().add_argument("name", Store, "The file to compile");
ap.refer(&mut output).add_option(&["-o", "--output"], StoreOption, "The file to output to");
ap.refer(&mut print_mir).add_option(&["--print-mir"],
StoreTrue,
"Pass if you would like to print the generated MIR");
ap.refer(&mut print_llir).add_option(&["--print-llir"],
StoreTrue,
"Pass if you would like to print the generated LLVM \
IR");
ap.refer(&mut opt).add_option(&["--opt", "-O"],
StoreTrue,
"Pass if you would like to optimize the generated LLVM IR");
ap.parse_args_or_exit();
}
let output = output.unwrap_or(get_output_from_name(&name));
let mut file = Vec::new();
std::fs::File::open(&name)
.expect(&name)
.read_to_end(&mut file)
.unwrap();
let file = String::from_utf8(file).unwrap();
let lexer = Lexer::new(&file);
let tyctxt = ty::TypeContext::new();
let ast = match Ast::create(lexer, &tyctxt) {
Ok(ast) => ast,
Err(e) => panic!("\n{:#?}", e),
};
let mir = match ast.typeck(opt) {
Ok(mir) => mir,
Err(e) => panic!("\n{:#?}", e),
};
if print_mir {
println!("{}", mir);
}
|
mir.build_and_write(&output, print_llir)
}
// TODO: take off the ".sva" of the input file
fn get_output_from_name(name: &str) -> String {
format!("{}.s", name)
}
|
random_line_split
|
|
true.rs
|
// * This file is part of the uutils coreutils package.
// *
// * (c) Jordi Boggiano <[email protected]>
// *
// * For the full copyright and license information, please view the LICENSE
// * file that was distributed with this source code.
use clap::{App, AppSettings, Arg};
use std::io::Write;
use uucore::error::{set_exit_code, UResult};
static ABOUT: &str = "\
Returns true, a successful exit status.
Immediately returns with the exit status `0`, except when invoked with one of the recognized
options. In those cases it will try to write the help or version text. Any IO error during this
operation causes the program to return `1` instead.
";
#[uucore::main]
pub fn
|
(args: impl uucore::Args) -> UResult<()> {
let mut app = uu_app();
if let Ok(matches) = app.try_get_matches_from_mut(args) {
let error = if matches.index_of("help").is_some() {
app.print_long_help()
} else if matches.index_of("version").is_some() {
writeln!(std::io::stdout(), "{}", app.render_version())
} else {
Ok(())
};
if let Err(print_fail) = error {
// Try to display this error.
let _ = writeln!(std::io::stderr(), "{}: {}", uucore::util_name(), print_fail);
// Mirror GNU options. When failing to print warnings or version flags, then we exit
// with FAIL. This avoids allocation some error information which may result in yet
// other types of failure.
set_exit_code(1);
}
}
Ok(())
}
pub fn uu_app<'a>() -> App<'a> {
App::new(uucore::util_name())
.version(clap::crate_version!())
.about(ABOUT)
// We provide our own help and version options, to ensure maximum compatibility with GNU.
.setting(AppSettings::DisableHelpFlag | AppSettings::DisableVersionFlag)
.arg(
Arg::new("help")
.long("help")
.help("Print help information")
.exclusive(true),
)
.arg(
Arg::new("version")
.long("version")
.help("Print version information"),
)
}
|
uumain
|
identifier_name
|
true.rs
|
// * This file is part of the uutils coreutils package.
// *
// * (c) Jordi Boggiano <[email protected]>
// *
// * For the full copyright and license information, please view the LICENSE
// * file that was distributed with this source code.
use clap::{App, AppSettings, Arg};
use std::io::Write;
use uucore::error::{set_exit_code, UResult};
static ABOUT: &str = "\
Returns true, a successful exit status.
Immediately returns with the exit status `0`, except when invoked with one of the recognized
options. In those cases it will try to write the help or version text. Any IO error during this
operation causes the program to return `1` instead.
";
#[uucore::main]
pub fn uumain(args: impl uucore::Args) -> UResult<()> {
let mut app = uu_app();
if let Ok(matches) = app.try_get_matches_from_mut(args) {
let error = if matches.index_of("help").is_some() {
app.print_long_help()
} else if matches.index_of("version").is_some() {
writeln!(std::io::stdout(), "{}", app.render_version())
} else {
Ok(())
};
if let Err(print_fail) = error {
// Try to display this error.
let _ = writeln!(std::io::stderr(), "{}: {}", uucore::util_name(), print_fail);
// Mirror GNU options. When failing to print warnings or version flags, then we exit
// with FAIL. This avoids allocation some error information which may result in yet
// other types of failure.
set_exit_code(1);
}
}
Ok(())
}
pub fn uu_app<'a>() -> App<'a>
|
{
App::new(uucore::util_name())
.version(clap::crate_version!())
.about(ABOUT)
// We provide our own help and version options, to ensure maximum compatibility with GNU.
.setting(AppSettings::DisableHelpFlag | AppSettings::DisableVersionFlag)
.arg(
Arg::new("help")
.long("help")
.help("Print help information")
.exclusive(true),
)
.arg(
Arg::new("version")
.long("version")
.help("Print version information"),
)
}
|
identifier_body
|
|
true.rs
|
// * This file is part of the uutils coreutils package.
// *
// * (c) Jordi Boggiano <[email protected]>
// *
// * For the full copyright and license information, please view the LICENSE
// * file that was distributed with this source code.
use clap::{App, AppSettings, Arg};
use std::io::Write;
use uucore::error::{set_exit_code, UResult};
static ABOUT: &str = "\
Returns true, a successful exit status.
Immediately returns with the exit status `0`, except when invoked with one of the recognized
options. In those cases it will try to write the help or version text. Any IO error during this
operation causes the program to return `1` instead.
";
#[uucore::main]
pub fn uumain(args: impl uucore::Args) -> UResult<()> {
let mut app = uu_app();
if let Ok(matches) = app.try_get_matches_from_mut(args) {
let error = if matches.index_of("help").is_some() {
app.print_long_help()
} else if matches.index_of("version").is_some()
|
else {
Ok(())
};
if let Err(print_fail) = error {
// Try to display this error.
let _ = writeln!(std::io::stderr(), "{}: {}", uucore::util_name(), print_fail);
// Mirror GNU options. When failing to print warnings or version flags, then we exit
// with FAIL. This avoids allocation some error information which may result in yet
// other types of failure.
set_exit_code(1);
}
}
Ok(())
}
pub fn uu_app<'a>() -> App<'a> {
App::new(uucore::util_name())
.version(clap::crate_version!())
.about(ABOUT)
// We provide our own help and version options, to ensure maximum compatibility with GNU.
.setting(AppSettings::DisableHelpFlag | AppSettings::DisableVersionFlag)
.arg(
Arg::new("help")
.long("help")
.help("Print help information")
.exclusive(true),
)
.arg(
Arg::new("version")
.long("version")
.help("Print version information"),
)
}
|
{
writeln!(std::io::stdout(), "{}", app.render_version())
}
|
conditional_block
|
true.rs
|
// * This file is part of the uutils coreutils package.
// *
// * (c) Jordi Boggiano <[email protected]>
// *
// * For the full copyright and license information, please view the LICENSE
// * file that was distributed with this source code.
use clap::{App, AppSettings, Arg};
use std::io::Write;
use uucore::error::{set_exit_code, UResult};
static ABOUT: &str = "\
Returns true, a successful exit status.
Immediately returns with the exit status `0`, except when invoked with one of the recognized
options. In those cases it will try to write the help or version text. Any IO error during this
operation causes the program to return `1` instead.
";
#[uucore::main]
pub fn uumain(args: impl uucore::Args) -> UResult<()> {
let mut app = uu_app();
|
if let Ok(matches) = app.try_get_matches_from_mut(args) {
let error = if matches.index_of("help").is_some() {
app.print_long_help()
} else if matches.index_of("version").is_some() {
writeln!(std::io::stdout(), "{}", app.render_version())
} else {
Ok(())
};
if let Err(print_fail) = error {
// Try to display this error.
let _ = writeln!(std::io::stderr(), "{}: {}", uucore::util_name(), print_fail);
// Mirror GNU options. When failing to print warnings or version flags, then we exit
// with FAIL. This avoids allocation some error information which may result in yet
// other types of failure.
set_exit_code(1);
}
}
Ok(())
}
pub fn uu_app<'a>() -> App<'a> {
App::new(uucore::util_name())
.version(clap::crate_version!())
.about(ABOUT)
// We provide our own help and version options, to ensure maximum compatibility with GNU.
.setting(AppSettings::DisableHelpFlag | AppSettings::DisableVersionFlag)
.arg(
Arg::new("help")
.long("help")
.help("Print help information")
.exclusive(true),
)
.arg(
Arg::new("version")
.long("version")
.help("Print version information"),
)
}
|
random_line_split
|
|
x86.rs
|
#![allow(non_upper_case_globals)]
pub use self::x86_shared::*;
use core::mem::size_of;
mod x86_shared;
bitflags! {
flags GdtAccess: u8 {
const Accessed = 1 << 0,
const Writable = 1 << 1,
const Direction = 1 << 2,
const Executable = 1 << 3,
const NotTss = 1 << 4,
}
}
#[derive(Copy, Clone, Debug)]
#[repr(C, packed)]
pub struct GdtEntry {
limit: u16,
base1: u16,
base2: u8,
access: u8,
flags: u8,
base3: u8,
}
#[derive(Copy, Clone)]
#[repr(C, packed)]
pub struct IdtEntry {
offset1: u16,
selector: u16,
reserved: u8,
flags: u8,
offset2: u16
}
impl GdtEntry {
pub const NULL: GdtEntry = GdtEntry {
base1: 0,
base2: 0,
base3: 0,
access: 0,
limit: 0,
flags: 0
};
pub fn new(base: *const (), limit: usize, access: GdtAccess, dpl: PrivilegeLevel) -> GdtEntry {
let (limit, flags) = if limit < 0x100000
|
else {
if ((limit - 0xFFF) & 0xFFF) > 0 {
panic!("bad segment limit for GDT entry");
}
(((limit & 0xFFFF000) >> 12) as u16, ((limit & 0xF0000000) >> 28) as u8 | 0xC0u8)
};
GdtEntry {
base1: base as u16,
base2: ((base as usize & 0xFF0000) >> 16) as u8,
base3: ((base as usize & 0xFF000000) >> 24) as u8,
access: access.bits() | ((dpl as u8) << 5) | 0x80,
limit: limit,
flags: flags
}
}
}
impl IdtEntry {
pub const NULL: IdtEntry = IdtEntry {
offset1: 0,
selector: 0,
reserved: 0,
flags: 0,
offset2: 0
};
pub fn new(f: unsafe extern "C" fn(), dpl: PrivilegeLevel, block: bool) -> IdtEntry {
IdtEntry {
offset1: f as u16,
offset2: ((f as usize & 0xFFFF0000) >> 16) as u16,
selector: 8,
reserved: 0,
flags: if block { 0x8E } else { 0x8F } | ((dpl as u8) << 5)
}
}
}
#[derive(Copy, Clone, Debug)]
#[repr(C, packed)]
pub struct Tss {
pub link: u16,
reserved0: u16,
pub esp0: u32,
pub ss0: u16,
reserved1: u16,
pub esp1: u32,
pub ss1: u16,
reserved2: u16,
pub esp2: u32,
pub ss2: u16,
reserved3: u16,
pub cr3: u32,
pub eip: u32,
pub eflags: u32,
pub eax: u32,
pub ecx: u32,
pub edx: u32,
pub ebx: u32,
pub esp: u32,
pub ebp: u32,
pub esi: u32,
pub edi: u32,
pub es: u16,
reserved4: u16,
pub cs: u16,
reserved5: u16,
pub ss: u16,
reserved6: u16,
pub ds: u16,
reserved7: u16,
pub fs: u16,
reserved8: u16,
pub gs: u16,
reserved9: u16,
pub ldtr: u16,
reserved10: u32,
pub iobp_offset: u16
}
impl Tss {
pub fn new() -> Tss {
Tss {
link: 0,
reserved0: 0,
esp0: 0,
ss0: 0,
reserved1: 0,
esp1: 0,
ss1: 0,
reserved2: 0,
esp2: 0,
ss2: 0,
reserved3: 0,
cr3: 0,
eip: 0,
eflags: 0,
eax: 0,
ecx: 0,
edx: 0,
ebx: 0,
esp: 0,
ebp: 0,
esi: 0,
edi: 0,
es: 0,
reserved4: 0,
cs: 0,
reserved5: 0,
ss: 0,
reserved6: 0,
ds: 0,
reserved7: 0,
fs: 0,
reserved8: 0,
gs: 0,
reserved9: 0,
ldtr: 0,
reserved10: 0,
iobp_offset: size_of::<Tss>() as u16
}
}
}
#[inline(always)]
pub fn get_flags() -> Flags {
unsafe {
let mut r: usize;
asm!("pushfd; pop $0" : "=r"(r) ::: "intel");
Flags::from_bits_truncate(r)
}
}
#[inline(always)]
pub unsafe fn set_flags(val: Flags) {
asm!("push $0; popfd" :: "r"(val.bits()) : "flags" : "volatile", "intel");
}
#[inline(always)]
pub unsafe fn set_gdt(gdt: &[GdtEntry]) {
#[repr(C, packed)]
struct GDTR {
limit: u16,
ptr: *const GdtEntry,
}
asm!("lgdtl $0" :: "*m"(&GDTR { ptr: gdt.as_ptr(), limit: (gdt.len()*size_of::<GdtEntry>() - 1) as u16 }) :: "volatile");
}
#[inline(always)]
pub unsafe fn set_idt(idt: &[IdtEntry]) {
#[repr(C, packed)]
struct IDTR {
limit: u16,
ptr: *const IdtEntry,
}
asm!("lidtl $0" :: "*m"(&IDTR { ptr: idt.as_ptr(), limit: idt.len() as u16 * 8 }) :: "volatile");
}
#[inline(always)]
pub unsafe fn stack_jmp(stack: *mut (), ip: *const ()) ->! {
asm!("mov esp, $0; jmp $1" :: "rg"(stack), "r"(ip) :: "volatile", "intel");
loop { }
}
|
{
((limit & 0xFFFF) as u16, ((limit & 0xF0000) >> 16) as u8 | 0x40u8)
}
|
conditional_block
|
x86.rs
|
#![allow(non_upper_case_globals)]
pub use self::x86_shared::*;
use core::mem::size_of;
mod x86_shared;
bitflags! {
flags GdtAccess: u8 {
const Accessed = 1 << 0,
const Writable = 1 << 1,
const Direction = 1 << 2,
const Executable = 1 << 3,
const NotTss = 1 << 4,
}
}
#[derive(Copy, Clone, Debug)]
#[repr(C, packed)]
pub struct GdtEntry {
limit: u16,
base1: u16,
base2: u8,
access: u8,
flags: u8,
base3: u8,
}
#[derive(Copy, Clone)]
#[repr(C, packed)]
pub struct IdtEntry {
offset1: u16,
selector: u16,
reserved: u8,
flags: u8,
offset2: u16
}
impl GdtEntry {
pub const NULL: GdtEntry = GdtEntry {
base1: 0,
base2: 0,
base3: 0,
access: 0,
limit: 0,
flags: 0
};
pub fn new(base: *const (), limit: usize, access: GdtAccess, dpl: PrivilegeLevel) -> GdtEntry {
let (limit, flags) = if limit < 0x100000 {
((limit & 0xFFFF) as u16, ((limit & 0xF0000) >> 16) as u8 | 0x40u8)
} else {
if ((limit - 0xFFF) & 0xFFF) > 0 {
panic!("bad segment limit for GDT entry");
}
(((limit & 0xFFFF000) >> 12) as u16, ((limit & 0xF0000000) >> 28) as u8 | 0xC0u8)
};
GdtEntry {
base1: base as u16,
base2: ((base as usize & 0xFF0000) >> 16) as u8,
base3: ((base as usize & 0xFF000000) >> 24) as u8,
access: access.bits() | ((dpl as u8) << 5) | 0x80,
limit: limit,
flags: flags
}
}
}
impl IdtEntry {
pub const NULL: IdtEntry = IdtEntry {
offset1: 0,
selector: 0,
reserved: 0,
flags: 0,
offset2: 0
};
pub fn new(f: unsafe extern "C" fn(), dpl: PrivilegeLevel, block: bool) -> IdtEntry {
IdtEntry {
offset1: f as u16,
offset2: ((f as usize & 0xFFFF0000) >> 16) as u16,
selector: 8,
reserved: 0,
flags: if block { 0x8E } else { 0x8F } | ((dpl as u8) << 5)
}
}
}
#[derive(Copy, Clone, Debug)]
#[repr(C, packed)]
pub struct Tss {
pub link: u16,
reserved0: u16,
pub esp0: u32,
pub ss0: u16,
reserved1: u16,
pub esp1: u32,
pub ss1: u16,
reserved2: u16,
pub esp2: u32,
pub ss2: u16,
|
pub eax: u32,
pub ecx: u32,
pub edx: u32,
pub ebx: u32,
pub esp: u32,
pub ebp: u32,
pub esi: u32,
pub edi: u32,
pub es: u16,
reserved4: u16,
pub cs: u16,
reserved5: u16,
pub ss: u16,
reserved6: u16,
pub ds: u16,
reserved7: u16,
pub fs: u16,
reserved8: u16,
pub gs: u16,
reserved9: u16,
pub ldtr: u16,
reserved10: u32,
pub iobp_offset: u16
}
impl Tss {
pub fn new() -> Tss {
Tss {
link: 0,
reserved0: 0,
esp0: 0,
ss0: 0,
reserved1: 0,
esp1: 0,
ss1: 0,
reserved2: 0,
esp2: 0,
ss2: 0,
reserved3: 0,
cr3: 0,
eip: 0,
eflags: 0,
eax: 0,
ecx: 0,
edx: 0,
ebx: 0,
esp: 0,
ebp: 0,
esi: 0,
edi: 0,
es: 0,
reserved4: 0,
cs: 0,
reserved5: 0,
ss: 0,
reserved6: 0,
ds: 0,
reserved7: 0,
fs: 0,
reserved8: 0,
gs: 0,
reserved9: 0,
ldtr: 0,
reserved10: 0,
iobp_offset: size_of::<Tss>() as u16
}
}
}
#[inline(always)]
pub fn get_flags() -> Flags {
unsafe {
let mut r: usize;
asm!("pushfd; pop $0" : "=r"(r) ::: "intel");
Flags::from_bits_truncate(r)
}
}
#[inline(always)]
pub unsafe fn set_flags(val: Flags) {
asm!("push $0; popfd" :: "r"(val.bits()) : "flags" : "volatile", "intel");
}
#[inline(always)]
pub unsafe fn set_gdt(gdt: &[GdtEntry]) {
#[repr(C, packed)]
struct GDTR {
limit: u16,
ptr: *const GdtEntry,
}
asm!("lgdtl $0" :: "*m"(&GDTR { ptr: gdt.as_ptr(), limit: (gdt.len()*size_of::<GdtEntry>() - 1) as u16 }) :: "volatile");
}
#[inline(always)]
pub unsafe fn set_idt(idt: &[IdtEntry]) {
#[repr(C, packed)]
struct IDTR {
limit: u16,
ptr: *const IdtEntry,
}
asm!("lidtl $0" :: "*m"(&IDTR { ptr: idt.as_ptr(), limit: idt.len() as u16 * 8 }) :: "volatile");
}
#[inline(always)]
pub unsafe fn stack_jmp(stack: *mut (), ip: *const ()) ->! {
asm!("mov esp, $0; jmp $1" :: "rg"(stack), "r"(ip) :: "volatile", "intel");
loop { }
}
|
reserved3: u16,
pub cr3: u32,
pub eip: u32,
pub eflags: u32,
|
random_line_split
|
x86.rs
|
#![allow(non_upper_case_globals)]
pub use self::x86_shared::*;
use core::mem::size_of;
mod x86_shared;
bitflags! {
flags GdtAccess: u8 {
const Accessed = 1 << 0,
const Writable = 1 << 1,
const Direction = 1 << 2,
const Executable = 1 << 3,
const NotTss = 1 << 4,
}
}
#[derive(Copy, Clone, Debug)]
#[repr(C, packed)]
pub struct GdtEntry {
limit: u16,
base1: u16,
base2: u8,
access: u8,
flags: u8,
base3: u8,
}
#[derive(Copy, Clone)]
#[repr(C, packed)]
pub struct IdtEntry {
offset1: u16,
selector: u16,
reserved: u8,
flags: u8,
offset2: u16
}
impl GdtEntry {
pub const NULL: GdtEntry = GdtEntry {
base1: 0,
base2: 0,
base3: 0,
access: 0,
limit: 0,
flags: 0
};
pub fn new(base: *const (), limit: usize, access: GdtAccess, dpl: PrivilegeLevel) -> GdtEntry {
let (limit, flags) = if limit < 0x100000 {
((limit & 0xFFFF) as u16, ((limit & 0xF0000) >> 16) as u8 | 0x40u8)
} else {
if ((limit - 0xFFF) & 0xFFF) > 0 {
panic!("bad segment limit for GDT entry");
}
(((limit & 0xFFFF000) >> 12) as u16, ((limit & 0xF0000000) >> 28) as u8 | 0xC0u8)
};
GdtEntry {
base1: base as u16,
base2: ((base as usize & 0xFF0000) >> 16) as u8,
base3: ((base as usize & 0xFF000000) >> 24) as u8,
access: access.bits() | ((dpl as u8) << 5) | 0x80,
limit: limit,
flags: flags
}
}
}
impl IdtEntry {
pub const NULL: IdtEntry = IdtEntry {
offset1: 0,
selector: 0,
reserved: 0,
flags: 0,
offset2: 0
};
pub fn new(f: unsafe extern "C" fn(), dpl: PrivilegeLevel, block: bool) -> IdtEntry {
IdtEntry {
offset1: f as u16,
offset2: ((f as usize & 0xFFFF0000) >> 16) as u16,
selector: 8,
reserved: 0,
flags: if block { 0x8E } else { 0x8F } | ((dpl as u8) << 5)
}
}
}
#[derive(Copy, Clone, Debug)]
#[repr(C, packed)]
pub struct Tss {
pub link: u16,
reserved0: u16,
pub esp0: u32,
pub ss0: u16,
reserved1: u16,
pub esp1: u32,
pub ss1: u16,
reserved2: u16,
pub esp2: u32,
pub ss2: u16,
reserved3: u16,
pub cr3: u32,
pub eip: u32,
pub eflags: u32,
pub eax: u32,
pub ecx: u32,
pub edx: u32,
pub ebx: u32,
pub esp: u32,
pub ebp: u32,
pub esi: u32,
pub edi: u32,
pub es: u16,
reserved4: u16,
pub cs: u16,
reserved5: u16,
pub ss: u16,
reserved6: u16,
pub ds: u16,
reserved7: u16,
pub fs: u16,
reserved8: u16,
pub gs: u16,
reserved9: u16,
pub ldtr: u16,
reserved10: u32,
pub iobp_offset: u16
}
impl Tss {
pub fn new() -> Tss {
Tss {
link: 0,
reserved0: 0,
esp0: 0,
ss0: 0,
reserved1: 0,
esp1: 0,
ss1: 0,
reserved2: 0,
esp2: 0,
ss2: 0,
reserved3: 0,
cr3: 0,
eip: 0,
eflags: 0,
eax: 0,
ecx: 0,
edx: 0,
ebx: 0,
esp: 0,
ebp: 0,
esi: 0,
edi: 0,
es: 0,
reserved4: 0,
cs: 0,
reserved5: 0,
ss: 0,
reserved6: 0,
ds: 0,
reserved7: 0,
fs: 0,
reserved8: 0,
gs: 0,
reserved9: 0,
ldtr: 0,
reserved10: 0,
iobp_offset: size_of::<Tss>() as u16
}
}
}
#[inline(always)]
pub fn get_flags() -> Flags {
unsafe {
let mut r: usize;
asm!("pushfd; pop $0" : "=r"(r) ::: "intel");
Flags::from_bits_truncate(r)
}
}
#[inline(always)]
pub unsafe fn set_flags(val: Flags) {
asm!("push $0; popfd" :: "r"(val.bits()) : "flags" : "volatile", "intel");
}
#[inline(always)]
pub unsafe fn set_gdt(gdt: &[GdtEntry]) {
#[repr(C, packed)]
struct GDTR {
limit: u16,
ptr: *const GdtEntry,
}
asm!("lgdtl $0" :: "*m"(&GDTR { ptr: gdt.as_ptr(), limit: (gdt.len()*size_of::<GdtEntry>() - 1) as u16 }) :: "volatile");
}
#[inline(always)]
pub unsafe fn set_idt(idt: &[IdtEntry]) {
#[repr(C, packed)]
struct
|
{
limit: u16,
ptr: *const IdtEntry,
}
asm!("lidtl $0" :: "*m"(&IDTR { ptr: idt.as_ptr(), limit: idt.len() as u16 * 8 }) :: "volatile");
}
#[inline(always)]
pub unsafe fn stack_jmp(stack: *mut (), ip: *const ()) ->! {
asm!("mov esp, $0; jmp $1" :: "rg"(stack), "r"(ip) :: "volatile", "intel");
loop { }
}
|
IDTR
|
identifier_name
|
events_impl.rs
|
// Copyright 2020 The Exonum Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use exonum::blockchain::Schema;
use log::{info, trace, warn};
use std::mem;
use crate::{
events::{
Event, EventHandler, EventOutcome, InternalEvent, InternalEventInner, InternalRequest,
NetworkEvent,
},
ConnectListConfig, ExternalMessage, NodeHandler, NodeTimeout,
};
impl EventHandler for NodeHandler {
fn handle_event(&mut self, event: Event) -> EventOutcome
|
}
impl NodeHandler {
fn handle_internal_event(&mut self, event: InternalEvent) {
match event.0 {
InternalEventInner::Timeout(timeout) => self.handle_timeout(timeout),
InternalEventInner::JumpToRound(height, round) => self.handle_new_round(height, round),
InternalEventInner::MessageVerified(msg) => self.handle_message(*msg),
}
}
fn handle_network_event(&mut self, event: NetworkEvent) {
match event {
NetworkEvent::PeerConnected { addr, connect } => self.handle_connected(addr, *connect),
NetworkEvent::PeerDisconnected(peer) => self.handle_disconnected(peer),
NetworkEvent::UnableConnectToPeer(peer) => self.handle_unable_to_connect(peer),
NetworkEvent::MessageReceived(raw) => {
self.execute_later(InternalRequest::VerifyMessage(raw));
}
}
}
fn handle_api_event(&mut self, event: ExternalMessage) -> EventOutcome {
match event {
ExternalMessage::PeerAdd(info) => {
info!("Send Connect message to {}", info);
self.state.add_peer_to_connect_list(info.clone());
self.connect(info.public_key);
if let Some(ref mut config_manager) = self.config_manager {
let connect_list_config =
ConnectListConfig::from_connect_list(&self.state.connect_list());
config_manager.store_connect_list(connect_list_config);
}
}
ExternalMessage::Enable(value) => {
let s = if value { "enabled" } else { "disabled" };
if self.is_enabled == value {
info!("Node is already {}", s);
} else {
self.is_enabled = value;
self.api_state().set_enabled(value);
info!("The node is {} now", s);
if self.is_enabled {
self.request_next_block();
}
}
}
ExternalMessage::Shutdown => {
self.handle_shutdown();
return EventOutcome::Terminated;
}
}
EventOutcome::Ok
}
fn handle_timeout(&mut self, timeout: NodeTimeout) {
match timeout {
NodeTimeout::Round(epoch, round) => self.handle_round_timeout(epoch, round),
NodeTimeout::Request(data, peer) => self.handle_request_timeout(&data, peer),
NodeTimeout::Status(epoch) => self.handle_status_timeout(epoch),
NodeTimeout::PeerExchange => self.handle_peer_exchange_timeout(),
NodeTimeout::UpdateApiState => self.handle_update_api_state_timeout(),
NodeTimeout::Propose(epoch, round) => self.handle_propose_timeout(epoch, round),
NodeTimeout::FlushPool => {
self.flush_txs_into_pool();
self.maybe_add_flush_pool_timeout();
}
}
}
/// Schedule execution for later time.
pub(crate) fn execute_later(&mut self, event: InternalRequest) {
self.channel.internal_requests.send(event);
}
/// Shutdown current node.
pub(crate) fn handle_shutdown(&mut self) {
log::info!("Shutting down node handler");
// Flush transactions stored in tx_cache to persistent pool.
self.flush_txs_into_pool();
}
pub(crate) fn flush_txs_into_pool(&mut self) {
let tx_cache_size = self.state().tx_cache_len();
if tx_cache_size == 0 {
return;
}
trace!(
"Flushing {} transactions from cache to persistent pool",
tx_cache_size
);
let fork = self.blockchain.fork();
let mut schema = Schema::new(&fork);
for (_, tx) in mem::take(self.state.tx_cache_mut()) {
schema.add_transaction_into_pool(tx);
}
if self.blockchain.merge(fork.into_patch()).is_err() {
warn!("Failed to flush transactions from cache to persistent pool.");
}
}
}
|
{
match event {
Event::Network(network) => {
self.handle_network_event(network);
EventOutcome::Ok
}
Event::Transaction(tx) => {
self.handle_incoming_tx(tx);
EventOutcome::Ok
}
Event::Internal(internal) => {
self.handle_internal_event(internal);
EventOutcome::Ok
}
Event::Api(api) => self.handle_api_event(api),
}
}
|
identifier_body
|
events_impl.rs
|
// Copyright 2020 The Exonum Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use exonum::blockchain::Schema;
use log::{info, trace, warn};
use std::mem;
use crate::{
events::{
Event, EventHandler, EventOutcome, InternalEvent, InternalEventInner, InternalRequest,
NetworkEvent,
},
ConnectListConfig, ExternalMessage, NodeHandler, NodeTimeout,
};
impl EventHandler for NodeHandler {
fn
|
(&mut self, event: Event) -> EventOutcome {
match event {
Event::Network(network) => {
self.handle_network_event(network);
EventOutcome::Ok
}
Event::Transaction(tx) => {
self.handle_incoming_tx(tx);
EventOutcome::Ok
}
Event::Internal(internal) => {
self.handle_internal_event(internal);
EventOutcome::Ok
}
Event::Api(api) => self.handle_api_event(api),
}
}
}
impl NodeHandler {
fn handle_internal_event(&mut self, event: InternalEvent) {
match event.0 {
InternalEventInner::Timeout(timeout) => self.handle_timeout(timeout),
InternalEventInner::JumpToRound(height, round) => self.handle_new_round(height, round),
InternalEventInner::MessageVerified(msg) => self.handle_message(*msg),
}
}
fn handle_network_event(&mut self, event: NetworkEvent) {
match event {
NetworkEvent::PeerConnected { addr, connect } => self.handle_connected(addr, *connect),
NetworkEvent::PeerDisconnected(peer) => self.handle_disconnected(peer),
NetworkEvent::UnableConnectToPeer(peer) => self.handle_unable_to_connect(peer),
NetworkEvent::MessageReceived(raw) => {
self.execute_later(InternalRequest::VerifyMessage(raw));
}
}
}
fn handle_api_event(&mut self, event: ExternalMessage) -> EventOutcome {
match event {
ExternalMessage::PeerAdd(info) => {
info!("Send Connect message to {}", info);
self.state.add_peer_to_connect_list(info.clone());
self.connect(info.public_key);
if let Some(ref mut config_manager) = self.config_manager {
let connect_list_config =
ConnectListConfig::from_connect_list(&self.state.connect_list());
config_manager.store_connect_list(connect_list_config);
}
}
ExternalMessage::Enable(value) => {
let s = if value { "enabled" } else { "disabled" };
if self.is_enabled == value {
info!("Node is already {}", s);
} else {
self.is_enabled = value;
self.api_state().set_enabled(value);
info!("The node is {} now", s);
if self.is_enabled {
self.request_next_block();
}
}
}
ExternalMessage::Shutdown => {
self.handle_shutdown();
return EventOutcome::Terminated;
}
}
EventOutcome::Ok
}
fn handle_timeout(&mut self, timeout: NodeTimeout) {
match timeout {
NodeTimeout::Round(epoch, round) => self.handle_round_timeout(epoch, round),
NodeTimeout::Request(data, peer) => self.handle_request_timeout(&data, peer),
NodeTimeout::Status(epoch) => self.handle_status_timeout(epoch),
NodeTimeout::PeerExchange => self.handle_peer_exchange_timeout(),
NodeTimeout::UpdateApiState => self.handle_update_api_state_timeout(),
NodeTimeout::Propose(epoch, round) => self.handle_propose_timeout(epoch, round),
NodeTimeout::FlushPool => {
self.flush_txs_into_pool();
self.maybe_add_flush_pool_timeout();
}
}
}
/// Schedule execution for later time.
pub(crate) fn execute_later(&mut self, event: InternalRequest) {
self.channel.internal_requests.send(event);
}
/// Shutdown current node.
pub(crate) fn handle_shutdown(&mut self) {
log::info!("Shutting down node handler");
// Flush transactions stored in tx_cache to persistent pool.
self.flush_txs_into_pool();
}
pub(crate) fn flush_txs_into_pool(&mut self) {
let tx_cache_size = self.state().tx_cache_len();
if tx_cache_size == 0 {
return;
}
trace!(
"Flushing {} transactions from cache to persistent pool",
tx_cache_size
);
let fork = self.blockchain.fork();
let mut schema = Schema::new(&fork);
for (_, tx) in mem::take(self.state.tx_cache_mut()) {
schema.add_transaction_into_pool(tx);
}
if self.blockchain.merge(fork.into_patch()).is_err() {
warn!("Failed to flush transactions from cache to persistent pool.");
}
}
}
|
handle_event
|
identifier_name
|
events_impl.rs
|
// Copyright 2020 The Exonum Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use exonum::blockchain::Schema;
use log::{info, trace, warn};
use std::mem;
use crate::{
events::{
Event, EventHandler, EventOutcome, InternalEvent, InternalEventInner, InternalRequest,
NetworkEvent,
},
ConnectListConfig, ExternalMessage, NodeHandler, NodeTimeout,
};
impl EventHandler for NodeHandler {
fn handle_event(&mut self, event: Event) -> EventOutcome {
match event {
Event::Network(network) => {
self.handle_network_event(network);
EventOutcome::Ok
}
Event::Transaction(tx) => {
self.handle_incoming_tx(tx);
EventOutcome::Ok
}
Event::Internal(internal) => {
self.handle_internal_event(internal);
EventOutcome::Ok
}
Event::Api(api) => self.handle_api_event(api),
}
}
}
impl NodeHandler {
fn handle_internal_event(&mut self, event: InternalEvent) {
match event.0 {
InternalEventInner::Timeout(timeout) => self.handle_timeout(timeout),
InternalEventInner::JumpToRound(height, round) => self.handle_new_round(height, round),
InternalEventInner::MessageVerified(msg) => self.handle_message(*msg),
}
}
|
NetworkEvent::MessageReceived(raw) => {
self.execute_later(InternalRequest::VerifyMessage(raw));
}
}
}
fn handle_api_event(&mut self, event: ExternalMessage) -> EventOutcome {
match event {
ExternalMessage::PeerAdd(info) => {
info!("Send Connect message to {}", info);
self.state.add_peer_to_connect_list(info.clone());
self.connect(info.public_key);
if let Some(ref mut config_manager) = self.config_manager {
let connect_list_config =
ConnectListConfig::from_connect_list(&self.state.connect_list());
config_manager.store_connect_list(connect_list_config);
}
}
ExternalMessage::Enable(value) => {
let s = if value { "enabled" } else { "disabled" };
if self.is_enabled == value {
info!("Node is already {}", s);
} else {
self.is_enabled = value;
self.api_state().set_enabled(value);
info!("The node is {} now", s);
if self.is_enabled {
self.request_next_block();
}
}
}
ExternalMessage::Shutdown => {
self.handle_shutdown();
return EventOutcome::Terminated;
}
}
EventOutcome::Ok
}
fn handle_timeout(&mut self, timeout: NodeTimeout) {
match timeout {
NodeTimeout::Round(epoch, round) => self.handle_round_timeout(epoch, round),
NodeTimeout::Request(data, peer) => self.handle_request_timeout(&data, peer),
NodeTimeout::Status(epoch) => self.handle_status_timeout(epoch),
NodeTimeout::PeerExchange => self.handle_peer_exchange_timeout(),
NodeTimeout::UpdateApiState => self.handle_update_api_state_timeout(),
NodeTimeout::Propose(epoch, round) => self.handle_propose_timeout(epoch, round),
NodeTimeout::FlushPool => {
self.flush_txs_into_pool();
self.maybe_add_flush_pool_timeout();
}
}
}
/// Schedule execution for later time.
pub(crate) fn execute_later(&mut self, event: InternalRequest) {
self.channel.internal_requests.send(event);
}
/// Shutdown current node.
pub(crate) fn handle_shutdown(&mut self) {
log::info!("Shutting down node handler");
// Flush transactions stored in tx_cache to persistent pool.
self.flush_txs_into_pool();
}
pub(crate) fn flush_txs_into_pool(&mut self) {
let tx_cache_size = self.state().tx_cache_len();
if tx_cache_size == 0 {
return;
}
trace!(
"Flushing {} transactions from cache to persistent pool",
tx_cache_size
);
let fork = self.blockchain.fork();
let mut schema = Schema::new(&fork);
for (_, tx) in mem::take(self.state.tx_cache_mut()) {
schema.add_transaction_into_pool(tx);
}
if self.blockchain.merge(fork.into_patch()).is_err() {
warn!("Failed to flush transactions from cache to persistent pool.");
}
}
}
|
fn handle_network_event(&mut self, event: NetworkEvent) {
match event {
NetworkEvent::PeerConnected { addr, connect } => self.handle_connected(addr, *connect),
NetworkEvent::PeerDisconnected(peer) => self.handle_disconnected(peer),
NetworkEvent::UnableConnectToPeer(peer) => self.handle_unable_to_connect(peer),
|
random_line_split
|
start_grammar.rs
|
use crate::grammar::ElemTypes;
use crate::grammar::{
build, Elem, Grammar, GrammarErrors, Prod, ProdElement, Rule,
};
use crate::utils::{take_only, ToDoc};
use std::marker::PhantomData;
#[derive(Clone, PartialOrd, Ord, PartialEq, Eq, Debug)]
pub enum StreamTerminal<T> {
EndOfStream,
Term(T),
}
impl<T> StreamTerminal<T>
where
T: Eq,
{
pub fn has_kind(&self, kind: &T) -> bool {
match self {
StreamTerminal::Term(t) => t == kind,
StreamTerminal::EndOfStream => false,
}
}
pub fn is_eos(&self) -> bool {
matches!(self, StreamTerminal::EndOfStream)
}
}
impl<T> ToDoc for StreamTerminal<T>
where
T: ToDoc,
{
fn to_doc<'a, DA: pretty::DocAllocator<'a>>(
&self,
da: &'a DA,
) -> pretty::DocBuilder<'a, DA>
where
DA::Doc: Clone,
{
match self {
StreamTerminal::EndOfStream => da.text("<EOS>"),
StreamTerminal::Term(t) => t.to_doc(da),
}
}
}
#[derive(Clone, PartialOrd, Ord, PartialEq, Eq, Debug)]
pub enum StartNonTerminal<NT> {
Start,
NTerm(NT),
}
impl<NT> ToDoc for StartNonTerminal<NT>
where
NT: ToDoc,
{
fn to_doc<'a, DA: pretty::DocAllocator<'a>>(
&self,
da: &'a DA,
) -> pretty::DocBuilder<'a, DA>
where
DA::Doc: Clone,
{
match self {
StartNonTerminal::Start => da.text("<START>"),
StartNonTerminal::NTerm(nt) => nt.to_doc(da),
}
}
}
#[derive(Clone, PartialOrd, Ord, PartialEq, Eq, Debug)]
pub enum StartActionKey<AK> {
Start,
ActionKey(AK),
}
impl<AK> StartActionKey<AK> {
pub fn as_base(&self) -> Option<&AK> {
match self {
StartActionKey::Start => None,
StartActionKey::ActionKey(ak) => Some(ak),
}
}
}
impl<AK> ToDoc for StartActionKey<AK>
where
AK: ToDoc,
{
fn to_doc<'a, DA: pretty::DocAllocator<'a>>(
&self,
da: &'a DA,
) -> pretty::DocBuilder<'a, DA>
where
DA::Doc: Clone,
{
match self {
StartActionKey::Start => da.text("<START>"),
StartActionKey::ActionKey(ak) => ak.to_doc(da),
}
}
}
#[derive(Clone, Debug)]
pub enum StartActionValue<AV> {
Start,
ActionValue(AV),
}
impl<AV> ToDoc for StartActionValue<AV>
where
AV: ToDoc,
{
fn to_doc<'a, DA: pretty::DocAllocator<'a>>(
&self,
da: &'a DA,
) -> pretty::DocBuilder<'a, DA>
where
DA::Doc: Clone,
{
match self {
StartActionValue::Start => da.text("<START>"),
StartActionValue::ActionValue(av) => av.to_doc(da),
}
}
}
#[derive(Derivative)]
#[derivative(Clone(bound = ""), Debug(bound = ""))]
pub struct StartElementTypes<E>(PhantomData<E>);
impl<E: ElemTypes> ElemTypes for StartElementTypes<E> {
type Term = StreamTerminal<E::Term>;
type NonTerm = StartNonTerminal<E::NonTerm>;
type ActionKey = StartActionKey<E::ActionKey>;
type ActionValue = StartActionValue<E::ActionValue>;
}
pub type StartGrammar<E> = Grammar<StartElementTypes<E>>;
impl<E: ElemTypes> StartGrammar<E> {
pub fn start_rule(&self) -> Rule<StartElementTypes<E>> {
self.get_rule(&StartNonTerminal::Start)
}
pub fn start_prod(&self) -> Prod<StartElementTypes<E>> {
take_only(self.start_rule().prods())
.expect("The start rule should only have a single production.")
}
}
fn
|
<E: ElemTypes>(
elem: Elem<E>,
) -> Elem<StartElementTypes<E>> {
match elem {
Elem::Term(t) => Elem::Term(StreamTerminal::Term(t)),
Elem::NonTerm(nt) => Elem::NonTerm(StartNonTerminal::NTerm(nt)),
}
}
pub fn wrap_grammar_with_start<E: ElemTypes>(
g: Grammar<E>,
) -> Result<Grammar<StartElementTypes<E>>, GrammarErrors<StartElementTypes<E>>>
{
build(StartNonTerminal::Start, |gb| {
gb.add_rule(StartNonTerminal::Start, |rb| {
rb.add_prod(StartActionKey::Start, StartActionValue::Start, |pb| {
pb.add_named_nonterm(
"start",
StartNonTerminal::NTerm(g.start_nt().clone()),
)
.add_term(StreamTerminal::EndOfStream);
});
});
for rule in g.rules() {
gb.add_rule(StartNonTerminal::NTerm(rule.head().clone()), |rb| {
for prod in rule.prods() {
rb.add_prod_with_elems(
StartActionKey::ActionKey(prod.action_key().clone()),
StartActionValue::ActionValue(prod.action_value().clone()),
prod
.prod_elements()
.iter()
.map(|e| {
ProdElement::new(
e.id().cloned(),
base_elem_to_start_elem(e.elem().clone()),
)
})
.collect::<Vec<_>>(),
);
}
});
}
})
}
|
base_elem_to_start_elem
|
identifier_name
|
start_grammar.rs
|
use crate::grammar::ElemTypes;
use crate::grammar::{
build, Elem, Grammar, GrammarErrors, Prod, ProdElement, Rule,
};
use crate::utils::{take_only, ToDoc};
use std::marker::PhantomData;
#[derive(Clone, PartialOrd, Ord, PartialEq, Eq, Debug)]
pub enum StreamTerminal<T> {
EndOfStream,
Term(T),
}
impl<T> StreamTerminal<T>
where
T: Eq,
{
pub fn has_kind(&self, kind: &T) -> bool {
match self {
StreamTerminal::Term(t) => t == kind,
StreamTerminal::EndOfStream => false,
}
}
pub fn is_eos(&self) -> bool {
matches!(self, StreamTerminal::EndOfStream)
}
}
impl<T> ToDoc for StreamTerminal<T>
where
T: ToDoc,
{
fn to_doc<'a, DA: pretty::DocAllocator<'a>>(
&self,
da: &'a DA,
) -> pretty::DocBuilder<'a, DA>
where
DA::Doc: Clone,
{
match self {
StreamTerminal::EndOfStream => da.text("<EOS>"),
StreamTerminal::Term(t) => t.to_doc(da),
}
}
}
#[derive(Clone, PartialOrd, Ord, PartialEq, Eq, Debug)]
pub enum StartNonTerminal<NT> {
Start,
NTerm(NT),
}
impl<NT> ToDoc for StartNonTerminal<NT>
where
NT: ToDoc,
{
fn to_doc<'a, DA: pretty::DocAllocator<'a>>(
&self,
da: &'a DA,
) -> pretty::DocBuilder<'a, DA>
where
DA::Doc: Clone,
{
match self {
StartNonTerminal::Start => da.text("<START>"),
StartNonTerminal::NTerm(nt) => nt.to_doc(da),
}
}
}
#[derive(Clone, PartialOrd, Ord, PartialEq, Eq, Debug)]
pub enum StartActionKey<AK> {
Start,
ActionKey(AK),
}
impl<AK> StartActionKey<AK> {
pub fn as_base(&self) -> Option<&AK>
|
}
impl<AK> ToDoc for StartActionKey<AK>
where
AK: ToDoc,
{
fn to_doc<'a, DA: pretty::DocAllocator<'a>>(
&self,
da: &'a DA,
) -> pretty::DocBuilder<'a, DA>
where
DA::Doc: Clone,
{
match self {
StartActionKey::Start => da.text("<START>"),
StartActionKey::ActionKey(ak) => ak.to_doc(da),
}
}
}
#[derive(Clone, Debug)]
pub enum StartActionValue<AV> {
Start,
ActionValue(AV),
}
impl<AV> ToDoc for StartActionValue<AV>
where
AV: ToDoc,
{
fn to_doc<'a, DA: pretty::DocAllocator<'a>>(
&self,
da: &'a DA,
) -> pretty::DocBuilder<'a, DA>
where
DA::Doc: Clone,
{
match self {
StartActionValue::Start => da.text("<START>"),
StartActionValue::ActionValue(av) => av.to_doc(da),
}
}
}
#[derive(Derivative)]
#[derivative(Clone(bound = ""), Debug(bound = ""))]
pub struct StartElementTypes<E>(PhantomData<E>);
impl<E: ElemTypes> ElemTypes for StartElementTypes<E> {
type Term = StreamTerminal<E::Term>;
type NonTerm = StartNonTerminal<E::NonTerm>;
type ActionKey = StartActionKey<E::ActionKey>;
type ActionValue = StartActionValue<E::ActionValue>;
}
pub type StartGrammar<E> = Grammar<StartElementTypes<E>>;
impl<E: ElemTypes> StartGrammar<E> {
pub fn start_rule(&self) -> Rule<StartElementTypes<E>> {
self.get_rule(&StartNonTerminal::Start)
}
pub fn start_prod(&self) -> Prod<StartElementTypes<E>> {
take_only(self.start_rule().prods())
.expect("The start rule should only have a single production.")
}
}
fn base_elem_to_start_elem<E: ElemTypes>(
elem: Elem<E>,
) -> Elem<StartElementTypes<E>> {
match elem {
Elem::Term(t) => Elem::Term(StreamTerminal::Term(t)),
Elem::NonTerm(nt) => Elem::NonTerm(StartNonTerminal::NTerm(nt)),
}
}
pub fn wrap_grammar_with_start<E: ElemTypes>(
g: Grammar<E>,
) -> Result<Grammar<StartElementTypes<E>>, GrammarErrors<StartElementTypes<E>>>
{
build(StartNonTerminal::Start, |gb| {
gb.add_rule(StartNonTerminal::Start, |rb| {
rb.add_prod(StartActionKey::Start, StartActionValue::Start, |pb| {
pb.add_named_nonterm(
"start",
StartNonTerminal::NTerm(g.start_nt().clone()),
)
.add_term(StreamTerminal::EndOfStream);
});
});
for rule in g.rules() {
gb.add_rule(StartNonTerminal::NTerm(rule.head().clone()), |rb| {
for prod in rule.prods() {
rb.add_prod_with_elems(
StartActionKey::ActionKey(prod.action_key().clone()),
StartActionValue::ActionValue(prod.action_value().clone()),
prod
.prod_elements()
.iter()
.map(|e| {
ProdElement::new(
e.id().cloned(),
base_elem_to_start_elem(e.elem().clone()),
)
})
.collect::<Vec<_>>(),
);
}
});
}
})
}
|
{
match self {
StartActionKey::Start => None,
StartActionKey::ActionKey(ak) => Some(ak),
}
}
|
identifier_body
|
start_grammar.rs
|
use crate::grammar::ElemTypes;
use crate::grammar::{
build, Elem, Grammar, GrammarErrors, Prod, ProdElement, Rule,
};
use crate::utils::{take_only, ToDoc};
use std::marker::PhantomData;
#[derive(Clone, PartialOrd, Ord, PartialEq, Eq, Debug)]
pub enum StreamTerminal<T> {
EndOfStream,
Term(T),
}
impl<T> StreamTerminal<T>
where
T: Eq,
{
pub fn has_kind(&self, kind: &T) -> bool {
match self {
StreamTerminal::Term(t) => t == kind,
StreamTerminal::EndOfStream => false,
}
}
pub fn is_eos(&self) -> bool {
matches!(self, StreamTerminal::EndOfStream)
}
}
impl<T> ToDoc for StreamTerminal<T>
where
T: ToDoc,
{
fn to_doc<'a, DA: pretty::DocAllocator<'a>>(
&self,
da: &'a DA,
) -> pretty::DocBuilder<'a, DA>
where
DA::Doc: Clone,
{
match self {
StreamTerminal::EndOfStream => da.text("<EOS>"),
StreamTerminal::Term(t) => t.to_doc(da),
}
}
}
#[derive(Clone, PartialOrd, Ord, PartialEq, Eq, Debug)]
pub enum StartNonTerminal<NT> {
Start,
NTerm(NT),
}
impl<NT> ToDoc for StartNonTerminal<NT>
where
NT: ToDoc,
{
fn to_doc<'a, DA: pretty::DocAllocator<'a>>(
&self,
da: &'a DA,
) -> pretty::DocBuilder<'a, DA>
where
DA::Doc: Clone,
{
match self {
StartNonTerminal::Start => da.text("<START>"),
StartNonTerminal::NTerm(nt) => nt.to_doc(da),
}
}
}
#[derive(Clone, PartialOrd, Ord, PartialEq, Eq, Debug)]
pub enum StartActionKey<AK> {
Start,
ActionKey(AK),
}
impl<AK> StartActionKey<AK> {
pub fn as_base(&self) -> Option<&AK> {
match self {
StartActionKey::Start => None,
StartActionKey::ActionKey(ak) => Some(ak),
}
}
}
impl<AK> ToDoc for StartActionKey<AK>
where
AK: ToDoc,
{
fn to_doc<'a, DA: pretty::DocAllocator<'a>>(
&self,
da: &'a DA,
) -> pretty::DocBuilder<'a, DA>
where
DA::Doc: Clone,
{
match self {
StartActionKey::Start => da.text("<START>"),
StartActionKey::ActionKey(ak) => ak.to_doc(da),
}
}
}
#[derive(Clone, Debug)]
pub enum StartActionValue<AV> {
Start,
ActionValue(AV),
}
|
where
AV: ToDoc,
{
fn to_doc<'a, DA: pretty::DocAllocator<'a>>(
&self,
da: &'a DA,
) -> pretty::DocBuilder<'a, DA>
where
DA::Doc: Clone,
{
match self {
StartActionValue::Start => da.text("<START>"),
StartActionValue::ActionValue(av) => av.to_doc(da),
}
}
}
#[derive(Derivative)]
#[derivative(Clone(bound = ""), Debug(bound = ""))]
pub struct StartElementTypes<E>(PhantomData<E>);
impl<E: ElemTypes> ElemTypes for StartElementTypes<E> {
type Term = StreamTerminal<E::Term>;
type NonTerm = StartNonTerminal<E::NonTerm>;
type ActionKey = StartActionKey<E::ActionKey>;
type ActionValue = StartActionValue<E::ActionValue>;
}
pub type StartGrammar<E> = Grammar<StartElementTypes<E>>;
impl<E: ElemTypes> StartGrammar<E> {
pub fn start_rule(&self) -> Rule<StartElementTypes<E>> {
self.get_rule(&StartNonTerminal::Start)
}
pub fn start_prod(&self) -> Prod<StartElementTypes<E>> {
take_only(self.start_rule().prods())
.expect("The start rule should only have a single production.")
}
}
fn base_elem_to_start_elem<E: ElemTypes>(
elem: Elem<E>,
) -> Elem<StartElementTypes<E>> {
match elem {
Elem::Term(t) => Elem::Term(StreamTerminal::Term(t)),
Elem::NonTerm(nt) => Elem::NonTerm(StartNonTerminal::NTerm(nt)),
}
}
pub fn wrap_grammar_with_start<E: ElemTypes>(
g: Grammar<E>,
) -> Result<Grammar<StartElementTypes<E>>, GrammarErrors<StartElementTypes<E>>>
{
build(StartNonTerminal::Start, |gb| {
gb.add_rule(StartNonTerminal::Start, |rb| {
rb.add_prod(StartActionKey::Start, StartActionValue::Start, |pb| {
pb.add_named_nonterm(
"start",
StartNonTerminal::NTerm(g.start_nt().clone()),
)
.add_term(StreamTerminal::EndOfStream);
});
});
for rule in g.rules() {
gb.add_rule(StartNonTerminal::NTerm(rule.head().clone()), |rb| {
for prod in rule.prods() {
rb.add_prod_with_elems(
StartActionKey::ActionKey(prod.action_key().clone()),
StartActionValue::ActionValue(prod.action_value().clone()),
prod
.prod_elements()
.iter()
.map(|e| {
ProdElement::new(
e.id().cloned(),
base_elem_to_start_elem(e.elem().clone()),
)
})
.collect::<Vec<_>>(),
);
}
});
}
})
}
|
impl<AV> ToDoc for StartActionValue<AV>
|
random_line_split
|
spec.rs
|
extern crate stache;
extern crate tempdir;
extern crate yaml_rust;
use std::fs::File;
use std::io::Read;
use std::path::{Path, PathBuf};
use std::process::Command;
use stache::ruby;
use stache::{Compile, Statement, Template};
use tempdir::TempDir;
use yaml_rust::{Yaml, YamlLoader};
#[test]
fn ruby() {
let build = TempDir::new("stache-build").unwrap();
let source = build.path().join("stache.c");
let script = "./tests/fixtures/test-ruby";
let templates = templates();
let program = ruby::link(&templates).unwrap();
program.write(source).unwrap();
let output = Command::new(script).arg(build.path()).output().unwrap();
if!output.status.success()
|
}
/// Parses templates provided by the Mustache specification suite.
fn templates() -> Vec<Template> {
let base = PathBuf::from("ext/spec/specs");
let files = vec!["comments", "interpolation", "inverted", "sections"];
files
.iter()
.flat_map(|name| {
let path = base.join(name).with_extension("yml");
let spec = document(&path);
let tests = spec["tests"].as_vec().unwrap();
tests
.iter()
.enumerate()
.map(|(index, test)| {
let template = test["template"].as_str().unwrap();
let tree = Statement::parse(template).unwrap();
let fake = path.with_file_name(format!("{}{}", name, index));
Template::new(&base, fake, tree)
})
.collect::<Vec<_>>()
})
.collect()
}
/// Parses the YAML document at the given path.
fn document(path: &Path) -> Yaml {
let mut file = File::open(path).unwrap();
let mut contents = String::new();
file.read_to_string(&mut contents).unwrap();
let mut docs = YamlLoader::load_from_str(&contents).unwrap();
docs.pop().unwrap()
}
|
{
let out = String::from_utf8(output.stdout).unwrap();
let err = String::from_utf8(output.stderr).unwrap();
panic!("{}{}", out, err);
}
|
conditional_block
|
spec.rs
|
extern crate stache;
extern crate tempdir;
extern crate yaml_rust;
|
use std::io::Read;
use std::path::{Path, PathBuf};
use std::process::Command;
use stache::ruby;
use stache::{Compile, Statement, Template};
use tempdir::TempDir;
use yaml_rust::{Yaml, YamlLoader};
#[test]
fn ruby() {
let build = TempDir::new("stache-build").unwrap();
let source = build.path().join("stache.c");
let script = "./tests/fixtures/test-ruby";
let templates = templates();
let program = ruby::link(&templates).unwrap();
program.write(source).unwrap();
let output = Command::new(script).arg(build.path()).output().unwrap();
if!output.status.success() {
let out = String::from_utf8(output.stdout).unwrap();
let err = String::from_utf8(output.stderr).unwrap();
panic!("{}{}", out, err);
}
}
/// Parses templates provided by the Mustache specification suite.
fn templates() -> Vec<Template> {
let base = PathBuf::from("ext/spec/specs");
let files = vec!["comments", "interpolation", "inverted", "sections"];
files
.iter()
.flat_map(|name| {
let path = base.join(name).with_extension("yml");
let spec = document(&path);
let tests = spec["tests"].as_vec().unwrap();
tests
.iter()
.enumerate()
.map(|(index, test)| {
let template = test["template"].as_str().unwrap();
let tree = Statement::parse(template).unwrap();
let fake = path.with_file_name(format!("{}{}", name, index));
Template::new(&base, fake, tree)
})
.collect::<Vec<_>>()
})
.collect()
}
/// Parses the YAML document at the given path.
fn document(path: &Path) -> Yaml {
let mut file = File::open(path).unwrap();
let mut contents = String::new();
file.read_to_string(&mut contents).unwrap();
let mut docs = YamlLoader::load_from_str(&contents).unwrap();
docs.pop().unwrap()
}
|
use std::fs::File;
|
random_line_split
|
spec.rs
|
extern crate stache;
extern crate tempdir;
extern crate yaml_rust;
use std::fs::File;
use std::io::Read;
use std::path::{Path, PathBuf};
use std::process::Command;
use stache::ruby;
use stache::{Compile, Statement, Template};
use tempdir::TempDir;
use yaml_rust::{Yaml, YamlLoader};
#[test]
fn
|
() {
let build = TempDir::new("stache-build").unwrap();
let source = build.path().join("stache.c");
let script = "./tests/fixtures/test-ruby";
let templates = templates();
let program = ruby::link(&templates).unwrap();
program.write(source).unwrap();
let output = Command::new(script).arg(build.path()).output().unwrap();
if!output.status.success() {
let out = String::from_utf8(output.stdout).unwrap();
let err = String::from_utf8(output.stderr).unwrap();
panic!("{}{}", out, err);
}
}
/// Parses templates provided by the Mustache specification suite.
fn templates() -> Vec<Template> {
let base = PathBuf::from("ext/spec/specs");
let files = vec!["comments", "interpolation", "inverted", "sections"];
files
.iter()
.flat_map(|name| {
let path = base.join(name).with_extension("yml");
let spec = document(&path);
let tests = spec["tests"].as_vec().unwrap();
tests
.iter()
.enumerate()
.map(|(index, test)| {
let template = test["template"].as_str().unwrap();
let tree = Statement::parse(template).unwrap();
let fake = path.with_file_name(format!("{}{}", name, index));
Template::new(&base, fake, tree)
})
.collect::<Vec<_>>()
})
.collect()
}
/// Parses the YAML document at the given path.
fn document(path: &Path) -> Yaml {
let mut file = File::open(path).unwrap();
let mut contents = String::new();
file.read_to_string(&mut contents).unwrap();
let mut docs = YamlLoader::load_from_str(&contents).unwrap();
docs.pop().unwrap()
}
|
ruby
|
identifier_name
|
spec.rs
|
extern crate stache;
extern crate tempdir;
extern crate yaml_rust;
use std::fs::File;
use std::io::Read;
use std::path::{Path, PathBuf};
use std::process::Command;
use stache::ruby;
use stache::{Compile, Statement, Template};
use tempdir::TempDir;
use yaml_rust::{Yaml, YamlLoader};
#[test]
fn ruby() {
let build = TempDir::new("stache-build").unwrap();
let source = build.path().join("stache.c");
let script = "./tests/fixtures/test-ruby";
let templates = templates();
let program = ruby::link(&templates).unwrap();
program.write(source).unwrap();
let output = Command::new(script).arg(build.path()).output().unwrap();
if!output.status.success() {
let out = String::from_utf8(output.stdout).unwrap();
let err = String::from_utf8(output.stderr).unwrap();
panic!("{}{}", out, err);
}
}
/// Parses templates provided by the Mustache specification suite.
fn templates() -> Vec<Template>
|
.collect()
}
/// Parses the YAML document at the given path.
fn document(path: &Path) -> Yaml {
let mut file = File::open(path).unwrap();
let mut contents = String::new();
file.read_to_string(&mut contents).unwrap();
let mut docs = YamlLoader::load_from_str(&contents).unwrap();
docs.pop().unwrap()
}
|
{
let base = PathBuf::from("ext/spec/specs");
let files = vec!["comments", "interpolation", "inverted", "sections"];
files
.iter()
.flat_map(|name| {
let path = base.join(name).with_extension("yml");
let spec = document(&path);
let tests = spec["tests"].as_vec().unwrap();
tests
.iter()
.enumerate()
.map(|(index, test)| {
let template = test["template"].as_str().unwrap();
let tree = Statement::parse(template).unwrap();
let fake = path.with_file_name(format!("{}{}", name, index));
Template::new(&base, fake, tree)
})
.collect::<Vec<_>>()
})
|
identifier_body
|
pool.rs
|
//! Connection pooling for a single MongoDB server.
use error::Error::{self, ArgumentError, OperationError};
use error::Result;
use Client;
use coll::options::FindOptions;
use command_type::CommandType;
use connstring::Host;
use cursor::Cursor;
use stream::{Stream, StreamConnector};
use wire_protocol::flags::OpQueryFlags;
use bson::{bson, doc};
use bufstream::BufStream;
use std::fmt;
use std::sync::{Arc, Condvar, Mutex};
use std::sync::atomic::{AtomicUsize, Ordering};
pub static DEFAULT_POOL_SIZE: usize = 5;
/// Handles threaded connections to a MongoDB server.
#[derive(Clone)]
pub struct ConnectionPool {
/// The connection host.
pub host: Host,
// The socket pool.
inner: Arc<Mutex<Pool>>,
// A condition variable used for threads waiting for the pool
// to be repopulated with available connections.
wait_lock: Arc<Condvar>,
stream_connector: StreamConnector,
}
impl fmt::Debug for ConnectionPool {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("ConnectionPool")
.field("host", &self.host)
.finish()
}
}
struct Pool {
/// The maximum number of concurrent connections allowed.
pub size: usize,
// The current number of open connections.
pub len: Arc<AtomicUsize>,
// The idle socket pool.
sockets: Vec<BufStream<Stream>>,
// The pool iteration. When a server monitor fails to execute ismaster,
// the connection pool is cleared and the iteration is incremented.
iteration: usize,
}
/// Holds an available socket, with logic to return the socket
/// to the connection pool when dropped.
pub struct PooledStream {
// This socket option will always be Some(stream) until it is
// returned to the pool using take().
socket: Option<BufStream<Stream>>,
// A reference to the pool that the stream was taken from.
pool: Arc<Mutex<Pool>>,
// A reference to the waiting condvar associated with the pool.
wait_lock: Arc<Condvar>,
// The pool iteration at the moment of extraction.
iteration: usize,
// Whether the handshake occurred successfully.
successful_handshake: bool,
}
impl PooledStream {
/// Returns a reference to the socket.
pub fn get_socket(&mut self) -> &mut BufStream<Stream> {
self.socket.as_mut().unwrap()
}
}
impl Drop for PooledStream {
fn drop(&mut self) {
// Don't add streams that couldn't successfully handshake to the pool.
if!self.successful_handshake
|
// Attempt to lock and return the socket to the pool,
// or give up if the pool lock has been poisoned.
if let Ok(mut locked) = self.pool.lock() {
if self.iteration == locked.iteration {
locked.sockets.push(self.socket.take().unwrap());
// Notify waiting threads that the pool has been repopulated.
self.wait_lock.notify_one();
}
}
}
}
impl ConnectionPool {
/// Returns a connection pool with a default size.
pub fn new(host: Host, connector: StreamConnector) -> ConnectionPool {
ConnectionPool::with_size(host, connector, DEFAULT_POOL_SIZE)
}
/// Returns a connection pool with a specified capped size.
pub fn with_size(host: Host, connector: StreamConnector, size: usize) -> ConnectionPool {
ConnectionPool {
host: host,
wait_lock: Arc::new(Condvar::new()),
inner: Arc::new(Mutex::new(Pool {
len: Arc::new(AtomicUsize::new(0)),
size: size,
sockets: Vec::with_capacity(size),
iteration: 0,
})),
stream_connector: connector,
}
}
/// Sets the maximum number of open connections.
pub fn set_size(&self, size: usize) -> Result<()> {
if size < 1 {
Err(ArgumentError(String::from(
"The connection pool size must be greater than zero.",
)))
} else {
let mut locked = self.inner.lock()?;
locked.size = size;
Ok(())
}
}
// Clear all open socket connections.
pub fn clear(&self) {
if let Ok(mut locked) = self.inner.lock() {
locked.iteration += 1;
locked.sockets.clear();
locked.len.store(0, Ordering::SeqCst);
}
}
/// Attempts to acquire a connected socket. If none are available and
/// the pool has not reached its maximum size, a new socket will connect.
/// Otherwise, the function will block until a socket is returned to the pool.
pub fn acquire_stream(&self, client: Client) -> Result<PooledStream> {
let mut locked = self.inner.lock()?;
if locked.size == 0 {
return Err(OperationError(String::from(
"The connection pool does not allow connections; increase the size of the pool.",
)));
}
loop {
// Acquire available existing socket
if let Some(stream) = locked.sockets.pop() {
return Ok(PooledStream {
socket: Some(stream),
pool: self.inner.clone(),
wait_lock: self.wait_lock.clone(),
iteration: locked.iteration,
successful_handshake: true,
});
}
// Attempt to make a new connection
let len = locked.len.load(Ordering::SeqCst);
if len < locked.size {
let socket = self.connect()?;
let mut stream = PooledStream {
socket: Some(socket),
pool: self.inner.clone(),
wait_lock: self.wait_lock.clone(),
iteration: locked.iteration,
successful_handshake: false,
};
self.handshake(client, &mut stream)?;
let _ = locked.len.fetch_add(1, Ordering::SeqCst);
return Ok(stream);
}
// Release lock and wait for pool to be repopulated
locked = self.wait_lock.wait(locked)?;
}
}
// Connects to a MongoDB server as defined by the initial configuration.
fn connect(&self) -> Result<BufStream<Stream>> {
match self.stream_connector.connect(
&self.host.host_name[..],
self.host.port,
) {
Ok(s) => Ok(BufStream::new(s)),
Err(e) => Err(Error::from(e)),
}
}
// This sends the client metadata to the server as described by the handshake spec.
//
// See https://github.com/mongodb/specifications/blob/master/source/mongodb-handshake/handshake.rst
fn handshake(&self, client: Client, stream: &mut PooledStream) -> Result<()> {
let mut options = FindOptions::new();
options.limit = Some(1);
options.batch_size = Some(1);
let flags = OpQueryFlags::with_find_options(&options);
Cursor::query_with_stream(
stream,
client,
String::from("local.$cmd"),
flags,
doc! {
"isMaster": 1i32,
"client": {
"driver": {
"name": ::DRIVER_NAME,
"version": env!("CARGO_PKG_VERSION"),
},
"os": {
"type": ::std::env::consts::OS,
"architecture": ::std::env::consts::ARCH
}
},
},
options,
CommandType::IsMaster,
false,
None,
)?;
stream.successful_handshake = true;
Ok(())
}
}
|
{
return;
}
|
conditional_block
|
pool.rs
|
//! Connection pooling for a single MongoDB server.
use error::Error::{self, ArgumentError, OperationError};
use error::Result;
use Client;
use coll::options::FindOptions;
use command_type::CommandType;
use connstring::Host;
use cursor::Cursor;
use stream::{Stream, StreamConnector};
use wire_protocol::flags::OpQueryFlags;
use bson::{bson, doc};
use bufstream::BufStream;
use std::fmt;
use std::sync::{Arc, Condvar, Mutex};
use std::sync::atomic::{AtomicUsize, Ordering};
pub static DEFAULT_POOL_SIZE: usize = 5;
/// Handles threaded connections to a MongoDB server.
#[derive(Clone)]
pub struct ConnectionPool {
/// The connection host.
pub host: Host,
// The socket pool.
inner: Arc<Mutex<Pool>>,
// A condition variable used for threads waiting for the pool
// to be repopulated with available connections.
wait_lock: Arc<Condvar>,
stream_connector: StreamConnector,
}
impl fmt::Debug for ConnectionPool {
fn
|
(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("ConnectionPool")
.field("host", &self.host)
.finish()
}
}
struct Pool {
/// The maximum number of concurrent connections allowed.
pub size: usize,
// The current number of open connections.
pub len: Arc<AtomicUsize>,
// The idle socket pool.
sockets: Vec<BufStream<Stream>>,
// The pool iteration. When a server monitor fails to execute ismaster,
// the connection pool is cleared and the iteration is incremented.
iteration: usize,
}
/// Holds an available socket, with logic to return the socket
/// to the connection pool when dropped.
pub struct PooledStream {
// This socket option will always be Some(stream) until it is
// returned to the pool using take().
socket: Option<BufStream<Stream>>,
// A reference to the pool that the stream was taken from.
pool: Arc<Mutex<Pool>>,
// A reference to the waiting condvar associated with the pool.
wait_lock: Arc<Condvar>,
// The pool iteration at the moment of extraction.
iteration: usize,
// Whether the handshake occurred successfully.
successful_handshake: bool,
}
impl PooledStream {
/// Returns a reference to the socket.
pub fn get_socket(&mut self) -> &mut BufStream<Stream> {
self.socket.as_mut().unwrap()
}
}
impl Drop for PooledStream {
fn drop(&mut self) {
// Don't add streams that couldn't successfully handshake to the pool.
if!self.successful_handshake {
return;
}
// Attempt to lock and return the socket to the pool,
// or give up if the pool lock has been poisoned.
if let Ok(mut locked) = self.pool.lock() {
if self.iteration == locked.iteration {
locked.sockets.push(self.socket.take().unwrap());
// Notify waiting threads that the pool has been repopulated.
self.wait_lock.notify_one();
}
}
}
}
impl ConnectionPool {
/// Returns a connection pool with a default size.
pub fn new(host: Host, connector: StreamConnector) -> ConnectionPool {
ConnectionPool::with_size(host, connector, DEFAULT_POOL_SIZE)
}
/// Returns a connection pool with a specified capped size.
pub fn with_size(host: Host, connector: StreamConnector, size: usize) -> ConnectionPool {
ConnectionPool {
host: host,
wait_lock: Arc::new(Condvar::new()),
inner: Arc::new(Mutex::new(Pool {
len: Arc::new(AtomicUsize::new(0)),
size: size,
sockets: Vec::with_capacity(size),
iteration: 0,
})),
stream_connector: connector,
}
}
/// Sets the maximum number of open connections.
pub fn set_size(&self, size: usize) -> Result<()> {
if size < 1 {
Err(ArgumentError(String::from(
"The connection pool size must be greater than zero.",
)))
} else {
let mut locked = self.inner.lock()?;
locked.size = size;
Ok(())
}
}
// Clear all open socket connections.
pub fn clear(&self) {
if let Ok(mut locked) = self.inner.lock() {
locked.iteration += 1;
locked.sockets.clear();
locked.len.store(0, Ordering::SeqCst);
}
}
/// Attempts to acquire a connected socket. If none are available and
/// the pool has not reached its maximum size, a new socket will connect.
/// Otherwise, the function will block until a socket is returned to the pool.
pub fn acquire_stream(&self, client: Client) -> Result<PooledStream> {
let mut locked = self.inner.lock()?;
if locked.size == 0 {
return Err(OperationError(String::from(
"The connection pool does not allow connections; increase the size of the pool.",
)));
}
loop {
// Acquire available existing socket
if let Some(stream) = locked.sockets.pop() {
return Ok(PooledStream {
socket: Some(stream),
pool: self.inner.clone(),
wait_lock: self.wait_lock.clone(),
iteration: locked.iteration,
successful_handshake: true,
});
}
// Attempt to make a new connection
let len = locked.len.load(Ordering::SeqCst);
if len < locked.size {
let socket = self.connect()?;
let mut stream = PooledStream {
socket: Some(socket),
pool: self.inner.clone(),
wait_lock: self.wait_lock.clone(),
iteration: locked.iteration,
successful_handshake: false,
};
self.handshake(client, &mut stream)?;
let _ = locked.len.fetch_add(1, Ordering::SeqCst);
return Ok(stream);
}
// Release lock and wait for pool to be repopulated
locked = self.wait_lock.wait(locked)?;
}
}
// Connects to a MongoDB server as defined by the initial configuration.
fn connect(&self) -> Result<BufStream<Stream>> {
match self.stream_connector.connect(
&self.host.host_name[..],
self.host.port,
) {
Ok(s) => Ok(BufStream::new(s)),
Err(e) => Err(Error::from(e)),
}
}
// This sends the client metadata to the server as described by the handshake spec.
//
// See https://github.com/mongodb/specifications/blob/master/source/mongodb-handshake/handshake.rst
fn handshake(&self, client: Client, stream: &mut PooledStream) -> Result<()> {
let mut options = FindOptions::new();
options.limit = Some(1);
options.batch_size = Some(1);
let flags = OpQueryFlags::with_find_options(&options);
Cursor::query_with_stream(
stream,
client,
String::from("local.$cmd"),
flags,
doc! {
"isMaster": 1i32,
"client": {
"driver": {
"name": ::DRIVER_NAME,
"version": env!("CARGO_PKG_VERSION"),
},
"os": {
"type": ::std::env::consts::OS,
"architecture": ::std::env::consts::ARCH
}
},
},
options,
CommandType::IsMaster,
false,
None,
)?;
stream.successful_handshake = true;
Ok(())
}
}
|
fmt
|
identifier_name
|
pool.rs
|
//! Connection pooling for a single MongoDB server.
use error::Error::{self, ArgumentError, OperationError};
use error::Result;
use Client;
use coll::options::FindOptions;
use command_type::CommandType;
use connstring::Host;
use cursor::Cursor;
use stream::{Stream, StreamConnector};
use wire_protocol::flags::OpQueryFlags;
use bson::{bson, doc};
use bufstream::BufStream;
use std::fmt;
use std::sync::{Arc, Condvar, Mutex};
use std::sync::atomic::{AtomicUsize, Ordering};
pub static DEFAULT_POOL_SIZE: usize = 5;
/// Handles threaded connections to a MongoDB server.
#[derive(Clone)]
pub struct ConnectionPool {
/// The connection host.
pub host: Host,
// The socket pool.
inner: Arc<Mutex<Pool>>,
// A condition variable used for threads waiting for the pool
// to be repopulated with available connections.
wait_lock: Arc<Condvar>,
stream_connector: StreamConnector,
}
impl fmt::Debug for ConnectionPool {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("ConnectionPool")
.field("host", &self.host)
.finish()
}
}
struct Pool {
/// The maximum number of concurrent connections allowed.
pub size: usize,
// The current number of open connections.
pub len: Arc<AtomicUsize>,
// The idle socket pool.
sockets: Vec<BufStream<Stream>>,
// The pool iteration. When a server monitor fails to execute ismaster,
// the connection pool is cleared and the iteration is incremented.
iteration: usize,
}
/// Holds an available socket, with logic to return the socket
/// to the connection pool when dropped.
pub struct PooledStream {
// This socket option will always be Some(stream) until it is
// returned to the pool using take().
socket: Option<BufStream<Stream>>,
// A reference to the pool that the stream was taken from.
pool: Arc<Mutex<Pool>>,
// A reference to the waiting condvar associated with the pool.
wait_lock: Arc<Condvar>,
// The pool iteration at the moment of extraction.
iteration: usize,
// Whether the handshake occurred successfully.
successful_handshake: bool,
}
impl PooledStream {
/// Returns a reference to the socket.
pub fn get_socket(&mut self) -> &mut BufStream<Stream> {
self.socket.as_mut().unwrap()
}
|
impl Drop for PooledStream {
fn drop(&mut self) {
// Don't add streams that couldn't successfully handshake to the pool.
if!self.successful_handshake {
return;
}
// Attempt to lock and return the socket to the pool,
// or give up if the pool lock has been poisoned.
if let Ok(mut locked) = self.pool.lock() {
if self.iteration == locked.iteration {
locked.sockets.push(self.socket.take().unwrap());
// Notify waiting threads that the pool has been repopulated.
self.wait_lock.notify_one();
}
}
}
}
impl ConnectionPool {
/// Returns a connection pool with a default size.
pub fn new(host: Host, connector: StreamConnector) -> ConnectionPool {
ConnectionPool::with_size(host, connector, DEFAULT_POOL_SIZE)
}
/// Returns a connection pool with a specified capped size.
pub fn with_size(host: Host, connector: StreamConnector, size: usize) -> ConnectionPool {
ConnectionPool {
host: host,
wait_lock: Arc::new(Condvar::new()),
inner: Arc::new(Mutex::new(Pool {
len: Arc::new(AtomicUsize::new(0)),
size: size,
sockets: Vec::with_capacity(size),
iteration: 0,
})),
stream_connector: connector,
}
}
/// Sets the maximum number of open connections.
pub fn set_size(&self, size: usize) -> Result<()> {
if size < 1 {
Err(ArgumentError(String::from(
"The connection pool size must be greater than zero.",
)))
} else {
let mut locked = self.inner.lock()?;
locked.size = size;
Ok(())
}
}
// Clear all open socket connections.
pub fn clear(&self) {
if let Ok(mut locked) = self.inner.lock() {
locked.iteration += 1;
locked.sockets.clear();
locked.len.store(0, Ordering::SeqCst);
}
}
/// Attempts to acquire a connected socket. If none are available and
/// the pool has not reached its maximum size, a new socket will connect.
/// Otherwise, the function will block until a socket is returned to the pool.
pub fn acquire_stream(&self, client: Client) -> Result<PooledStream> {
let mut locked = self.inner.lock()?;
if locked.size == 0 {
return Err(OperationError(String::from(
"The connection pool does not allow connections; increase the size of the pool.",
)));
}
loop {
// Acquire available existing socket
if let Some(stream) = locked.sockets.pop() {
return Ok(PooledStream {
socket: Some(stream),
pool: self.inner.clone(),
wait_lock: self.wait_lock.clone(),
iteration: locked.iteration,
successful_handshake: true,
});
}
// Attempt to make a new connection
let len = locked.len.load(Ordering::SeqCst);
if len < locked.size {
let socket = self.connect()?;
let mut stream = PooledStream {
socket: Some(socket),
pool: self.inner.clone(),
wait_lock: self.wait_lock.clone(),
iteration: locked.iteration,
successful_handshake: false,
};
self.handshake(client, &mut stream)?;
let _ = locked.len.fetch_add(1, Ordering::SeqCst);
return Ok(stream);
}
// Release lock and wait for pool to be repopulated
locked = self.wait_lock.wait(locked)?;
}
}
// Connects to a MongoDB server as defined by the initial configuration.
fn connect(&self) -> Result<BufStream<Stream>> {
match self.stream_connector.connect(
&self.host.host_name[..],
self.host.port,
) {
Ok(s) => Ok(BufStream::new(s)),
Err(e) => Err(Error::from(e)),
}
}
// This sends the client metadata to the server as described by the handshake spec.
//
// See https://github.com/mongodb/specifications/blob/master/source/mongodb-handshake/handshake.rst
fn handshake(&self, client: Client, stream: &mut PooledStream) -> Result<()> {
let mut options = FindOptions::new();
options.limit = Some(1);
options.batch_size = Some(1);
let flags = OpQueryFlags::with_find_options(&options);
Cursor::query_with_stream(
stream,
client,
String::from("local.$cmd"),
flags,
doc! {
"isMaster": 1i32,
"client": {
"driver": {
"name": ::DRIVER_NAME,
"version": env!("CARGO_PKG_VERSION"),
},
"os": {
"type": ::std::env::consts::OS,
"architecture": ::std::env::consts::ARCH
}
},
},
options,
CommandType::IsMaster,
false,
None,
)?;
stream.successful_handshake = true;
Ok(())
}
}
|
}
|
random_line_split
|
crateC.rs
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// This tests the extra note reported when a type error deals with
// seemingly identical types.
// The main use case of this error is when there are two crates
// (generally different versions of the same crate) with the same name
// causing a type mismatch.
// The test is nearly the same as the one in
// compile-fail/type-mismatch-same-crate-name.rs
// but deals with the case where one of the crates
// is only introduced as an indirect dependency.
// and the type is accessed via a re-export.
// This is similar to how the error can be introduced
// when using cargo's automatic dependency resolution.
extern crate crateA;
fn main()
|
{
let foo2 = crateA::Foo;
let bar2 = crateA::bar();
{
extern crate crateB;
crateB::try_foo(foo2);
crateB::try_bar(bar2);
}
}
|
identifier_body
|
|
crateC.rs
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// This tests the extra note reported when a type error deals with
// seemingly identical types.
// The main use case of this error is when there are two crates
// (generally different versions of the same crate) with the same name
// causing a type mismatch.
// The test is nearly the same as the one in
// compile-fail/type-mismatch-same-crate-name.rs
// but deals with the case where one of the crates
// is only introduced as an indirect dependency.
// and the type is accessed via a re-export.
// This is similar to how the error can be introduced
// when using cargo's automatic dependency resolution.
extern crate crateA;
fn
|
() {
let foo2 = crateA::Foo;
let bar2 = crateA::bar();
{
extern crate crateB;
crateB::try_foo(foo2);
crateB::try_bar(bar2);
}
}
|
main
|
identifier_name
|
crateC.rs
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// This tests the extra note reported when a type error deals with
// seemingly identical types.
// The main use case of this error is when there are two crates
// (generally different versions of the same crate) with the same name
// causing a type mismatch.
// The test is nearly the same as the one in
// compile-fail/type-mismatch-same-crate-name.rs
// but deals with the case where one of the crates
|
// This is similar to how the error can be introduced
// when using cargo's automatic dependency resolution.
extern crate crateA;
fn main() {
let foo2 = crateA::Foo;
let bar2 = crateA::bar();
{
extern crate crateB;
crateB::try_foo(foo2);
crateB::try_bar(bar2);
}
}
|
// is only introduced as an indirect dependency.
// and the type is accessed via a re-export.
|
random_line_split
|
mod.rs
|
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
#[macro_use]
mod macros;
mod api;
mod blockdev;
mod connection;
mod consts;
mod filesystem;
mod pool;
mod tree;
mod types;
|
use std::{
collections::HashMap,
sync::{Condvar, Mutex},
};
use dbus::Path;
use crate::engine::PoolUuid;
pub use self::{
connection::DbusConnectionHandler, tree::DbusTreeHandler, types::DbusAction,
udev::DbusUdevHandler, util::create_dbus_handlers,
};
type CreatePoolState = Mutex<HashMap<PoolUuid, Option<(Path<'static>, Vec<Path<'static>>)>>>;
lazy_static! {
pub static ref POOL_CONDVAR: Condvar = Condvar::new();
pub static ref POOL_SETUP_STATE: CreatePoolState = Mutex::new(HashMap::new());
}
|
mod udev;
mod util;
|
random_line_split
|
main.rs
|
extern crate hyper;
extern crate iron;
extern crate rand;
extern crate urlencoded;
#[cfg(test)]
extern crate iron_test;
mod handlers;
mod storage;
use iron::prelude::*;
use std::env;
use self::handlers::UrlShortenerHandler;
use self::storage::persisted::PersistedKeyValueStore;
fn
|
() {
let port = env::var("PORT").unwrap_or("3000".to_string());
let short_url_prefix = env::var("SHORT_URL_PREFIX")
.unwrap_or(format!("http://localhost:{}/", port));
println!("Starting URL Shortener on port {}...", port);
let addr = format!("localhost:{}", port);
let key_value_store = PersistedKeyValueStore::new(env::var("DATA_FILE")
.unwrap_or("short.urls".to_owned()));
let handler = UrlShortenerHandler::new(short_url_prefix, key_value_store);
match Iron::new(handler).http(&addr[..]) {
Ok(_) => println!("Server started"),
Err(e) => {
println!("Error: {}", e.to_string());
std::process::exit(-1);
}
}
}
|
main
|
identifier_name
|
main.rs
|
extern crate hyper;
extern crate iron;
extern crate rand;
extern crate urlencoded;
#[cfg(test)]
extern crate iron_test;
mod handlers;
mod storage;
use iron::prelude::*;
use std::env;
use self::handlers::UrlShortenerHandler;
use self::storage::persisted::PersistedKeyValueStore;
fn main() {
let port = env::var("PORT").unwrap_or("3000".to_string());
let short_url_prefix = env::var("SHORT_URL_PREFIX")
.unwrap_or(format!("http://localhost:{}/", port));
println!("Starting URL Shortener on port {}...", port);
let addr = format!("localhost:{}", port);
let key_value_store = PersistedKeyValueStore::new(env::var("DATA_FILE")
.unwrap_or("short.urls".to_owned()));
let handler = UrlShortenerHandler::new(short_url_prefix, key_value_store);
match Iron::new(handler).http(&addr[..]) {
Ok(_) => println!("Server started"),
Err(e) =>
|
}
}
|
{
println!("Error: {}", e.to_string());
std::process::exit(-1);
}
|
conditional_block
|
main.rs
|
extern crate hyper;
extern crate iron;
extern crate rand;
extern crate urlencoded;
|
mod storage;
use iron::prelude::*;
use std::env;
use self::handlers::UrlShortenerHandler;
use self::storage::persisted::PersistedKeyValueStore;
fn main() {
let port = env::var("PORT").unwrap_or("3000".to_string());
let short_url_prefix = env::var("SHORT_URL_PREFIX")
.unwrap_or(format!("http://localhost:{}/", port));
println!("Starting URL Shortener on port {}...", port);
let addr = format!("localhost:{}", port);
let key_value_store = PersistedKeyValueStore::new(env::var("DATA_FILE")
.unwrap_or("short.urls".to_owned()));
let handler = UrlShortenerHandler::new(short_url_prefix, key_value_store);
match Iron::new(handler).http(&addr[..]) {
Ok(_) => println!("Server started"),
Err(e) => {
println!("Error: {}", e.to_string());
std::process::exit(-1);
}
}
}
|
#[cfg(test)]
extern crate iron_test;
mod handlers;
|
random_line_split
|
main.rs
|
extern crate hyper;
extern crate iron;
extern crate rand;
extern crate urlencoded;
#[cfg(test)]
extern crate iron_test;
mod handlers;
mod storage;
use iron::prelude::*;
use std::env;
use self::handlers::UrlShortenerHandler;
use self::storage::persisted::PersistedKeyValueStore;
fn main()
|
{
let port = env::var("PORT").unwrap_or("3000".to_string());
let short_url_prefix = env::var("SHORT_URL_PREFIX")
.unwrap_or(format!("http://localhost:{}/", port));
println!("Starting URL Shortener on port {}...", port);
let addr = format!("localhost:{}", port);
let key_value_store = PersistedKeyValueStore::new(env::var("DATA_FILE")
.unwrap_or("short.urls".to_owned()));
let handler = UrlShortenerHandler::new(short_url_prefix, key_value_store);
match Iron::new(handler).http(&addr[..]) {
Ok(_) => println!("Server started"),
Err(e) => {
println!("Error: {}", e.to_string());
std::process::exit(-1);
}
}
}
|
identifier_body
|
|
reusable_diagnostic.rs
|
use codespan_reporting::diagnostic::{Diagnostic, Label};
|
use codespan_reporting::term::termcolor::StandardStream;
use codespan_reporting::term::{self, ColorArg};
use std::ops::Range;
use structopt::StructOpt;
#[derive(Debug, StructOpt)]
#[structopt(name = "emit")]
pub struct Opts {
#[structopt(long = "color",
parse(try_from_str),
default_value = "auto",
possible_values = ColorArg::VARIANTS,
case_insensitive = true
)]
color: ColorArg,
}
fn main() -> anyhow::Result<()> {
let file = SimpleFile::new(
"main.rs",
unindent::unindent(
r#"
fn main() {
let foo: i32 = "hello, world";
foo += 1;
}
"#,
),
);
let errors = [
Error::MismatchType(
Item::new(20..23, "i32"),
Item::new(31..45, "\"hello, world\""),
),
Error::MutatingImmutable(Item::new(20..23, "foo"), Item::new(51..59, "foo += 1")),
];
let opts = Opts::from_args();
let writer = StandardStream::stderr(opts.color.into());
let config = codespan_reporting::term::Config::default();
for diagnostic in errors.iter().map(Error::report) {
term::emit(&mut writer.lock(), &config, &file, &diagnostic)?;
}
Ok(())
}
/// An error enum that represent all possible errors within your program
enum Error {
MismatchType(Item, Item),
MutatingImmutable(Item, Item),
}
impl Error {
fn report(&self) -> Diagnostic<()> {
match self {
Error::MismatchType(left, right) => Diagnostic::error()
.with_code("E0308")
.with_message("mismatch types")
.with_labels(vec![
Label::primary((), right.range.clone()).with_message(format!(
"Expected `{}`, found: `{}`",
left.content, right.content,
)),
Label::secondary((), left.range.clone()).with_message("expected due to this"),
]),
Error::MutatingImmutable(original, mutating) => Diagnostic::error()
.with_code("E0384")
.with_message(format!(
"cannot mutate immutable variable `{}`",
original.content,
))
.with_labels(vec![
Label::secondary((), original.range.clone()).with_message(unindent::unindent(
&format!(
r#"
first assignment to `{0}`
help: make this binding mutable: `mut {0}`
"#,
original.content,
),
)),
Label::primary((), mutating.range.clone())
.with_message("cannot assign twice to immutable variable"),
]),
}
}
}
/// An item in the source code to be used in the `Error` enum.
/// In a more complex program it could also contain a `files::FileId` to handle errors that occur inside multiple files.
struct Item {
range: Range<usize>,
content: String,
}
impl Item {
fn new(range: Range<usize>, content: impl Into<String>) -> Item {
let content = content.into();
Item { range, content }
}
}
|
use codespan_reporting::files::SimpleFile;
|
random_line_split
|
reusable_diagnostic.rs
|
use codespan_reporting::diagnostic::{Diagnostic, Label};
use codespan_reporting::files::SimpleFile;
use codespan_reporting::term::termcolor::StandardStream;
use codespan_reporting::term::{self, ColorArg};
use std::ops::Range;
use structopt::StructOpt;
#[derive(Debug, StructOpt)]
#[structopt(name = "emit")]
pub struct Opts {
#[structopt(long = "color",
parse(try_from_str),
default_value = "auto",
possible_values = ColorArg::VARIANTS,
case_insensitive = true
)]
color: ColorArg,
}
fn main() -> anyhow::Result<()> {
let file = SimpleFile::new(
"main.rs",
unindent::unindent(
r#"
fn main() {
let foo: i32 = "hello, world";
foo += 1;
}
"#,
),
);
let errors = [
Error::MismatchType(
Item::new(20..23, "i32"),
Item::new(31..45, "\"hello, world\""),
),
Error::MutatingImmutable(Item::new(20..23, "foo"), Item::new(51..59, "foo += 1")),
];
let opts = Opts::from_args();
let writer = StandardStream::stderr(opts.color.into());
let config = codespan_reporting::term::Config::default();
for diagnostic in errors.iter().map(Error::report) {
term::emit(&mut writer.lock(), &config, &file, &diagnostic)?;
}
Ok(())
}
/// An error enum that represent all possible errors within your program
enum Error {
MismatchType(Item, Item),
MutatingImmutable(Item, Item),
}
impl Error {
fn
|
(&self) -> Diagnostic<()> {
match self {
Error::MismatchType(left, right) => Diagnostic::error()
.with_code("E0308")
.with_message("mismatch types")
.with_labels(vec![
Label::primary((), right.range.clone()).with_message(format!(
"Expected `{}`, found: `{}`",
left.content, right.content,
)),
Label::secondary((), left.range.clone()).with_message("expected due to this"),
]),
Error::MutatingImmutable(original, mutating) => Diagnostic::error()
.with_code("E0384")
.with_message(format!(
"cannot mutate immutable variable `{}`",
original.content,
))
.with_labels(vec![
Label::secondary((), original.range.clone()).with_message(unindent::unindent(
&format!(
r#"
first assignment to `{0}`
help: make this binding mutable: `mut {0}`
"#,
original.content,
),
)),
Label::primary((), mutating.range.clone())
.with_message("cannot assign twice to immutable variable"),
]),
}
}
}
/// An item in the source code to be used in the `Error` enum.
/// In a more complex program it could also contain a `files::FileId` to handle errors that occur inside multiple files.
struct Item {
range: Range<usize>,
content: String,
}
impl Item {
fn new(range: Range<usize>, content: impl Into<String>) -> Item {
let content = content.into();
Item { range, content }
}
}
|
report
|
identifier_name
|
reusable_diagnostic.rs
|
use codespan_reporting::diagnostic::{Diagnostic, Label};
use codespan_reporting::files::SimpleFile;
use codespan_reporting::term::termcolor::StandardStream;
use codespan_reporting::term::{self, ColorArg};
use std::ops::Range;
use structopt::StructOpt;
#[derive(Debug, StructOpt)]
#[structopt(name = "emit")]
pub struct Opts {
#[structopt(long = "color",
parse(try_from_str),
default_value = "auto",
possible_values = ColorArg::VARIANTS,
case_insensitive = true
)]
color: ColorArg,
}
fn main() -> anyhow::Result<()> {
let file = SimpleFile::new(
"main.rs",
unindent::unindent(
r#"
fn main() {
let foo: i32 = "hello, world";
foo += 1;
}
"#,
),
);
let errors = [
Error::MismatchType(
Item::new(20..23, "i32"),
Item::new(31..45, "\"hello, world\""),
),
Error::MutatingImmutable(Item::new(20..23, "foo"), Item::new(51..59, "foo += 1")),
];
let opts = Opts::from_args();
let writer = StandardStream::stderr(opts.color.into());
let config = codespan_reporting::term::Config::default();
for diagnostic in errors.iter().map(Error::report) {
term::emit(&mut writer.lock(), &config, &file, &diagnostic)?;
}
Ok(())
}
/// An error enum that represent all possible errors within your program
enum Error {
MismatchType(Item, Item),
MutatingImmutable(Item, Item),
}
impl Error {
fn report(&self) -> Diagnostic<()>
|
&format!(
r#"
first assignment to `{0}`
help: make this binding mutable: `mut {0}`
"#,
original.content,
),
)),
Label::primary((), mutating.range.clone())
.with_message("cannot assign twice to immutable variable"),
]),
}
}
}
/// An item in the source code to be used in the `Error` enum.
/// In a more complex program it could also contain a `files::FileId` to handle errors that occur inside multiple files.
struct Item {
range: Range<usize>,
content: String,
}
impl Item {
fn new(range: Range<usize>, content: impl Into<String>) -> Item {
let content = content.into();
Item { range, content }
}
}
|
{
match self {
Error::MismatchType(left, right) => Diagnostic::error()
.with_code("E0308")
.with_message("mismatch types")
.with_labels(vec![
Label::primary((), right.range.clone()).with_message(format!(
"Expected `{}`, found: `{}`",
left.content, right.content,
)),
Label::secondary((), left.range.clone()).with_message("expected due to this"),
]),
Error::MutatingImmutable(original, mutating) => Diagnostic::error()
.with_code("E0384")
.with_message(format!(
"cannot mutate immutable variable `{}`",
original.content,
))
.with_labels(vec![
Label::secondary((), original.range.clone()).with_message(unindent::unindent(
|
identifier_body
|
mod.rs
|
/// VFS and filesystem drivers
///
/// Virtual file system abstraction and filesystem drivers
pub mod fs;
/// ATA specific disk driver
|
pub mod ata;
/// A storage disk
///
/// This is a trait implemented by specific disk types (ATA, ATAPI, SATA) for transferring data
/// between a disk and memory.
pub trait Disk {
/// Read from disk
///
/// Read content from the disk at specified block. To read multiple blocks/sectors, change the
/// `count` parameter to the chosen number of blocks. The `buffer` should be an array of `u8`s
/// of size `count * 512`. This is where to received data will be written.
fn read(&self, block: u64, count: u16, buffer: &mut [u8]);
/// Write to disk
///
/// Write content to the disk at specified block. To write multiple blocks/sectors, change the
/// `count` parameter to the chosen number of blocks. The `buffer` should be an array of `u8`s
/// of size `count * 512`. This is where the data to be written will be located.
fn write(&self, block: u64, count: u16, buffer: &[u8]);
}
|
random_line_split
|
|
bool_filter.rs
|
//
// imag - the personal information management suite for the commandline
// Copyright (C) 2015, 2016 Matthias Beyer <[email protected]> and contributors
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; version
// 2.1 of the License.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
//
use libimagstore::store::Entry;
use filters::filter::Filter;
pub struct BoolFilter(bool);
impl BoolFilter {
pub fn new(b: bool) -> BoolFilter {
BoolFilter(b)
|
}
impl Filter<Entry> for BoolFilter {
fn filter(&self, _: &Entry) -> bool {
self.0
}
}
|
}
|
random_line_split
|
bool_filter.rs
|
//
// imag - the personal information management suite for the commandline
// Copyright (C) 2015, 2016 Matthias Beyer <[email protected]> and contributors
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; version
// 2.1 of the License.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
//
use libimagstore::store::Entry;
use filters::filter::Filter;
pub struct
|
(bool);
impl BoolFilter {
pub fn new(b: bool) -> BoolFilter {
BoolFilter(b)
}
}
impl Filter<Entry> for BoolFilter {
fn filter(&self, _: &Entry) -> bool {
self.0
}
}
|
BoolFilter
|
identifier_name
|
bool_filter.rs
|
//
// imag - the personal information management suite for the commandline
// Copyright (C) 2015, 2016 Matthias Beyer <[email protected]> and contributors
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; version
// 2.1 of the License.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
//
use libimagstore::store::Entry;
use filters::filter::Filter;
pub struct BoolFilter(bool);
impl BoolFilter {
pub fn new(b: bool) -> BoolFilter {
BoolFilter(b)
}
}
impl Filter<Entry> for BoolFilter {
fn filter(&self, _: &Entry) -> bool
|
}
|
{
self.0
}
|
identifier_body
|
driver.rs
|
// Copyright (c) 2016 P.Y. Laligand
use std::fs;
use std::io::Read;
use std::path::Path;
use google_drive3::Drive;
use hyper::Client;
use serde_json;
use yup_oauth2::{Authenticator, DefaultAuthenticatorDelegate, ApplicationSecret, ConsoleApplicationSecret, DiskTokenStorage, FlowType};
use super::error::DriveError;
/// Interface to the Google Drive service.
///
/// Takes care of authentication.
pub struct Driver {
hub: Drive<Client, Authenticator<DefaultAuthenticatorDelegate, DiskTokenStorage, Client>>,
}
impl Driver {
/// Reads oAuth application config from a given file.
fn read_secret<P: AsRef<Path>>(path: P) -> Result<ApplicationSecret, DriveError> {
let mut file = try!(fs::File::open(path));
let mut content = String::new();
try!(file.read_to_string(&mut content));
|
pub fn new(config_path: &String, credentials_path: &String) -> Result<Driver, DriveError> {
let secret = try!(Driver::read_secret(config_path));
let storage = try!(DiskTokenStorage::new(credentials_path));
let auth = Authenticator::new(&secret, DefaultAuthenticatorDelegate,
Client::new(),
storage,
Some(FlowType::InstalledInteractive));
Ok(Driver { hub: Drive::new(Client::new(), auth) })
}
pub fn get_user_details(&self) -> Result<String, DriveError> {
let response = try!(self.hub.about().get().param("fields", "user").doit());
Ok(format!("{:?}", response))
}
}
|
let secret: ConsoleApplicationSecret = try!(serde_json::from_str(content.as_str()));
secret.installed.ok_or(DriveError::Described(String::from("No params for installed flow")))
}
|
random_line_split
|
driver.rs
|
// Copyright (c) 2016 P.Y. Laligand
use std::fs;
use std::io::Read;
use std::path::Path;
use google_drive3::Drive;
use hyper::Client;
use serde_json;
use yup_oauth2::{Authenticator, DefaultAuthenticatorDelegate, ApplicationSecret, ConsoleApplicationSecret, DiskTokenStorage, FlowType};
use super::error::DriveError;
/// Interface to the Google Drive service.
///
/// Takes care of authentication.
pub struct
|
{
hub: Drive<Client, Authenticator<DefaultAuthenticatorDelegate, DiskTokenStorage, Client>>,
}
impl Driver {
/// Reads oAuth application config from a given file.
fn read_secret<P: AsRef<Path>>(path: P) -> Result<ApplicationSecret, DriveError> {
let mut file = try!(fs::File::open(path));
let mut content = String::new();
try!(file.read_to_string(&mut content));
let secret: ConsoleApplicationSecret = try!(serde_json::from_str(content.as_str()));
secret.installed.ok_or(DriveError::Described(String::from("No params for installed flow")))
}
pub fn new(config_path: &String, credentials_path: &String) -> Result<Driver, DriveError> {
let secret = try!(Driver::read_secret(config_path));
let storage = try!(DiskTokenStorage::new(credentials_path));
let auth = Authenticator::new(&secret, DefaultAuthenticatorDelegate,
Client::new(),
storage,
Some(FlowType::InstalledInteractive));
Ok(Driver { hub: Drive::new(Client::new(), auth) })
}
pub fn get_user_details(&self) -> Result<String, DriveError> {
let response = try!(self.hub.about().get().param("fields", "user").doit());
Ok(format!("{:?}", response))
}
}
|
Driver
|
identifier_name
|
driver.rs
|
// Copyright (c) 2016 P.Y. Laligand
use std::fs;
use std::io::Read;
use std::path::Path;
use google_drive3::Drive;
use hyper::Client;
use serde_json;
use yup_oauth2::{Authenticator, DefaultAuthenticatorDelegate, ApplicationSecret, ConsoleApplicationSecret, DiskTokenStorage, FlowType};
use super::error::DriveError;
/// Interface to the Google Drive service.
///
/// Takes care of authentication.
pub struct Driver {
hub: Drive<Client, Authenticator<DefaultAuthenticatorDelegate, DiskTokenStorage, Client>>,
}
impl Driver {
/// Reads oAuth application config from a given file.
fn read_secret<P: AsRef<Path>>(path: P) -> Result<ApplicationSecret, DriveError>
|
pub fn new(config_path: &String, credentials_path: &String) -> Result<Driver, DriveError> {
let secret = try!(Driver::read_secret(config_path));
let storage = try!(DiskTokenStorage::new(credentials_path));
let auth = Authenticator::new(&secret, DefaultAuthenticatorDelegate,
Client::new(),
storage,
Some(FlowType::InstalledInteractive));
Ok(Driver { hub: Drive::new(Client::new(), auth) })
}
pub fn get_user_details(&self) -> Result<String, DriveError> {
let response = try!(self.hub.about().get().param("fields", "user").doit());
Ok(format!("{:?}", response))
}
}
|
{
let mut file = try!(fs::File::open(path));
let mut content = String::new();
try!(file.read_to_string(&mut content));
let secret: ConsoleApplicationSecret = try!(serde_json::from_str(content.as_str()));
secret.installed.ok_or(DriveError::Described(String::from("No params for installed flow")))
}
|
identifier_body
|
lib.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![feature(arc_weak)]
#![cfg_attr(any(target_os = "linux", target_os = "android"), feature(box_raw))]
#![feature(box_syntax)]
#![feature(custom_attribute)]
#![feature(custom_derive)]
#![feature(hashmap_hasher)]
#![cfg_attr(any(target_os = "linux", target_os = "android"), feature(heap_api))]
#![feature(mpsc_select)]
#![feature(plugin)]
#![feature(str_char)]
#![feature(unique)]
#![feature(vec_push_all)]
#![plugin(plugins)]
#![plugin(serde_macros)]
#[macro_use]
extern crate log;
extern crate serde;
extern crate azure;
#[macro_use] extern crate bitflags;
extern crate fnv;
extern crate euclid;
extern crate ipc_channel;
#[macro_use]
extern crate lazy_static;
extern crate layers;
extern crate libc;
#[macro_use]
extern crate profile_traits;
extern crate script_traits;
extern crate rustc_serialize;
extern crate net_traits;
#[macro_use]
extern crate util;
extern crate msg;
extern crate rand;
extern crate smallvec;
extern crate string_cache;
extern crate style;
extern crate skia;
extern crate time;
extern crate url;
extern crate gfx_traits;
extern crate canvas_traits;
// Eventually we would like the shaper to be pluggable, as many operating systems have their own
// shapers. For now, however, this is a hard dependency.
extern crate harfbuzz;
// Linux and Android-specific library dependencies
#[cfg(any(target_os = "linux", target_os = "android"))]
extern crate fontconfig;
#[cfg(any(target_os = "linux", target_os = "android"))]
|
// Mac OS-specific library dependencies
#[cfg(target_os = "macos")] extern crate core_foundation;
#[cfg(target_os = "macos")] extern crate core_graphics;
#[cfg(target_os = "macos")] extern crate core_text;
pub use paint_context::PaintContext;
// Private painting modules
mod paint_context;
#[path = "display_list/mod.rs"]
pub mod display_list;
pub mod paint_task;
// Fonts
pub mod font;
pub mod font_context;
pub mod font_cache_task;
pub mod font_template;
// Misc.
mod filters;
// Platform-specific implementations.
#[path = "platform/mod.rs"]
pub mod platform;
// Text
#[path = "text/mod.rs"]
pub mod text;
|
extern crate freetype;
|
random_line_split
|
lib.rs
|
// The MIT License (MIT)
//
// Copyright (c) 2016 Skylor R. Schermer
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//
////////////////////////////////////////////////////////////////////////////////
//!
//! Provides structured `Palette` objects for storing and generating colors.
//!
//! The palette acts as a tree-like structure that acts as a collection of
//! 'Cell's into which color elements are placed. Color elements will then
//! lazily generate a color when queried. This allows for the construction of
//! dynamic palette structures that can generate related colors based off of a
//! small subset of 'control' colors.
//!
//! More practically, `Cell`s are identified by `Address`, and each cell
//! contains a single `Expression`, which will generate a `Color` when either
//! the Cell's or Expression's `color` method is called. Expressions are
//! categorized by 'order', which denotes the number of dependencies needed to
//! generate a color. For example, a second order element is dependent upon two
//! other colors, while a zeroth order color element is simply a color. These
//! dependencies are expressed through references to other cells in the palette.
//!
////////////////////////////////////////////////////////////////////////////////
extern crate color;
extern crate interval;
// Submodules.
#[warn(missing_docs)]
pub mod address;
#[warn(missing_docs)]
pub mod cell;
#[warn(missing_docs)]
pub mod data;
#[warn(missing_docs)]
pub mod expression;
#[warn(missing_docs)]
pub mod format;
#[warn(missing_docs)]
pub mod operation;
#[warn(missing_docs)]
pub mod result;
#[warn(missing_docs)]
pub mod utilities;
// Non-local re-exports.
pub use color::Color;
// Submodule re-exports
pub use address::{
Address,
Reference,
};
pub use expression::Expression;
pub use format::Format;
// Local imports.
use data::Data;
use operation::{PaletteOperation, OperationHistory};
use result::Result;
// Standard imports.
use std::fmt;
////////////////////////////////////////////////////////////////////////////////
// Palette
////////////////////////////////////////////////////////////////////////////////
/// Encapsulates a single color palette.
#[derive(Debug)]
pub struct Palette {
/// The `Palette`'s operation-relevant data.
data: Data,
/// The operation undo and redo history.
operation_history: Option<OperationHistory>,
/// The palette format.
format: Format,
}
impl Palette {
/// Creates a new `Palette` with the given name.
pub fn new<S>(name: S, format: Format, history: bool)
-> Self where S: Into<String>
{
let mut pal = Palette {
data: Default::default(),
operation_history: if history
|
else {
None
},
format: format,
};
pal.data.set_name(Reference::all(), name.into());
format.initialize(&mut pal.data);
pal
}
/// Returns the number of color `Cell`s in the `Palette`.
pub fn len(&self) -> usize {
self.data.len()
}
/// Returns whether the `Palette` contains any color `Cell`s.
pub fn is_empty(&self) -> bool {
self.data.is_empty()
}
/// Returns the total number of history entries recorded.
pub fn history_len(&self) -> (usize, usize) {
if let Some(ref history) = self.operation_history {
(history.undo_entries.len(), history.redo_entries.len())
} else {
(0, 0)
}
}
/// Returns whether the `Palette` contains any history entries.
pub fn history_is_empty(&self) -> bool {
if let Some(ref history) = self.operation_history {
history.undo_entries.is_empty() && history.redo_entries.is_empty()
} else {
false
}
}
/// Returns the color at the given address, or None if the cell is empty.
pub fn color(&self, address: Address) -> Option<Color> {
self.data.cell(address).and_then(|cell| cell.color())
}
/// Applies the given operation to the `Palette`. Usually, this will just
/// defer to the `PaletteOperation`'s apply method, but this could also
/// provide extra functionality such as undo/redo and format-specific
/// checks.
#[allow(unused_variables)]
pub fn apply(
&mut self,
operation: Box<PaletteOperation>)
-> Result<()>
{
self.format.apply_operation(self, operation)
}
/// Reverses the most recently applied operation.
#[allow(unused_variables)]
pub fn undo(&mut self) -> Result<()> {
self.format.undo(self)
}
/// Reverses the most recently applied undo operation.
#[allow(unused_variables)]
pub fn redo(&mut self) -> Result<()> {
self.format.redo(self)
}
}
// Default is empty `Palette` with default format.
impl Default for Palette {
fn default() -> Self {
Palette {
data: Default::default(),
operation_history: None,
format: Format::Default,
}
}
}
// Display `Palette` in readable format.
impl fmt::Display for Palette {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Format: {:?}, History: {:?}\n{}",
self.format,
self.history_len(),
self.data)
}
}
|
{
Some(Default::default())
}
|
conditional_block
|
lib.rs
|
// The MIT License (MIT)
//
// Copyright (c) 2016 Skylor R. Schermer
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//
////////////////////////////////////////////////////////////////////////////////
//!
//! Provides structured `Palette` objects for storing and generating colors.
//!
//! The palette acts as a tree-like structure that acts as a collection of
//! 'Cell's into which color elements are placed. Color elements will then
//! lazily generate a color when queried. This allows for the construction of
//! dynamic palette structures that can generate related colors based off of a
//! small subset of 'control' colors.
//!
//! More practically, `Cell`s are identified by `Address`, and each cell
//! contains a single `Expression`, which will generate a `Color` when either
//! the Cell's or Expression's `color` method is called. Expressions are
//! categorized by 'order', which denotes the number of dependencies needed to
//! generate a color. For example, a second order element is dependent upon two
//! other colors, while a zeroth order color element is simply a color. These
//! dependencies are expressed through references to other cells in the palette.
//!
////////////////////////////////////////////////////////////////////////////////
extern crate color;
extern crate interval;
// Submodules.
#[warn(missing_docs)]
pub mod address;
#[warn(missing_docs)]
pub mod cell;
#[warn(missing_docs)]
pub mod data;
#[warn(missing_docs)]
pub mod expression;
#[warn(missing_docs)]
pub mod format;
#[warn(missing_docs)]
pub mod operation;
#[warn(missing_docs)]
pub mod result;
#[warn(missing_docs)]
pub mod utilities;
// Non-local re-exports.
pub use color::Color;
// Submodule re-exports
pub use address::{
Address,
Reference,
};
pub use expression::Expression;
pub use format::Format;
// Local imports.
use data::Data;
use operation::{PaletteOperation, OperationHistory};
use result::Result;
// Standard imports.
use std::fmt;
////////////////////////////////////////////////////////////////////////////////
// Palette
////////////////////////////////////////////////////////////////////////////////
/// Encapsulates a single color palette.
#[derive(Debug)]
pub struct Palette {
/// The `Palette`'s operation-relevant data.
data: Data,
/// The operation undo and redo history.
operation_history: Option<OperationHistory>,
/// The palette format.
format: Format,
}
impl Palette {
/// Creates a new `Palette` with the given name.
pub fn new<S>(name: S, format: Format, history: bool)
-> Self where S: Into<String>
{
let mut pal = Palette {
data: Default::default(),
operation_history: if history {
Some(Default::default())
} else {
None
},
format: format,
};
pal.data.set_name(Reference::all(), name.into());
format.initialize(&mut pal.data);
pal
}
/// Returns the number of color `Cell`s in the `Palette`.
pub fn len(&self) -> usize {
self.data.len()
}
/// Returns whether the `Palette` contains any color `Cell`s.
pub fn is_empty(&self) -> bool {
self.data.is_empty()
}
/// Returns the total number of history entries recorded.
pub fn history_len(&self) -> (usize, usize) {
if let Some(ref history) = self.operation_history {
(history.undo_entries.len(), history.redo_entries.len())
} else {
(0, 0)
}
}
/// Returns whether the `Palette` contains any history entries.
pub fn history_is_empty(&self) -> bool {
if let Some(ref history) = self.operation_history {
history.undo_entries.is_empty() && history.redo_entries.is_empty()
} else {
false
}
}
/// Returns the color at the given address, or None if the cell is empty.
pub fn color(&self, address: Address) -> Option<Color> {
self.data.cell(address).and_then(|cell| cell.color())
}
/// Applies the given operation to the `Palette`. Usually, this will just
/// defer to the `PaletteOperation`'s apply method, but this could also
/// provide extra functionality such as undo/redo and format-specific
/// checks.
#[allow(unused_variables)]
pub fn apply(
&mut self,
operation: Box<PaletteOperation>)
-> Result<()>
{
self.format.apply_operation(self, operation)
}
/// Reverses the most recently applied operation.
#[allow(unused_variables)]
pub fn undo(&mut self) -> Result<()> {
self.format.undo(self)
}
/// Reverses the most recently applied undo operation.
#[allow(unused_variables)]
pub fn redo(&mut self) -> Result<()> {
self.format.redo(self)
}
}
// Default is empty `Palette` with default format.
impl Default for Palette {
fn
|
() -> Self {
Palette {
data: Default::default(),
operation_history: None,
format: Format::Default,
}
}
}
// Display `Palette` in readable format.
impl fmt::Display for Palette {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Format: {:?}, History: {:?}\n{}",
self.format,
self.history_len(),
self.data)
}
}
|
default
|
identifier_name
|
lib.rs
|
// The MIT License (MIT)
//
// Copyright (c) 2016 Skylor R. Schermer
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//
////////////////////////////////////////////////////////////////////////////////
//!
//! Provides structured `Palette` objects for storing and generating colors.
//!
//! The palette acts as a tree-like structure that acts as a collection of
//! 'Cell's into which color elements are placed. Color elements will then
//! lazily generate a color when queried. This allows for the construction of
//! dynamic palette structures that can generate related colors based off of a
//! small subset of 'control' colors.
//!
//! More practically, `Cell`s are identified by `Address`, and each cell
//! contains a single `Expression`, which will generate a `Color` when either
//! the Cell's or Expression's `color` method is called. Expressions are
//! categorized by 'order', which denotes the number of dependencies needed to
//! generate a color. For example, a second order element is dependent upon two
//! other colors, while a zeroth order color element is simply a color. These
//! dependencies are expressed through references to other cells in the palette.
//!
////////////////////////////////////////////////////////////////////////////////
extern crate color;
extern crate interval;
// Submodules.
#[warn(missing_docs)]
pub mod address;
#[warn(missing_docs)]
pub mod cell;
#[warn(missing_docs)]
pub mod data;
#[warn(missing_docs)]
pub mod expression;
#[warn(missing_docs)]
pub mod format;
#[warn(missing_docs)]
pub mod operation;
#[warn(missing_docs)]
pub mod result;
#[warn(missing_docs)]
pub mod utilities;
// Non-local re-exports.
pub use color::Color;
// Submodule re-exports
pub use address::{
Address,
Reference,
};
pub use expression::Expression;
pub use format::Format;
// Local imports.
use data::Data;
use operation::{PaletteOperation, OperationHistory};
use result::Result;
|
// Standard imports.
use std::fmt;
////////////////////////////////////////////////////////////////////////////////
// Palette
////////////////////////////////////////////////////////////////////////////////
/// Encapsulates a single color palette.
#[derive(Debug)]
pub struct Palette {
/// The `Palette`'s operation-relevant data.
data: Data,
/// The operation undo and redo history.
operation_history: Option<OperationHistory>,
/// The palette format.
format: Format,
}
impl Palette {
/// Creates a new `Palette` with the given name.
pub fn new<S>(name: S, format: Format, history: bool)
-> Self where S: Into<String>
{
let mut pal = Palette {
data: Default::default(),
operation_history: if history {
Some(Default::default())
} else {
None
},
format: format,
};
pal.data.set_name(Reference::all(), name.into());
format.initialize(&mut pal.data);
pal
}
/// Returns the number of color `Cell`s in the `Palette`.
pub fn len(&self) -> usize {
self.data.len()
}
/// Returns whether the `Palette` contains any color `Cell`s.
pub fn is_empty(&self) -> bool {
self.data.is_empty()
}
/// Returns the total number of history entries recorded.
pub fn history_len(&self) -> (usize, usize) {
if let Some(ref history) = self.operation_history {
(history.undo_entries.len(), history.redo_entries.len())
} else {
(0, 0)
}
}
/// Returns whether the `Palette` contains any history entries.
pub fn history_is_empty(&self) -> bool {
if let Some(ref history) = self.operation_history {
history.undo_entries.is_empty() && history.redo_entries.is_empty()
} else {
false
}
}
/// Returns the color at the given address, or None if the cell is empty.
pub fn color(&self, address: Address) -> Option<Color> {
self.data.cell(address).and_then(|cell| cell.color())
}
/// Applies the given operation to the `Palette`. Usually, this will just
/// defer to the `PaletteOperation`'s apply method, but this could also
/// provide extra functionality such as undo/redo and format-specific
/// checks.
#[allow(unused_variables)]
pub fn apply(
&mut self,
operation: Box<PaletteOperation>)
-> Result<()>
{
self.format.apply_operation(self, operation)
}
/// Reverses the most recently applied operation.
#[allow(unused_variables)]
pub fn undo(&mut self) -> Result<()> {
self.format.undo(self)
}
/// Reverses the most recently applied undo operation.
#[allow(unused_variables)]
pub fn redo(&mut self) -> Result<()> {
self.format.redo(self)
}
}
// Default is empty `Palette` with default format.
impl Default for Palette {
fn default() -> Self {
Palette {
data: Default::default(),
operation_history: None,
format: Format::Default,
}
}
}
// Display `Palette` in readable format.
impl fmt::Display for Palette {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Format: {:?}, History: {:?}\n{}",
self.format,
self.history_len(),
self.data)
}
}
|
random_line_split
|
|
zoodefs.rs
|
pub mod perms {
pub const READ: i32 = 1;
pub const WRITE: i32 = 1 << 1;
pub const CREATE: i32 = 1 << 2;
pub const DELETE: i32 = 1 << 3;
pub const ADMIN: i32 = 1 << 4;
pub const ALL: i32 = READ | WRITE | CREATE | DELETE | ADMIN;
}
pub mod acls {
use perms;
use proto::Acl;
fn acl(perm: i32, scheme: &str, id: &str) -> Vec<Acl>
|
lazy_static!{
pub static ref CREATOR_ALL_ACL: Vec<Acl> = acl(perms::ALL, "auth", "");
pub static ref OPEN_ACL_UNSAFE: Vec<Acl> = acl(perms::ALL, "world", "anyone");
pub static ref READ_ACL_UNSAFE: Vec<Acl> = acl(perms::READ, "world", "anyone");
}
}
|
{
vec![Acl {
perms: perm,
scheme: scheme.to_owned(),
id: id.to_owned(),
}]
}
|
identifier_body
|
zoodefs.rs
|
pub mod perms {
pub const READ: i32 = 1;
pub const WRITE: i32 = 1 << 1;
pub const CREATE: i32 = 1 << 2;
pub const DELETE: i32 = 1 << 3;
pub const ADMIN: i32 = 1 << 4;
pub const ALL: i32 = READ | WRITE | CREATE | DELETE | ADMIN;
}
pub mod acls {
use perms;
use proto::Acl;
fn
|
(perm: i32, scheme: &str, id: &str) -> Vec<Acl> {
vec![Acl {
perms: perm,
scheme: scheme.to_owned(),
id: id.to_owned(),
}]
}
lazy_static!{
pub static ref CREATOR_ALL_ACL: Vec<Acl> = acl(perms::ALL, "auth", "");
pub static ref OPEN_ACL_UNSAFE: Vec<Acl> = acl(perms::ALL, "world", "anyone");
pub static ref READ_ACL_UNSAFE: Vec<Acl> = acl(perms::READ, "world", "anyone");
}
}
|
acl
|
identifier_name
|
zoodefs.rs
|
pub mod perms {
pub const READ: i32 = 1;
pub const WRITE: i32 = 1 << 1;
pub const CREATE: i32 = 1 << 2;
pub const DELETE: i32 = 1 << 3;
pub const ADMIN: i32 = 1 << 4;
pub const ALL: i32 = READ | WRITE | CREATE | DELETE | ADMIN;
}
pub mod acls {
use perms;
use proto::Acl;
fn acl(perm: i32, scheme: &str, id: &str) -> Vec<Acl> {
vec![Acl {
|
}
lazy_static!{
pub static ref CREATOR_ALL_ACL: Vec<Acl> = acl(perms::ALL, "auth", "");
pub static ref OPEN_ACL_UNSAFE: Vec<Acl> = acl(perms::ALL, "world", "anyone");
pub static ref READ_ACL_UNSAFE: Vec<Acl> = acl(perms::READ, "world", "anyone");
}
}
|
perms: perm,
scheme: scheme.to_owned(),
id: id.to_owned(),
}]
|
random_line_split
|
issue-5884.rs
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// compile-pass
#![allow(dead_code)]
// pretty-expanded FIXME #23616
#![feature(box_syntax)]
pub struct Foo {
a: isize,
}
struct
|
<'a> {
a: Box<Option<isize>>,
b: &'a Foo,
}
fn check(a: Box<Foo>) {
let _ic = Bar{ b: &*a, a: box None };
}
pub fn main(){}
|
Bar
|
identifier_name
|
issue-5884.rs
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// compile-pass
#![allow(dead_code)]
// pretty-expanded FIXME #23616
#![feature(box_syntax)]
pub struct Foo {
a: isize,
}
struct Bar<'a> {
a: Box<Option<isize>>,
b: &'a Foo,
}
fn check(a: Box<Foo>) {
let _ic = Bar{ b: &*a, a: box None };
|
}
pub fn main(){}
|
random_line_split
|
|
main.rs
|
#![feature(plugin, custom_derive)]
#![plugin(rocket_codegen)]
extern crate rocket;
extern crate serde_json;
#[macro_use] extern crate lazy_static;
extern crate rocket_contrib;
#[macro_use] extern crate serde_derive;
extern crate serde;
extern crate derbyjson;
extern crate handlebars;
extern crate chrono;
use rocket_contrib::Json;
use std::collections::HashMap;
use std::time::Duration;
use rocket::request::Form;
use rocket::response::Redirect;
use rocket::http::RawStr;
mod gamestate;
mod roster;
mod staticpages;
mod guard;
mod timetoderby;
use gamestate::{Penalty, ActiveClock};
use gamestate::jamstate::{Team,TeamJamState};
use guard::{Game, MutGame};
use timetoderby::*;
#[derive(Deserialize)]
struct PenaltyCmd {
skater: String,
code: char,
}
#[post("/penalties/<team>", format = "application/json", data = "<cmd>")]
fn add_penalty(mut game: MutGame, team: Team, cmd: Json<PenaltyCmd>)
-> Json<HashMap<String, Vec<Penalty>>>
{
game.penalty(team, cmd.skater.as_str(), cmd.code);
Json(game.team_penalties(team))
}
#[get("/penalties/<team>")]
fn get_penalties(game: Game, team: Team) -> Json<HashMap<String, Vec<Penalty>>>
{
Json(game.team_penalties(team))
}
#[derive(Serialize)]
struct ScoreUpdate {
score: (u32, u32),
jamscore: (u32, u32),
gameclock: (u8, Duration),
activeclock: ActiveClock,
timeouts: (u8, u8),
reviews: (u8, u8),
}
#[get("/score/update")]
fn scoreupdate(game: Game) -> Json<ScoreUpdate> {
let cur_jam = game.cur_jam();
let jamscore = if cur_jam.starttime.is_some() {
cur_jam.jam_score()
} else {
match game.prev_jam() {
Some(ref prev_jam) => prev_jam.jam_score(),
None => (0, 0)
}
};
Json(ScoreUpdate {
score: game.total_score(), jamscore: jamscore,
gameclock: game.get_time(), activeclock: game.get_active_clock(),
reviews: game.reviews(), timeouts: game.timeouts(),
})
}
#[allow(non_camel_case_types)]
#[derive(Deserialize)]
enum UpdateCommand {
score_adj(i8, i8),
//score_set(i8, i8),
set_time(u16),
start_jam,
stop_jam,
team_timeout(Team),
star_pass(Team),
official_timeout,
official_review(Team),
review_lost(Team),
review_retained(Team),
}
#[post("/score/update", format = "application/json", data = "<cmd>")]
fn post_score(mut game: MutGame, cmd: Json<UpdateCommand>) -> &'static str
{
match cmd.0 {
UpdateCommand::score_adj(a1, a2) =>
game.cur_jam_mut().adj_score(a1, a2),
UpdateCommand::start_jam => game.start_jam(),
UpdateCommand::stop_jam => game.stop_jam(),
UpdateCommand::official_timeout => game.official_timeout(),
UpdateCommand::team_timeout(team) => { game.team_timeout(team); },
UpdateCommand::official_review(team) => { game.official_review(team); }
UpdateCommand::star_pass(team) =>
game.cur_jam_mut()[team].set_starpass(true),
UpdateCommand::set_time(secs) =>
game.set_time(Duration::new(secs as u64, 0)),
UpdateCommand::review_lost(team) => game.review_lost(team),
// don't need to actually do anything for this case.
UpdateCommand::review_retained(_) => (),
};
"success"
}
#[derive(Deserialize)]
#[serde(rename_all = "kebab-case")]
enum JamCommand {
Lead(bool),
Lost(bool),
Call(bool),
Starpass(bool),
ScoringTrip { trip: u8, points: u8 },
}
#[post("/jam/<jam>/<team>/command", format = "application/json", data = "<cmd>")]
fn jam_command(mut game: MutGame, jam: usize, team: Team, cmd: Json<JamCommand>) -> &'static str
{
let ref mut teamjam = game.get_jam_mut(jam)[team];
match cmd.0 {
JamCommand::Lead(yesno) => teamjam.set_lead(yesno),
JamCommand::Call(yesno) => teamjam.set_call(yesno),
JamCommand::Lost(yesno) => teamjam.set_lost(yesno),
JamCommand::Starpass(yesno) => teamjam.set_starpass(yesno),
JamCommand::ScoringTrip { trip, points } => teamjam.set_score(trip, points),
};
"success"
}
#[get("/scoresheet/update")]
fn
|
(game: Game) -> Json<Vec<(TeamJamState, TeamJamState)>> {
let stuff = game.jams().iter().map(|jamstate| {
(jamstate[Team::Home].clone(), jamstate[Team::Away].clone())
}).collect::<Vec<_>>();
Json(stuff)
}
#[derive(FromForm)]
struct StartGameCommand<'a> {
hometeam: &'a RawStr,
awayteam: &'a RawStr,
timetype: TimeType,
at_hrs: Option<u8>,
at_mins: Option<u8>,
at_ampm: TimeAMPM,
ttd_hrs: Option<u8>,
ttd_mins: Option<u8>,
ttd_secs: Option<u8>,
}
#[post("/startgame", data = "<form>")]
fn startgame<'a>(form: Form<'a, StartGameCommand<'a>>) -> Redirect
{
let cmd = form.get();
let team1 = roster::get_team(cmd.hometeam, String::from("Home")).unwrap(); // XXX
let team2 = roster::get_team(cmd.awayteam, String::from("Away")).unwrap(); // XXX
let time = match cmd.timetype {
TimeType::TimeToDerby => Duration::new((cmd.ttd_hrs.unwrap_or_default() as u64) * 3600
+ (cmd.ttd_mins.unwrap_or_default() as u64) * 60
+ (cmd.ttd_secs.unwrap_or_default() as u64), 0),
TimeType::StartAt => start_at_time(cmd.at_hrs.unwrap_or_default(),
cmd.at_mins.unwrap_or_default(),
cmd.at_ampm).unwrap(),// XXX
};
guard::start_game(team1, team2, time);
Redirect::to("/")
}
#[get("/gameroster/<team>")]
fn gameroster(game: Game, team: Team) -> Json<roster::Team> {
let skaters = game.roster(team);
Json(skaters.clone()) // ew. Why can't we serialize a ref?
}
fn main() {
rocket::ignite().mount(
"/",
routes![staticpages::index, gameroster, startgame,
staticpages::penalties, staticpages::penaltiesjs, get_penalties,
staticpages::scoreboard, staticpages::scoreboardjs,
staticpages::mobilejt, staticpages::mobilejtjs,
staticpages::scoresheet, staticpages::scoresheetjs,
get_scoresheet, jam_command,
scoreupdate, post_score, add_penalty]
).launch();
}
|
get_scoresheet
|
identifier_name
|
main.rs
|
#![feature(plugin, custom_derive)]
#![plugin(rocket_codegen)]
extern crate rocket;
extern crate serde_json;
#[macro_use] extern crate lazy_static;
extern crate rocket_contrib;
#[macro_use] extern crate serde_derive;
extern crate serde;
extern crate derbyjson;
extern crate handlebars;
extern crate chrono;
use rocket_contrib::Json;
use std::collections::HashMap;
use std::time::Duration;
use rocket::request::Form;
use rocket::response::Redirect;
use rocket::http::RawStr;
mod gamestate;
mod roster;
mod staticpages;
mod guard;
mod timetoderby;
use gamestate::{Penalty, ActiveClock};
use gamestate::jamstate::{Team,TeamJamState};
use guard::{Game, MutGame};
use timetoderby::*;
#[derive(Deserialize)]
struct PenaltyCmd {
skater: String,
code: char,
}
#[post("/penalties/<team>", format = "application/json", data = "<cmd>")]
fn add_penalty(mut game: MutGame, team: Team, cmd: Json<PenaltyCmd>)
-> Json<HashMap<String, Vec<Penalty>>>
{
game.penalty(team, cmd.skater.as_str(), cmd.code);
Json(game.team_penalties(team))
}
#[get("/penalties/<team>")]
fn get_penalties(game: Game, team: Team) -> Json<HashMap<String, Vec<Penalty>>>
{
Json(game.team_penalties(team))
}
#[derive(Serialize)]
struct ScoreUpdate {
score: (u32, u32),
jamscore: (u32, u32),
gameclock: (u8, Duration),
activeclock: ActiveClock,
timeouts: (u8, u8),
reviews: (u8, u8),
}
#[get("/score/update")]
fn scoreupdate(game: Game) -> Json<ScoreUpdate> {
let cur_jam = game.cur_jam();
let jamscore = if cur_jam.starttime.is_some() {
cur_jam.jam_score()
} else {
match game.prev_jam() {
Some(ref prev_jam) => prev_jam.jam_score(),
None => (0, 0)
}
};
Json(ScoreUpdate {
score: game.total_score(), jamscore: jamscore,
gameclock: game.get_time(), activeclock: game.get_active_clock(),
reviews: game.reviews(), timeouts: game.timeouts(),
})
}
#[allow(non_camel_case_types)]
#[derive(Deserialize)]
enum UpdateCommand {
score_adj(i8, i8),
//score_set(i8, i8),
set_time(u16),
start_jam,
stop_jam,
team_timeout(Team),
star_pass(Team),
official_timeout,
official_review(Team),
review_lost(Team),
review_retained(Team),
}
#[post("/score/update", format = "application/json", data = "<cmd>")]
fn post_score(mut game: MutGame, cmd: Json<UpdateCommand>) -> &'static str
{
match cmd.0 {
UpdateCommand::score_adj(a1, a2) =>
game.cur_jam_mut().adj_score(a1, a2),
UpdateCommand::start_jam => game.start_jam(),
UpdateCommand::stop_jam => game.stop_jam(),
UpdateCommand::official_timeout => game.official_timeout(),
UpdateCommand::team_timeout(team) => { game.team_timeout(team); },
UpdateCommand::official_review(team) => { game.official_review(team); }
UpdateCommand::star_pass(team) =>
game.cur_jam_mut()[team].set_starpass(true),
UpdateCommand::set_time(secs) =>
game.set_time(Duration::new(secs as u64, 0)),
UpdateCommand::review_lost(team) => game.review_lost(team),
// don't need to actually do anything for this case.
UpdateCommand::review_retained(_) => (),
};
"success"
}
#[derive(Deserialize)]
#[serde(rename_all = "kebab-case")]
enum JamCommand {
Lead(bool),
Lost(bool),
Call(bool),
Starpass(bool),
ScoringTrip { trip: u8, points: u8 },
}
#[post("/jam/<jam>/<team>/command", format = "application/json", data = "<cmd>")]
fn jam_command(mut game: MutGame, jam: usize, team: Team, cmd: Json<JamCommand>) -> &'static str
{
let ref mut teamjam = game.get_jam_mut(jam)[team];
match cmd.0 {
JamCommand::Lead(yesno) => teamjam.set_lead(yesno),
JamCommand::Call(yesno) => teamjam.set_call(yesno),
JamCommand::Lost(yesno) => teamjam.set_lost(yesno),
JamCommand::Starpass(yesno) => teamjam.set_starpass(yesno),
JamCommand::ScoringTrip { trip, points } => teamjam.set_score(trip, points),
};
"success"
}
#[get("/scoresheet/update")]
fn get_scoresheet(game: Game) -> Json<Vec<(TeamJamState, TeamJamState)>> {
let stuff = game.jams().iter().map(|jamstate| {
(jamstate[Team::Home].clone(), jamstate[Team::Away].clone())
}).collect::<Vec<_>>();
Json(stuff)
}
#[derive(FromForm)]
|
struct StartGameCommand<'a> {
hometeam: &'a RawStr,
awayteam: &'a RawStr,
timetype: TimeType,
at_hrs: Option<u8>,
at_mins: Option<u8>,
at_ampm: TimeAMPM,
ttd_hrs: Option<u8>,
ttd_mins: Option<u8>,
ttd_secs: Option<u8>,
}
#[post("/startgame", data = "<form>")]
fn startgame<'a>(form: Form<'a, StartGameCommand<'a>>) -> Redirect
{
let cmd = form.get();
let team1 = roster::get_team(cmd.hometeam, String::from("Home")).unwrap(); // XXX
let team2 = roster::get_team(cmd.awayteam, String::from("Away")).unwrap(); // XXX
let time = match cmd.timetype {
TimeType::TimeToDerby => Duration::new((cmd.ttd_hrs.unwrap_or_default() as u64) * 3600
+ (cmd.ttd_mins.unwrap_or_default() as u64) * 60
+ (cmd.ttd_secs.unwrap_or_default() as u64), 0),
TimeType::StartAt => start_at_time(cmd.at_hrs.unwrap_or_default(),
cmd.at_mins.unwrap_or_default(),
cmd.at_ampm).unwrap(),// XXX
};
guard::start_game(team1, team2, time);
Redirect::to("/")
}
#[get("/gameroster/<team>")]
fn gameroster(game: Game, team: Team) -> Json<roster::Team> {
let skaters = game.roster(team);
Json(skaters.clone()) // ew. Why can't we serialize a ref?
}
fn main() {
rocket::ignite().mount(
"/",
routes![staticpages::index, gameroster, startgame,
staticpages::penalties, staticpages::penaltiesjs, get_penalties,
staticpages::scoreboard, staticpages::scoreboardjs,
staticpages::mobilejt, staticpages::mobilejtjs,
staticpages::scoresheet, staticpages::scoresheetjs,
get_scoresheet, jam_command,
scoreupdate, post_score, add_penalty]
).launch();
}
|
random_line_split
|
|
build.rs
|
// +--------------------------------------------------------------------------+
// | Copyright 2016 Matthew D. Steele <[email protected]> |
// | |
// | This file is part of Tuna. |
|
// | |
// | Tuna is free software: you can redistribute it and/or modify it under |
// | the terms of the GNU General Public License as published by the Free |
// | Software Foundation, either version 3 of the License, or (at your |
// | option) any later version. |
// | |
// | Tuna is distributed in the hope that it will be useful, but WITHOUT ANY |
// | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
// | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
// | for details. |
// | |
// | You should have received a copy of the GNU General Public License along |
// | with Tuna. If not, see <http://www.gnu.org/licenses/>. |
// +--------------------------------------------------------------------------+
fn main() {
let target = std::env::var("TARGET").unwrap();
if target.ends_with("-apple-darwin") {
println!("cargo:rustc-link-search=framework=/Library/Frameworks");
}
}
|
random_line_split
|
|
build.rs
|
// +--------------------------------------------------------------------------+
// | Copyright 2016 Matthew D. Steele <[email protected]> |
// | |
// | This file is part of Tuna. |
// | |
// | Tuna is free software: you can redistribute it and/or modify it under |
// | the terms of the GNU General Public License as published by the Free |
// | Software Foundation, either version 3 of the License, or (at your |
// | option) any later version. |
// | |
// | Tuna is distributed in the hope that it will be useful, but WITHOUT ANY |
// | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
// | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
// | for details. |
// | |
// | You should have received a copy of the GNU General Public License along |
// | with Tuna. If not, see <http://www.gnu.org/licenses/>. |
// +--------------------------------------------------------------------------+
fn main() {
let target = std::env::var("TARGET").unwrap();
if target.ends_with("-apple-darwin")
|
}
|
{
println!("cargo:rustc-link-search=framework=/Library/Frameworks");
}
|
conditional_block
|
build.rs
|
// +--------------------------------------------------------------------------+
// | Copyright 2016 Matthew D. Steele <[email protected]> |
// | |
// | This file is part of Tuna. |
// | |
// | Tuna is free software: you can redistribute it and/or modify it under |
// | the terms of the GNU General Public License as published by the Free |
// | Software Foundation, either version 3 of the License, or (at your |
// | option) any later version. |
// | |
// | Tuna is distributed in the hope that it will be useful, but WITHOUT ANY |
// | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
// | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
// | for details. |
// | |
// | You should have received a copy of the GNU General Public License along |
// | with Tuna. If not, see <http://www.gnu.org/licenses/>. |
// +--------------------------------------------------------------------------+
fn
|
() {
let target = std::env::var("TARGET").unwrap();
if target.ends_with("-apple-darwin") {
println!("cargo:rustc-link-search=framework=/Library/Frameworks");
}
}
|
main
|
identifier_name
|
build.rs
|
// +--------------------------------------------------------------------------+
// | Copyright 2016 Matthew D. Steele <[email protected]> |
// | |
// | This file is part of Tuna. |
// | |
// | Tuna is free software: you can redistribute it and/or modify it under |
// | the terms of the GNU General Public License as published by the Free |
// | Software Foundation, either version 3 of the License, or (at your |
// | option) any later version. |
// | |
// | Tuna is distributed in the hope that it will be useful, but WITHOUT ANY |
// | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
// | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
// | for details. |
// | |
// | You should have received a copy of the GNU General Public License along |
// | with Tuna. If not, see <http://www.gnu.org/licenses/>. |
// +--------------------------------------------------------------------------+
fn main()
|
{
let target = std::env::var("TARGET").unwrap();
if target.ends_with("-apple-darwin") {
println!("cargo:rustc-link-search=framework=/Library/Frameworks");
}
}
|
identifier_body
|
|
2_5_fluid_resistance.rs
|
// The Nature of Code
// Daniel Shiffman
// http://natureofcode.com
//
// Example 2-5: Forces (Gravity and Fluid Resistence) with Vectors
//
// Demonstration of multiple forces acting on bodies (Mover type)
// Bodies experience gravity continuously
// Bodies experience fluid resistance when in *water*
use nannou::prelude::*;
fn main() {
nannou::app(model).update(update).run();
}
struct Model {
movers: Vec<Mover>,
liquid: Liquid,
}
struct Mover {
position: Point2,
velocity: Vector2,
acceleration: Vector2,
mass: f32,
}
// Liquid type
struct Liquid {
// Liquid is a rectangle
rect: Rect,
// Coefficient of drag
c: f32,
}
impl Liquid {
fn new(rect: Rect, c: f32) -> Self {
let rect = rect;
let c = c;
Liquid { rect, c }
}
// Is the Mover in the liquid?
fn contains(&self, m: &Mover) -> bool {
self.rect.contains(m.position)
}
// Calculate drag force
fn drag(&self, m: &Mover) -> Vector2 {
// Magnitude is coefficient * speed squared
let speed = m.velocity.magnitude();
let drag_magnitude = self.c * speed * speed;
// Direction is inverse of velocity
let mut drag_force = m.velocity;
drag_force *= -1.0;
// Scale according to magnitude
drag_force = drag_force.normalize();
drag_force *= drag_magnitude;
drag_force
}
fn display(&self, draw: &Draw) {
draw.rect().xy(self.rect.xy()).wh(self.rect.wh()).gray(0.1);
}
}
impl Mover {
fn new(m: f32, x: f32, y: f32) -> Self {
// Mass is tied to size
let mass = m;
let position = pt2(x, y);
let velocity = vec2(0.0, 0.0);
let acceleration = vec2(0.0, 0.0);
Mover {
position,
velocity,
acceleration,
mass,
}
}
fn new_random(rect: &Rect) -> Self {
Mover::new(
random_range(0.5f32, 4.0),
random_range(rect.left(), rect.right()),
rect.top(),
)
}
// Newton's 2nd law: F = M * A
// or A = F / M
fn apply_force(&mut self, force: Vector2) {
// Divide by mass
let f = force / self.mass;
// Accumulate all forces in acceleration
self.acceleration += f;
}
fn update(&mut self) {
// Velocity changes according to acceleration
self.velocity += self.acceleration;
// Position changes by velocity
self.position += self.velocity;
// We must clear acceleration each frame
self.acceleration *= 0.0;
}
// Draw Mover
fn display(&self, draw: &Draw) {
draw.ellipse()
.xy(self.position)
.w_h(self.mass * 16.0, self.mass * 16.0)
.rgba(0.0, 0.0, 0.0, 0.5)
.stroke(BLACK)
.stroke_weight(2.0);
}
// Bounce off bottom of window
fn check_edges(&mut self, rect: Rect) {
if self.position.y < rect.bottom() {
self.velocity.y *= -0.9; // A little dampening when hitting the bottom
self.position.y = rect.bottom();
}
}
}
fn model(app: &App) -> Model {
let rect = Rect::from_w_h(640.0, 360.0);
app.new_window()
.size(rect.w() as u32, rect.h() as u32)
.mouse_pressed(mouse_pressed)
.view(view)
.build()
.unwrap();
// Nine moving bodies
let movers = (0..9)
.map(|_| Mover::new_random(&app.window_rect()))
.collect();
// Create an instance of our Liquid type
let rect = Rect::from_w_h(rect.w(), rect.h() * 0.5).align_bottom_of(rect);
let liquid = Liquid::new(rect, 0.1);
Model { movers, liquid }
}
fn mouse_pressed(app: &App, m: &mut Model, _button: MouseButton) {
// Restart all the Mover objects randomly
for mover in &mut m.movers {
*mover = Mover::new_random(&app.window_rect());
}
}
fn update(app: &App, m: &mut Model, _update: Update) {
for i in 0..m.movers.len() {
// Is the Mover in the liquid?
if m.liquid.contains(&m.movers[i]) {
let drag_force = m.liquid.drag(&m.movers[i]);
// Apply drag force to Mover
m.movers[i].apply_force(drag_force);
}
// Gravity is scaled by mass here!
let gravity = vec2(0.0, -0.1 * m.movers[i].mass);
// Apply gravity
m.movers[i].apply_force(gravity);
m.movers[i].update();
m.movers[i].check_edges(app.window_rect());
}
}
fn view(app: &App, m: &Model, frame: Frame) {
// Begin drawing
let draw = app.draw();
draw.background().color(WHITE);
// Draw water
m.liquid.display(&draw);
|
// Draw movers
for mover in &m.movers {
mover.display(&draw);
}
// Write the result of our drawing to the window's frame.
draw.to_frame(app, &frame).unwrap();
}
|
random_line_split
|
|
2_5_fluid_resistance.rs
|
// The Nature of Code
// Daniel Shiffman
// http://natureofcode.com
//
// Example 2-5: Forces (Gravity and Fluid Resistence) with Vectors
//
// Demonstration of multiple forces acting on bodies (Mover type)
// Bodies experience gravity continuously
// Bodies experience fluid resistance when in *water*
use nannou::prelude::*;
fn main() {
nannou::app(model).update(update).run();
}
struct Model {
movers: Vec<Mover>,
liquid: Liquid,
}
struct Mover {
position: Point2,
velocity: Vector2,
acceleration: Vector2,
mass: f32,
}
// Liquid type
struct Liquid {
// Liquid is a rectangle
rect: Rect,
// Coefficient of drag
c: f32,
}
impl Liquid {
fn new(rect: Rect, c: f32) -> Self {
let rect = rect;
let c = c;
Liquid { rect, c }
}
// Is the Mover in the liquid?
fn contains(&self, m: &Mover) -> bool {
self.rect.contains(m.position)
}
// Calculate drag force
fn drag(&self, m: &Mover) -> Vector2 {
// Magnitude is coefficient * speed squared
let speed = m.velocity.magnitude();
let drag_magnitude = self.c * speed * speed;
// Direction is inverse of velocity
let mut drag_force = m.velocity;
drag_force *= -1.0;
// Scale according to magnitude
drag_force = drag_force.normalize();
drag_force *= drag_magnitude;
drag_force
}
fn
|
(&self, draw: &Draw) {
draw.rect().xy(self.rect.xy()).wh(self.rect.wh()).gray(0.1);
}
}
impl Mover {
fn new(m: f32, x: f32, y: f32) -> Self {
// Mass is tied to size
let mass = m;
let position = pt2(x, y);
let velocity = vec2(0.0, 0.0);
let acceleration = vec2(0.0, 0.0);
Mover {
position,
velocity,
acceleration,
mass,
}
}
fn new_random(rect: &Rect) -> Self {
Mover::new(
random_range(0.5f32, 4.0),
random_range(rect.left(), rect.right()),
rect.top(),
)
}
// Newton's 2nd law: F = M * A
// or A = F / M
fn apply_force(&mut self, force: Vector2) {
// Divide by mass
let f = force / self.mass;
// Accumulate all forces in acceleration
self.acceleration += f;
}
fn update(&mut self) {
// Velocity changes according to acceleration
self.velocity += self.acceleration;
// Position changes by velocity
self.position += self.velocity;
// We must clear acceleration each frame
self.acceleration *= 0.0;
}
// Draw Mover
fn display(&self, draw: &Draw) {
draw.ellipse()
.xy(self.position)
.w_h(self.mass * 16.0, self.mass * 16.0)
.rgba(0.0, 0.0, 0.0, 0.5)
.stroke(BLACK)
.stroke_weight(2.0);
}
// Bounce off bottom of window
fn check_edges(&mut self, rect: Rect) {
if self.position.y < rect.bottom() {
self.velocity.y *= -0.9; // A little dampening when hitting the bottom
self.position.y = rect.bottom();
}
}
}
fn model(app: &App) -> Model {
let rect = Rect::from_w_h(640.0, 360.0);
app.new_window()
.size(rect.w() as u32, rect.h() as u32)
.mouse_pressed(mouse_pressed)
.view(view)
.build()
.unwrap();
// Nine moving bodies
let movers = (0..9)
.map(|_| Mover::new_random(&app.window_rect()))
.collect();
// Create an instance of our Liquid type
let rect = Rect::from_w_h(rect.w(), rect.h() * 0.5).align_bottom_of(rect);
let liquid = Liquid::new(rect, 0.1);
Model { movers, liquid }
}
fn mouse_pressed(app: &App, m: &mut Model, _button: MouseButton) {
// Restart all the Mover objects randomly
for mover in &mut m.movers {
*mover = Mover::new_random(&app.window_rect());
}
}
fn update(app: &App, m: &mut Model, _update: Update) {
for i in 0..m.movers.len() {
// Is the Mover in the liquid?
if m.liquid.contains(&m.movers[i]) {
let drag_force = m.liquid.drag(&m.movers[i]);
// Apply drag force to Mover
m.movers[i].apply_force(drag_force);
}
// Gravity is scaled by mass here!
let gravity = vec2(0.0, -0.1 * m.movers[i].mass);
// Apply gravity
m.movers[i].apply_force(gravity);
m.movers[i].update();
m.movers[i].check_edges(app.window_rect());
}
}
fn view(app: &App, m: &Model, frame: Frame) {
// Begin drawing
let draw = app.draw();
draw.background().color(WHITE);
// Draw water
m.liquid.display(&draw);
// Draw movers
for mover in &m.movers {
mover.display(&draw);
}
// Write the result of our drawing to the window's frame.
draw.to_frame(app, &frame).unwrap();
}
|
display
|
identifier_name
|
2_5_fluid_resistance.rs
|
// The Nature of Code
// Daniel Shiffman
// http://natureofcode.com
//
// Example 2-5: Forces (Gravity and Fluid Resistence) with Vectors
//
// Demonstration of multiple forces acting on bodies (Mover type)
// Bodies experience gravity continuously
// Bodies experience fluid resistance when in *water*
use nannou::prelude::*;
fn main() {
nannou::app(model).update(update).run();
}
struct Model {
movers: Vec<Mover>,
liquid: Liquid,
}
struct Mover {
position: Point2,
velocity: Vector2,
acceleration: Vector2,
mass: f32,
}
// Liquid type
struct Liquid {
// Liquid is a rectangle
rect: Rect,
// Coefficient of drag
c: f32,
}
impl Liquid {
fn new(rect: Rect, c: f32) -> Self {
let rect = rect;
let c = c;
Liquid { rect, c }
}
// Is the Mover in the liquid?
fn contains(&self, m: &Mover) -> bool {
self.rect.contains(m.position)
}
// Calculate drag force
fn drag(&self, m: &Mover) -> Vector2 {
// Magnitude is coefficient * speed squared
let speed = m.velocity.magnitude();
let drag_magnitude = self.c * speed * speed;
// Direction is inverse of velocity
let mut drag_force = m.velocity;
drag_force *= -1.0;
// Scale according to magnitude
drag_force = drag_force.normalize();
drag_force *= drag_magnitude;
drag_force
}
fn display(&self, draw: &Draw) {
draw.rect().xy(self.rect.xy()).wh(self.rect.wh()).gray(0.1);
}
}
impl Mover {
fn new(m: f32, x: f32, y: f32) -> Self {
// Mass is tied to size
let mass = m;
let position = pt2(x, y);
let velocity = vec2(0.0, 0.0);
let acceleration = vec2(0.0, 0.0);
Mover {
position,
velocity,
acceleration,
mass,
}
}
fn new_random(rect: &Rect) -> Self
|
// Newton's 2nd law: F = M * A
// or A = F / M
fn apply_force(&mut self, force: Vector2) {
// Divide by mass
let f = force / self.mass;
// Accumulate all forces in acceleration
self.acceleration += f;
}
fn update(&mut self) {
// Velocity changes according to acceleration
self.velocity += self.acceleration;
// Position changes by velocity
self.position += self.velocity;
// We must clear acceleration each frame
self.acceleration *= 0.0;
}
// Draw Mover
fn display(&self, draw: &Draw) {
draw.ellipse()
.xy(self.position)
.w_h(self.mass * 16.0, self.mass * 16.0)
.rgba(0.0, 0.0, 0.0, 0.5)
.stroke(BLACK)
.stroke_weight(2.0);
}
// Bounce off bottom of window
fn check_edges(&mut self, rect: Rect) {
if self.position.y < rect.bottom() {
self.velocity.y *= -0.9; // A little dampening when hitting the bottom
self.position.y = rect.bottom();
}
}
}
fn model(app: &App) -> Model {
let rect = Rect::from_w_h(640.0, 360.0);
app.new_window()
.size(rect.w() as u32, rect.h() as u32)
.mouse_pressed(mouse_pressed)
.view(view)
.build()
.unwrap();
// Nine moving bodies
let movers = (0..9)
.map(|_| Mover::new_random(&app.window_rect()))
.collect();
// Create an instance of our Liquid type
let rect = Rect::from_w_h(rect.w(), rect.h() * 0.5).align_bottom_of(rect);
let liquid = Liquid::new(rect, 0.1);
Model { movers, liquid }
}
fn mouse_pressed(app: &App, m: &mut Model, _button: MouseButton) {
// Restart all the Mover objects randomly
for mover in &mut m.movers {
*mover = Mover::new_random(&app.window_rect());
}
}
fn update(app: &App, m: &mut Model, _update: Update) {
for i in 0..m.movers.len() {
// Is the Mover in the liquid?
if m.liquid.contains(&m.movers[i]) {
let drag_force = m.liquid.drag(&m.movers[i]);
// Apply drag force to Mover
m.movers[i].apply_force(drag_force);
}
// Gravity is scaled by mass here!
let gravity = vec2(0.0, -0.1 * m.movers[i].mass);
// Apply gravity
m.movers[i].apply_force(gravity);
m.movers[i].update();
m.movers[i].check_edges(app.window_rect());
}
}
fn view(app: &App, m: &Model, frame: Frame) {
// Begin drawing
let draw = app.draw();
draw.background().color(WHITE);
// Draw water
m.liquid.display(&draw);
// Draw movers
for mover in &m.movers {
mover.display(&draw);
}
// Write the result of our drawing to the window's frame.
draw.to_frame(app, &frame).unwrap();
}
|
{
Mover::new(
random_range(0.5f32, 4.0),
random_range(rect.left(), rect.right()),
rect.top(),
)
}
|
identifier_body
|
2_5_fluid_resistance.rs
|
// The Nature of Code
// Daniel Shiffman
// http://natureofcode.com
//
// Example 2-5: Forces (Gravity and Fluid Resistence) with Vectors
//
// Demonstration of multiple forces acting on bodies (Mover type)
// Bodies experience gravity continuously
// Bodies experience fluid resistance when in *water*
use nannou::prelude::*;
fn main() {
nannou::app(model).update(update).run();
}
struct Model {
movers: Vec<Mover>,
liquid: Liquid,
}
struct Mover {
position: Point2,
velocity: Vector2,
acceleration: Vector2,
mass: f32,
}
// Liquid type
struct Liquid {
// Liquid is a rectangle
rect: Rect,
// Coefficient of drag
c: f32,
}
impl Liquid {
fn new(rect: Rect, c: f32) -> Self {
let rect = rect;
let c = c;
Liquid { rect, c }
}
// Is the Mover in the liquid?
fn contains(&self, m: &Mover) -> bool {
self.rect.contains(m.position)
}
// Calculate drag force
fn drag(&self, m: &Mover) -> Vector2 {
// Magnitude is coefficient * speed squared
let speed = m.velocity.magnitude();
let drag_magnitude = self.c * speed * speed;
// Direction is inverse of velocity
let mut drag_force = m.velocity;
drag_force *= -1.0;
// Scale according to magnitude
drag_force = drag_force.normalize();
drag_force *= drag_magnitude;
drag_force
}
fn display(&self, draw: &Draw) {
draw.rect().xy(self.rect.xy()).wh(self.rect.wh()).gray(0.1);
}
}
impl Mover {
fn new(m: f32, x: f32, y: f32) -> Self {
// Mass is tied to size
let mass = m;
let position = pt2(x, y);
let velocity = vec2(0.0, 0.0);
let acceleration = vec2(0.0, 0.0);
Mover {
position,
velocity,
acceleration,
mass,
}
}
fn new_random(rect: &Rect) -> Self {
Mover::new(
random_range(0.5f32, 4.0),
random_range(rect.left(), rect.right()),
rect.top(),
)
}
// Newton's 2nd law: F = M * A
// or A = F / M
fn apply_force(&mut self, force: Vector2) {
// Divide by mass
let f = force / self.mass;
// Accumulate all forces in acceleration
self.acceleration += f;
}
fn update(&mut self) {
// Velocity changes according to acceleration
self.velocity += self.acceleration;
// Position changes by velocity
self.position += self.velocity;
// We must clear acceleration each frame
self.acceleration *= 0.0;
}
// Draw Mover
fn display(&self, draw: &Draw) {
draw.ellipse()
.xy(self.position)
.w_h(self.mass * 16.0, self.mass * 16.0)
.rgba(0.0, 0.0, 0.0, 0.5)
.stroke(BLACK)
.stroke_weight(2.0);
}
// Bounce off bottom of window
fn check_edges(&mut self, rect: Rect) {
if self.position.y < rect.bottom() {
self.velocity.y *= -0.9; // A little dampening when hitting the bottom
self.position.y = rect.bottom();
}
}
}
fn model(app: &App) -> Model {
let rect = Rect::from_w_h(640.0, 360.0);
app.new_window()
.size(rect.w() as u32, rect.h() as u32)
.mouse_pressed(mouse_pressed)
.view(view)
.build()
.unwrap();
// Nine moving bodies
let movers = (0..9)
.map(|_| Mover::new_random(&app.window_rect()))
.collect();
// Create an instance of our Liquid type
let rect = Rect::from_w_h(rect.w(), rect.h() * 0.5).align_bottom_of(rect);
let liquid = Liquid::new(rect, 0.1);
Model { movers, liquid }
}
fn mouse_pressed(app: &App, m: &mut Model, _button: MouseButton) {
// Restart all the Mover objects randomly
for mover in &mut m.movers {
*mover = Mover::new_random(&app.window_rect());
}
}
fn update(app: &App, m: &mut Model, _update: Update) {
for i in 0..m.movers.len() {
// Is the Mover in the liquid?
if m.liquid.contains(&m.movers[i])
|
// Gravity is scaled by mass here!
let gravity = vec2(0.0, -0.1 * m.movers[i].mass);
// Apply gravity
m.movers[i].apply_force(gravity);
m.movers[i].update();
m.movers[i].check_edges(app.window_rect());
}
}
fn view(app: &App, m: &Model, frame: Frame) {
// Begin drawing
let draw = app.draw();
draw.background().color(WHITE);
// Draw water
m.liquid.display(&draw);
// Draw movers
for mover in &m.movers {
mover.display(&draw);
}
// Write the result of our drawing to the window's frame.
draw.to_frame(app, &frame).unwrap();
}
|
{
let drag_force = m.liquid.drag(&m.movers[i]);
// Apply drag force to Mover
m.movers[i].apply_force(drag_force);
}
|
conditional_block
|
edsl.rs
|
//! Holmes EDSL
//!
//! This module provides a set of macros for more easily writing Holmes
//! programs, avoiding manual construction of all tye types required.
/// Converts an EDSL type specification into a Holmes type object
/// Takes the name of a variable containing a holmes object as the first
/// parameter, and a type description as the second.
///
/// [type] -> list of that type
/// (type0, type1, type2) -> tuple of those types
/// type -> look up type by name in the registry
#[macro_export]
macro_rules! htype {
($holmes:ident, [$t:tt]) => {
::holmes::pg::dyn::types::List::new(htype!($holmes, $t))
};
($holmes:ident, ($($t:tt),*)) => {
::holmes::pg::dyn::types::Tuple::new(vec![$(htype!($holmes, $t)),*])
};
($holmes:ident, $i:ident) => {
$holmes.get_type(stringify!($i))
.expect(&format!("Type not present in database: {}", stringify!($i)))
};
}
/// Shorthand notation for performing many actions with the same holmes context
/// Analogous to a weaker version of the `Reader` monad which cannot return
/// values.
///
/// The first parameter is the holmes object to be used, and the second is
/// a list of the actions to be performed, e.g.
///
/// ```c
/// holmes_exec!(holmes, {
/// predicate!(foo(string, uint64));
/// fact!(foo("bar", 3));
/// });
/// ```
///
/// If any of the actions would error, the *enclosing function* will error out.
/// This is due to a limitation in how the `try!` macro works. (It uses return
/// to error out, rather than a bind-like mechanism).
///
/// This only works because the other macros have both an explicit ident form,
/// and one which generates a function taking a `holmes` parameter instead.
#[macro_export]
macro_rules! holmes_exec {
($holmes:ident, { $( $action:expr );* }) => {{
$( try!($action($holmes)); );*
$holmes.nop()
}};
}
#[macro_export]
macro_rules! field {
($holmes:ident, [$name:ident $t:tt $descr:expr]) => {{::holmes::engine::types::Field {
name: Some(stringify!($name).to_string()),
description: Some($descr.to_string()),
type_: htype!($holmes, $t)
}}};
($holmes:ident, [$name:ident $t:tt]) => {{::holmes::engine::types::Field {
name: Some(stringify!($name).to_string()),
description: None,
type_: htype!($holmes, $t)
}}};
($holmes:ident, $t:tt) => {{::holmes::engine::types::Field {
name: None,
description: None,
type_: htype!($holmes, $t)
}}};
}
/// Registers a predicate with the `Holmes` context.
///
/// ```c
/// predicate!(holmes, foo(string, uint64))
/// ```
///
/// will register a predicate named foo, with a `string` slot and a `uint64`
/// slot, to the provided `holmes` context object.
///
/// If the `holmes` parameter is omitted, it will generate a function taking
/// a `holmes` parameter in its stead.
#[macro_export]
macro_rules! predicate {
($holmes:ident, $pred_name:ident($($t:tt),*), $descr:expr) => {{
let fields = vec![$(field!($holmes, $t),)*];
$holmes.new_predicate(&::holmes::engine::types::Predicate {
name: stringify!($pred_name).to_string(),
description: Some($descr.to_string()),
fields: fields
})
}};
($holmes:ident, $pred_name:ident($($t:tt),*)) => {{
let fields = vec![$(field!($holmes, $t),)*];
$holmes.new_predicate(&::holmes::engine::types::Predicate {
name: stringify!($pred_name).to_string(),
description: None,
fields: fields
})
}};
($pred_name:ident($($t:tt),*) : $descr:expr) => { |holmes: &mut ::holmes::Engine| {
predicate!(holmes, $pred_name($($t),*), $descr)
}};
($pred_name:ident($($t:tt),*)) => { |holmes: &mut ::holmes::Engine| {
predicate!(holmes, $pred_name($($t),*))
}};
}
/// Stores a fact with the `Holmes` context.
///
/// ```c
/// fact!(holmes, foo("bar", 3))
/// ```
///
/// will store a true instance of the predicate foo with "bar" in the first
/// slot and 3 in the second.
///
/// If the `holmes` parameter is omitted, it will generate a function taking
/// a `holmes` parameter in its stead.
#[macro_export]
macro_rules! fact {
($holmes:ident, $pred_name:ident($($a:expr),*)) => {
$holmes.new_fact(&::holmes::engine::types::Fact {
pred_name : stringify!($pred_name).to_string(),
args : vec![$(::holmes::pg::dyn::values::ToValue::to_value($a)),*]
})
};
($pred_name:ident($($a:expr),*)) => { |holmes: &mut ::holmes::Engine| {
fact!(holmes, $pred_name($($a),*))
}};
}
#[macro_export]
macro_rules! clause {
($holmes:ident, $vars:ident, $next:ident, $pred_name:ident($($m:tt),*)) => {{
::holmes::engine::types::Clause {
pred_name: stringify!($pred_name).to_string(),
args: vec![$(clause_match!($vars, $next, $m)),*]
}
}};
($holmes:ident, $vars:ident, $next:ident, $pred_name:ident{$($field:ident = $m:tt),*}) => {{
use std::collections::HashMap;
let pred_name = stringify!($pred_name).to_string();
let pred = $holmes.get_predicate(&pred_name).unwrap().unwrap();
let mut matches = HashMap::new();
let _ = {
$(matches.insert(stringify!($field).to_string(), clause_match!($vars, $next, $m)));*
};
let args: Vec<_> = pred.fields.iter().map(|field| {
match field.name {
Some(ref name) => match matches.remove(name) {
Some(cm) => cm,
None => ::holmes::engine::types::MatchExpr::Unbound
},
None => ::holmes::engine::types::MatchExpr::Unbound,
}
}).collect();
::holmes::engine::types::Clause {
pred_name: pred_name,
args: args
}
}};
}
/// Runs a datalog query against the `Holmes` context
///
/// Matches as per the right hand side of a datalog rule, then returns
/// a list of possible assignments to variables.
///
/// Clauses are separated by `&`, slots follow the rules in `match_expr!`
///
/// ```c
/// query!(holmes, foo((3), [_]) & bar([_], x))
/// ```
#[macro_export]
macro_rules! query {
($holmes:ident, $($pred_name:ident $inner:tt)&*) => {{
use std::collections::HashMap;
let mut _vars : HashMap<String, ::holmes::engine::types::Var> = HashMap::new();
let mut _n : ::holmes::engine::types::Var = 0;
let query = vec![$(clause!($holmes, _vars, _n, $pred_name $inner)),*];
$holmes.derive(&query)
}}
}
/// Adds a Holmes rule to the system
///
/// # Datalog Rules
///
/// ```c
/// rule!(holmes, baz([x], (7)) <= foo((3), [_]) & bar([_], x))
/// ```
///
/// will work as per a normal datalog rule, matching on foo and bar, and
/// generating a baz using any solutions found.
///
/// # Extended Rules
///
/// Holmes rules can also have "where clauses" which call out to native code
/// in the event of a match. For example,
///
/// ```c
/// rule!(holmes, baz([y], (8)) <= foo((3), [_]) & bar([_], x), {
/// let y = {f(x)}
/// })
/// ```
///
/// would call the Holmes registered function `f` on each output of `x`, bind
/// the result to `y`, and output it in the first slot of `baz`.
///
/// For more information on the expression and bind syntax, see the `hexpr!`
/// and `bind_match!` macro docs.
#[macro_export]
macro_rules! rule {
($holmes:ident, $rule_name:ident : $head_name:ident $head_inner:tt <= $($body_name:ident $body_inner:tt)&*,
{$(let $bind:tt = $hexpr:tt);*}) => {{
use std::collections::HashMap;
let mut _vars : HashMap<String, ::holmes::engine::types::Var> = HashMap::new();
let mut _n : ::holmes::engine::types::Var = 0;
let body = vec![$(clause!($holmes, _vars, _n, $body_name $body_inner)),*];
let wheres = vec![$(::holmes::engine::types::WhereClause {
lhs: bind_match!(_vars, _n, $bind),
rhs: hexpr!(_vars, _n, $hexpr)
}),*];
let head = clause!($holmes, _vars, _n, $head_name $head_inner);
$holmes.new_rule(&::holmes::engine::types::Rule {
name: stringify!($rule_name).to_string(),
body: body,
head: head,
wheres: wheres,
})
}};
($holmes:ident, $rule_name:ident : $($head_name:ident $head_inner:tt),* <= $($body_name:ident $inner:tt)&*) => {
rule!($holmes, $rule_name : $($head_name $head_inner),* <= $($body_name $inner)&*, {})
};
($rule_name:ident : $($head_name:ident $head_inner:tt),* <= $($body_name:ident $inner:tt)&*) => {
|holmes: &mut ::holmes::Engine| {
rule!(holmes, $rule_name : $($head_name $head_inner),* <= $($body_name $inner)&*, {})
}
};
($rule_name:ident : $($head_name:ident $head_inner:tt),* <=
$($body_name:ident $inner:tt)&*, {$(let $bind:tt = $hexpr:tt);*}) => {
|holmes: &mut ::holmes::Engine| {
rule!(holmes, $rule_name : $($head_name $head_inner),* <=
$($body_name $inner)&*, {$(let $bind = $hexpr);*})
}
};
}
/// Registers a native rust function with the `Holmes` object for use in rules.
///
/// ```c
/// func!(holmes, let f : uint64 -> string = |x : &u64| {
/// format!("{}", x)
/// })
/// ```
///
/// If your function input has more than one parameter, they will be tupled
/// and packed into a value.
/// To describe such a function, just use a tuple type on the left of the
/// arrow.
#[macro_export]
macro_rules! func {
($holmes:ident, let $name:ident : $src:tt -> $dst:tt = $body:expr) => {{
let src = htype!($holmes, $src);
let dst = htype!($holmes, $dst);
$holmes.reg_func(stringify!($name).to_string(),
|
::holmes::engine::types::Func {
input_type: src,
output_type: dst,
run: Box::new(move |v : ::holmes::pg::dyn::Value| {
::holmes::pg::dyn::values::ToValue::to_value($body(typed_unpack!(v, $src)))
})})
}};
(let $name:ident : $src:tt -> $dst:tt = $body:expr) => {
|holmes: &mut ::holmes::Engine| {
func!(holmes, let $name : $src -> $dst = $body)
}
};
}
pub mod internal {
//! EDSL Support Code
//! This module contains support code for the other macros which is not
//! intended to be user facing, but which must be exported for the macros
//! to work properly.
//!
//! Until more complete example code is provided at the top of the module,
//! the documentation in here may be useful for understanding the EDSL
//! structure.
/// Given a value and a type it is believed to be, unpack it to the greatest
/// extent possible (e.g. unpack through tupling and lists)
#[macro_export]
macro_rules! typed_unpack {
($val:expr, [$typ:tt]) => {
$val.get().downcast_ref::<Vec<::holmes::pg::dyn::Value>>()
.expect("Dynamic list unpack failed")
.into_iter().map(|v| {
typed_unpack!(v, $typ)
}).collect::<Vec<_>>()
};
($val:expr, ($($typ:tt),*)) => {{
let mut pack = $val.get().downcast_ref::<Vec<::holmes::pg::dyn::Value>>()
.expect("Dynamic tuple unpack failed").into_iter();
($(typed_unpack!(pack.next().expect("Dynamic tuple too short"), $typ)),*)
}};
($val:expr, $name:ident) => {
$val.get().downcast_ref()
.expect(concat!("Dynamic base type unpack failed for ",
stringify!($name)))
};
}
/// Constructs a bind match outer object.
///
/// Args:
///
/// * `$vars:ident` is a mutable `HashMap` from variable name to
/// variable number, to be updated as more variables are created, or
/// referenced to re-use existing variable numberings.
/// * `$n:ident` is a mutable Var, intended to be used as an allocator for
/// the next unused variable. It should have a value equal to the next
/// unallocated variable
/// * The last parameter is the bind expression, it can be structured as:
/// * `[bind_expression]` -> do a list destructure/iteration, similar to
/// the list monad
/// * {bind_expression0, bind_expression1} -> do a tuple destructure
/// * a `clause_match!` compatible expression (see `clause_match` docs)
#[macro_export]
macro_rules! bind_match {
($vars:ident, $n:ident, [ $bm:tt ]) => {
::holmes::engine::types::BindExpr::Iterate(
Box::new(bind_match!($vars, $n, $bm)))
};
($vars:ident, $n:ident, {$($bm:tt),*}) => {
::holmes::engine::types::BindExpr::Destructure(
vec![$(bind_match!($vars, $n, $bm)),*])
};
($vars:ident, $n:ident, $cm:tt) => {{
::holmes::engine::types::BindExpr::Normal(
clause_match!($vars, $n, $cm))
}};
}
/// Generates an expression structure
///
/// Args:
///
/// * `$vars:ident` is a mutable `HashMap` from variable name to
/// variable number, to be updated as more variables are created, or
/// referenced to re-use existing variable numberings.
/// * `$n:ident` is a mutable Var, intended to be used as an allocator for
/// the next unused variable. It should have a value equal to the next
/// unallocated variable
/// * the expression to convert
/// * `[var]`
/// * `(val)`
/// * `{f(expr, expr, expr)}`
#[macro_export]
macro_rules! hexpr {
($vars:ident, $n:ident, [$hexpr_name:ident]) => {{
match clause_match!($vars, $n, $hexpr_name) {
::holmes::engine::types::MatchExpr::Var(var_no) =>
::holmes::engine::types::Expr::Var(var_no),
_ => panic!("clause_match! returned non-var for var input")
}
}};
($vars:ident, $n:ident, ($hexpr:expr)) => {
::holmes::engine::types::Expr::Val(
::holmes::pg::dyn::values::ToValue::to_value($hexpr))
};
($vars:ident, $n:ident, {$hexpr_func:ident($($hexpr_arg:tt),*)}) => {
::holmes::engine::types::Expr::App(
stringify!($hexpr_func).to_string(),
vec![$(hexpr!($vars, $n, $hexpr_arg)),*])
};
}
/// Generates a `MatchExpr` from a representation
///
/// Args:
///
/// * `$vars:ident` is a mutable `HashMap` from variable name to
/// variable number, to be updated as more variables are created, or
/// referenced to re-use existing variable numberings.
/// * `$n:ident` is a mutable Var, intended to be used as an allocator for
/// the next unused variable. It should have a value equal to the next
/// unallocated variable
/// * Clause representation:
/// * `[_]` -> unbound
/// * `(val)` -> constant match
/// * `x` -> variable bind
#[macro_export]
macro_rules! clause_match {
($vars:ident, $n:ident, [_]) => {{
::holmes::engine::types::MatchExpr::Unbound
}};
($vars:ident, $n:ident, ($v:expr)) => {{
::holmes::engine::types::MatchExpr::Const(
::holmes::pg::dyn::values::ToValue::to_value($v))
}};
($vars:ident, $n:ident, $m:ident) => {{
use std::collections::hash_map::Entry::*;
use ::holmes::engine::types::MatchExpr::*;
match $vars.entry(stringify!($m).to_string()) {
Occupied(entry) => Var(*entry.get()),
Vacant(entry) => {
$n = $n + 1;
entry.insert($n - 1);
Var($n - 1)
}
}
}};
}
}
|
random_line_split
|
|
main.rs
|
use rand::Rng;
|
let secret_number = rand::thread_rng().gen_range(1..101);
// ANCHOR: here
// --snip--
println!("The secret number is: {}", secret_number);
loop {
println!("Please input your guess.");
// --snip--
// ANCHOR_END: here
let mut guess = String::new();
io::stdin()
.read_line(&mut guess)
.expect("Failed to read line");
let guess: u32 = guess.trim().parse().expect("Please type a number!");
println!("You guessed: {}", guess);
// ANCHOR: here
match guess.cmp(&secret_number) {
Ordering::Less => println!("Too small!"),
Ordering::Greater => println!("Too big!"),
Ordering::Equal => println!("You win!"),
}
}
}
// ANCHOR_END: here
|
use std::cmp::Ordering;
use std::io;
fn main() {
println!("Guess the number!");
|
random_line_split
|
main.rs
|
use rand::Rng;
use std::cmp::Ordering;
use std::io;
fn
|
() {
println!("Guess the number!");
let secret_number = rand::thread_rng().gen_range(1..101);
// ANCHOR: here
// --snip--
println!("The secret number is: {}", secret_number);
loop {
println!("Please input your guess.");
// --snip--
// ANCHOR_END: here
let mut guess = String::new();
io::stdin()
.read_line(&mut guess)
.expect("Failed to read line");
let guess: u32 = guess.trim().parse().expect("Please type a number!");
println!("You guessed: {}", guess);
// ANCHOR: here
match guess.cmp(&secret_number) {
Ordering::Less => println!("Too small!"),
Ordering::Greater => println!("Too big!"),
Ordering::Equal => println!("You win!"),
}
}
}
// ANCHOR_END: here
|
main
|
identifier_name
|
main.rs
|
use rand::Rng;
use std::cmp::Ordering;
use std::io;
fn main()
|
.read_line(&mut guess)
.expect("Failed to read line");
let guess: u32 = guess.trim().parse().expect("Please type a number!");
println!("You guessed: {}", guess);
// ANCHOR: here
match guess.cmp(&secret_number) {
Ordering::Less => println!("Too small!"),
Ordering::Greater => println!("Too big!"),
Ordering::Equal => println!("You win!"),
}
}
}
// ANCHOR_END: here
|
{
println!("Guess the number!");
let secret_number = rand::thread_rng().gen_range(1..101);
// ANCHOR: here
// --snip--
println!("The secret number is: {}", secret_number);
loop {
println!("Please input your guess.");
// --snip--
// ANCHOR_END: here
let mut guess = String::new();
io::stdin()
|
identifier_body
|
default_ty_param_default_dependent_associated_type.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//
#![feature(default_type_parameter_fallback)]
use std::marker::PhantomData;
trait Id {
type This;
|
struct Foo<X: Default = usize, Y = <X as Id>::This> {
data: PhantomData<(X, Y)>
}
impl<X: Default, Y> Foo<X, Y> {
fn new() -> Foo<X, Y> {
Foo { data: PhantomData }
}
}
fn main() {
let foo = Foo::new();
}
|
}
impl<A> Id for A {
type This = A;
}
|
random_line_split
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.