file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
main.rs
|
#[macro_use]
extern crate lazy_static;
extern crate md5;
use md5::compute;
fn next_pair(input: &[u8], pepper: &mut u64) -> Option<(u8, u8)> {
let mut preimage = Vec::new();
// Ignore edge case where maximum pepper yields the digit.
while *pepper!= u64::max_value() {
preimage.clear();
preimage.extend_from_slice(input);
preimage.extend(pepper.to_string().bytes());
let digest = compute(&preimage);
let mut occurrence =
digest.iter().enumerate().skip_while(|&(_, &b)| b == 0);
*pepper = pepper.saturating_add(1);
if let Some((index, byte)) = occurrence.next() {
if (index > 2) || (index == 2 && (byte & 0xf0) == 0) {
let d6 = digest[2] & 0xf;
let d7 = (digest[3] >> 4) & 0xf;
return Some((d6, d7));
}
}
}
None
}
fn next_digit(input: &[u8], pepper: &mut u64) -> Option<u8> {
next_pair(input, pepper).map(|(x, _)| x)
}
fn solve_one(input: &[u8]) -> Option<String> {
let mut password = Vec::new();
let mut pepper = 0u64;
for _ in 0..8 {
match next_digit(input, &mut pepper) {
Some(digit) => password.push(format!("{:x}", digit)),
_ => return None,
}
}
Some(password.join(""))
}
fn solve_two(input: &[u8]) -> Option<String>
|
.join(""))
}
fn main() {
lazy_static! {
static ref INPUT: Vec<u8> = "ugkcyxxp".bytes().collect();
}
match solve_one(&INPUT) {
Some(password) => println!("[1] Found the password: {}.", password),
_ => println!("[1] Could not find the password."),
}
match solve_two(&INPUT) {
Some(password) => println!("[2] Found the password: {}.", password),
_ => println!("[2] Could not find the password."),
}
}
|
{
let (mut free, mut pepper) = (8, 0u64);
let mut password = [!0u8; 8];
while free != 0 {
match next_pair(input, &mut pepper) {
Some((position, digit)) => {
let position = position as usize;
if position < 8 && password[position] == !0u8 {
free -= 1;
password[position] = digit;
}
}
_ => return None,
}
}
Some(password.iter()
.map(|x| format!("{:x}", x))
.collect::<Vec<_>>()
|
identifier_body
|
lib.rs
|
#![feature(alloc_layout_extra)]
#![feature(array_chunks)]
#![feature(array_methods)]
#![feature(array_windows)]
#![feature(bool_to_option)]
#![feature(box_syntax)]
#![feature(cell_update)]
#![feature(cfg_panic)]
#![feature(cfg_target_has_atomic)]
#![feature(const_assume)]
#![feature(const_cell_into_inner)]
#![feature(const_maybe_uninit_assume_init)]
#![feature(const_ptr_read)]
#![feature(const_ptr_write)]
#![feature(const_ptr_offset)]
#![feature(const_trait_impl)]
#![feature(const_num_from_num)]
#![feature(core_intrinsics)]
#![feature(core_private_bignum)]
#![feature(core_private_diy_float)]
#![feature(dec2flt)]
#![feature(div_duration)]
#![feature(duration_consts_2)]
#![feature(duration_constants)]
#![feature(exact_size_is_empty)]
#![feature(extern_types)]
#![feature(flt2dec)]
#![feature(fmt_internals)]
#![feature(hashmap_internals)]
#![feature(try_find)]
#![feature(is_sorted)]
#![feature(pattern)]
#![feature(sort_internals)]
#![feature(slice_partition_at_index)]
#![feature(maybe_uninit_uninit_array)]
|
#![feature(min_specialization)]
#![feature(numfmt)]
#![feature(step_trait)]
#![feature(str_internals)]
#![feature(test)]
#![feature(trusted_len)]
#![feature(try_trait_v2)]
#![feature(slice_internals)]
#![feature(slice_partition_dedup)]
#![feature(int_log)]
#![feature(iter_advance_by)]
#![feature(iter_partition_in_place)]
#![feature(iter_is_partitioned)]
#![feature(iter_order_by)]
#![feature(iter_map_while)]
#![feature(const_mut_refs)]
#![feature(const_pin)]
#![feature(const_slice_from_raw_parts)]
#![feature(const_raw_ptr_deref)]
#![feature(never_type)]
#![feature(unwrap_infallible)]
#![feature(option_result_unwrap_unchecked)]
#![feature(result_into_ok_or_err)]
#![feature(ptr_metadata)]
#![feature(once_cell)]
#![feature(unsized_tuple_coercion)]
#![feature(const_option)]
#![feature(integer_atomics)]
#![feature(int_roundings)]
#![feature(slice_group_by)]
#![feature(trusted_random_access)]
#![feature(unsize)]
#![feature(unzip_option)]
#![deny(unsafe_op_in_unsafe_fn)]
extern crate test;
mod alloc;
mod any;
mod array;
mod ascii;
mod atomic;
mod bool;
mod cell;
mod char;
mod clone;
mod cmp;
mod const_ptr;
mod fmt;
mod hash;
mod intrinsics;
mod iter;
mod lazy;
mod macros;
mod manually_drop;
mod mem;
mod nonzero;
mod num;
mod ops;
mod option;
mod pattern;
mod pin;
mod ptr;
mod result;
mod slice;
mod str;
mod str_lossy;
mod task;
mod time;
mod tuple;
mod unicode;
|
#![feature(maybe_uninit_array_assume_init)]
#![feature(maybe_uninit_extra)]
#![feature(maybe_uninit_write_slice)]
|
random_line_split
|
import.rs
|
use gpgme;
use structopt;
use gpgme::{data, Context, Data, ImportFlags};
use std::{error::Error, fs::File, path::PathBuf};
use structopt::StructOpt;
#[derive(Debug, StructOpt)]
struct
|
{
#[structopt(long)]
/// Import from given URLs
url: bool,
#[structopt(short = "0")]
/// URLS are delimited by a null
nul: bool,
#[structopt(min_values(1), parse(from_os_str))]
filenames: Vec<PathBuf>,
}
fn main() -> Result<(), Box<dyn Error>> {
let args = Cli::from_args();
let mode = if args.url {
if args.nul {
Some(data::Encoding::Url0)
} else {
Some(data::Encoding::Url)
}
} else {
None
};
let mut ctx = Context::from_protocol(gpgme::Protocol::OpenPgp)?;
for file in args.filenames {
println!("reading file `{}'", &file.display());
let input = File::open(file)?;
let mut data = Data::from_seekable_stream(input)?;
mode.map(|m| data.set_encoding(m));
print_import_result(
ctx.import(&mut data)
.map_err(|e| format!("import failed {:?}", e))?,
);
}
Ok(())
}
fn print_import_result(result: gpgme::ImportResult) {
for import in result.imports() {
print!(
" fpr: {} err: {:?} status:",
import.fingerprint().unwrap_or("[none]"),
import.result().err()
);
let status = import.status();
if status.contains(ImportFlags::NEW) {
print!(" new");
}
if status.contains(ImportFlags::UID) {
print!(" uid");
}
if status.contains(ImportFlags::SIG) {
print!(" sig");
}
if status.contains(ImportFlags::SUBKEY) {
print!(" subkey");
}
if status.contains(ImportFlags::SECRET) {
print!(" secret");
}
println!("");
}
println!("key import summary:");
println!(" considered: {}", result.considered());
println!(" no user id: {}", result.without_user_id());
println!(" imported: {}", result.imported());
println!(" imported rsa: {}", result.imported_rsa());
println!(" unchanged: {}", result.unchanged());
println!(" new user ids: {}", result.new_user_ids());
println!(" new subkeys: {}", result.new_subkeys());
println!(" new signatures: {}", result.new_signatures());
println!(" new revocations: {}", result.new_revocations());
println!(" secret read: {}", result.secret_considered());
println!(" secret imported: {}", result.secret_imported());
println!(" secret unchanged: {}", result.secret_unchanged());
println!(" not imported: {}", result.not_imported());
}
|
Cli
|
identifier_name
|
import.rs
|
use gpgme;
use structopt;
use gpgme::{data, Context, Data, ImportFlags};
use std::{error::Error, fs::File, path::PathBuf};
use structopt::StructOpt;
#[derive(Debug, StructOpt)]
struct Cli {
#[structopt(long)]
/// Import from given URLs
url: bool,
#[structopt(short = "0")]
/// URLS are delimited by a null
nul: bool,
#[structopt(min_values(1), parse(from_os_str))]
filenames: Vec<PathBuf>,
}
fn main() -> Result<(), Box<dyn Error>> {
let args = Cli::from_args();
let mode = if args.url {
if args.nul {
Some(data::Encoding::Url0)
} else {
Some(data::Encoding::Url)
}
} else {
None
};
let mut ctx = Context::from_protocol(gpgme::Protocol::OpenPgp)?;
for file in args.filenames {
println!("reading file `{}'", &file.display());
let input = File::open(file)?;
let mut data = Data::from_seekable_stream(input)?;
mode.map(|m| data.set_encoding(m));
|
);
}
Ok(())
}
fn print_import_result(result: gpgme::ImportResult) {
for import in result.imports() {
print!(
" fpr: {} err: {:?} status:",
import.fingerprint().unwrap_or("[none]"),
import.result().err()
);
let status = import.status();
if status.contains(ImportFlags::NEW) {
print!(" new");
}
if status.contains(ImportFlags::UID) {
print!(" uid");
}
if status.contains(ImportFlags::SIG) {
print!(" sig");
}
if status.contains(ImportFlags::SUBKEY) {
print!(" subkey");
}
if status.contains(ImportFlags::SECRET) {
print!(" secret");
}
println!("");
}
println!("key import summary:");
println!(" considered: {}", result.considered());
println!(" no user id: {}", result.without_user_id());
println!(" imported: {}", result.imported());
println!(" imported rsa: {}", result.imported_rsa());
println!(" unchanged: {}", result.unchanged());
println!(" new user ids: {}", result.new_user_ids());
println!(" new subkeys: {}", result.new_subkeys());
println!(" new signatures: {}", result.new_signatures());
println!(" new revocations: {}", result.new_revocations());
println!(" secret read: {}", result.secret_considered());
println!(" secret imported: {}", result.secret_imported());
println!(" secret unchanged: {}", result.secret_unchanged());
println!(" not imported: {}", result.not_imported());
}
|
print_import_result(
ctx.import(&mut data)
.map_err(|e| format!("import failed {:?}", e))?,
|
random_line_split
|
import.rs
|
use gpgme;
use structopt;
use gpgme::{data, Context, Data, ImportFlags};
use std::{error::Error, fs::File, path::PathBuf};
use structopt::StructOpt;
#[derive(Debug, StructOpt)]
struct Cli {
#[structopt(long)]
/// Import from given URLs
url: bool,
#[structopt(short = "0")]
/// URLS are delimited by a null
nul: bool,
#[structopt(min_values(1), parse(from_os_str))]
filenames: Vec<PathBuf>,
}
fn main() -> Result<(), Box<dyn Error>> {
let args = Cli::from_args();
let mode = if args.url {
if args.nul {
Some(data::Encoding::Url0)
} else {
Some(data::Encoding::Url)
}
} else {
None
};
let mut ctx = Context::from_protocol(gpgme::Protocol::OpenPgp)?;
for file in args.filenames {
println!("reading file `{}'", &file.display());
let input = File::open(file)?;
let mut data = Data::from_seekable_stream(input)?;
mode.map(|m| data.set_encoding(m));
print_import_result(
ctx.import(&mut data)
.map_err(|e| format!("import failed {:?}", e))?,
);
}
Ok(())
}
fn print_import_result(result: gpgme::ImportResult) {
for import in result.imports() {
print!(
" fpr: {} err: {:?} status:",
import.fingerprint().unwrap_or("[none]"),
import.result().err()
);
let status = import.status();
if status.contains(ImportFlags::NEW) {
print!(" new");
}
if status.contains(ImportFlags::UID) {
print!(" uid");
}
if status.contains(ImportFlags::SIG) {
print!(" sig");
}
if status.contains(ImportFlags::SUBKEY)
|
if status.contains(ImportFlags::SECRET) {
print!(" secret");
}
println!("");
}
println!("key import summary:");
println!(" considered: {}", result.considered());
println!(" no user id: {}", result.without_user_id());
println!(" imported: {}", result.imported());
println!(" imported rsa: {}", result.imported_rsa());
println!(" unchanged: {}", result.unchanged());
println!(" new user ids: {}", result.new_user_ids());
println!(" new subkeys: {}", result.new_subkeys());
println!(" new signatures: {}", result.new_signatures());
println!(" new revocations: {}", result.new_revocations());
println!(" secret read: {}", result.secret_considered());
println!(" secret imported: {}", result.secret_imported());
println!(" secret unchanged: {}", result.secret_unchanged());
println!(" not imported: {}", result.not_imported());
}
|
{
print!(" subkey");
}
|
conditional_block
|
uutils.rs
|
#![crate_name = "uutils"]
#![feature(exit_status, rustc_private)]
/*
* This file is part of the uutils coreutils package.
*
* (c) Michael Gehring <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
extern crate getopts;
@CRATES@
use std::env;
use std::collections::hash_map::HashMap;
use std::path::Path;
static NAME: &'static str = "uutils";
static VERSION: &'static str = "1.0.0";
fn util_map() -> HashMap<&'static str, fn(Vec<String>) -> i32> {
let mut map = HashMap::new();
@UTIL_MAP@
map
}
fn usage(cmap: &HashMap<&'static str, fn(Vec<String>) -> i32>)
|
fn main() {
let umap = util_map();
let mut args : Vec<String> = env::args().collect();
// try binary name as util name.
let args0 = args[0].clone();
let binary = Path::new(&args0[..]);
let binary_as_util = binary.file_name().unwrap().to_str().unwrap();
match umap.get(binary_as_util) {
Some(&uumain) => {
env::set_exit_status(uumain(args));
return
}
None => (),
}
if binary_as_util.ends_with("uutils") || binary_as_util.starts_with("uutils") ||
binary_as_util.ends_with("busybox") || binary_as_util.starts_with("busybox") {
// uutils can be called as either "uutils", "busybox"
// "uutils-suffix" or "busybox-suffix". Not sure
// what busybox uses the -suffix pattern for.
} else {
println!("{}: applet not found", binary_as_util);
env::set_exit_status(1);
return
}
// try first arg as util name.
if args.len() >= 2 {
args.remove(0);
let util = &args[0][..];
match umap.get(util) {
Some(&uumain) => {
env::set_exit_status(uumain(args.clone()));
return
}
None => {
if &args[0][..] == "--help" {
// see if they want help on a specific util
if args.len() >= 2 {
let util = &args[1][..];
match umap.get(util) {
Some(&uumain) => {
env::set_exit_status(uumain(vec![util.to_string(), "--help".to_string()]));
return
}
None => {
println!("{}: applet not found", util);
env::set_exit_status(1);
return
}
}
}
usage(&umap);
env::set_exit_status(0);
return
} else {
println!("{}: applet not found", util);
env::set_exit_status(1);
return
}
}
}
} else {
// no arguments provided
usage(&umap);
env::set_exit_status(0);
return
}
}
|
{
println!("{} {}", NAME, VERSION);
println!("");
println!("Usage:");
println!(" {} [util [arguments...]]\n", NAME);
println!("Currently defined functions:");
let mut utils: Vec<&str> = cmap.keys().map(|&s| s).collect();
utils.sort();
for util in utils.iter() {
println!("\t{}", util);
}
}
|
identifier_body
|
uutils.rs
|
#![crate_name = "uutils"]
#![feature(exit_status, rustc_private)]
/*
* This file is part of the uutils coreutils package.
*
* (c) Michael Gehring <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
extern crate getopts;
@CRATES@
use std::env;
use std::collections::hash_map::HashMap;
use std::path::Path;
static NAME: &'static str = "uutils";
static VERSION: &'static str = "1.0.0";
fn util_map() -> HashMap<&'static str, fn(Vec<String>) -> i32> {
let mut map = HashMap::new();
@UTIL_MAP@
map
}
fn usage(cmap: &HashMap<&'static str, fn(Vec<String>) -> i32>) {
println!("{} {}", NAME, VERSION);
println!("");
println!("Usage:");
println!(" {} [util [arguments...]]\n", NAME);
println!("Currently defined functions:");
let mut utils: Vec<&str> = cmap.keys().map(|&s| s).collect();
utils.sort();
for util in utils.iter() {
println!("\t{}", util);
}
}
fn main() {
let umap = util_map();
let mut args : Vec<String> = env::args().collect();
// try binary name as util name.
let args0 = args[0].clone();
let binary = Path::new(&args0[..]);
let binary_as_util = binary.file_name().unwrap().to_str().unwrap();
match umap.get(binary_as_util) {
Some(&uumain) => {
env::set_exit_status(uumain(args));
return
|
}
if binary_as_util.ends_with("uutils") || binary_as_util.starts_with("uutils") ||
binary_as_util.ends_with("busybox") || binary_as_util.starts_with("busybox") {
// uutils can be called as either "uutils", "busybox"
// "uutils-suffix" or "busybox-suffix". Not sure
// what busybox uses the -suffix pattern for.
} else {
println!("{}: applet not found", binary_as_util);
env::set_exit_status(1);
return
}
// try first arg as util name.
if args.len() >= 2 {
args.remove(0);
let util = &args[0][..];
match umap.get(util) {
Some(&uumain) => {
env::set_exit_status(uumain(args.clone()));
return
}
None => {
if &args[0][..] == "--help" {
// see if they want help on a specific util
if args.len() >= 2 {
let util = &args[1][..];
match umap.get(util) {
Some(&uumain) => {
env::set_exit_status(uumain(vec![util.to_string(), "--help".to_string()]));
return
}
None => {
println!("{}: applet not found", util);
env::set_exit_status(1);
return
}
}
}
usage(&umap);
env::set_exit_status(0);
return
} else {
println!("{}: applet not found", util);
env::set_exit_status(1);
return
}
}
}
} else {
// no arguments provided
usage(&umap);
env::set_exit_status(0);
return
}
}
|
}
None => (),
|
random_line_split
|
uutils.rs
|
#![crate_name = "uutils"]
#![feature(exit_status, rustc_private)]
/*
* This file is part of the uutils coreutils package.
*
* (c) Michael Gehring <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
extern crate getopts;
@CRATES@
use std::env;
use std::collections::hash_map::HashMap;
use std::path::Path;
static NAME: &'static str = "uutils";
static VERSION: &'static str = "1.0.0";
fn util_map() -> HashMap<&'static str, fn(Vec<String>) -> i32> {
let mut map = HashMap::new();
@UTIL_MAP@
map
}
fn usage(cmap: &HashMap<&'static str, fn(Vec<String>) -> i32>) {
println!("{} {}", NAME, VERSION);
println!("");
println!("Usage:");
println!(" {} [util [arguments...]]\n", NAME);
println!("Currently defined functions:");
let mut utils: Vec<&str> = cmap.keys().map(|&s| s).collect();
utils.sort();
for util in utils.iter() {
println!("\t{}", util);
}
}
fn
|
() {
let umap = util_map();
let mut args : Vec<String> = env::args().collect();
// try binary name as util name.
let args0 = args[0].clone();
let binary = Path::new(&args0[..]);
let binary_as_util = binary.file_name().unwrap().to_str().unwrap();
match umap.get(binary_as_util) {
Some(&uumain) => {
env::set_exit_status(uumain(args));
return
}
None => (),
}
if binary_as_util.ends_with("uutils") || binary_as_util.starts_with("uutils") ||
binary_as_util.ends_with("busybox") || binary_as_util.starts_with("busybox") {
// uutils can be called as either "uutils", "busybox"
// "uutils-suffix" or "busybox-suffix". Not sure
// what busybox uses the -suffix pattern for.
} else {
println!("{}: applet not found", binary_as_util);
env::set_exit_status(1);
return
}
// try first arg as util name.
if args.len() >= 2 {
args.remove(0);
let util = &args[0][..];
match umap.get(util) {
Some(&uumain) => {
env::set_exit_status(uumain(args.clone()));
return
}
None => {
if &args[0][..] == "--help" {
// see if they want help on a specific util
if args.len() >= 2 {
let util = &args[1][..];
match umap.get(util) {
Some(&uumain) => {
env::set_exit_status(uumain(vec![util.to_string(), "--help".to_string()]));
return
}
None => {
println!("{}: applet not found", util);
env::set_exit_status(1);
return
}
}
}
usage(&umap);
env::set_exit_status(0);
return
} else {
println!("{}: applet not found", util);
env::set_exit_status(1);
return
}
}
}
} else {
// no arguments provided
usage(&umap);
env::set_exit_status(0);
return
}
}
|
main
|
identifier_name
|
uutils.rs
|
#![crate_name = "uutils"]
#![feature(exit_status, rustc_private)]
/*
* This file is part of the uutils coreutils package.
*
* (c) Michael Gehring <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
extern crate getopts;
@CRATES@
use std::env;
use std::collections::hash_map::HashMap;
use std::path::Path;
static NAME: &'static str = "uutils";
static VERSION: &'static str = "1.0.0";
fn util_map() -> HashMap<&'static str, fn(Vec<String>) -> i32> {
let mut map = HashMap::new();
@UTIL_MAP@
map
}
fn usage(cmap: &HashMap<&'static str, fn(Vec<String>) -> i32>) {
println!("{} {}", NAME, VERSION);
println!("");
println!("Usage:");
println!(" {} [util [arguments...]]\n", NAME);
println!("Currently defined functions:");
let mut utils: Vec<&str> = cmap.keys().map(|&s| s).collect();
utils.sort();
for util in utils.iter() {
println!("\t{}", util);
}
}
fn main() {
let umap = util_map();
let mut args : Vec<String> = env::args().collect();
// try binary name as util name.
let args0 = args[0].clone();
let binary = Path::new(&args0[..]);
let binary_as_util = binary.file_name().unwrap().to_str().unwrap();
match umap.get(binary_as_util) {
Some(&uumain) => {
env::set_exit_status(uumain(args));
return
}
None => (),
}
if binary_as_util.ends_with("uutils") || binary_as_util.starts_with("uutils") ||
binary_as_util.ends_with("busybox") || binary_as_util.starts_with("busybox") {
// uutils can be called as either "uutils", "busybox"
// "uutils-suffix" or "busybox-suffix". Not sure
// what busybox uses the -suffix pattern for.
} else {
println!("{}: applet not found", binary_as_util);
env::set_exit_status(1);
return
}
// try first arg as util name.
if args.len() >= 2
|
println!("{}: applet not found", util);
env::set_exit_status(1);
return
}
}
}
usage(&umap);
env::set_exit_status(0);
return
} else {
println!("{}: applet not found", util);
env::set_exit_status(1);
return
}
}
}
}
else {
// no arguments provided
usage(&umap);
env::set_exit_status(0);
return
}
}
|
{
args.remove(0);
let util = &args[0][..];
match umap.get(util) {
Some(&uumain) => {
env::set_exit_status(uumain(args.clone()));
return
}
None => {
if &args[0][..] == "--help" {
// see if they want help on a specific util
if args.len() >= 2 {
let util = &args[1][..];
match umap.get(util) {
Some(&uumain) => {
env::set_exit_status(uumain(vec![util.to_string(), "--help".to_string()]));
return
}
None => {
|
conditional_block
|
support_map_against_support_map.rs
|
use num::Zero;
use na::{Translation, Translate};
use na;
use geometry::algorithms::gjk;
use geometry::algorithms::simplex::Simplex;
use geometry::algorithms::johnson_simplex::JohnsonSimplex;
use entities::support_map::SupportMap;
use entities::support_map;
use math::{Scalar, Point, Vect};
/// Distance between support-mapped shapes.
pub fn support_map_against_support_map<P, M, G1:?Sized, G2:?Sized>(m1: &M, g1: &G1,
m2: &M, g2: &G2)
-> <P::Vect as Vect>::Scalar
where P: Point,
P::Vect: Translate<P>,
M: Translation<P::Vect>,
G1: SupportMap<P, M>,
G2: SupportMap<P, M> {
support_map_against_support_map_with_params(m1, g1, m2, g2, &mut JohnsonSimplex::new_w_tls(), None)
}
/// Distance between support-mapped shapes.
///
/// This allows a more fine grained control other the underlying GJK algorigtm.
pub fn support_map_against_support_map_with_params<P, M, S, G1:?Sized, G2:?Sized>(
m1: &M,
g1: &G1,
m2: &M,
g2: &G2,
simplex: &mut S,
init_dir: Option<P::Vect>)
-> <P::Vect as Vect>::Scalar
where P: Point,
P::Vect: Translate<P>,
M: Translation<P::Vect>,
S: Simplex<P>,
G1: SupportMap<P, M>,
G2: SupportMap<P, M>
|
{
let mut dir =
match init_dir {
None => m1.translation() - m2.translation(), // FIXME: or m2.translation - m1.translation ?
Some(dir) => dir
};
if dir.is_zero() {
dir[0] = na::one();
}
simplex.reset(support_map::cso_support_point(m1, g1, m2, g2, dir).point().clone());
gjk::distance(m1, g1, m2, g2, simplex)
}
|
identifier_body
|
|
support_map_against_support_map.rs
|
use num::Zero;
use na::{Translation, Translate};
use na;
use geometry::algorithms::gjk;
use geometry::algorithms::simplex::Simplex;
use geometry::algorithms::johnson_simplex::JohnsonSimplex;
use entities::support_map::SupportMap;
use entities::support_map;
use math::{Scalar, Point, Vect};
/// Distance between support-mapped shapes.
pub fn support_map_against_support_map<P, M, G1:?Sized, G2:?Sized>(m1: &M, g1: &G1,
m2: &M, g2: &G2)
-> <P::Vect as Vect>::Scalar
where P: Point,
P::Vect: Translate<P>,
M: Translation<P::Vect>,
G1: SupportMap<P, M>,
G2: SupportMap<P, M> {
support_map_against_support_map_with_params(m1, g1, m2, g2, &mut JohnsonSimplex::new_w_tls(), None)
}
/// Distance between support-mapped shapes.
///
/// This allows a more fine grained control other the underlying GJK algorigtm.
pub fn
|
<P, M, S, G1:?Sized, G2:?Sized>(
m1: &M,
g1: &G1,
m2: &M,
g2: &G2,
simplex: &mut S,
init_dir: Option<P::Vect>)
-> <P::Vect as Vect>::Scalar
where P: Point,
P::Vect: Translate<P>,
M: Translation<P::Vect>,
S: Simplex<P>,
G1: SupportMap<P, M>,
G2: SupportMap<P, M> {
let mut dir =
match init_dir {
None => m1.translation() - m2.translation(), // FIXME: or m2.translation - m1.translation?
Some(dir) => dir
};
if dir.is_zero() {
dir[0] = na::one();
}
simplex.reset(support_map::cso_support_point(m1, g1, m2, g2, dir).point().clone());
gjk::distance(m1, g1, m2, g2, simplex)
}
|
support_map_against_support_map_with_params
|
identifier_name
|
support_map_against_support_map.rs
|
use num::Zero;
use na::{Translation, Translate};
use na;
use geometry::algorithms::gjk;
use geometry::algorithms::simplex::Simplex;
use geometry::algorithms::johnson_simplex::JohnsonSimplex;
use entities::support_map::SupportMap;
use entities::support_map;
use math::{Scalar, Point, Vect};
/// Distance between support-mapped shapes.
pub fn support_map_against_support_map<P, M, G1:?Sized, G2:?Sized>(m1: &M, g1: &G1,
m2: &M, g2: &G2)
-> <P::Vect as Vect>::Scalar
where P: Point,
P::Vect: Translate<P>,
M: Translation<P::Vect>,
G1: SupportMap<P, M>,
G2: SupportMap<P, M> {
support_map_against_support_map_with_params(m1, g1, m2, g2, &mut JohnsonSimplex::new_w_tls(), None)
|
/// Distance between support-mapped shapes.
///
/// This allows a more fine grained control other the underlying GJK algorigtm.
pub fn support_map_against_support_map_with_params<P, M, S, G1:?Sized, G2:?Sized>(
m1: &M,
g1: &G1,
m2: &M,
g2: &G2,
simplex: &mut S,
init_dir: Option<P::Vect>)
-> <P::Vect as Vect>::Scalar
where P: Point,
P::Vect: Translate<P>,
M: Translation<P::Vect>,
S: Simplex<P>,
G1: SupportMap<P, M>,
G2: SupportMap<P, M> {
let mut dir =
match init_dir {
None => m1.translation() - m2.translation(), // FIXME: or m2.translation - m1.translation?
Some(dir) => dir
};
if dir.is_zero() {
dir[0] = na::one();
}
simplex.reset(support_map::cso_support_point(m1, g1, m2, g2, dir).point().clone());
gjk::distance(m1, g1, m2, g2, simplex)
}
|
}
|
random_line_split
|
stream.rs
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use std::pin::Pin;
use anyhow::Error;
use async_compression::tokio::bufread::{GzipEncoder, ZstdEncoder};
use bytes::Bytes;
use futures::{
future::Either,
stream::{BoxStream, Stream, StreamExt, TryStreamExt},
task::{Context, Poll},
};
use pin_project::pin_project;
use tokio_util::io::{ReaderStream, StreamReader};
use crate::content_encoding::{ContentCompression, ContentEncoding};
/// Create a response stream using the specified Content-Encoding.
///
/// The resulting stream may or may not be compressed depending on the chosen encoding. Optionally,
/// the caller can specify the value for the `Content-Length` header. This is only useful in cases
/// where the response isn't compressed (i.e., the encoding is set to `ContentEncoding::Identity`)
/// because otherwise, we would need to send the post-compression size of the content, which cannot
/// be known in advance.
pub fn encode_stream<S>(
stream: S,
encoding: ContentEncoding,
length: Option<u64>,
) -> Either<ResponseStream<S>, CompressedResponseStream<'static>>
where
S: Stream<Item = Result<Bytes, Error>> + Send +'static,
{
match (encoding, length) {
(ContentEncoding::Identity, Some(size)) => ResponseStream::new(stream)
.set_content_length(size)
.left_stream(),
(ContentEncoding::Identity, None) => ResponseStream::new(stream).left_stream(),
(ContentEncoding::Compressed(c), _) => {
CompressedResponseStream::new(stream, c).right_stream()
}
}
}
#[pin_project]
pub struct CompressedResponseStream<'a> {
inner: BoxStream<'a, Result<Bytes, Error>>,
content_compression: ContentCompression,
}
impl<'a> CompressedResponseStream<'a> {
pub fn new<S>(inner: S, content_compression: ContentCompression) -> Self
where
S: Stream<Item = Result<Bytes, Error>> + Send + 'a,
{
use std::io;
// 2MiB, for LFS that's at least once every content chunk.
const YIELD_EVERY: usize = 2 * 1024 * 1024;
let inner = inner.map_err(|e| io::Error::new(io::ErrorKind::Other, e));
let inner = YieldStream::new(inner, YIELD_EVERY);
let inner = StreamReader::new(inner);
let inner = match content_compression {
ContentCompression::Zstd => ReaderStream::new(ZstdEncoder::new(inner))
.map_err(Error::from)
.boxed(),
ContentCompression::Gzip => ReaderStream::new(GzipEncoder::new(inner))
.map_err(Error::from)
.boxed(),
};
Self {
inner,
content_compression,
}
}
pub fn content_compression(&self) -> ContentCompression {
self.content_compression
}
}
impl Stream for CompressedResponseStream<'_> {
type Item = Result<Bytes, Error>;
fn poll_next(self: Pin<&mut Self>, ctx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
self.project().inner.poll_next_unpin(ctx)
}
}
#[pin_project]
pub struct ResponseStream<S> {
#[pin]
inner: S,
content_length: Option<u64>,
}
impl<S> ResponseStream<S> {
pub fn new(inner: S) -> Self {
Self {
inner,
content_length: None,
}
}
/// Set a Content-Length for this stream. This *must* match the exact size of the uncompressed
/// content that will be sent, since that is what the client will expect.
pub fn set_content_length(self, content_length: u64) -> Self
|
pub fn content_length(&self) -> Option<u64> {
self.content_length
}
}
impl<S> Stream for ResponseStream<S>
where
S: Stream,
{
type Item = S::Item;
fn poll_next(self: Pin<&mut Self>, ctx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
self.project().inner.poll_next(ctx)
}
}
/// This is a helper that forces the underlying stream to yield (i.e. return Pending) periodically.
/// This is useful with compression, because our compression library will try to compress as much
/// as it can. If the data is always ready (which it often is with e.g. LFS, where we have
/// everything in cache most of the time), then it'll compress the entire stream before returning,
/// which is good for compression performance, but terrible for time-to-first-byte. So, we force
/// our compression to periodically stop compresing (every YIELD_EVERY).
#[pin_project]
pub struct YieldStream<S> {
read: usize,
yield_every: usize,
#[pin]
inner: S,
}
impl<S> YieldStream<S> {
pub fn new(inner: S, yield_every: usize) -> Self {
Self {
read: 0,
yield_every,
inner,
}
}
}
impl<S, E> Stream for YieldStream<S>
where
S: Stream<Item = Result<Bytes, E>>,
{
type Item = Result<Bytes, E>;
fn poll_next(self: Pin<&mut Self>, ctx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
let mut projection = self.project();
if *projection.read >= *projection.yield_every {
*projection.read %= *projection.yield_every;
ctx.waker().wake_by_ref();
return Poll::Pending;
}
let ret = futures::ready!(projection.inner.poll_next_unpin(ctx));
if let Some(Ok(ref bytes)) = ret {
*projection.read += bytes.len();
}
Poll::Ready(ret)
}
}
#[cfg(test)]
mod test {
use super::*;
use futures::stream;
#[tokio::test]
async fn test_yield_stream() {
// NOTE: This tests that the yield probably wakes up but assumes it yields.
let data = &[b"foo".as_ref(), b"bar2".as_ref()];
let data = stream::iter(
data.iter()
.map(|d| Result::<_, ()>::Ok(Bytes::copy_from_slice(d))),
);
let mut stream = YieldStream::new(data, 1);
assert_eq!(
stream.next().await,
Some(Ok(Bytes::copy_from_slice(b"foo")))
);
assert!(stream.read > stream.yield_every);
assert_eq!(
stream.next().await,
Some(Ok(Bytes::copy_from_slice(b"bar2")))
);
assert_eq!(stream.next().await, None,);
}
}
|
{
Self {
content_length: Some(content_length),
..self
}
}
|
identifier_body
|
stream.rs
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use std::pin::Pin;
|
use async_compression::tokio::bufread::{GzipEncoder, ZstdEncoder};
use bytes::Bytes;
use futures::{
future::Either,
stream::{BoxStream, Stream, StreamExt, TryStreamExt},
task::{Context, Poll},
};
use pin_project::pin_project;
use tokio_util::io::{ReaderStream, StreamReader};
use crate::content_encoding::{ContentCompression, ContentEncoding};
/// Create a response stream using the specified Content-Encoding.
///
/// The resulting stream may or may not be compressed depending on the chosen encoding. Optionally,
/// the caller can specify the value for the `Content-Length` header. This is only useful in cases
/// where the response isn't compressed (i.e., the encoding is set to `ContentEncoding::Identity`)
/// because otherwise, we would need to send the post-compression size of the content, which cannot
/// be known in advance.
pub fn encode_stream<S>(
stream: S,
encoding: ContentEncoding,
length: Option<u64>,
) -> Either<ResponseStream<S>, CompressedResponseStream<'static>>
where
S: Stream<Item = Result<Bytes, Error>> + Send +'static,
{
match (encoding, length) {
(ContentEncoding::Identity, Some(size)) => ResponseStream::new(stream)
.set_content_length(size)
.left_stream(),
(ContentEncoding::Identity, None) => ResponseStream::new(stream).left_stream(),
(ContentEncoding::Compressed(c), _) => {
CompressedResponseStream::new(stream, c).right_stream()
}
}
}
#[pin_project]
pub struct CompressedResponseStream<'a> {
inner: BoxStream<'a, Result<Bytes, Error>>,
content_compression: ContentCompression,
}
impl<'a> CompressedResponseStream<'a> {
pub fn new<S>(inner: S, content_compression: ContentCompression) -> Self
where
S: Stream<Item = Result<Bytes, Error>> + Send + 'a,
{
use std::io;
// 2MiB, for LFS that's at least once every content chunk.
const YIELD_EVERY: usize = 2 * 1024 * 1024;
let inner = inner.map_err(|e| io::Error::new(io::ErrorKind::Other, e));
let inner = YieldStream::new(inner, YIELD_EVERY);
let inner = StreamReader::new(inner);
let inner = match content_compression {
ContentCompression::Zstd => ReaderStream::new(ZstdEncoder::new(inner))
.map_err(Error::from)
.boxed(),
ContentCompression::Gzip => ReaderStream::new(GzipEncoder::new(inner))
.map_err(Error::from)
.boxed(),
};
Self {
inner,
content_compression,
}
}
pub fn content_compression(&self) -> ContentCompression {
self.content_compression
}
}
impl Stream for CompressedResponseStream<'_> {
type Item = Result<Bytes, Error>;
fn poll_next(self: Pin<&mut Self>, ctx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
self.project().inner.poll_next_unpin(ctx)
}
}
#[pin_project]
pub struct ResponseStream<S> {
#[pin]
inner: S,
content_length: Option<u64>,
}
impl<S> ResponseStream<S> {
pub fn new(inner: S) -> Self {
Self {
inner,
content_length: None,
}
}
/// Set a Content-Length for this stream. This *must* match the exact size of the uncompressed
/// content that will be sent, since that is what the client will expect.
pub fn set_content_length(self, content_length: u64) -> Self {
Self {
content_length: Some(content_length),
..self
}
}
pub fn content_length(&self) -> Option<u64> {
self.content_length
}
}
impl<S> Stream for ResponseStream<S>
where
S: Stream,
{
type Item = S::Item;
fn poll_next(self: Pin<&mut Self>, ctx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
self.project().inner.poll_next(ctx)
}
}
/// This is a helper that forces the underlying stream to yield (i.e. return Pending) periodically.
/// This is useful with compression, because our compression library will try to compress as much
/// as it can. If the data is always ready (which it often is with e.g. LFS, where we have
/// everything in cache most of the time), then it'll compress the entire stream before returning,
/// which is good for compression performance, but terrible for time-to-first-byte. So, we force
/// our compression to periodically stop compresing (every YIELD_EVERY).
#[pin_project]
pub struct YieldStream<S> {
read: usize,
yield_every: usize,
#[pin]
inner: S,
}
impl<S> YieldStream<S> {
pub fn new(inner: S, yield_every: usize) -> Self {
Self {
read: 0,
yield_every,
inner,
}
}
}
impl<S, E> Stream for YieldStream<S>
where
S: Stream<Item = Result<Bytes, E>>,
{
type Item = Result<Bytes, E>;
fn poll_next(self: Pin<&mut Self>, ctx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
let mut projection = self.project();
if *projection.read >= *projection.yield_every {
*projection.read %= *projection.yield_every;
ctx.waker().wake_by_ref();
return Poll::Pending;
}
let ret = futures::ready!(projection.inner.poll_next_unpin(ctx));
if let Some(Ok(ref bytes)) = ret {
*projection.read += bytes.len();
}
Poll::Ready(ret)
}
}
#[cfg(test)]
mod test {
use super::*;
use futures::stream;
#[tokio::test]
async fn test_yield_stream() {
// NOTE: This tests that the yield probably wakes up but assumes it yields.
let data = &[b"foo".as_ref(), b"bar2".as_ref()];
let data = stream::iter(
data.iter()
.map(|d| Result::<_, ()>::Ok(Bytes::copy_from_slice(d))),
);
let mut stream = YieldStream::new(data, 1);
assert_eq!(
stream.next().await,
Some(Ok(Bytes::copy_from_slice(b"foo")))
);
assert!(stream.read > stream.yield_every);
assert_eq!(
stream.next().await,
Some(Ok(Bytes::copy_from_slice(b"bar2")))
);
assert_eq!(stream.next().await, None,);
}
}
|
use anyhow::Error;
|
random_line_split
|
stream.rs
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use std::pin::Pin;
use anyhow::Error;
use async_compression::tokio::bufread::{GzipEncoder, ZstdEncoder};
use bytes::Bytes;
use futures::{
future::Either,
stream::{BoxStream, Stream, StreamExt, TryStreamExt},
task::{Context, Poll},
};
use pin_project::pin_project;
use tokio_util::io::{ReaderStream, StreamReader};
use crate::content_encoding::{ContentCompression, ContentEncoding};
/// Create a response stream using the specified Content-Encoding.
///
/// The resulting stream may or may not be compressed depending on the chosen encoding. Optionally,
/// the caller can specify the value for the `Content-Length` header. This is only useful in cases
/// where the response isn't compressed (i.e., the encoding is set to `ContentEncoding::Identity`)
/// because otherwise, we would need to send the post-compression size of the content, which cannot
/// be known in advance.
pub fn encode_stream<S>(
stream: S,
encoding: ContentEncoding,
length: Option<u64>,
) -> Either<ResponseStream<S>, CompressedResponseStream<'static>>
where
S: Stream<Item = Result<Bytes, Error>> + Send +'static,
{
match (encoding, length) {
(ContentEncoding::Identity, Some(size)) => ResponseStream::new(stream)
.set_content_length(size)
.left_stream(),
(ContentEncoding::Identity, None) => ResponseStream::new(stream).left_stream(),
(ContentEncoding::Compressed(c), _) => {
CompressedResponseStream::new(stream, c).right_stream()
}
}
}
#[pin_project]
pub struct CompressedResponseStream<'a> {
inner: BoxStream<'a, Result<Bytes, Error>>,
content_compression: ContentCompression,
}
impl<'a> CompressedResponseStream<'a> {
pub fn new<S>(inner: S, content_compression: ContentCompression) -> Self
where
S: Stream<Item = Result<Bytes, Error>> + Send + 'a,
{
use std::io;
// 2MiB, for LFS that's at least once every content chunk.
const YIELD_EVERY: usize = 2 * 1024 * 1024;
let inner = inner.map_err(|e| io::Error::new(io::ErrorKind::Other, e));
let inner = YieldStream::new(inner, YIELD_EVERY);
let inner = StreamReader::new(inner);
let inner = match content_compression {
ContentCompression::Zstd => ReaderStream::new(ZstdEncoder::new(inner))
.map_err(Error::from)
.boxed(),
ContentCompression::Gzip => ReaderStream::new(GzipEncoder::new(inner))
.map_err(Error::from)
.boxed(),
};
Self {
inner,
content_compression,
}
}
pub fn content_compression(&self) -> ContentCompression {
self.content_compression
}
}
impl Stream for CompressedResponseStream<'_> {
type Item = Result<Bytes, Error>;
fn poll_next(self: Pin<&mut Self>, ctx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
self.project().inner.poll_next_unpin(ctx)
}
}
#[pin_project]
pub struct ResponseStream<S> {
#[pin]
inner: S,
content_length: Option<u64>,
}
impl<S> ResponseStream<S> {
pub fn new(inner: S) -> Self {
Self {
inner,
content_length: None,
}
}
/// Set a Content-Length for this stream. This *must* match the exact size of the uncompressed
/// content that will be sent, since that is what the client will expect.
pub fn set_content_length(self, content_length: u64) -> Self {
Self {
content_length: Some(content_length),
..self
}
}
pub fn content_length(&self) -> Option<u64> {
self.content_length
}
}
impl<S> Stream for ResponseStream<S>
where
S: Stream,
{
type Item = S::Item;
fn poll_next(self: Pin<&mut Self>, ctx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
self.project().inner.poll_next(ctx)
}
}
/// This is a helper that forces the underlying stream to yield (i.e. return Pending) periodically.
/// This is useful with compression, because our compression library will try to compress as much
/// as it can. If the data is always ready (which it often is with e.g. LFS, where we have
/// everything in cache most of the time), then it'll compress the entire stream before returning,
/// which is good for compression performance, but terrible for time-to-first-byte. So, we force
/// our compression to periodically stop compresing (every YIELD_EVERY).
#[pin_project]
pub struct
|
<S> {
read: usize,
yield_every: usize,
#[pin]
inner: S,
}
impl<S> YieldStream<S> {
pub fn new(inner: S, yield_every: usize) -> Self {
Self {
read: 0,
yield_every,
inner,
}
}
}
impl<S, E> Stream for YieldStream<S>
where
S: Stream<Item = Result<Bytes, E>>,
{
type Item = Result<Bytes, E>;
fn poll_next(self: Pin<&mut Self>, ctx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
let mut projection = self.project();
if *projection.read >= *projection.yield_every {
*projection.read %= *projection.yield_every;
ctx.waker().wake_by_ref();
return Poll::Pending;
}
let ret = futures::ready!(projection.inner.poll_next_unpin(ctx));
if let Some(Ok(ref bytes)) = ret {
*projection.read += bytes.len();
}
Poll::Ready(ret)
}
}
#[cfg(test)]
mod test {
use super::*;
use futures::stream;
#[tokio::test]
async fn test_yield_stream() {
// NOTE: This tests that the yield probably wakes up but assumes it yields.
let data = &[b"foo".as_ref(), b"bar2".as_ref()];
let data = stream::iter(
data.iter()
.map(|d| Result::<_, ()>::Ok(Bytes::copy_from_slice(d))),
);
let mut stream = YieldStream::new(data, 1);
assert_eq!(
stream.next().await,
Some(Ok(Bytes::copy_from_slice(b"foo")))
);
assert!(stream.read > stream.yield_every);
assert_eq!(
stream.next().await,
Some(Ok(Bytes::copy_from_slice(b"bar2")))
);
assert_eq!(stream.next().await, None,);
}
}
|
YieldStream
|
identifier_name
|
svg.mako.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
${helpers.single_keyword("dominant-baseline",
"""auto use-script no-change reset-size ideographic alphabetic hanging
mathematical central middle text-after-edge text-before-edge""",
products="gecko")}
${helpers.single_keyword("vector-effect", "none non-scaling-stroke", products="gecko")}
// Section 13 - Gradients and Patterns
${helpers.predefined_type("stop-opacity", "Opacity", "1.0", products="gecko")}
// Section 15 - Filter Effects
${helpers.predefined_type("flood-opacity", "Opacity", "1.0", products="gecko")}
// CSS Masking Module Level 1
// https://www.w3.org/TR/css-masking-1/
${helpers.single_keyword("mask-type", "luminance alpha", products="gecko")}
|
<%namespace name="helpers" file="/helpers.mako.rs" />
<% data.new_style_struct("SVG", inherited=False, gecko_name="SVGReset") %>
|
random_line_split
|
lib.rs
|
extern crate clap;
extern crate imageflow_helpers;
extern crate imageflow_types as s;
extern crate imageflow_core as fc;
extern crate serde_json;
use imageflow_helpers as hlp;
use std::path::{Path,PathBuf};
mod cmd_build;
pub mod self_test;
use clap::{App, Arg, SubCommand, AppSettings};
fn artifact_source() -> hlp::process_capture::IncludeBinary{
hlp::process_capture::IncludeBinary::UrlOrCopy(s::version::get_build_env_value("ESTIMATED_ARTIFACT_URL").map(|v| v.to_owned()))
}
pub fn
|
() -> i32 {
imageflow_helpers::debug::set_panic_hook_once();
let version = s::version::one_line_version();
let app = App::new("imageflow_tool").version(version.as_ref())
.arg( Arg::with_name("capture-to").long("capture-to").takes_value(true)
.help("Run whatever you're doing in a sub-process, capturing output, input, and version detail")
).setting(AppSettings::SubcommandRequiredElseHelp).setting(AppSettings::VersionlessSubcommands)
.subcommand(
SubCommand::with_name("diagnose").setting(AppSettings::ArgRequiredElseHelp)
.about("Diagnostic utilities")
.arg(
Arg::with_name("show-compilation-info").long("show-compilation-info")
.help("Show all the information stored in this executable about the environment in which it was compiled.")
).arg(
Arg::with_name("self-test").long("self-test")
.help("Creates a'self_tests' directory and runs self-tests"))
.arg(
Arg::with_name("wait").long("wait")
.help("Process stays in memory until you press the enter key.")
)
.arg(
Arg::with_name("call-panic").long("call-panic")
.help("Triggers a Rust panic (so you can observe failure/backtrace behavior)")
)
)
.subcommand(
SubCommand::with_name("examples")
.about("Generate usage examples")
.arg(
Arg::with_name("generate").long("generate").required(true)
.help("Create an 'examples' directory")
)
)
// --json [path]
// --response [response_json_path]
// --demo [name]
// --in 0 a.png b.png
// --out a.png
//Eventually:
// --local-only (prevent remote URL requests)
// --no-io-ids (Disables interpretation of numbers in --in and --out as io_id assignment).
// --no-clobber
// --debug (verbose, graph export, frame export?)
// --debug-package
// file.json --in a.png a.png --out s.png
// file.json --in 0 a.png 1 b.png --out 3 base64
.subcommand(SubCommand::with_name("v1/build").alias("v0.1/build")
.about("Runs the given operation file")
.arg(
Arg::with_name("in").long("in").min_values(1)
.multiple(true)
.help("Replace/add inputs for the operation file")
)
.arg(Arg::with_name("out").long("out").multiple(true).min_values(1)
.help("Replace/add outputs for the operation file"))
//.arg(Arg::with_name("demo").long("demo").takes_value(true).possible_values(&["example:200x200_png"]))
.arg(Arg::with_name("json").long("json").takes_value(true).required(true).help("The JSON operation file."))
.arg(Arg::with_name("quiet").long("quiet").takes_value(false).help("Don't write the JSON response to stdout"))
.arg(Arg::with_name("response").long("response").takes_value(true).help("Write the JSON job result to file instead of stdout"))
.arg(Arg::with_name("bundle-to").long("bundle-to").takes_value(true).help("Copies the recipe and all dependencies into the given folder, simplifying it."))
.arg(Arg::with_name("debug-package").long("debug-package").takes_value(true).help("Creates a debug package in the given folder so others can reproduce the behavior you are seeing"))
)
.subcommand(SubCommand::with_name("v1/querystring").aliases(&["v0.1/ir4","v1/ir4"])
.about("Run an command querystring")
.arg(
Arg::with_name("in").long("in").min_values(1)
.multiple(true).required(true)
.help("Input image")
)
.arg(Arg::with_name("out").long("out").multiple(true).min_values(1).required(true)
.help("Output image"))
.arg(Arg::with_name("quiet").long("quiet").takes_value(false).help("Don't write the JSON response to stdout"))
.arg(Arg::with_name("response").long("response").takes_value(true).help("Write the JSON job result to file instead of stdout"))
.arg(Arg::with_name("command").long("command").takes_value(true).required(true).help("w=200&h=200&mode=crop&format=png&rotate=90&flip=v - querystring style command"))
.arg(Arg::with_name("bundle-to").long("bundle-to").takes_value(true).help("Copies the recipe and all dependencies into the given folder, simplifying it."))
.arg(Arg::with_name("debug-package").long("debug-package").takes_value(true).help("Creates a debug package in the given folder so others can reproduce the behavior you are seeing"))
);
let matches = app.get_matches();
if matches.is_present("capture-to"){
let mut filtered_args = std::env::args().collect::<Vec<String>>();
for ix in 0..filtered_args.len() {
if filtered_args[ix] == "--capture-to"{
//Remove this and the next arg
filtered_args.remove(ix);
if ix < filtered_args.len() - 1{
filtered_args.remove(ix);
}
break;
}
}
filtered_args.remove(0); //Remove the tool executable itself
let cap = hlp::process_capture::CaptureTo::create(matches.value_of("capture-to").unwrap(), None, filtered_args, artifact_source());
cap.run();
return cap.exit_code();
}
let build_triple = if let Some(m) = matches.subcommand_matches("v1/build") {
let source = if m.is_present("demo") {
cmd_build::JobSource::NamedDemo(m.value_of("demo").unwrap().to_owned())
} else {
cmd_build::JobSource::JsonFile(m.value_of("json").unwrap().to_owned())
};
Some((m, source, "v1/build"))
}else if let Some(m) = matches.subcommand_matches("v1/querystring"){
Some((m,cmd_build::JobSource::Ir4QueryString(m.value_of("command").unwrap().to_owned()), "v1/querystring"))
}else{ None };
if let Some((m, source, subcommand_name)) = build_triple{
let builder =
cmd_build::CmdBuild::parse(source, m.values_of_lossy("in"), m.values_of_lossy("out"))
.build_maybe();
if let Some(dir_str) = m.value_of("debug-package").and_then(|v| Some(v.to_owned())){
builder.write_errors_maybe().unwrap();
let dir = Path::new(&dir_str);
builder.bundle_to(dir);
let curdir = std::env::current_dir().unwrap();
std::env::set_current_dir(&dir).unwrap();
let cap = hlp::process_capture::CaptureTo::create("recipe", None, vec![subcommand_name.to_owned(), "--json".to_owned(), "recipe.json".to_owned()], artifact_source());
cap.run();
//Restore current directory
std::env::set_current_dir(&curdir).unwrap();
let archive_name = PathBuf::from(format!("{}.zip", &dir_str));
hlp::filesystem::zip_directory_nonrecursive(&dir,&archive_name.as_path()).unwrap();
return cap.exit_code();
} else if let Some(dir) = m.value_of("bundle-to").and_then(|v| Some(v.to_owned())) {
builder.write_errors_maybe().unwrap();
let dir = Path::new(&dir);
return builder.bundle_to(dir);
} else {
builder.write_response_maybe(m.value_of("response"),!m.is_present("quiet"))
.expect("IO error writing JSON output file. Does the directory exist?");
builder.write_errors_maybe().expect("Writing to stderr failed!");
return builder.get_exit_code().unwrap();
}
}
if let Some(matches) = matches.subcommand_matches("diagnose") {
let m: &clap::ArgMatches = matches;
if m.is_present("show-compilation-info") {
println!("{}\n{}\n",
s::version::one_line_version(),
s::version::all_build_info_pairs());
return 0;
}
if m.is_present("self-test") {
return self_test::run(None);
}
if m.is_present("wait") {
let mut input_buf = String::new();
let input = std::io::stdin().read_line(&mut input_buf).expect("Failed to read from stdin. Are you using --wait in a non-interactive shell?");
println!("{}", input);
return 0;
}
if m.is_present("call-panic") {
panic!("Panicking on command");
}
}
if let Some(matches) = matches.subcommand_matches("examples") {
let m: &clap::ArgMatches = matches;
if m.is_present("generate") {
self_test::export_examples(None);
return 0;
}
}
64
}
#[test]
fn test_file_macro_for_this_build(){
assert!(file!().starts_with("imageflow_tool"))
}
|
main_with_exit_code
|
identifier_name
|
lib.rs
|
extern crate clap;
extern crate imageflow_helpers;
extern crate imageflow_types as s;
extern crate imageflow_core as fc;
extern crate serde_json;
use imageflow_helpers as hlp;
use std::path::{Path,PathBuf};
mod cmd_build;
pub mod self_test;
use clap::{App, Arg, SubCommand, AppSettings};
fn artifact_source() -> hlp::process_capture::IncludeBinary{
hlp::process_capture::IncludeBinary::UrlOrCopy(s::version::get_build_env_value("ESTIMATED_ARTIFACT_URL").map(|v| v.to_owned()))
}
pub fn main_with_exit_code() -> i32 {
imageflow_helpers::debug::set_panic_hook_once();
let version = s::version::one_line_version();
let app = App::new("imageflow_tool").version(version.as_ref())
.arg( Arg::with_name("capture-to").long("capture-to").takes_value(true)
.help("Run whatever you're doing in a sub-process, capturing output, input, and version detail")
).setting(AppSettings::SubcommandRequiredElseHelp).setting(AppSettings::VersionlessSubcommands)
.subcommand(
SubCommand::with_name("diagnose").setting(AppSettings::ArgRequiredElseHelp)
.about("Diagnostic utilities")
.arg(
Arg::with_name("show-compilation-info").long("show-compilation-info")
.help("Show all the information stored in this executable about the environment in which it was compiled.")
).arg(
Arg::with_name("self-test").long("self-test")
.help("Creates a'self_tests' directory and runs self-tests"))
.arg(
Arg::with_name("wait").long("wait")
.help("Process stays in memory until you press the enter key.")
)
.arg(
Arg::with_name("call-panic").long("call-panic")
.help("Triggers a Rust panic (so you can observe failure/backtrace behavior)")
)
)
.subcommand(
SubCommand::with_name("examples")
.about("Generate usage examples")
.arg(
Arg::with_name("generate").long("generate").required(true)
.help("Create an 'examples' directory")
)
)
// --json [path]
// --response [response_json_path]
// --demo [name]
// --in 0 a.png b.png
// --out a.png
//Eventually:
// --local-only (prevent remote URL requests)
// --no-io-ids (Disables interpretation of numbers in --in and --out as io_id assignment).
// --no-clobber
// --debug (verbose, graph export, frame export?)
// --debug-package
// file.json --in a.png a.png --out s.png
// file.json --in 0 a.png 1 b.png --out 3 base64
.subcommand(SubCommand::with_name("v1/build").alias("v0.1/build")
.about("Runs the given operation file")
.arg(
Arg::with_name("in").long("in").min_values(1)
.multiple(true)
.help("Replace/add inputs for the operation file")
)
.arg(Arg::with_name("out").long("out").multiple(true).min_values(1)
.help("Replace/add outputs for the operation file"))
//.arg(Arg::with_name("demo").long("demo").takes_value(true).possible_values(&["example:200x200_png"]))
.arg(Arg::with_name("json").long("json").takes_value(true).required(true).help("The JSON operation file."))
.arg(Arg::with_name("quiet").long("quiet").takes_value(false).help("Don't write the JSON response to stdout"))
.arg(Arg::with_name("response").long("response").takes_value(true).help("Write the JSON job result to file instead of stdout"))
.arg(Arg::with_name("bundle-to").long("bundle-to").takes_value(true).help("Copies the recipe and all dependencies into the given folder, simplifying it."))
.arg(Arg::with_name("debug-package").long("debug-package").takes_value(true).help("Creates a debug package in the given folder so others can reproduce the behavior you are seeing"))
)
.subcommand(SubCommand::with_name("v1/querystring").aliases(&["v0.1/ir4","v1/ir4"])
.about("Run an command querystring")
.arg(
Arg::with_name("in").long("in").min_values(1)
.multiple(true).required(true)
.help("Input image")
)
.arg(Arg::with_name("out").long("out").multiple(true).min_values(1).required(true)
.help("Output image"))
.arg(Arg::with_name("quiet").long("quiet").takes_value(false).help("Don't write the JSON response to stdout"))
.arg(Arg::with_name("response").long("response").takes_value(true).help("Write the JSON job result to file instead of stdout"))
.arg(Arg::with_name("command").long("command").takes_value(true).required(true).help("w=200&h=200&mode=crop&format=png&rotate=90&flip=v - querystring style command"))
.arg(Arg::with_name("bundle-to").long("bundle-to").takes_value(true).help("Copies the recipe and all dependencies into the given folder, simplifying it."))
.arg(Arg::with_name("debug-package").long("debug-package").takes_value(true).help("Creates a debug package in the given folder so others can reproduce the behavior you are seeing"))
);
let matches = app.get_matches();
if matches.is_present("capture-to"){
let mut filtered_args = std::env::args().collect::<Vec<String>>();
for ix in 0..filtered_args.len() {
if filtered_args[ix] == "--capture-to"{
//Remove this and the next arg
filtered_args.remove(ix);
if ix < filtered_args.len() - 1{
filtered_args.remove(ix);
}
break;
}
}
filtered_args.remove(0); //Remove the tool executable itself
let cap = hlp::process_capture::CaptureTo::create(matches.value_of("capture-to").unwrap(), None, filtered_args, artifact_source());
cap.run();
return cap.exit_code();
}
let build_triple = if let Some(m) = matches.subcommand_matches("v1/build") {
let source = if m.is_present("demo") {
cmd_build::JobSource::NamedDemo(m.value_of("demo").unwrap().to_owned())
} else {
cmd_build::JobSource::JsonFile(m.value_of("json").unwrap().to_owned())
};
Some((m, source, "v1/build"))
}else if let Some(m) = matches.subcommand_matches("v1/querystring"){
Some((m,cmd_build::JobSource::Ir4QueryString(m.value_of("command").unwrap().to_owned()), "v1/querystring"))
}else{ None };
if let Some((m, source, subcommand_name)) = build_triple{
let builder =
cmd_build::CmdBuild::parse(source, m.values_of_lossy("in"), m.values_of_lossy("out"))
.build_maybe();
if let Some(dir_str) = m.value_of("debug-package").and_then(|v| Some(v.to_owned())){
builder.write_errors_maybe().unwrap();
let dir = Path::new(&dir_str);
builder.bundle_to(dir);
let curdir = std::env::current_dir().unwrap();
std::env::set_current_dir(&dir).unwrap();
let cap = hlp::process_capture::CaptureTo::create("recipe", None, vec![subcommand_name.to_owned(), "--json".to_owned(), "recipe.json".to_owned()], artifact_source());
cap.run();
//Restore current directory
std::env::set_current_dir(&curdir).unwrap();
let archive_name = PathBuf::from(format!("{}.zip", &dir_str));
hlp::filesystem::zip_directory_nonrecursive(&dir,&archive_name.as_path()).unwrap();
return cap.exit_code();
} else if let Some(dir) = m.value_of("bundle-to").and_then(|v| Some(v.to_owned())) {
builder.write_errors_maybe().unwrap();
let dir = Path::new(&dir);
return builder.bundle_to(dir);
} else {
builder.write_response_maybe(m.value_of("response"),!m.is_present("quiet"))
.expect("IO error writing JSON output file. Does the directory exist?");
builder.write_errors_maybe().expect("Writing to stderr failed!");
return builder.get_exit_code().unwrap();
}
}
if let Some(matches) = matches.subcommand_matches("diagnose") {
let m: &clap::ArgMatches = matches;
if m.is_present("show-compilation-info") {
println!("{}\n{}\n",
s::version::one_line_version(),
s::version::all_build_info_pairs());
return 0;
}
if m.is_present("self-test") {
return self_test::run(None);
}
if m.is_present("wait") {
let mut input_buf = String::new();
let input = std::io::stdin().read_line(&mut input_buf).expect("Failed to read from stdin. Are you using --wait in a non-interactive shell?");
println!("{}", input);
return 0;
}
if m.is_present("call-panic") {
panic!("Panicking on command");
}
}
if let Some(matches) = matches.subcommand_matches("examples") {
let m: &clap::ArgMatches = matches;
if m.is_present("generate") {
self_test::export_examples(None);
return 0;
}
}
64
}
#[test]
fn test_file_macro_for_this_build()
|
{
assert!(file!().starts_with("imageflow_tool"))
}
|
identifier_body
|
|
lib.rs
|
extern crate clap;
extern crate imageflow_helpers;
extern crate imageflow_types as s;
extern crate imageflow_core as fc;
extern crate serde_json;
use imageflow_helpers as hlp;
use std::path::{Path,PathBuf};
mod cmd_build;
pub mod self_test;
use clap::{App, Arg, SubCommand, AppSettings};
fn artifact_source() -> hlp::process_capture::IncludeBinary{
hlp::process_capture::IncludeBinary::UrlOrCopy(s::version::get_build_env_value("ESTIMATED_ARTIFACT_URL").map(|v| v.to_owned()))
}
pub fn main_with_exit_code() -> i32 {
imageflow_helpers::debug::set_panic_hook_once();
let version = s::version::one_line_version();
let app = App::new("imageflow_tool").version(version.as_ref())
.arg( Arg::with_name("capture-to").long("capture-to").takes_value(true)
.help("Run whatever you're doing in a sub-process, capturing output, input, and version detail")
).setting(AppSettings::SubcommandRequiredElseHelp).setting(AppSettings::VersionlessSubcommands)
.subcommand(
SubCommand::with_name("diagnose").setting(AppSettings::ArgRequiredElseHelp)
.about("Diagnostic utilities")
.arg(
Arg::with_name("show-compilation-info").long("show-compilation-info")
.help("Show all the information stored in this executable about the environment in which it was compiled.")
).arg(
Arg::with_name("self-test").long("self-test")
.help("Creates a'self_tests' directory and runs self-tests"))
.arg(
Arg::with_name("wait").long("wait")
.help("Process stays in memory until you press the enter key.")
)
.arg(
Arg::with_name("call-panic").long("call-panic")
.help("Triggers a Rust panic (so you can observe failure/backtrace behavior)")
)
)
.subcommand(
SubCommand::with_name("examples")
.about("Generate usage examples")
.arg(
Arg::with_name("generate").long("generate").required(true)
.help("Create an 'examples' directory")
)
)
// --json [path]
// --response [response_json_path]
// --demo [name]
// --in 0 a.png b.png
// --out a.png
//Eventually:
// --local-only (prevent remote URL requests)
// --no-io-ids (Disables interpretation of numbers in --in and --out as io_id assignment).
// --no-clobber
// --debug (verbose, graph export, frame export?)
// --debug-package
// file.json --in a.png a.png --out s.png
// file.json --in 0 a.png 1 b.png --out 3 base64
.subcommand(SubCommand::with_name("v1/build").alias("v0.1/build")
.about("Runs the given operation file")
.arg(
Arg::with_name("in").long("in").min_values(1)
.multiple(true)
.help("Replace/add inputs for the operation file")
)
.arg(Arg::with_name("out").long("out").multiple(true).min_values(1)
.help("Replace/add outputs for the operation file"))
//.arg(Arg::with_name("demo").long("demo").takes_value(true).possible_values(&["example:200x200_png"]))
.arg(Arg::with_name("json").long("json").takes_value(true).required(true).help("The JSON operation file."))
.arg(Arg::with_name("quiet").long("quiet").takes_value(false).help("Don't write the JSON response to stdout"))
.arg(Arg::with_name("response").long("response").takes_value(true).help("Write the JSON job result to file instead of stdout"))
.arg(Arg::with_name("bundle-to").long("bundle-to").takes_value(true).help("Copies the recipe and all dependencies into the given folder, simplifying it."))
.arg(Arg::with_name("debug-package").long("debug-package").takes_value(true).help("Creates a debug package in the given folder so others can reproduce the behavior you are seeing"))
)
.subcommand(SubCommand::with_name("v1/querystring").aliases(&["v0.1/ir4","v1/ir4"])
.about("Run an command querystring")
.arg(
Arg::with_name("in").long("in").min_values(1)
.multiple(true).required(true)
.help("Input image")
)
.arg(Arg::with_name("out").long("out").multiple(true).min_values(1).required(true)
.help("Output image"))
.arg(Arg::with_name("quiet").long("quiet").takes_value(false).help("Don't write the JSON response to stdout"))
.arg(Arg::with_name("response").long("response").takes_value(true).help("Write the JSON job result to file instead of stdout"))
.arg(Arg::with_name("command").long("command").takes_value(true).required(true).help("w=200&h=200&mode=crop&format=png&rotate=90&flip=v - querystring style command"))
.arg(Arg::with_name("bundle-to").long("bundle-to").takes_value(true).help("Copies the recipe and all dependencies into the given folder, simplifying it."))
.arg(Arg::with_name("debug-package").long("debug-package").takes_value(true).help("Creates a debug package in the given folder so others can reproduce the behavior you are seeing"))
);
let matches = app.get_matches();
if matches.is_present("capture-to"){
let mut filtered_args = std::env::args().collect::<Vec<String>>();
for ix in 0..filtered_args.len() {
if filtered_args[ix] == "--capture-to"{
//Remove this and the next arg
filtered_args.remove(ix);
if ix < filtered_args.len() - 1{
filtered_args.remove(ix);
}
break;
}
}
filtered_args.remove(0); //Remove the tool executable itself
let cap = hlp::process_capture::CaptureTo::create(matches.value_of("capture-to").unwrap(), None, filtered_args, artifact_source());
cap.run();
return cap.exit_code();
}
let build_triple = if let Some(m) = matches.subcommand_matches("v1/build")
|
else if let Some(m) = matches.subcommand_matches("v1/querystring"){
Some((m,cmd_build::JobSource::Ir4QueryString(m.value_of("command").unwrap().to_owned()), "v1/querystring"))
}else{ None };
if let Some((m, source, subcommand_name)) = build_triple{
let builder =
cmd_build::CmdBuild::parse(source, m.values_of_lossy("in"), m.values_of_lossy("out"))
.build_maybe();
if let Some(dir_str) = m.value_of("debug-package").and_then(|v| Some(v.to_owned())){
builder.write_errors_maybe().unwrap();
let dir = Path::new(&dir_str);
builder.bundle_to(dir);
let curdir = std::env::current_dir().unwrap();
std::env::set_current_dir(&dir).unwrap();
let cap = hlp::process_capture::CaptureTo::create("recipe", None, vec![subcommand_name.to_owned(), "--json".to_owned(), "recipe.json".to_owned()], artifact_source());
cap.run();
//Restore current directory
std::env::set_current_dir(&curdir).unwrap();
let archive_name = PathBuf::from(format!("{}.zip", &dir_str));
hlp::filesystem::zip_directory_nonrecursive(&dir,&archive_name.as_path()).unwrap();
return cap.exit_code();
} else if let Some(dir) = m.value_of("bundle-to").and_then(|v| Some(v.to_owned())) {
builder.write_errors_maybe().unwrap();
let dir = Path::new(&dir);
return builder.bundle_to(dir);
} else {
builder.write_response_maybe(m.value_of("response"),!m.is_present("quiet"))
.expect("IO error writing JSON output file. Does the directory exist?");
builder.write_errors_maybe().expect("Writing to stderr failed!");
return builder.get_exit_code().unwrap();
}
}
if let Some(matches) = matches.subcommand_matches("diagnose") {
let m: &clap::ArgMatches = matches;
if m.is_present("show-compilation-info") {
println!("{}\n{}\n",
s::version::one_line_version(),
s::version::all_build_info_pairs());
return 0;
}
if m.is_present("self-test") {
return self_test::run(None);
}
if m.is_present("wait") {
let mut input_buf = String::new();
let input = std::io::stdin().read_line(&mut input_buf).expect("Failed to read from stdin. Are you using --wait in a non-interactive shell?");
println!("{}", input);
return 0;
}
if m.is_present("call-panic") {
panic!("Panicking on command");
}
}
if let Some(matches) = matches.subcommand_matches("examples") {
let m: &clap::ArgMatches = matches;
if m.is_present("generate") {
self_test::export_examples(None);
return 0;
}
}
64
}
#[test]
fn test_file_macro_for_this_build(){
assert!(file!().starts_with("imageflow_tool"))
}
|
{
let source = if m.is_present("demo") {
cmd_build::JobSource::NamedDemo(m.value_of("demo").unwrap().to_owned())
} else {
cmd_build::JobSource::JsonFile(m.value_of("json").unwrap().to_owned())
};
Some((m, source, "v1/build"))
}
|
conditional_block
|
lib.rs
|
extern crate clap;
extern crate imageflow_helpers;
extern crate imageflow_types as s;
extern crate imageflow_core as fc;
extern crate serde_json;
use imageflow_helpers as hlp;
use std::path::{Path,PathBuf};
mod cmd_build;
pub mod self_test;
use clap::{App, Arg, SubCommand, AppSettings};
fn artifact_source() -> hlp::process_capture::IncludeBinary{
hlp::process_capture::IncludeBinary::UrlOrCopy(s::version::get_build_env_value("ESTIMATED_ARTIFACT_URL").map(|v| v.to_owned()))
}
pub fn main_with_exit_code() -> i32 {
imageflow_helpers::debug::set_panic_hook_once();
let version = s::version::one_line_version();
let app = App::new("imageflow_tool").version(version.as_ref())
.arg( Arg::with_name("capture-to").long("capture-to").takes_value(true)
.help("Run whatever you're doing in a sub-process, capturing output, input, and version detail")
).setting(AppSettings::SubcommandRequiredElseHelp).setting(AppSettings::VersionlessSubcommands)
.subcommand(
SubCommand::with_name("diagnose").setting(AppSettings::ArgRequiredElseHelp)
.about("Diagnostic utilities")
.arg(
Arg::with_name("show-compilation-info").long("show-compilation-info")
.help("Show all the information stored in this executable about the environment in which it was compiled.")
).arg(
Arg::with_name("self-test").long("self-test")
.help("Creates a'self_tests' directory and runs self-tests"))
.arg(
Arg::with_name("wait").long("wait")
.help("Process stays in memory until you press the enter key.")
)
.arg(
Arg::with_name("call-panic").long("call-panic")
.help("Triggers a Rust panic (so you can observe failure/backtrace behavior)")
)
)
.subcommand(
SubCommand::with_name("examples")
.about("Generate usage examples")
.arg(
Arg::with_name("generate").long("generate").required(true)
.help("Create an 'examples' directory")
)
)
// --json [path]
// --response [response_json_path]
// --demo [name]
// --in 0 a.png b.png
// --out a.png
//Eventually:
// --local-only (prevent remote URL requests)
// --no-io-ids (Disables interpretation of numbers in --in and --out as io_id assignment).
// --no-clobber
// --debug (verbose, graph export, frame export?)
// --debug-package
// file.json --in a.png a.png --out s.png
// file.json --in 0 a.png 1 b.png --out 3 base64
.subcommand(SubCommand::with_name("v1/build").alias("v0.1/build")
.about("Runs the given operation file")
.arg(
Arg::with_name("in").long("in").min_values(1)
.multiple(true)
.help("Replace/add inputs for the operation file")
)
.arg(Arg::with_name("out").long("out").multiple(true).min_values(1)
.help("Replace/add outputs for the operation file"))
//.arg(Arg::with_name("demo").long("demo").takes_value(true).possible_values(&["example:200x200_png"]))
.arg(Arg::with_name("json").long("json").takes_value(true).required(true).help("The JSON operation file."))
.arg(Arg::with_name("quiet").long("quiet").takes_value(false).help("Don't write the JSON response to stdout"))
.arg(Arg::with_name("response").long("response").takes_value(true).help("Write the JSON job result to file instead of stdout"))
.arg(Arg::with_name("bundle-to").long("bundle-to").takes_value(true).help("Copies the recipe and all dependencies into the given folder, simplifying it."))
.arg(Arg::with_name("debug-package").long("debug-package").takes_value(true).help("Creates a debug package in the given folder so others can reproduce the behavior you are seeing"))
)
.subcommand(SubCommand::with_name("v1/querystring").aliases(&["v0.1/ir4","v1/ir4"])
.about("Run an command querystring")
.arg(
Arg::with_name("in").long("in").min_values(1)
.multiple(true).required(true)
.help("Input image")
)
.arg(Arg::with_name("out").long("out").multiple(true).min_values(1).required(true)
.help("Output image"))
.arg(Arg::with_name("quiet").long("quiet").takes_value(false).help("Don't write the JSON response to stdout"))
.arg(Arg::with_name("response").long("response").takes_value(true).help("Write the JSON job result to file instead of stdout"))
.arg(Arg::with_name("command").long("command").takes_value(true).required(true).help("w=200&h=200&mode=crop&format=png&rotate=90&flip=v - querystring style command"))
.arg(Arg::with_name("bundle-to").long("bundle-to").takes_value(true).help("Copies the recipe and all dependencies into the given folder, simplifying it."))
.arg(Arg::with_name("debug-package").long("debug-package").takes_value(true).help("Creates a debug package in the given folder so others can reproduce the behavior you are seeing"))
);
let matches = app.get_matches();
if matches.is_present("capture-to"){
let mut filtered_args = std::env::args().collect::<Vec<String>>();
for ix in 0..filtered_args.len() {
if filtered_args[ix] == "--capture-to"{
//Remove this and the next arg
filtered_args.remove(ix);
if ix < filtered_args.len() - 1{
filtered_args.remove(ix);
}
break;
}
}
filtered_args.remove(0); //Remove the tool executable itself
let cap = hlp::process_capture::CaptureTo::create(matches.value_of("capture-to").unwrap(), None, filtered_args, artifact_source());
cap.run();
return cap.exit_code();
}
let build_triple = if let Some(m) = matches.subcommand_matches("v1/build") {
let source = if m.is_present("demo") {
cmd_build::JobSource::NamedDemo(m.value_of("demo").unwrap().to_owned())
} else {
cmd_build::JobSource::JsonFile(m.value_of("json").unwrap().to_owned())
};
Some((m, source, "v1/build"))
}else if let Some(m) = matches.subcommand_matches("v1/querystring"){
Some((m,cmd_build::JobSource::Ir4QueryString(m.value_of("command").unwrap().to_owned()), "v1/querystring"))
}else{ None };
if let Some((m, source, subcommand_name)) = build_triple{
let builder =
cmd_build::CmdBuild::parse(source, m.values_of_lossy("in"), m.values_of_lossy("out"))
.build_maybe();
if let Some(dir_str) = m.value_of("debug-package").and_then(|v| Some(v.to_owned())){
builder.write_errors_maybe().unwrap();
let dir = Path::new(&dir_str);
builder.bundle_to(dir);
let curdir = std::env::current_dir().unwrap();
std::env::set_current_dir(&dir).unwrap();
let cap = hlp::process_capture::CaptureTo::create("recipe", None, vec![subcommand_name.to_owned(), "--json".to_owned(), "recipe.json".to_owned()], artifact_source());
cap.run();
//Restore current directory
std::env::set_current_dir(&curdir).unwrap();
let archive_name = PathBuf::from(format!("{}.zip", &dir_str));
hlp::filesystem::zip_directory_nonrecursive(&dir,&archive_name.as_path()).unwrap();
return cap.exit_code();
} else if let Some(dir) = m.value_of("bundle-to").and_then(|v| Some(v.to_owned())) {
builder.write_errors_maybe().unwrap();
let dir = Path::new(&dir);
return builder.bundle_to(dir);
} else {
builder.write_response_maybe(m.value_of("response"),!m.is_present("quiet"))
.expect("IO error writing JSON output file. Does the directory exist?");
builder.write_errors_maybe().expect("Writing to stderr failed!");
return builder.get_exit_code().unwrap();
}
}
if let Some(matches) = matches.subcommand_matches("diagnose") {
let m: &clap::ArgMatches = matches;
if m.is_present("show-compilation-info") {
println!("{}\n{}\n",
s::version::one_line_version(),
s::version::all_build_info_pairs());
return 0;
}
if m.is_present("self-test") {
return self_test::run(None);
}
if m.is_present("wait") {
let mut input_buf = String::new();
let input = std::io::stdin().read_line(&mut input_buf).expect("Failed to read from stdin. Are you using --wait in a non-interactive shell?");
println!("{}", input);
return 0;
}
if m.is_present("call-panic") {
panic!("Panicking on command");
}
}
if let Some(matches) = matches.subcommand_matches("examples") {
let m: &clap::ArgMatches = matches;
if m.is_present("generate") {
self_test::export_examples(None);
return 0;
}
}
64
}
#[test]
fn test_file_macro_for_this_build(){
assert!(file!().starts_with("imageflow_tool"))
|
}
|
random_line_split
|
|
primitive.rs
|
//! Allows to setup a scene with scenes in pyramidal layout, along with traits
//! to help shooting rays to check for intersections
use super::vec::{Vector, RFloat};
use std::default::Default;
use std::f32;
#[derive(Default, PartialEq, Clone, Copy, Debug)]
pub struct Ray {
pub pos: Vector,
pub dir: Vector,
}
#[derive(Clone, Copy)]
pub struct Hit {
pub distance: RFloat,
pub pos: Vector,
}
impl Hit {
pub fn missed() -> Hit {
Hit {
distance: f32::INFINITY,
pos: Default::default(),
}
}
pub fn has_missed(&self) -> bool {
self.distance == f32::INFINITY
}
pub fn set_missed(&mut self) {
self.distance = f32::INFINITY;
}
}
#[derive(Clone, Copy)]
pub struct Sphere {
pub center: Vector,
pub radius: RFloat,
}
impl Default for Sphere {
fn default() -> Sphere {
Sphere {
center: Default::default(),
radius: 1.0,
}
}
}
impl DistanceMeasure for Sphere {
#[inline(always)]
fn distance_from_ray(&self, r: &Ray) -> RFloat {
let v = self.center - r.pos;
let b = v.dot(&r.dir);
let disc = b * b - v.dot(&v) + self.radius * self.radius;
if disc < 0.0 {
return f32::INFINITY;
}
let d = disc.sqrt();
let t2 = b + d;
if t2 < 0.0 {
return f32::INFINITY;
}
let t1 = b - d;
if t1 > 0.0 { t1 } else { t2 }
}
}
impl Intersectable for Sphere {
#[inline(always)]
fn intersect(&self, hit: &mut Hit, ray: &Ray) {
let distance = self.distance_from_ray(ray);
if distance >= hit.distance {
return;
}
hit.distance = distance;
hit.pos = (ray.pos + (ray.dir.mulfed(distance) - self.center)).normalized();
}
}
pub trait Intersectable {
/// Return intersection point of ray with item (relative to the Ray!!)
fn intersect(&self, &mut Hit, ray: &Ray);
}
pub trait DistanceMeasure {
fn distance_from_ray(&self, r: &Ray) -> RFloat;
}
#[cfg(test)]
mod primitive_tests {
use super::Ray;
use std::default::Default;
#[test]
fn ray_defaults() {
let r1: Ray = Ray {
pos: Default::default(),
dir: Default::default(),
};
let r2: Ray = Default::default();
assert_eq!(r1, r2);
}
}
#[cfg(test)]
mod sphere {
extern crate test;
use super::*;
use std::default::Default;
use super::super::vec::Vector;
use std::f32;
fn setup_scene() -> (Ray, Ray, Sphere) {
let s = Sphere {
center: Default::default(),
radius: 1.0,
};
let mut dir: Vector = Default::default();
dir.x = -1.0;
let r1 = Ray {
pos: Vector {
x: 2.0,
y: 0.0,
z: 0.0,
},
dir: dir,
};
let mut r2 = r1;
r2.dir.x = -r2.dir.x; // invert direction
(r1, r2, s)
}
#[test]
fn intersect() {
let (r1, r2, s) = setup_scene();
{
let dfr = s.distance_from_ray(&r1);
assert_eq!(dfr, 1.0);
let dfr = s.distance_from_ray(&r2);
assert_eq!(dfr, f32::INFINITY);
}
{
let mut h = Hit {
distance: 2.0,
pos: Default::default(),
};
s.intersect(&mut h, &r1);
assert_eq!(h.distance, 1.0);
assert_eq!(h.pos.x, 1.0);
h.distance = 0.5;
s.intersect(&mut h, &r1);
assert!(h.distance == 0.5, "Max Distance too short");
h.distance = 10.0;
s.intersect(&mut h, &r2);
assert!(h.distance == 10.0, "r2 is shot the wrong way");
}
}
#[test]
fn defaultdefault() {
let s: Sphere = Default::default();
|
const NUM_ITERATIONS: usize = 10000;
#[bench]
fn bench_ray_sphere(b: &mut test::Bencher) {
let (r1, r2, s) = setup_scene();
b.iter(|| {
for _ in 0..NUM_ITERATIONS {
test::black_box(s.distance_from_ray(&r1));
test::black_box(s.distance_from_ray(&r2));
}
});
b.bytes = (NUM_ITERATIONS * 2) as u64;
}
#[bench]
fn bench_intersect(b: &mut test::Bencher) {
let (r1, r2, s) = setup_scene();
let mut h = Hit::missed();
b.iter(|| {
for _ in 0..NUM_ITERATIONS {
h.set_missed();
test::black_box(s.intersect(&mut h, &r1));
h.set_missed();
test::black_box(s.intersect(&mut h, &r2));
}
});
b.bytes = (NUM_ITERATIONS * 2) as u64;
}
}
|
assert!(s.radius != 0.0);
}
|
random_line_split
|
primitive.rs
|
//! Allows to setup a scene with scenes in pyramidal layout, along with traits
//! to help shooting rays to check for intersections
use super::vec::{Vector, RFloat};
use std::default::Default;
use std::f32;
#[derive(Default, PartialEq, Clone, Copy, Debug)]
pub struct Ray {
pub pos: Vector,
pub dir: Vector,
}
#[derive(Clone, Copy)]
pub struct Hit {
pub distance: RFloat,
pub pos: Vector,
}
impl Hit {
pub fn missed() -> Hit {
Hit {
distance: f32::INFINITY,
pos: Default::default(),
}
}
pub fn has_missed(&self) -> bool {
self.distance == f32::INFINITY
}
pub fn set_missed(&mut self) {
self.distance = f32::INFINITY;
}
}
#[derive(Clone, Copy)]
pub struct Sphere {
pub center: Vector,
pub radius: RFloat,
}
impl Default for Sphere {
fn
|
() -> Sphere {
Sphere {
center: Default::default(),
radius: 1.0,
}
}
}
impl DistanceMeasure for Sphere {
#[inline(always)]
fn distance_from_ray(&self, r: &Ray) -> RFloat {
let v = self.center - r.pos;
let b = v.dot(&r.dir);
let disc = b * b - v.dot(&v) + self.radius * self.radius;
if disc < 0.0 {
return f32::INFINITY;
}
let d = disc.sqrt();
let t2 = b + d;
if t2 < 0.0 {
return f32::INFINITY;
}
let t1 = b - d;
if t1 > 0.0 { t1 } else { t2 }
}
}
impl Intersectable for Sphere {
#[inline(always)]
fn intersect(&self, hit: &mut Hit, ray: &Ray) {
let distance = self.distance_from_ray(ray);
if distance >= hit.distance {
return;
}
hit.distance = distance;
hit.pos = (ray.pos + (ray.dir.mulfed(distance) - self.center)).normalized();
}
}
pub trait Intersectable {
/// Return intersection point of ray with item (relative to the Ray!!)
fn intersect(&self, &mut Hit, ray: &Ray);
}
pub trait DistanceMeasure {
fn distance_from_ray(&self, r: &Ray) -> RFloat;
}
#[cfg(test)]
mod primitive_tests {
use super::Ray;
use std::default::Default;
#[test]
fn ray_defaults() {
let r1: Ray = Ray {
pos: Default::default(),
dir: Default::default(),
};
let r2: Ray = Default::default();
assert_eq!(r1, r2);
}
}
#[cfg(test)]
mod sphere {
extern crate test;
use super::*;
use std::default::Default;
use super::super::vec::Vector;
use std::f32;
fn setup_scene() -> (Ray, Ray, Sphere) {
let s = Sphere {
center: Default::default(),
radius: 1.0,
};
let mut dir: Vector = Default::default();
dir.x = -1.0;
let r1 = Ray {
pos: Vector {
x: 2.0,
y: 0.0,
z: 0.0,
},
dir: dir,
};
let mut r2 = r1;
r2.dir.x = -r2.dir.x; // invert direction
(r1, r2, s)
}
#[test]
fn intersect() {
let (r1, r2, s) = setup_scene();
{
let dfr = s.distance_from_ray(&r1);
assert_eq!(dfr, 1.0);
let dfr = s.distance_from_ray(&r2);
assert_eq!(dfr, f32::INFINITY);
}
{
let mut h = Hit {
distance: 2.0,
pos: Default::default(),
};
s.intersect(&mut h, &r1);
assert_eq!(h.distance, 1.0);
assert_eq!(h.pos.x, 1.0);
h.distance = 0.5;
s.intersect(&mut h, &r1);
assert!(h.distance == 0.5, "Max Distance too short");
h.distance = 10.0;
s.intersect(&mut h, &r2);
assert!(h.distance == 10.0, "r2 is shot the wrong way");
}
}
#[test]
fn defaultdefault() {
let s: Sphere = Default::default();
assert!(s.radius!= 0.0);
}
const NUM_ITERATIONS: usize = 10000;
#[bench]
fn bench_ray_sphere(b: &mut test::Bencher) {
let (r1, r2, s) = setup_scene();
b.iter(|| {
for _ in 0..NUM_ITERATIONS {
test::black_box(s.distance_from_ray(&r1));
test::black_box(s.distance_from_ray(&r2));
}
});
b.bytes = (NUM_ITERATIONS * 2) as u64;
}
#[bench]
fn bench_intersect(b: &mut test::Bencher) {
let (r1, r2, s) = setup_scene();
let mut h = Hit::missed();
b.iter(|| {
for _ in 0..NUM_ITERATIONS {
h.set_missed();
test::black_box(s.intersect(&mut h, &r1));
h.set_missed();
test::black_box(s.intersect(&mut h, &r2));
}
});
b.bytes = (NUM_ITERATIONS * 2) as u64;
}
}
|
default
|
identifier_name
|
primitive.rs
|
//! Allows to setup a scene with scenes in pyramidal layout, along with traits
//! to help shooting rays to check for intersections
use super::vec::{Vector, RFloat};
use std::default::Default;
use std::f32;
#[derive(Default, PartialEq, Clone, Copy, Debug)]
pub struct Ray {
pub pos: Vector,
pub dir: Vector,
}
#[derive(Clone, Copy)]
pub struct Hit {
pub distance: RFloat,
pub pos: Vector,
}
impl Hit {
pub fn missed() -> Hit {
Hit {
distance: f32::INFINITY,
pos: Default::default(),
}
}
pub fn has_missed(&self) -> bool {
self.distance == f32::INFINITY
}
pub fn set_missed(&mut self) {
self.distance = f32::INFINITY;
}
}
#[derive(Clone, Copy)]
pub struct Sphere {
pub center: Vector,
pub radius: RFloat,
}
impl Default for Sphere {
fn default() -> Sphere {
Sphere {
center: Default::default(),
radius: 1.0,
}
}
}
impl DistanceMeasure for Sphere {
#[inline(always)]
fn distance_from_ray(&self, r: &Ray) -> RFloat {
let v = self.center - r.pos;
let b = v.dot(&r.dir);
let disc = b * b - v.dot(&v) + self.radius * self.radius;
if disc < 0.0 {
return f32::INFINITY;
}
let d = disc.sqrt();
let t2 = b + d;
if t2 < 0.0 {
return f32::INFINITY;
}
let t1 = b - d;
if t1 > 0.0 { t1 } else { t2 }
}
}
impl Intersectable for Sphere {
#[inline(always)]
fn intersect(&self, hit: &mut Hit, ray: &Ray) {
let distance = self.distance_from_ray(ray);
if distance >= hit.distance
|
hit.distance = distance;
hit.pos = (ray.pos + (ray.dir.mulfed(distance) - self.center)).normalized();
}
}
pub trait Intersectable {
/// Return intersection point of ray with item (relative to the Ray!!)
fn intersect(&self, &mut Hit, ray: &Ray);
}
pub trait DistanceMeasure {
fn distance_from_ray(&self, r: &Ray) -> RFloat;
}
#[cfg(test)]
mod primitive_tests {
use super::Ray;
use std::default::Default;
#[test]
fn ray_defaults() {
let r1: Ray = Ray {
pos: Default::default(),
dir: Default::default(),
};
let r2: Ray = Default::default();
assert_eq!(r1, r2);
}
}
#[cfg(test)]
mod sphere {
extern crate test;
use super::*;
use std::default::Default;
use super::super::vec::Vector;
use std::f32;
fn setup_scene() -> (Ray, Ray, Sphere) {
let s = Sphere {
center: Default::default(),
radius: 1.0,
};
let mut dir: Vector = Default::default();
dir.x = -1.0;
let r1 = Ray {
pos: Vector {
x: 2.0,
y: 0.0,
z: 0.0,
},
dir: dir,
};
let mut r2 = r1;
r2.dir.x = -r2.dir.x; // invert direction
(r1, r2, s)
}
#[test]
fn intersect() {
let (r1, r2, s) = setup_scene();
{
let dfr = s.distance_from_ray(&r1);
assert_eq!(dfr, 1.0);
let dfr = s.distance_from_ray(&r2);
assert_eq!(dfr, f32::INFINITY);
}
{
let mut h = Hit {
distance: 2.0,
pos: Default::default(),
};
s.intersect(&mut h, &r1);
assert_eq!(h.distance, 1.0);
assert_eq!(h.pos.x, 1.0);
h.distance = 0.5;
s.intersect(&mut h, &r1);
assert!(h.distance == 0.5, "Max Distance too short");
h.distance = 10.0;
s.intersect(&mut h, &r2);
assert!(h.distance == 10.0, "r2 is shot the wrong way");
}
}
#[test]
fn defaultdefault() {
let s: Sphere = Default::default();
assert!(s.radius!= 0.0);
}
const NUM_ITERATIONS: usize = 10000;
#[bench]
fn bench_ray_sphere(b: &mut test::Bencher) {
let (r1, r2, s) = setup_scene();
b.iter(|| {
for _ in 0..NUM_ITERATIONS {
test::black_box(s.distance_from_ray(&r1));
test::black_box(s.distance_from_ray(&r2));
}
});
b.bytes = (NUM_ITERATIONS * 2) as u64;
}
#[bench]
fn bench_intersect(b: &mut test::Bencher) {
let (r1, r2, s) = setup_scene();
let mut h = Hit::missed();
b.iter(|| {
for _ in 0..NUM_ITERATIONS {
h.set_missed();
test::black_box(s.intersect(&mut h, &r1));
h.set_missed();
test::black_box(s.intersect(&mut h, &r2));
}
});
b.bytes = (NUM_ITERATIONS * 2) as u64;
}
}
|
{
return;
}
|
conditional_block
|
blockdev.rs
|
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
// Code to handle a single block device.
use std::{fs::OpenOptions, path::Path};
use chrono::{DateTime, TimeZone, Utc};
use serde_json::Value;
use devicemapper::{Device, Sectors};
use crate::{
engine::{
engine::BlockDev,
strat_engine::{
backstore::{
crypt::CryptHandle,
range_alloc::{PerDevSegments, RangeAllocator},
},
metadata::{disown_device, BDAExtendedSize, BlockdevSize, MDADataSize, BDA},
serde_structs::{BaseBlockDevSave, Recordable},
},
types::{DevUuid, DevicePath, EncryptionInfo, KeyDescription, PoolUuid},
},
stratis::{StratisError, StratisResult},
};
#[derive(Debug)]
pub enum UnderlyingDevice {
Encrypted(CryptHandle),
Unencrypted(DevicePath),
}
impl UnderlyingDevice {
pub fn physical_path(&self) -> &Path {
match self {
UnderlyingDevice::Encrypted(handle) => handle.luks2_device_path(),
UnderlyingDevice::Unencrypted(path) => &*path,
}
}
pub fn metadata_path(&self) -> &Path {
match self {
UnderlyingDevice::Encrypted(handle) => handle.activated_device_path(),
|
match self {
UnderlyingDevice::Encrypted(handle) => Some(handle),
UnderlyingDevice::Unencrypted(_) => None,
}
}
pub fn crypt_handle_mut(&mut self) -> Option<&mut CryptHandle> {
match self {
UnderlyingDevice::Encrypted(handle) => Some(handle),
UnderlyingDevice::Unencrypted(_) => None,
}
}
}
#[derive(Debug)]
pub struct StratBlockDev {
dev: Device,
bda: BDA,
used: RangeAllocator,
user_info: Option<String>,
hardware_info: Option<String>,
underlying_device: UnderlyingDevice,
}
impl StratBlockDev {
/// Make a new BlockDev from the parameters.
/// Allocate space for the Stratis metadata on the device.
/// - dev: the device, identified by number
/// - devnode: for encrypted devices, the logical and physical
/// paths; for unencrypted devices, the physical path
/// - bda: the device's BDA
/// - other_segments: segments allocated outside Stratis metadata region
/// - user_info: user settable identifying information
/// - hardware_info: identifying information in the hardware
/// - key_description: optional argument enabling encryption using
/// the specified key in the kernel keyring
/// Returns an error if it is impossible to allocate all segments on the
/// device.
/// NOTE: It is possible that the actual device size is greater than
/// the recorded device size. In that case, the additional space available
/// on the device is simply invisible to the blockdev. Consequently, it
/// is invisible to the engine, and is not part of the total size value
/// reported on the D-Bus.
///
/// Precondition: segments in other_segments do not overlap with Stratis
/// metadata region.
pub fn new(
dev: Device,
bda: BDA,
other_segments: &[(Sectors, Sectors)],
user_info: Option<String>,
hardware_info: Option<String>,
underlying_device: UnderlyingDevice,
) -> StratisResult<StratBlockDev> {
let mut segments = vec![(Sectors(0), bda.extended_size().sectors())];
segments.extend(other_segments);
let allocator = RangeAllocator::new(bda.dev_size(), &segments)?;
Ok(StratBlockDev {
dev,
bda,
used: allocator,
user_info,
hardware_info,
underlying_device,
})
}
/// Returns the blockdev's Device
pub fn device(&self) -> &Device {
&self.dev
}
/// Returns the physical path of the block device structure.
pub fn physical_path(&self) -> &Path {
self.devnode()
}
/// Returns the path to the unencrypted metadata stored on the block device structure.
/// On encrypted devices, this will point to a devicemapper device set up by libcryptsetup.
/// On unencrypted devices, this will be the same as the physical device.
pub fn metadata_path(&self) -> &Path {
self.underlying_device.metadata_path()
}
/// Remove information that identifies this device as belonging to Stratis
///
/// If self.is_encrypted() is true, destroy all keyslots and wipe the LUKS2 header.
/// This will render all Stratis and LUKS2 metadata unreadable and unrecoverable
/// from the given device.
///
/// If self.is_encrypted() is false, wipe the Stratis metadata on the device.
/// This will make the Stratis data and metadata invisible to all standard blkid
/// and stratisd operations.
///
/// Precondition: if self.is_encrypted() == true, the data on
/// self.devnode.physical_path() has been encrypted with
/// aes-xts-plain64 encryption.
pub fn disown(&mut self) -> StratisResult<()> {
if let Some(ref mut handle) = self.underlying_device.crypt_handle_mut() {
handle.wipe()?;
} else {
disown_device(
&mut OpenOptions::new()
.write(true)
.open(self.underlying_device.physical_path())?,
)?;
}
Ok(())
}
pub fn save_state(&mut self, time: &DateTime<Utc>, metadata: &[u8]) -> StratisResult<()> {
let mut f = OpenOptions::new()
.write(true)
.open(self.underlying_device.metadata_path())?;
self.bda.save_state(time, metadata, &mut f)
}
/// The pool's UUID.
pub fn pool_uuid(&self) -> PoolUuid {
self.bda.pool_uuid()
}
/// The device's UUID.
pub fn uuid(&self) -> DevUuid {
self.bda.dev_uuid()
}
/// Find some sector ranges that could be allocated. If more
/// sectors are needed than are available, return partial results.
/// If all available sectors are desired, don't use this function.
/// Define a request_all() function here and have it invoke the
/// RangeAllocator::request_all() function.
pub fn request_space(&mut self, size: Sectors) -> PerDevSegments {
self.used.request(size)
}
// ALL SIZE METHODS (except size(), which is in BlockDev impl.)
/// The number of Sectors on this device used by Stratis for metadata
pub fn metadata_size(&self) -> BDAExtendedSize {
self.bda.extended_size()
}
/// The number of Sectors on this device not allocated for any purpose.
/// self.total_allocated_size() - self.metadata_size() >= self.available()
pub fn available(&self) -> Sectors {
self.used.available()
}
/// The total size of the Stratis block device.
pub fn total_size(&self) -> BlockdevSize {
self.bda.dev_size()
}
/// The total size of the allocated portions of the Stratis block device.
pub fn total_allocated_size(&self) -> BlockdevSize {
self.used.size()
}
/// The maximum size of variable length metadata that can be accommodated.
/// self.max_metadata_size() < self.metadata_size()
pub fn max_metadata_size(&self) -> MDADataSize {
self.bda.max_data_size()
}
/// Set the user info on this blockdev.
/// The user_info may be None, which unsets user info.
/// Returns true if the user info was changed, otherwise false.
pub fn set_user_info(&mut self, user_info: Option<&str>) -> bool {
set_blockdev_user_info!(self; user_info)
}
/// Get the physical path for a block device.
pub fn devnode(&self) -> &Path {
self.underlying_device.physical_path()
}
/// Get the encryption_info stored on the given encrypted blockdev.
///
/// The `Cow` return type is required due to the optional `CryptHandle` type.
/// If the device is not encrypted, it must return an owned `EncryptionInfo`
/// structure.
pub fn encryption_info(&self) -> Option<&EncryptionInfo> {
self.underlying_device
.crypt_handle()
.map(|ch| ch.encryption_info())
}
/// Bind encrypted device using the given clevis configuration.
pub fn bind_clevis(&mut self, pin: &str, clevis_info: &Value) -> StratisResult<()> {
let crypt_handle = self.underlying_device.crypt_handle_mut().ok_or_else(|| {
StratisError::Msg("This device does not appear to be encrypted".to_string())
})?;
crypt_handle.clevis_bind(pin, clevis_info)
}
/// Unbind encrypted device using the given clevis configuration.
pub fn unbind_clevis(&mut self) -> StratisResult<()> {
let crypt_handle = self.underlying_device.crypt_handle_mut().ok_or_else(|| {
StratisError::Msg("This device does not appear to be encrypted".to_string())
})?;
crypt_handle.clevis_unbind()
}
/// Bind a block device to a passphrase represented by a key description
/// in the kernel keyring.
pub fn bind_keyring(&mut self, key_desc: &KeyDescription) -> StratisResult<()> {
let crypt_handle = self.underlying_device.crypt_handle_mut().ok_or_else(|| {
StratisError::Msg("This device does not appear to be encrypted".to_string())
})?;
crypt_handle.bind_keyring(key_desc)
}
/// Unbind a block device from a passphrase represented by a key description
/// in the kernel keyring.
pub fn unbind_keyring(&mut self) -> StratisResult<()> {
let crypt_handle = self.underlying_device.crypt_handle_mut().ok_or_else(|| {
StratisError::Msg("This device does not appear to be encrypted".to_string())
})?;
crypt_handle.unbind_keyring()
}
/// Change the passphrase for a block device to a passphrase represented by a
/// key description in the kernel keyring.
pub fn rebind_keyring(&mut self, key_desc: &KeyDescription) -> StratisResult<()> {
let crypt_handle = self.underlying_device.crypt_handle_mut().ok_or_else(|| {
StratisError::Msg("This device does not appear to be encrypted".to_string())
})?;
crypt_handle.rebind_keyring(key_desc)
}
/// Regenerate the Clevis bindings for a block device.
pub fn rebind_clevis(&mut self) -> StratisResult<()> {
let crypt_handle = self.underlying_device.crypt_handle_mut().ok_or_else(|| {
StratisError::Msg("This device does not appear to be encrypted".to_string())
})?;
crypt_handle.rebind_clevis()
}
}
impl<'a> Into<Value> for &'a StratBlockDev {
fn into(self) -> Value {
let mut json = json!({
"path": self.underlying_device.physical_path(),
"uuid": self.bda.dev_uuid().to_string(),
});
let map = json.as_object_mut().expect("just created above");
if let Some(encryption_info) = self
.underlying_device
.crypt_handle()
.map(|ch| ch.encryption_info())
{
if let Value::Object(enc_map) = <&EncryptionInfo as Into<Value>>::into(encryption_info)
{
map.extend(enc_map);
} else {
unreachable!("EncryptionInfo conversion returns a JSON object");
};
}
json
}
}
impl BlockDev for StratBlockDev {
fn devnode(&self) -> &Path {
self.devnode()
}
fn metadata_path(&self) -> &Path {
self.metadata_path()
}
fn user_info(&self) -> Option<&str> {
self.user_info.as_deref()
}
fn hardware_info(&self) -> Option<&str> {
self.hardware_info.as_deref()
}
fn initialization_time(&self) -> DateTime<Utc> {
// This cast will result in an incorrect, negative value starting in
// the year 292,277,026,596. :-)
Utc.timestamp(self.bda.initialization_time() as i64, 0)
}
fn size(&self) -> Sectors {
self.total_size().sectors()
}
fn is_encrypted(&self) -> bool {
self.encryption_info().is_some()
}
}
impl Recordable<BaseBlockDevSave> for StratBlockDev {
fn record(&self) -> BaseBlockDevSave {
BaseBlockDevSave {
uuid: self.uuid(),
user_info: self.user_info.clone(),
hardware_info: self.hardware_info.clone(),
}
}
}
|
UnderlyingDevice::Unencrypted(path) => &*path,
}
}
pub fn crypt_handle(&self) -> Option<&CryptHandle> {
|
random_line_split
|
blockdev.rs
|
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
// Code to handle a single block device.
use std::{fs::OpenOptions, path::Path};
use chrono::{DateTime, TimeZone, Utc};
use serde_json::Value;
use devicemapper::{Device, Sectors};
use crate::{
engine::{
engine::BlockDev,
strat_engine::{
backstore::{
crypt::CryptHandle,
range_alloc::{PerDevSegments, RangeAllocator},
},
metadata::{disown_device, BDAExtendedSize, BlockdevSize, MDADataSize, BDA},
serde_structs::{BaseBlockDevSave, Recordable},
},
types::{DevUuid, DevicePath, EncryptionInfo, KeyDescription, PoolUuid},
},
stratis::{StratisError, StratisResult},
};
#[derive(Debug)]
pub enum UnderlyingDevice {
Encrypted(CryptHandle),
Unencrypted(DevicePath),
}
impl UnderlyingDevice {
pub fn physical_path(&self) -> &Path {
match self {
UnderlyingDevice::Encrypted(handle) => handle.luks2_device_path(),
UnderlyingDevice::Unencrypted(path) => &*path,
}
}
pub fn metadata_path(&self) -> &Path {
match self {
UnderlyingDevice::Encrypted(handle) => handle.activated_device_path(),
UnderlyingDevice::Unencrypted(path) => &*path,
}
}
pub fn crypt_handle(&self) -> Option<&CryptHandle> {
match self {
UnderlyingDevice::Encrypted(handle) => Some(handle),
UnderlyingDevice::Unencrypted(_) => None,
}
}
pub fn crypt_handle_mut(&mut self) -> Option<&mut CryptHandle> {
match self {
UnderlyingDevice::Encrypted(handle) => Some(handle),
UnderlyingDevice::Unencrypted(_) => None,
}
}
}
#[derive(Debug)]
pub struct StratBlockDev {
dev: Device,
bda: BDA,
used: RangeAllocator,
user_info: Option<String>,
hardware_info: Option<String>,
underlying_device: UnderlyingDevice,
}
impl StratBlockDev {
/// Make a new BlockDev from the parameters.
/// Allocate space for the Stratis metadata on the device.
/// - dev: the device, identified by number
/// - devnode: for encrypted devices, the logical and physical
/// paths; for unencrypted devices, the physical path
/// - bda: the device's BDA
/// - other_segments: segments allocated outside Stratis metadata region
/// - user_info: user settable identifying information
/// - hardware_info: identifying information in the hardware
/// - key_description: optional argument enabling encryption using
/// the specified key in the kernel keyring
/// Returns an error if it is impossible to allocate all segments on the
/// device.
/// NOTE: It is possible that the actual device size is greater than
/// the recorded device size. In that case, the additional space available
/// on the device is simply invisible to the blockdev. Consequently, it
/// is invisible to the engine, and is not part of the total size value
/// reported on the D-Bus.
///
/// Precondition: segments in other_segments do not overlap with Stratis
/// metadata region.
pub fn new(
dev: Device,
bda: BDA,
other_segments: &[(Sectors, Sectors)],
user_info: Option<String>,
hardware_info: Option<String>,
underlying_device: UnderlyingDevice,
) -> StratisResult<StratBlockDev> {
let mut segments = vec![(Sectors(0), bda.extended_size().sectors())];
segments.extend(other_segments);
let allocator = RangeAllocator::new(bda.dev_size(), &segments)?;
Ok(StratBlockDev {
dev,
bda,
used: allocator,
user_info,
hardware_info,
underlying_device,
})
}
/// Returns the blockdev's Device
pub fn device(&self) -> &Device {
&self.dev
}
/// Returns the physical path of the block device structure.
pub fn physical_path(&self) -> &Path {
self.devnode()
}
/// Returns the path to the unencrypted metadata stored on the block device structure.
/// On encrypted devices, this will point to a devicemapper device set up by libcryptsetup.
/// On unencrypted devices, this will be the same as the physical device.
pub fn metadata_path(&self) -> &Path {
self.underlying_device.metadata_path()
}
/// Remove information that identifies this device as belonging to Stratis
///
/// If self.is_encrypted() is true, destroy all keyslots and wipe the LUKS2 header.
/// This will render all Stratis and LUKS2 metadata unreadable and unrecoverable
/// from the given device.
///
/// If self.is_encrypted() is false, wipe the Stratis metadata on the device.
/// This will make the Stratis data and metadata invisible to all standard blkid
/// and stratisd operations.
///
/// Precondition: if self.is_encrypted() == true, the data on
/// self.devnode.physical_path() has been encrypted with
/// aes-xts-plain64 encryption.
pub fn disown(&mut self) -> StratisResult<()> {
if let Some(ref mut handle) = self.underlying_device.crypt_handle_mut() {
handle.wipe()?;
} else {
disown_device(
&mut OpenOptions::new()
.write(true)
.open(self.underlying_device.physical_path())?,
)?;
}
Ok(())
}
pub fn save_state(&mut self, time: &DateTime<Utc>, metadata: &[u8]) -> StratisResult<()> {
let mut f = OpenOptions::new()
.write(true)
.open(self.underlying_device.metadata_path())?;
self.bda.save_state(time, metadata, &mut f)
}
/// The pool's UUID.
pub fn pool_uuid(&self) -> PoolUuid {
self.bda.pool_uuid()
}
/// The device's UUID.
pub fn uuid(&self) -> DevUuid {
self.bda.dev_uuid()
}
/// Find some sector ranges that could be allocated. If more
/// sectors are needed than are available, return partial results.
/// If all available sectors are desired, don't use this function.
/// Define a request_all() function here and have it invoke the
/// RangeAllocator::request_all() function.
pub fn request_space(&mut self, size: Sectors) -> PerDevSegments {
self.used.request(size)
}
// ALL SIZE METHODS (except size(), which is in BlockDev impl.)
/// The number of Sectors on this device used by Stratis for metadata
pub fn metadata_size(&self) -> BDAExtendedSize {
self.bda.extended_size()
}
/// The number of Sectors on this device not allocated for any purpose.
/// self.total_allocated_size() - self.metadata_size() >= self.available()
pub fn available(&self) -> Sectors {
self.used.available()
}
/// The total size of the Stratis block device.
pub fn total_size(&self) -> BlockdevSize {
self.bda.dev_size()
}
/// The total size of the allocated portions of the Stratis block device.
pub fn total_allocated_size(&self) -> BlockdevSize {
self.used.size()
}
/// The maximum size of variable length metadata that can be accommodated.
/// self.max_metadata_size() < self.metadata_size()
pub fn max_metadata_size(&self) -> MDADataSize {
self.bda.max_data_size()
}
/// Set the user info on this blockdev.
/// The user_info may be None, which unsets user info.
/// Returns true if the user info was changed, otherwise false.
pub fn set_user_info(&mut self, user_info: Option<&str>) -> bool {
set_blockdev_user_info!(self; user_info)
}
/// Get the physical path for a block device.
pub fn devnode(&self) -> &Path {
self.underlying_device.physical_path()
}
/// Get the encryption_info stored on the given encrypted blockdev.
///
/// The `Cow` return type is required due to the optional `CryptHandle` type.
/// If the device is not encrypted, it must return an owned `EncryptionInfo`
/// structure.
pub fn encryption_info(&self) -> Option<&EncryptionInfo> {
self.underlying_device
.crypt_handle()
.map(|ch| ch.encryption_info())
}
/// Bind encrypted device using the given clevis configuration.
pub fn bind_clevis(&mut self, pin: &str, clevis_info: &Value) -> StratisResult<()> {
let crypt_handle = self.underlying_device.crypt_handle_mut().ok_or_else(|| {
StratisError::Msg("This device does not appear to be encrypted".to_string())
})?;
crypt_handle.clevis_bind(pin, clevis_info)
}
/// Unbind encrypted device using the given clevis configuration.
pub fn unbind_clevis(&mut self) -> StratisResult<()> {
let crypt_handle = self.underlying_device.crypt_handle_mut().ok_or_else(|| {
StratisError::Msg("This device does not appear to be encrypted".to_string())
})?;
crypt_handle.clevis_unbind()
}
/// Bind a block device to a passphrase represented by a key description
/// in the kernel keyring.
pub fn bind_keyring(&mut self, key_desc: &KeyDescription) -> StratisResult<()> {
let crypt_handle = self.underlying_device.crypt_handle_mut().ok_or_else(|| {
StratisError::Msg("This device does not appear to be encrypted".to_string())
})?;
crypt_handle.bind_keyring(key_desc)
}
/// Unbind a block device from a passphrase represented by a key description
/// in the kernel keyring.
pub fn unbind_keyring(&mut self) -> StratisResult<()> {
let crypt_handle = self.underlying_device.crypt_handle_mut().ok_or_else(|| {
StratisError::Msg("This device does not appear to be encrypted".to_string())
})?;
crypt_handle.unbind_keyring()
}
/// Change the passphrase for a block device to a passphrase represented by a
/// key description in the kernel keyring.
pub fn rebind_keyring(&mut self, key_desc: &KeyDescription) -> StratisResult<()> {
let crypt_handle = self.underlying_device.crypt_handle_mut().ok_or_else(|| {
StratisError::Msg("This device does not appear to be encrypted".to_string())
})?;
crypt_handle.rebind_keyring(key_desc)
}
/// Regenerate the Clevis bindings for a block device.
pub fn rebind_clevis(&mut self) -> StratisResult<()> {
let crypt_handle = self.underlying_device.crypt_handle_mut().ok_or_else(|| {
StratisError::Msg("This device does not appear to be encrypted".to_string())
})?;
crypt_handle.rebind_clevis()
}
}
impl<'a> Into<Value> for &'a StratBlockDev {
fn into(self) -> Value {
let mut json = json!({
"path": self.underlying_device.physical_path(),
"uuid": self.bda.dev_uuid().to_string(),
});
let map = json.as_object_mut().expect("just created above");
if let Some(encryption_info) = self
.underlying_device
.crypt_handle()
.map(|ch| ch.encryption_info())
{
if let Value::Object(enc_map) = <&EncryptionInfo as Into<Value>>::into(encryption_info)
{
map.extend(enc_map);
} else {
unreachable!("EncryptionInfo conversion returns a JSON object");
};
}
json
}
}
impl BlockDev for StratBlockDev {
fn devnode(&self) -> &Path {
self.devnode()
}
fn metadata_path(&self) -> &Path {
self.metadata_path()
}
fn user_info(&self) -> Option<&str> {
self.user_info.as_deref()
}
fn hardware_info(&self) -> Option<&str> {
self.hardware_info.as_deref()
}
fn initialization_time(&self) -> DateTime<Utc>
|
fn size(&self) -> Sectors {
self.total_size().sectors()
}
fn is_encrypted(&self) -> bool {
self.encryption_info().is_some()
}
}
impl Recordable<BaseBlockDevSave> for StratBlockDev {
fn record(&self) -> BaseBlockDevSave {
BaseBlockDevSave {
uuid: self.uuid(),
user_info: self.user_info.clone(),
hardware_info: self.hardware_info.clone(),
}
}
}
|
{
// This cast will result in an incorrect, negative value starting in
// the year 292,277,026,596. :-)
Utc.timestamp(self.bda.initialization_time() as i64, 0)
}
|
identifier_body
|
blockdev.rs
|
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
// Code to handle a single block device.
use std::{fs::OpenOptions, path::Path};
use chrono::{DateTime, TimeZone, Utc};
use serde_json::Value;
use devicemapper::{Device, Sectors};
use crate::{
engine::{
engine::BlockDev,
strat_engine::{
backstore::{
crypt::CryptHandle,
range_alloc::{PerDevSegments, RangeAllocator},
},
metadata::{disown_device, BDAExtendedSize, BlockdevSize, MDADataSize, BDA},
serde_structs::{BaseBlockDevSave, Recordable},
},
types::{DevUuid, DevicePath, EncryptionInfo, KeyDescription, PoolUuid},
},
stratis::{StratisError, StratisResult},
};
#[derive(Debug)]
pub enum UnderlyingDevice {
Encrypted(CryptHandle),
Unencrypted(DevicePath),
}
impl UnderlyingDevice {
pub fn physical_path(&self) -> &Path {
match self {
UnderlyingDevice::Encrypted(handle) => handle.luks2_device_path(),
UnderlyingDevice::Unencrypted(path) => &*path,
}
}
pub fn metadata_path(&self) -> &Path {
match self {
UnderlyingDevice::Encrypted(handle) => handle.activated_device_path(),
UnderlyingDevice::Unencrypted(path) => &*path,
}
}
pub fn crypt_handle(&self) -> Option<&CryptHandle> {
match self {
UnderlyingDevice::Encrypted(handle) => Some(handle),
UnderlyingDevice::Unencrypted(_) => None,
}
}
pub fn crypt_handle_mut(&mut self) -> Option<&mut CryptHandle> {
match self {
UnderlyingDevice::Encrypted(handle) => Some(handle),
UnderlyingDevice::Unencrypted(_) => None,
}
}
}
#[derive(Debug)]
pub struct
|
{
dev: Device,
bda: BDA,
used: RangeAllocator,
user_info: Option<String>,
hardware_info: Option<String>,
underlying_device: UnderlyingDevice,
}
impl StratBlockDev {
/// Make a new BlockDev from the parameters.
/// Allocate space for the Stratis metadata on the device.
/// - dev: the device, identified by number
/// - devnode: for encrypted devices, the logical and physical
/// paths; for unencrypted devices, the physical path
/// - bda: the device's BDA
/// - other_segments: segments allocated outside Stratis metadata region
/// - user_info: user settable identifying information
/// - hardware_info: identifying information in the hardware
/// - key_description: optional argument enabling encryption using
/// the specified key in the kernel keyring
/// Returns an error if it is impossible to allocate all segments on the
/// device.
/// NOTE: It is possible that the actual device size is greater than
/// the recorded device size. In that case, the additional space available
/// on the device is simply invisible to the blockdev. Consequently, it
/// is invisible to the engine, and is not part of the total size value
/// reported on the D-Bus.
///
/// Precondition: segments in other_segments do not overlap with Stratis
/// metadata region.
pub fn new(
dev: Device,
bda: BDA,
other_segments: &[(Sectors, Sectors)],
user_info: Option<String>,
hardware_info: Option<String>,
underlying_device: UnderlyingDevice,
) -> StratisResult<StratBlockDev> {
let mut segments = vec![(Sectors(0), bda.extended_size().sectors())];
segments.extend(other_segments);
let allocator = RangeAllocator::new(bda.dev_size(), &segments)?;
Ok(StratBlockDev {
dev,
bda,
used: allocator,
user_info,
hardware_info,
underlying_device,
})
}
/// Returns the blockdev's Device
pub fn device(&self) -> &Device {
&self.dev
}
/// Returns the physical path of the block device structure.
pub fn physical_path(&self) -> &Path {
self.devnode()
}
/// Returns the path to the unencrypted metadata stored on the block device structure.
/// On encrypted devices, this will point to a devicemapper device set up by libcryptsetup.
/// On unencrypted devices, this will be the same as the physical device.
pub fn metadata_path(&self) -> &Path {
self.underlying_device.metadata_path()
}
/// Remove information that identifies this device as belonging to Stratis
///
/// If self.is_encrypted() is true, destroy all keyslots and wipe the LUKS2 header.
/// This will render all Stratis and LUKS2 metadata unreadable and unrecoverable
/// from the given device.
///
/// If self.is_encrypted() is false, wipe the Stratis metadata on the device.
/// This will make the Stratis data and metadata invisible to all standard blkid
/// and stratisd operations.
///
/// Precondition: if self.is_encrypted() == true, the data on
/// self.devnode.physical_path() has been encrypted with
/// aes-xts-plain64 encryption.
pub fn disown(&mut self) -> StratisResult<()> {
if let Some(ref mut handle) = self.underlying_device.crypt_handle_mut() {
handle.wipe()?;
} else {
disown_device(
&mut OpenOptions::new()
.write(true)
.open(self.underlying_device.physical_path())?,
)?;
}
Ok(())
}
pub fn save_state(&mut self, time: &DateTime<Utc>, metadata: &[u8]) -> StratisResult<()> {
let mut f = OpenOptions::new()
.write(true)
.open(self.underlying_device.metadata_path())?;
self.bda.save_state(time, metadata, &mut f)
}
/// The pool's UUID.
pub fn pool_uuid(&self) -> PoolUuid {
self.bda.pool_uuid()
}
/// The device's UUID.
pub fn uuid(&self) -> DevUuid {
self.bda.dev_uuid()
}
/// Find some sector ranges that could be allocated. If more
/// sectors are needed than are available, return partial results.
/// If all available sectors are desired, don't use this function.
/// Define a request_all() function here and have it invoke the
/// RangeAllocator::request_all() function.
pub fn request_space(&mut self, size: Sectors) -> PerDevSegments {
self.used.request(size)
}
// ALL SIZE METHODS (except size(), which is in BlockDev impl.)
/// The number of Sectors on this device used by Stratis for metadata
pub fn metadata_size(&self) -> BDAExtendedSize {
self.bda.extended_size()
}
/// The number of Sectors on this device not allocated for any purpose.
/// self.total_allocated_size() - self.metadata_size() >= self.available()
pub fn available(&self) -> Sectors {
self.used.available()
}
/// The total size of the Stratis block device.
pub fn total_size(&self) -> BlockdevSize {
self.bda.dev_size()
}
/// The total size of the allocated portions of the Stratis block device.
pub fn total_allocated_size(&self) -> BlockdevSize {
self.used.size()
}
/// The maximum size of variable length metadata that can be accommodated.
/// self.max_metadata_size() < self.metadata_size()
pub fn max_metadata_size(&self) -> MDADataSize {
self.bda.max_data_size()
}
/// Set the user info on this blockdev.
/// The user_info may be None, which unsets user info.
/// Returns true if the user info was changed, otherwise false.
pub fn set_user_info(&mut self, user_info: Option<&str>) -> bool {
set_blockdev_user_info!(self; user_info)
}
/// Get the physical path for a block device.
pub fn devnode(&self) -> &Path {
self.underlying_device.physical_path()
}
/// Get the encryption_info stored on the given encrypted blockdev.
///
/// The `Cow` return type is required due to the optional `CryptHandle` type.
/// If the device is not encrypted, it must return an owned `EncryptionInfo`
/// structure.
pub fn encryption_info(&self) -> Option<&EncryptionInfo> {
self.underlying_device
.crypt_handle()
.map(|ch| ch.encryption_info())
}
/// Bind encrypted device using the given clevis configuration.
pub fn bind_clevis(&mut self, pin: &str, clevis_info: &Value) -> StratisResult<()> {
let crypt_handle = self.underlying_device.crypt_handle_mut().ok_or_else(|| {
StratisError::Msg("This device does not appear to be encrypted".to_string())
})?;
crypt_handle.clevis_bind(pin, clevis_info)
}
/// Unbind encrypted device using the given clevis configuration.
pub fn unbind_clevis(&mut self) -> StratisResult<()> {
let crypt_handle = self.underlying_device.crypt_handle_mut().ok_or_else(|| {
StratisError::Msg("This device does not appear to be encrypted".to_string())
})?;
crypt_handle.clevis_unbind()
}
/// Bind a block device to a passphrase represented by a key description
/// in the kernel keyring.
pub fn bind_keyring(&mut self, key_desc: &KeyDescription) -> StratisResult<()> {
let crypt_handle = self.underlying_device.crypt_handle_mut().ok_or_else(|| {
StratisError::Msg("This device does not appear to be encrypted".to_string())
})?;
crypt_handle.bind_keyring(key_desc)
}
/// Unbind a block device from a passphrase represented by a key description
/// in the kernel keyring.
pub fn unbind_keyring(&mut self) -> StratisResult<()> {
let crypt_handle = self.underlying_device.crypt_handle_mut().ok_or_else(|| {
StratisError::Msg("This device does not appear to be encrypted".to_string())
})?;
crypt_handle.unbind_keyring()
}
/// Change the passphrase for a block device to a passphrase represented by a
/// key description in the kernel keyring.
pub fn rebind_keyring(&mut self, key_desc: &KeyDescription) -> StratisResult<()> {
let crypt_handle = self.underlying_device.crypt_handle_mut().ok_or_else(|| {
StratisError::Msg("This device does not appear to be encrypted".to_string())
})?;
crypt_handle.rebind_keyring(key_desc)
}
/// Regenerate the Clevis bindings for a block device.
pub fn rebind_clevis(&mut self) -> StratisResult<()> {
let crypt_handle = self.underlying_device.crypt_handle_mut().ok_or_else(|| {
StratisError::Msg("This device does not appear to be encrypted".to_string())
})?;
crypt_handle.rebind_clevis()
}
}
impl<'a> Into<Value> for &'a StratBlockDev {
fn into(self) -> Value {
let mut json = json!({
"path": self.underlying_device.physical_path(),
"uuid": self.bda.dev_uuid().to_string(),
});
let map = json.as_object_mut().expect("just created above");
if let Some(encryption_info) = self
.underlying_device
.crypt_handle()
.map(|ch| ch.encryption_info())
{
if let Value::Object(enc_map) = <&EncryptionInfo as Into<Value>>::into(encryption_info)
{
map.extend(enc_map);
} else {
unreachable!("EncryptionInfo conversion returns a JSON object");
};
}
json
}
}
impl BlockDev for StratBlockDev {
fn devnode(&self) -> &Path {
self.devnode()
}
fn metadata_path(&self) -> &Path {
self.metadata_path()
}
fn user_info(&self) -> Option<&str> {
self.user_info.as_deref()
}
fn hardware_info(&self) -> Option<&str> {
self.hardware_info.as_deref()
}
fn initialization_time(&self) -> DateTime<Utc> {
// This cast will result in an incorrect, negative value starting in
// the year 292,277,026,596. :-)
Utc.timestamp(self.bda.initialization_time() as i64, 0)
}
fn size(&self) -> Sectors {
self.total_size().sectors()
}
fn is_encrypted(&self) -> bool {
self.encryption_info().is_some()
}
}
impl Recordable<BaseBlockDevSave> for StratBlockDev {
fn record(&self) -> BaseBlockDevSave {
BaseBlockDevSave {
uuid: self.uuid(),
user_info: self.user_info.clone(),
hardware_info: self.hardware_info.clone(),
}
}
}
|
StratBlockDev
|
identifier_name
|
meta.rs
|
#[doc(hidden)]
#[allow(missing_debug_implementations)]
#[derive(Default, Clone)]
pub struct AppMeta<'b> {
pub name: String,
pub bin_name: Option<String>,
pub author: Option<&'b str>,
pub version: Option<&'b str>,
pub long_version: Option<&'b str>,
pub about: Option<&'b str>,
pub long_about: Option<&'b str>,
pub more_help: Option<&'b str>,
pub pre_help: Option<&'b str>,
pub aliases: Option<Vec<(&'b str, bool)>>, // (name, visible)
pub usage_str: Option<&'b str>,
pub usage: Option<String>,
pub help_str: Option<&'b str>,
pub disp_ord: usize,
pub term_w: Option<usize>,
pub max_w: Option<usize>,
pub template: Option<&'b str>,
}
impl<'b> AppMeta<'b> {
pub fn new() -> Self { Default::default() }
pub fn with_name(s: String) -> Self
|
}
|
{
AppMeta {
name: s,
disp_ord: 999,
..Default::default()
}
}
|
identifier_body
|
meta.rs
|
#[doc(hidden)]
#[allow(missing_debug_implementations)]
#[derive(Default, Clone)]
pub struct AppMeta<'b> {
pub name: String,
pub bin_name: Option<String>,
pub author: Option<&'b str>,
pub version: Option<&'b str>,
pub long_version: Option<&'b str>,
pub about: Option<&'b str>,
pub long_about: Option<&'b str>,
pub more_help: Option<&'b str>,
pub pre_help: Option<&'b str>,
pub aliases: Option<Vec<(&'b str, bool)>>, // (name, visible)
pub usage_str: Option<&'b str>,
pub usage: Option<String>,
|
pub help_str: Option<&'b str>,
pub disp_ord: usize,
pub term_w: Option<usize>,
pub max_w: Option<usize>,
pub template: Option<&'b str>,
}
impl<'b> AppMeta<'b> {
pub fn new() -> Self { Default::default() }
pub fn with_name(s: String) -> Self {
AppMeta {
name: s,
disp_ord: 999,
..Default::default()
}
}
}
|
random_line_split
|
|
meta.rs
|
#[doc(hidden)]
#[allow(missing_debug_implementations)]
#[derive(Default, Clone)]
pub struct AppMeta<'b> {
pub name: String,
pub bin_name: Option<String>,
pub author: Option<&'b str>,
pub version: Option<&'b str>,
pub long_version: Option<&'b str>,
pub about: Option<&'b str>,
pub long_about: Option<&'b str>,
pub more_help: Option<&'b str>,
pub pre_help: Option<&'b str>,
pub aliases: Option<Vec<(&'b str, bool)>>, // (name, visible)
pub usage_str: Option<&'b str>,
pub usage: Option<String>,
pub help_str: Option<&'b str>,
pub disp_ord: usize,
pub term_w: Option<usize>,
pub max_w: Option<usize>,
pub template: Option<&'b str>,
}
impl<'b> AppMeta<'b> {
pub fn
|
() -> Self { Default::default() }
pub fn with_name(s: String) -> Self {
AppMeta {
name: s,
disp_ord: 999,
..Default::default()
}
}
}
|
new
|
identifier_name
|
issue-17074.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
#![allow(dead_code)]
static X2: u64 =!0 as u16 as u64;
static Y2: u64 =!0 as u32 as u64;
const X: u64 =!0 as u16 as u64;
const Y: u64 =!0 as u32 as u64;
fn main()
|
{
assert_eq!(match 1 {
X => unreachable!(),
Y => unreachable!(),
_ => 1
}, 1);
}
|
identifier_body
|
|
issue-17074.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
// except according to those terms.
// run-pass
#![allow(dead_code)]
static X2: u64 =!0 as u16 as u64;
static Y2: u64 =!0 as u32 as u64;
const X: u64 =!0 as u16 as u64;
const Y: u64 =!0 as u32 as u64;
fn main() {
assert_eq!(match 1 {
X => unreachable!(),
Y => unreachable!(),
_ => 1
}, 1);
}
|
// option. This file may not be copied, modified, or distributed
|
random_line_split
|
issue-17074.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
#![allow(dead_code)]
static X2: u64 =!0 as u16 as u64;
static Y2: u64 =!0 as u32 as u64;
const X: u64 =!0 as u16 as u64;
const Y: u64 =!0 as u32 as u64;
fn
|
() {
assert_eq!(match 1 {
X => unreachable!(),
Y => unreachable!(),
_ => 1
}, 1);
}
|
main
|
identifier_name
|
text_attributes.rs
|
// This file is part of rgtk.
//
// rgtk is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// rgtk is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with rgtk. If not, see <http://www.gnu.org/licenses/>.
//! GtkTextTag — A tag that can be applied to text in a GtkTextBuffer
use gtk::ffi;
pub struct TextAttributes {
pointer: *mut ffi::C_GtkTextAttributes
}
impl TextAttributes {
pub fn new() -> Option<TextAttributes> {
let tmp_pointer = unsafe { ffi::gtk_text_attributes_new() };
if tmp_pointer.is_null() {
None
} else {
Some(TextAttributes { pointer : tmp_pointer })
}
}
pub fn copy(&self) -> Option<TextAttributes> {
let tmp_pointer = unsafe { ffi::gtk_text_attributes_copy(self.pointer) };
if tmp_pointer.is_null() {
|
lse {
Some(TextAttributes { pointer : tmp_pointer })
}
}
pub fn copy_values_from(&self, src: &TextAttributes) {
unsafe { ffi::gtk_text_attributes_copy_values(src.pointer, self.pointer) }
}
pub fn unref(&self) {
unsafe { ffi::gtk_text_attributes_unref(self.pointer) }
}
pub fn _ref(&self) -> Option<TextAttributes> {
let tmp_pointer = unsafe { ffi::gtk_text_attributes_ref(self.pointer) };
if tmp_pointer.is_null() {
None
} else {
Some(TextAttributes { pointer : tmp_pointer })
}
}
}
impl_GObjectFunctions!(TextAttributes, C_GtkTextAttributes);
impl_TraitObject!(TextAttributes, C_GtkTextAttributes);
|
None
} e
|
conditional_block
|
text_attributes.rs
|
// This file is part of rgtk.
//
// rgtk is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// rgtk is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with rgtk. If not, see <http://www.gnu.org/licenses/>.
//! GtkTextTag — A tag that can be applied to text in a GtkTextBuffer
use gtk::ffi;
pub struct TextAttributes {
pointer: *mut ffi::C_GtkTextAttributes
}
impl TextAttributes {
pub fn new() -> Option<TextAttributes> {
let tmp_pointer = unsafe { ffi::gtk_text_attributes_new() };
if tmp_pointer.is_null() {
None
} else {
Some(TextAttributes { pointer : tmp_pointer })
}
}
pub fn copy(&self) -> Option<TextAttributes> {
let tmp_pointer = unsafe { ffi::gtk_text_attributes_copy(self.pointer) };
if tmp_pointer.is_null() {
None
} else {
Some(TextAttributes { pointer : tmp_pointer })
}
}
pub fn copy_values_from(&self, src: &TextAttributes) {
unsafe { ffi::gtk_text_attributes_copy_values(src.pointer, self.pointer) }
}
pub fn unref(&self) {
|
pub fn _ref(&self) -> Option<TextAttributes> {
let tmp_pointer = unsafe { ffi::gtk_text_attributes_ref(self.pointer) };
if tmp_pointer.is_null() {
None
} else {
Some(TextAttributes { pointer : tmp_pointer })
}
}
}
impl_GObjectFunctions!(TextAttributes, C_GtkTextAttributes);
impl_TraitObject!(TextAttributes, C_GtkTextAttributes);
|
unsafe { ffi::gtk_text_attributes_unref(self.pointer) }
}
|
identifier_body
|
text_attributes.rs
|
// This file is part of rgtk.
//
// rgtk is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// rgtk is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with rgtk. If not, see <http://www.gnu.org/licenses/>.
//! GtkTextTag — A tag that can be applied to text in a GtkTextBuffer
use gtk::ffi;
pub struct TextAttributes {
pointer: *mut ffi::C_GtkTextAttributes
}
impl TextAttributes {
pub fn new() -> Option<TextAttributes> {
let tmp_pointer = unsafe { ffi::gtk_text_attributes_new() };
if tmp_pointer.is_null() {
|
}
}
pub fn copy(&self) -> Option<TextAttributes> {
let tmp_pointer = unsafe { ffi::gtk_text_attributes_copy(self.pointer) };
if tmp_pointer.is_null() {
None
} else {
Some(TextAttributes { pointer : tmp_pointer })
}
}
pub fn copy_values_from(&self, src: &TextAttributes) {
unsafe { ffi::gtk_text_attributes_copy_values(src.pointer, self.pointer) }
}
pub fn unref(&self) {
unsafe { ffi::gtk_text_attributes_unref(self.pointer) }
}
pub fn _ref(&self) -> Option<TextAttributes> {
let tmp_pointer = unsafe { ffi::gtk_text_attributes_ref(self.pointer) };
if tmp_pointer.is_null() {
None
} else {
Some(TextAttributes { pointer : tmp_pointer })
}
}
}
impl_GObjectFunctions!(TextAttributes, C_GtkTextAttributes);
impl_TraitObject!(TextAttributes, C_GtkTextAttributes);
|
None
} else {
Some(TextAttributes { pointer : tmp_pointer })
|
random_line_split
|
text_attributes.rs
|
// This file is part of rgtk.
//
// rgtk is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// rgtk is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with rgtk. If not, see <http://www.gnu.org/licenses/>.
//! GtkTextTag — A tag that can be applied to text in a GtkTextBuffer
use gtk::ffi;
pub struct Te
|
pointer: *mut ffi::C_GtkTextAttributes
}
impl TextAttributes {
pub fn new() -> Option<TextAttributes> {
let tmp_pointer = unsafe { ffi::gtk_text_attributes_new() };
if tmp_pointer.is_null() {
None
} else {
Some(TextAttributes { pointer : tmp_pointer })
}
}
pub fn copy(&self) -> Option<TextAttributes> {
let tmp_pointer = unsafe { ffi::gtk_text_attributes_copy(self.pointer) };
if tmp_pointer.is_null() {
None
} else {
Some(TextAttributes { pointer : tmp_pointer })
}
}
pub fn copy_values_from(&self, src: &TextAttributes) {
unsafe { ffi::gtk_text_attributes_copy_values(src.pointer, self.pointer) }
}
pub fn unref(&self) {
unsafe { ffi::gtk_text_attributes_unref(self.pointer) }
}
pub fn _ref(&self) -> Option<TextAttributes> {
let tmp_pointer = unsafe { ffi::gtk_text_attributes_ref(self.pointer) };
if tmp_pointer.is_null() {
None
} else {
Some(TextAttributes { pointer : tmp_pointer })
}
}
}
impl_GObjectFunctions!(TextAttributes, C_GtkTextAttributes);
impl_TraitObject!(TextAttributes, C_GtkTextAttributes);
|
xtAttributes {
|
identifier_name
|
cors.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! A partial implementation of CORS
//! For now this library is XHR-specific.
//! For stuff involving `<img>`, `<iframe>`, `<form>`, etc please check what
//! the request mode should be and compare with the fetch spec
//! This library will eventually become the core of the Fetch crate
//! with CORSRequest being expanded into FetchRequest (etc)
use std::ascii::AsciiExt;
use time;
use time::{now, Timespec};
use hyper::header::{AccessControlRequestMethod, AccessControlAllowMethods};
use hyper::header::{AccessControlMaxAge, AccessControlAllowOrigin};
use hyper::header::{AccessControlRequestHeaders, AccessControlAllowHeaders};
use hyper::header::{Headers, HeaderView};
use hyper::client::Request;
use hyper::mime::{Mime, TopLevel, SubLevel};
use hyper::header::{ContentType, Host};
use hyper::method::Method;
use hyper::status::StatusClass::Success;
use url::{SchemeData, Url};
#[derive(Clone)]
pub struct CORSRequest {
pub origin: Url,
pub destination: Url,
pub mode: RequestMode,
pub method: Method,
pub headers: Headers,
/// CORS preflight flag (http://fetch.spec.whatwg.org/#concept-http-fetch)
/// Indicates that a CORS preflight request and/or cache check is to be performed
pub preflight_flag: bool
}
/// http://fetch.spec.whatwg.org/#concept-request-mode
/// This only covers some of the request modes. The
/// `same-origin` and `no CORS` modes are unnecessary for XHR.
#[derive(PartialEq, Copy, Clone)]
pub enum RequestMode {
CORS, // CORS
ForcedPreflight // CORS-with-forced-preflight
}
impl CORSRequest {
/// Creates a CORS request if necessary. Will return an error when fetching is forbidden
pub fn
|
(referer: Url, destination: Url, mode: RequestMode,
method: Method, headers: Headers) -> Result<Option<CORSRequest>, ()> {
if referer.scheme == destination.scheme &&
referer.host() == destination.host() &&
referer.port() == destination.port() {
return Ok(None); // Not cross-origin, proceed with a normal fetch
}
match destination.scheme.as_slice() {
// TODO: If the request's same origin data url flag is set (which isn't the case for XHR)
// we can fetch a data URL normally. about:blank can also be fetched by XHR
"http" | "https" => {
let mut req = CORSRequest::new(referer, destination, mode, method, headers);
req.preflight_flag =!is_simple_method(&req.method) || mode == RequestMode::ForcedPreflight;
if req.headers.iter().all(|h| is_simple_header(&h)) {
req.preflight_flag = true;
}
Ok(Some(req))
},
_ => Err(()),
}
}
fn new(mut referer: Url, destination: Url, mode: RequestMode, method: Method,
headers: Headers) -> CORSRequest {
match referer.scheme_data {
SchemeData::Relative(ref mut data) => data.path = vec!(),
_ => {}
};
referer.fragment = None;
referer.query = None;
CORSRequest {
origin: referer,
destination: destination,
mode: mode,
method: method,
headers: headers,
preflight_flag: false
}
}
/// http://fetch.spec.whatwg.org/#concept-http-fetch
/// This method assumes that the CORS flag is set
/// This does not perform the full HTTP fetch, rather it handles part of the CORS filtering
/// if self.mode is ForcedPreflight, then the CORS-with-forced-preflight
/// fetch flag is set as well
pub fn http_fetch(&self) -> CORSResponse {
let response = CORSResponse::new();
// Step 2: Handle service workers (unimplemented)
// Step 3
// Substep 1: Service workers (unimplemented )
// Substep 2
let cache = &mut CORSCache(vec!()); // XXXManishearth Should come from user agent
if self.preflight_flag &&
!cache.match_method(self, &self.method) &&
!self.headers.iter().all(|h| is_simple_header(&h) && cache.match_header(self, h.name())) {
if!is_simple_method(&self.method) || self.mode == RequestMode::ForcedPreflight {
return self.preflight_fetch();
// Everything after this is part of XHR::fetch()
// Expect the organization of code to improve once we have a fetch crate
}
}
response
}
/// http://fetch.spec.whatwg.org/#cors-preflight-fetch
fn preflight_fetch(&self) -> CORSResponse {
let error = CORSResponse::new_error();
let mut cors_response = CORSResponse::new();
let mut preflight = self.clone(); // Step 1
preflight.method = Method::Options; // Step 2
preflight.headers = Headers::new(); // Step 3
// Step 4
preflight.headers.set(AccessControlRequestMethod(self.method.clone()));
// Step 5 - 7
let mut header_names = vec!();
for header in self.headers.iter() {
header_names.push(header.name().to_ascii_lowercase());
}
header_names.sort();
preflight.headers.set(AccessControlRequestHeaders(header_names));
// Step 8 unnecessary, we don't use the request body
// Step 9, 10 unnecessary, we're writing our own fetch code
// Step 11
let preflight_request = Request::new(preflight.method, preflight.destination);
let mut req = match preflight_request {
Ok(req) => req,
Err(_) => return error
};
let host = req.headers().get::<Host>().unwrap().clone();
*req.headers_mut() = preflight.headers.clone();
req.headers_mut().set(host);
let stream = match req.start() {
Ok(s) => s,
Err(_) => return error
};
let response = match stream.send() {
Ok(r) => r,
Err(_) => return error
};
// Step 12
match response.status.class() {
Success => {}
_ => return error
}
cors_response.headers = response.headers.clone();
// Substeps 1-3 (parsing rules: http://fetch.spec.whatwg.org/#http-new-header-syntax)
let methods_substep4 = [self.method.clone()];
let mut methods = match response.headers.get() {
Some(&AccessControlAllowMethods(ref v)) => v.as_slice(),
_ => return error
};
let headers = match response.headers.get() {
Some(&AccessControlAllowHeaders(ref h)) => h,
_ => return error
};
// Substep 4
if methods.len() == 0 || preflight.mode == RequestMode::ForcedPreflight {
methods = &methods_substep4;
}
// Substep 5
if!is_simple_method(&self.method) &&
!methods.iter().any(|m| m == &self.method) {
return error;
}
// Substep 6
for h in self.headers.iter() {
if is_simple_header(&h) {
continue;
}
if!headers.iter().any(|ref h2| h.name().eq_ignore_ascii_case(h2)) {
return error;
}
}
// Substep 7, 8
let max_age = match response.headers.get() {
Some(&AccessControlMaxAge(num)) => num,
None => 0
};
// Substep 9: Impose restrictions on max-age, if any (unimplemented)
// Substeps 10-12: Add a cache (partially implemented, XXXManishearth)
// This cache should come from the user agent, creating a new one here to check
// for compile time errors
let cache = &mut CORSCache(vec!());
for m in methods.iter() {
let cache_match = cache.match_method_and_update(self, m, max_age);
if!cache_match {
cache.insert(CORSCacheEntry::new(self.origin.clone(), self.destination.clone(),
max_age, false, HeaderOrMethod::MethodData(m.clone())));
}
}
for h in response.headers.iter() {
let cache_match = cache.match_header_and_update(self, h.name(), max_age);
if!cache_match {
cache.insert(CORSCacheEntry::new(self.origin.clone(), self.destination.clone(),
max_age, false, HeaderOrMethod::HeaderData(h.to_string())));
}
}
cors_response
}
}
pub struct CORSResponse {
pub network_error: bool,
pub headers: Headers
}
impl CORSResponse {
fn new() -> CORSResponse {
CORSResponse {
network_error: false,
headers: Headers::new()
}
}
fn new_error() -> CORSResponse {
CORSResponse {
network_error: true,
headers: Headers::new()
}
}
}
// CORS Cache stuff
/// A CORS cache object. Anchor it somewhere to the user agent.
#[derive(Clone)]
pub struct CORSCache(Vec<CORSCacheEntry>);
/// Union type for CORS cache entries
/// Each entry might pertain to a header or method
#[derive(Clone)]
pub enum HeaderOrMethod {
HeaderData(String),
MethodData(Method)
}
impl HeaderOrMethod {
fn match_header(&self, header_name: &str) -> bool {
match *self {
HeaderOrMethod::HeaderData(ref s) => s.eq_ignore_ascii_case(header_name),
_ => false
}
}
fn match_method(&self, method: &Method) -> bool {
match *self {
HeaderOrMethod::MethodData(ref m) => m == method,
_ => false
}
}
}
// An entry in the CORS cache
#[derive(Clone)]
pub struct CORSCacheEntry {
pub origin: Url,
pub url: Url,
pub max_age: u32,
pub credentials: bool,
pub header_or_method: HeaderOrMethod,
created: Timespec
}
impl CORSCacheEntry {
fn new (origin:Url, url: Url, max_age: u32, credentials: bool, header_or_method: HeaderOrMethod) -> CORSCacheEntry {
CORSCacheEntry {
origin: origin,
url: url,
max_age: max_age,
credentials: credentials,
header_or_method: header_or_method,
created: time::now().to_timespec()
}
}
}
impl CORSCache {
/// http://fetch.spec.whatwg.org/#concept-cache-clear
#[allow(dead_code)]
fn clear (&mut self, request: &CORSRequest) {
let CORSCache(buf) = self.clone();
let new_buf: Vec<CORSCacheEntry> = buf.into_iter().filter(|e| e.origin == request.origin && request.destination == e.url).collect();
*self = CORSCache(new_buf);
}
// Remove old entries
fn cleanup(&mut self) {
let CORSCache(buf) = self.clone();
let now = time::now().to_timespec();
let new_buf: Vec<CORSCacheEntry> = buf.into_iter().filter(|e| now.sec > e.created.sec + e.max_age as i64).collect();
*self = CORSCache(new_buf);
}
/// http://fetch.spec.whatwg.org/#concept-cache-match-header
fn find_entry_by_header<'a>(&'a mut self, request: &CORSRequest, header_name: &str) -> Option<&'a mut CORSCacheEntry> {
self.cleanup();
let CORSCache(ref mut buf) = *self;
// Credentials are not yet implemented here
let entry = buf.iter_mut().find(|e| e.origin.scheme == request.origin.scheme &&
e.origin.host() == request.origin.host() &&
e.origin.port() == request.origin.port() &&
e.url == request.destination &&
e.header_or_method.match_header(header_name));
entry
}
fn match_header(&mut self, request: &CORSRequest, header_name: &str) -> bool {
self.find_entry_by_header(request, header_name).is_some()
}
fn match_header_and_update(&mut self, request: &CORSRequest, header_name: &str, new_max_age: u32) -> bool {
self.find_entry_by_header(request, header_name).map(|e| e.max_age = new_max_age).is_some()
}
fn find_entry_by_method<'a>(&'a mut self, request: &CORSRequest, method: &Method) -> Option<&'a mut CORSCacheEntry> {
// we can take the method from CORSRequest itself
self.cleanup();
let CORSCache(ref mut buf) = *self;
// Credentials are not yet implemented here
let entry = buf.iter_mut().find(|e| e.origin.scheme == request.origin.scheme &&
e.origin.host() == request.origin.host() &&
e.origin.port() == request.origin.port() &&
e.url == request.destination &&
e.header_or_method.match_method(method));
entry
}
/// http://fetch.spec.whatwg.org/#concept-cache-match-method
fn match_method(&mut self, request: &CORSRequest, method: &Method) -> bool {
self.find_entry_by_method(request, method).is_some()
}
fn match_method_and_update(&mut self, request: &CORSRequest, method: &Method, new_max_age: u32) -> bool {
self.find_entry_by_method(request, method).map(|e| e.max_age = new_max_age).is_some()
}
fn insert(&mut self, entry: CORSCacheEntry) {
self.cleanup();
let CORSCache(ref mut buf) = *self;
buf.push(entry);
}
}
fn is_simple_header(h: &HeaderView) -> bool {
//FIXME: use h.is::<HeaderType>() when AcceptLanguage and
//ContentLanguage headers exist
match h.name().to_ascii_lowercase().as_slice() {
"accept" | "accept-language" | "content-language" => true,
"content-type" => match h.value() {
Some(&ContentType(Mime(TopLevel::Text, SubLevel::Plain, _))) |
Some(&ContentType(Mime(TopLevel::Application, SubLevel::WwwFormUrlEncoded, _))) |
Some(&ContentType(Mime(TopLevel::Multipart, SubLevel::FormData, _))) => true,
_ => false
},
_ => false
}
}
fn is_simple_method(m: &Method) -> bool {
match *m {
Method::Get | Method::Head | Method::Post => true,
_ => false
}
}
/// Perform a CORS check on a header list and CORS request
/// http://fetch.spec.whatwg.org/#cors-check
pub fn allow_cross_origin_request(req: &CORSRequest, headers: &Headers) -> bool {
//FIXME(seanmonstar): use req.headers.get::<AccessControlAllowOrigin>()
match headers.get() {
Some(&AccessControlAllowOrigin::AllowStar) => true, // Not always true, depends on credentials mode
Some(&AccessControlAllowOrigin::AllowOrigin(ref url)) =>
url.scheme == req.origin.scheme &&
url.host() == req.origin.host() &&
url.port() == req.origin.port(),
None => false
}
}
|
maybe_new
|
identifier_name
|
cors.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! A partial implementation of CORS
//! For now this library is XHR-specific.
//! For stuff involving `<img>`, `<iframe>`, `<form>`, etc please check what
//! the request mode should be and compare with the fetch spec
//! This library will eventually become the core of the Fetch crate
//! with CORSRequest being expanded into FetchRequest (etc)
use std::ascii::AsciiExt;
use time;
use time::{now, Timespec};
use hyper::header::{AccessControlRequestMethod, AccessControlAllowMethods};
use hyper::header::{AccessControlMaxAge, AccessControlAllowOrigin};
use hyper::header::{AccessControlRequestHeaders, AccessControlAllowHeaders};
use hyper::header::{Headers, HeaderView};
use hyper::client::Request;
use hyper::mime::{Mime, TopLevel, SubLevel};
use hyper::header::{ContentType, Host};
use hyper::method::Method;
use hyper::status::StatusClass::Success;
use url::{SchemeData, Url};
#[derive(Clone)]
pub struct CORSRequest {
pub origin: Url,
pub destination: Url,
pub mode: RequestMode,
pub method: Method,
pub headers: Headers,
/// CORS preflight flag (http://fetch.spec.whatwg.org/#concept-http-fetch)
/// Indicates that a CORS preflight request and/or cache check is to be performed
pub preflight_flag: bool
}
/// http://fetch.spec.whatwg.org/#concept-request-mode
/// This only covers some of the request modes. The
/// `same-origin` and `no CORS` modes are unnecessary for XHR.
#[derive(PartialEq, Copy, Clone)]
pub enum RequestMode {
CORS, // CORS
ForcedPreflight // CORS-with-forced-preflight
}
impl CORSRequest {
/// Creates a CORS request if necessary. Will return an error when fetching is forbidden
pub fn maybe_new(referer: Url, destination: Url, mode: RequestMode,
method: Method, headers: Headers) -> Result<Option<CORSRequest>, ()> {
if referer.scheme == destination.scheme &&
referer.host() == destination.host() &&
referer.port() == destination.port() {
return Ok(None); // Not cross-origin, proceed with a normal fetch
}
match destination.scheme.as_slice() {
// TODO: If the request's same origin data url flag is set (which isn't the case for XHR)
// we can fetch a data URL normally. about:blank can also be fetched by XHR
"http" | "https" => {
let mut req = CORSRequest::new(referer, destination, mode, method, headers);
req.preflight_flag =!is_simple_method(&req.method) || mode == RequestMode::ForcedPreflight;
if req.headers.iter().all(|h| is_simple_header(&h)) {
req.preflight_flag = true;
}
Ok(Some(req))
},
_ => Err(()),
}
}
fn new(mut referer: Url, destination: Url, mode: RequestMode, method: Method,
headers: Headers) -> CORSRequest {
match referer.scheme_data {
SchemeData::Relative(ref mut data) => data.path = vec!(),
_ => {}
};
referer.fragment = None;
referer.query = None;
CORSRequest {
origin: referer,
destination: destination,
mode: mode,
method: method,
headers: headers,
preflight_flag: false
}
}
/// http://fetch.spec.whatwg.org/#concept-http-fetch
/// This method assumes that the CORS flag is set
/// This does not perform the full HTTP fetch, rather it handles part of the CORS filtering
/// if self.mode is ForcedPreflight, then the CORS-with-forced-preflight
/// fetch flag is set as well
pub fn http_fetch(&self) -> CORSResponse {
let response = CORSResponse::new();
// Step 2: Handle service workers (unimplemented)
// Step 3
// Substep 1: Service workers (unimplemented )
// Substep 2
let cache = &mut CORSCache(vec!()); // XXXManishearth Should come from user agent
if self.preflight_flag &&
!cache.match_method(self, &self.method) &&
!self.headers.iter().all(|h| is_simple_header(&h) && cache.match_header(self, h.name())) {
if!is_simple_method(&self.method) || self.mode == RequestMode::ForcedPreflight {
return self.preflight_fetch();
// Everything after this is part of XHR::fetch()
// Expect the organization of code to improve once we have a fetch crate
}
}
response
}
/// http://fetch.spec.whatwg.org/#cors-preflight-fetch
fn preflight_fetch(&self) -> CORSResponse {
let error = CORSResponse::new_error();
let mut cors_response = CORSResponse::new();
let mut preflight = self.clone(); // Step 1
preflight.method = Method::Options; // Step 2
preflight.headers = Headers::new(); // Step 3
// Step 4
preflight.headers.set(AccessControlRequestMethod(self.method.clone()));
|
// Step 5 - 7
let mut header_names = vec!();
for header in self.headers.iter() {
header_names.push(header.name().to_ascii_lowercase());
}
header_names.sort();
preflight.headers.set(AccessControlRequestHeaders(header_names));
// Step 8 unnecessary, we don't use the request body
// Step 9, 10 unnecessary, we're writing our own fetch code
// Step 11
let preflight_request = Request::new(preflight.method, preflight.destination);
let mut req = match preflight_request {
Ok(req) => req,
Err(_) => return error
};
let host = req.headers().get::<Host>().unwrap().clone();
*req.headers_mut() = preflight.headers.clone();
req.headers_mut().set(host);
let stream = match req.start() {
Ok(s) => s,
Err(_) => return error
};
let response = match stream.send() {
Ok(r) => r,
Err(_) => return error
};
// Step 12
match response.status.class() {
Success => {}
_ => return error
}
cors_response.headers = response.headers.clone();
// Substeps 1-3 (parsing rules: http://fetch.spec.whatwg.org/#http-new-header-syntax)
let methods_substep4 = [self.method.clone()];
let mut methods = match response.headers.get() {
Some(&AccessControlAllowMethods(ref v)) => v.as_slice(),
_ => return error
};
let headers = match response.headers.get() {
Some(&AccessControlAllowHeaders(ref h)) => h,
_ => return error
};
// Substep 4
if methods.len() == 0 || preflight.mode == RequestMode::ForcedPreflight {
methods = &methods_substep4;
}
// Substep 5
if!is_simple_method(&self.method) &&
!methods.iter().any(|m| m == &self.method) {
return error;
}
// Substep 6
for h in self.headers.iter() {
if is_simple_header(&h) {
continue;
}
if!headers.iter().any(|ref h2| h.name().eq_ignore_ascii_case(h2)) {
return error;
}
}
// Substep 7, 8
let max_age = match response.headers.get() {
Some(&AccessControlMaxAge(num)) => num,
None => 0
};
// Substep 9: Impose restrictions on max-age, if any (unimplemented)
// Substeps 10-12: Add a cache (partially implemented, XXXManishearth)
// This cache should come from the user agent, creating a new one here to check
// for compile time errors
let cache = &mut CORSCache(vec!());
for m in methods.iter() {
let cache_match = cache.match_method_and_update(self, m, max_age);
if!cache_match {
cache.insert(CORSCacheEntry::new(self.origin.clone(), self.destination.clone(),
max_age, false, HeaderOrMethod::MethodData(m.clone())));
}
}
for h in response.headers.iter() {
let cache_match = cache.match_header_and_update(self, h.name(), max_age);
if!cache_match {
cache.insert(CORSCacheEntry::new(self.origin.clone(), self.destination.clone(),
max_age, false, HeaderOrMethod::HeaderData(h.to_string())));
}
}
cors_response
}
}
pub struct CORSResponse {
pub network_error: bool,
pub headers: Headers
}
impl CORSResponse {
fn new() -> CORSResponse {
CORSResponse {
network_error: false,
headers: Headers::new()
}
}
fn new_error() -> CORSResponse {
CORSResponse {
network_error: true,
headers: Headers::new()
}
}
}
// CORS Cache stuff
/// A CORS cache object. Anchor it somewhere to the user agent.
#[derive(Clone)]
pub struct CORSCache(Vec<CORSCacheEntry>);
/// Union type for CORS cache entries
/// Each entry might pertain to a header or method
#[derive(Clone)]
pub enum HeaderOrMethod {
HeaderData(String),
MethodData(Method)
}
impl HeaderOrMethod {
fn match_header(&self, header_name: &str) -> bool {
match *self {
HeaderOrMethod::HeaderData(ref s) => s.eq_ignore_ascii_case(header_name),
_ => false
}
}
fn match_method(&self, method: &Method) -> bool {
match *self {
HeaderOrMethod::MethodData(ref m) => m == method,
_ => false
}
}
}
// An entry in the CORS cache
#[derive(Clone)]
pub struct CORSCacheEntry {
pub origin: Url,
pub url: Url,
pub max_age: u32,
pub credentials: bool,
pub header_or_method: HeaderOrMethod,
created: Timespec
}
impl CORSCacheEntry {
fn new (origin:Url, url: Url, max_age: u32, credentials: bool, header_or_method: HeaderOrMethod) -> CORSCacheEntry {
CORSCacheEntry {
origin: origin,
url: url,
max_age: max_age,
credentials: credentials,
header_or_method: header_or_method,
created: time::now().to_timespec()
}
}
}
impl CORSCache {
/// http://fetch.spec.whatwg.org/#concept-cache-clear
#[allow(dead_code)]
fn clear (&mut self, request: &CORSRequest) {
let CORSCache(buf) = self.clone();
let new_buf: Vec<CORSCacheEntry> = buf.into_iter().filter(|e| e.origin == request.origin && request.destination == e.url).collect();
*self = CORSCache(new_buf);
}
// Remove old entries
fn cleanup(&mut self) {
let CORSCache(buf) = self.clone();
let now = time::now().to_timespec();
let new_buf: Vec<CORSCacheEntry> = buf.into_iter().filter(|e| now.sec > e.created.sec + e.max_age as i64).collect();
*self = CORSCache(new_buf);
}
/// http://fetch.spec.whatwg.org/#concept-cache-match-header
fn find_entry_by_header<'a>(&'a mut self, request: &CORSRequest, header_name: &str) -> Option<&'a mut CORSCacheEntry> {
self.cleanup();
let CORSCache(ref mut buf) = *self;
// Credentials are not yet implemented here
let entry = buf.iter_mut().find(|e| e.origin.scheme == request.origin.scheme &&
e.origin.host() == request.origin.host() &&
e.origin.port() == request.origin.port() &&
e.url == request.destination &&
e.header_or_method.match_header(header_name));
entry
}
fn match_header(&mut self, request: &CORSRequest, header_name: &str) -> bool {
self.find_entry_by_header(request, header_name).is_some()
}
fn match_header_and_update(&mut self, request: &CORSRequest, header_name: &str, new_max_age: u32) -> bool {
self.find_entry_by_header(request, header_name).map(|e| e.max_age = new_max_age).is_some()
}
fn find_entry_by_method<'a>(&'a mut self, request: &CORSRequest, method: &Method) -> Option<&'a mut CORSCacheEntry> {
// we can take the method from CORSRequest itself
self.cleanup();
let CORSCache(ref mut buf) = *self;
// Credentials are not yet implemented here
let entry = buf.iter_mut().find(|e| e.origin.scheme == request.origin.scheme &&
e.origin.host() == request.origin.host() &&
e.origin.port() == request.origin.port() &&
e.url == request.destination &&
e.header_or_method.match_method(method));
entry
}
/// http://fetch.spec.whatwg.org/#concept-cache-match-method
fn match_method(&mut self, request: &CORSRequest, method: &Method) -> bool {
self.find_entry_by_method(request, method).is_some()
}
fn match_method_and_update(&mut self, request: &CORSRequest, method: &Method, new_max_age: u32) -> bool {
self.find_entry_by_method(request, method).map(|e| e.max_age = new_max_age).is_some()
}
fn insert(&mut self, entry: CORSCacheEntry) {
self.cleanup();
let CORSCache(ref mut buf) = *self;
buf.push(entry);
}
}
fn is_simple_header(h: &HeaderView) -> bool {
//FIXME: use h.is::<HeaderType>() when AcceptLanguage and
//ContentLanguage headers exist
match h.name().to_ascii_lowercase().as_slice() {
"accept" | "accept-language" | "content-language" => true,
"content-type" => match h.value() {
Some(&ContentType(Mime(TopLevel::Text, SubLevel::Plain, _))) |
Some(&ContentType(Mime(TopLevel::Application, SubLevel::WwwFormUrlEncoded, _))) |
Some(&ContentType(Mime(TopLevel::Multipart, SubLevel::FormData, _))) => true,
_ => false
},
_ => false
}
}
fn is_simple_method(m: &Method) -> bool {
match *m {
Method::Get | Method::Head | Method::Post => true,
_ => false
}
}
/// Perform a CORS check on a header list and CORS request
/// http://fetch.spec.whatwg.org/#cors-check
pub fn allow_cross_origin_request(req: &CORSRequest, headers: &Headers) -> bool {
//FIXME(seanmonstar): use req.headers.get::<AccessControlAllowOrigin>()
match headers.get() {
Some(&AccessControlAllowOrigin::AllowStar) => true, // Not always true, depends on credentials mode
Some(&AccessControlAllowOrigin::AllowOrigin(ref url)) =>
url.scheme == req.origin.scheme &&
url.host() == req.origin.host() &&
url.port() == req.origin.port(),
None => false
}
}
|
random_line_split
|
|
cors.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! A partial implementation of CORS
//! For now this library is XHR-specific.
//! For stuff involving `<img>`, `<iframe>`, `<form>`, etc please check what
//! the request mode should be and compare with the fetch spec
//! This library will eventually become the core of the Fetch crate
//! with CORSRequest being expanded into FetchRequest (etc)
use std::ascii::AsciiExt;
use time;
use time::{now, Timespec};
use hyper::header::{AccessControlRequestMethod, AccessControlAllowMethods};
use hyper::header::{AccessControlMaxAge, AccessControlAllowOrigin};
use hyper::header::{AccessControlRequestHeaders, AccessControlAllowHeaders};
use hyper::header::{Headers, HeaderView};
use hyper::client::Request;
use hyper::mime::{Mime, TopLevel, SubLevel};
use hyper::header::{ContentType, Host};
use hyper::method::Method;
use hyper::status::StatusClass::Success;
use url::{SchemeData, Url};
#[derive(Clone)]
pub struct CORSRequest {
pub origin: Url,
pub destination: Url,
pub mode: RequestMode,
pub method: Method,
pub headers: Headers,
/// CORS preflight flag (http://fetch.spec.whatwg.org/#concept-http-fetch)
/// Indicates that a CORS preflight request and/or cache check is to be performed
pub preflight_flag: bool
}
/// http://fetch.spec.whatwg.org/#concept-request-mode
/// This only covers some of the request modes. The
/// `same-origin` and `no CORS` modes are unnecessary for XHR.
#[derive(PartialEq, Copy, Clone)]
pub enum RequestMode {
CORS, // CORS
ForcedPreflight // CORS-with-forced-preflight
}
impl CORSRequest {
/// Creates a CORS request if necessary. Will return an error when fetching is forbidden
pub fn maybe_new(referer: Url, destination: Url, mode: RequestMode,
method: Method, headers: Headers) -> Result<Option<CORSRequest>, ()>
|
fn new(mut referer: Url, destination: Url, mode: RequestMode, method: Method,
headers: Headers) -> CORSRequest {
match referer.scheme_data {
SchemeData::Relative(ref mut data) => data.path = vec!(),
_ => {}
};
referer.fragment = None;
referer.query = None;
CORSRequest {
origin: referer,
destination: destination,
mode: mode,
method: method,
headers: headers,
preflight_flag: false
}
}
/// http://fetch.spec.whatwg.org/#concept-http-fetch
/// This method assumes that the CORS flag is set
/// This does not perform the full HTTP fetch, rather it handles part of the CORS filtering
/// if self.mode is ForcedPreflight, then the CORS-with-forced-preflight
/// fetch flag is set as well
pub fn http_fetch(&self) -> CORSResponse {
let response = CORSResponse::new();
// Step 2: Handle service workers (unimplemented)
// Step 3
// Substep 1: Service workers (unimplemented )
// Substep 2
let cache = &mut CORSCache(vec!()); // XXXManishearth Should come from user agent
if self.preflight_flag &&
!cache.match_method(self, &self.method) &&
!self.headers.iter().all(|h| is_simple_header(&h) && cache.match_header(self, h.name())) {
if!is_simple_method(&self.method) || self.mode == RequestMode::ForcedPreflight {
return self.preflight_fetch();
// Everything after this is part of XHR::fetch()
// Expect the organization of code to improve once we have a fetch crate
}
}
response
}
/// http://fetch.spec.whatwg.org/#cors-preflight-fetch
fn preflight_fetch(&self) -> CORSResponse {
let error = CORSResponse::new_error();
let mut cors_response = CORSResponse::new();
let mut preflight = self.clone(); // Step 1
preflight.method = Method::Options; // Step 2
preflight.headers = Headers::new(); // Step 3
// Step 4
preflight.headers.set(AccessControlRequestMethod(self.method.clone()));
// Step 5 - 7
let mut header_names = vec!();
for header in self.headers.iter() {
header_names.push(header.name().to_ascii_lowercase());
}
header_names.sort();
preflight.headers.set(AccessControlRequestHeaders(header_names));
// Step 8 unnecessary, we don't use the request body
// Step 9, 10 unnecessary, we're writing our own fetch code
// Step 11
let preflight_request = Request::new(preflight.method, preflight.destination);
let mut req = match preflight_request {
Ok(req) => req,
Err(_) => return error
};
let host = req.headers().get::<Host>().unwrap().clone();
*req.headers_mut() = preflight.headers.clone();
req.headers_mut().set(host);
let stream = match req.start() {
Ok(s) => s,
Err(_) => return error
};
let response = match stream.send() {
Ok(r) => r,
Err(_) => return error
};
// Step 12
match response.status.class() {
Success => {}
_ => return error
}
cors_response.headers = response.headers.clone();
// Substeps 1-3 (parsing rules: http://fetch.spec.whatwg.org/#http-new-header-syntax)
let methods_substep4 = [self.method.clone()];
let mut methods = match response.headers.get() {
Some(&AccessControlAllowMethods(ref v)) => v.as_slice(),
_ => return error
};
let headers = match response.headers.get() {
Some(&AccessControlAllowHeaders(ref h)) => h,
_ => return error
};
// Substep 4
if methods.len() == 0 || preflight.mode == RequestMode::ForcedPreflight {
methods = &methods_substep4;
}
// Substep 5
if!is_simple_method(&self.method) &&
!methods.iter().any(|m| m == &self.method) {
return error;
}
// Substep 6
for h in self.headers.iter() {
if is_simple_header(&h) {
continue;
}
if!headers.iter().any(|ref h2| h.name().eq_ignore_ascii_case(h2)) {
return error;
}
}
// Substep 7, 8
let max_age = match response.headers.get() {
Some(&AccessControlMaxAge(num)) => num,
None => 0
};
// Substep 9: Impose restrictions on max-age, if any (unimplemented)
// Substeps 10-12: Add a cache (partially implemented, XXXManishearth)
// This cache should come from the user agent, creating a new one here to check
// for compile time errors
let cache = &mut CORSCache(vec!());
for m in methods.iter() {
let cache_match = cache.match_method_and_update(self, m, max_age);
if!cache_match {
cache.insert(CORSCacheEntry::new(self.origin.clone(), self.destination.clone(),
max_age, false, HeaderOrMethod::MethodData(m.clone())));
}
}
for h in response.headers.iter() {
let cache_match = cache.match_header_and_update(self, h.name(), max_age);
if!cache_match {
cache.insert(CORSCacheEntry::new(self.origin.clone(), self.destination.clone(),
max_age, false, HeaderOrMethod::HeaderData(h.to_string())));
}
}
cors_response
}
}
pub struct CORSResponse {
pub network_error: bool,
pub headers: Headers
}
impl CORSResponse {
fn new() -> CORSResponse {
CORSResponse {
network_error: false,
headers: Headers::new()
}
}
fn new_error() -> CORSResponse {
CORSResponse {
network_error: true,
headers: Headers::new()
}
}
}
// CORS Cache stuff
/// A CORS cache object. Anchor it somewhere to the user agent.
#[derive(Clone)]
pub struct CORSCache(Vec<CORSCacheEntry>);
/// Union type for CORS cache entries
/// Each entry might pertain to a header or method
#[derive(Clone)]
pub enum HeaderOrMethod {
HeaderData(String),
MethodData(Method)
}
impl HeaderOrMethod {
fn match_header(&self, header_name: &str) -> bool {
match *self {
HeaderOrMethod::HeaderData(ref s) => s.eq_ignore_ascii_case(header_name),
_ => false
}
}
fn match_method(&self, method: &Method) -> bool {
match *self {
HeaderOrMethod::MethodData(ref m) => m == method,
_ => false
}
}
}
// An entry in the CORS cache
#[derive(Clone)]
pub struct CORSCacheEntry {
pub origin: Url,
pub url: Url,
pub max_age: u32,
pub credentials: bool,
pub header_or_method: HeaderOrMethod,
created: Timespec
}
impl CORSCacheEntry {
fn new (origin:Url, url: Url, max_age: u32, credentials: bool, header_or_method: HeaderOrMethod) -> CORSCacheEntry {
CORSCacheEntry {
origin: origin,
url: url,
max_age: max_age,
credentials: credentials,
header_or_method: header_or_method,
created: time::now().to_timespec()
}
}
}
impl CORSCache {
/// http://fetch.spec.whatwg.org/#concept-cache-clear
#[allow(dead_code)]
fn clear (&mut self, request: &CORSRequest) {
let CORSCache(buf) = self.clone();
let new_buf: Vec<CORSCacheEntry> = buf.into_iter().filter(|e| e.origin == request.origin && request.destination == e.url).collect();
*self = CORSCache(new_buf);
}
// Remove old entries
fn cleanup(&mut self) {
let CORSCache(buf) = self.clone();
let now = time::now().to_timespec();
let new_buf: Vec<CORSCacheEntry> = buf.into_iter().filter(|e| now.sec > e.created.sec + e.max_age as i64).collect();
*self = CORSCache(new_buf);
}
/// http://fetch.spec.whatwg.org/#concept-cache-match-header
fn find_entry_by_header<'a>(&'a mut self, request: &CORSRequest, header_name: &str) -> Option<&'a mut CORSCacheEntry> {
self.cleanup();
let CORSCache(ref mut buf) = *self;
// Credentials are not yet implemented here
let entry = buf.iter_mut().find(|e| e.origin.scheme == request.origin.scheme &&
e.origin.host() == request.origin.host() &&
e.origin.port() == request.origin.port() &&
e.url == request.destination &&
e.header_or_method.match_header(header_name));
entry
}
fn match_header(&mut self, request: &CORSRequest, header_name: &str) -> bool {
self.find_entry_by_header(request, header_name).is_some()
}
fn match_header_and_update(&mut self, request: &CORSRequest, header_name: &str, new_max_age: u32) -> bool {
self.find_entry_by_header(request, header_name).map(|e| e.max_age = new_max_age).is_some()
}
fn find_entry_by_method<'a>(&'a mut self, request: &CORSRequest, method: &Method) -> Option<&'a mut CORSCacheEntry> {
// we can take the method from CORSRequest itself
self.cleanup();
let CORSCache(ref mut buf) = *self;
// Credentials are not yet implemented here
let entry = buf.iter_mut().find(|e| e.origin.scheme == request.origin.scheme &&
e.origin.host() == request.origin.host() &&
e.origin.port() == request.origin.port() &&
e.url == request.destination &&
e.header_or_method.match_method(method));
entry
}
/// http://fetch.spec.whatwg.org/#concept-cache-match-method
fn match_method(&mut self, request: &CORSRequest, method: &Method) -> bool {
self.find_entry_by_method(request, method).is_some()
}
fn match_method_and_update(&mut self, request: &CORSRequest, method: &Method, new_max_age: u32) -> bool {
self.find_entry_by_method(request, method).map(|e| e.max_age = new_max_age).is_some()
}
fn insert(&mut self, entry: CORSCacheEntry) {
self.cleanup();
let CORSCache(ref mut buf) = *self;
buf.push(entry);
}
}
fn is_simple_header(h: &HeaderView) -> bool {
//FIXME: use h.is::<HeaderType>() when AcceptLanguage and
//ContentLanguage headers exist
match h.name().to_ascii_lowercase().as_slice() {
"accept" | "accept-language" | "content-language" => true,
"content-type" => match h.value() {
Some(&ContentType(Mime(TopLevel::Text, SubLevel::Plain, _))) |
Some(&ContentType(Mime(TopLevel::Application, SubLevel::WwwFormUrlEncoded, _))) |
Some(&ContentType(Mime(TopLevel::Multipart, SubLevel::FormData, _))) => true,
_ => false
},
_ => false
}
}
fn is_simple_method(m: &Method) -> bool {
match *m {
Method::Get | Method::Head | Method::Post => true,
_ => false
}
}
/// Perform a CORS check on a header list and CORS request
/// http://fetch.spec.whatwg.org/#cors-check
pub fn allow_cross_origin_request(req: &CORSRequest, headers: &Headers) -> bool {
//FIXME(seanmonstar): use req.headers.get::<AccessControlAllowOrigin>()
match headers.get() {
Some(&AccessControlAllowOrigin::AllowStar) => true, // Not always true, depends on credentials mode
Some(&AccessControlAllowOrigin::AllowOrigin(ref url)) =>
url.scheme == req.origin.scheme &&
url.host() == req.origin.host() &&
url.port() == req.origin.port(),
None => false
}
}
|
{
if referer.scheme == destination.scheme &&
referer.host() == destination.host() &&
referer.port() == destination.port() {
return Ok(None); // Not cross-origin, proceed with a normal fetch
}
match destination.scheme.as_slice() {
// TODO: If the request's same origin data url flag is set (which isn't the case for XHR)
// we can fetch a data URL normally. about:blank can also be fetched by XHR
"http" | "https" => {
let mut req = CORSRequest::new(referer, destination, mode, method, headers);
req.preflight_flag = !is_simple_method(&req.method) || mode == RequestMode::ForcedPreflight;
if req.headers.iter().all(|h| is_simple_header(&h)) {
req.preflight_flag = true;
}
Ok(Some(req))
},
_ => Err(()),
}
}
|
identifier_body
|
dependency.rs
|
use semver::VersionReq;
use core::{SourceId, Summary, PackageId};
use util::CargoResult;
/// Informations about a dependency requested by a Cargo manifest.
#[derive(PartialEq,Clone,Debug)]
pub struct Dependency {
name: String,
source_id: SourceId,
req: VersionReq,
specified_req: Option<String>,
kind: Kind,
only_match_name: bool,
optional: bool,
default_features: bool,
features: Vec<String>,
// This dependency should be used only for this platform.
// `None` means *all platforms*.
only_for_platform: Option<String>,
}
#[derive(PartialEq, Clone, Debug, Copy)]
pub enum Kind {
Normal,
Development,
Build,
}
impl Dependency {
/// Attempt to create a `Dependency` from an entry in the manifest.
pub fn parse(name: &str,
version: Option<&str>,
source_id: &SourceId) -> CargoResult<Dependency> {
let version_req = match version {
Some(v) => try!(VersionReq::parse(v)),
None => VersionReq::any()
};
Ok(Dependency {
only_match_name: false,
req: version_req,
specified_req: version.map(|s| s.to_string()),
.. Dependency::new_override(name, source_id)
})
}
pub fn new_override(name: &str, source_id: &SourceId) -> Dependency {
Dependency {
name: name.to_string(),
source_id: source_id.clone(),
req: VersionReq::any(),
kind: Kind::Normal,
only_match_name: true,
optional: false,
features: Vec::new(),
default_features: true,
specified_req: None,
only_for_platform: None,
}
}
pub fn version_req(&self) -> &VersionReq { &self.req }
pub fn name(&self) -> &str { &self.name }
pub fn source_id(&self) -> &SourceId { &self.source_id }
pub fn kind(&self) -> Kind { self.kind }
pub fn specified_req(&self) -> Option<&str> {
self.specified_req.as_ref().map(|s| &s[..])
}
/// If none, this dependencies must be built for all platforms.
/// If some, it must only be built for the specified platform.
pub fn only_for_platform(&self) -> Option<&str> {
self.only_for_platform.as_ref().map(|s| &s[..])
}
pub fn set_kind(mut self, kind: Kind) -> Dependency {
self.kind = kind;
self
}
/// Sets the list of features requested for the package.
pub fn set_features(mut self, features: Vec<String>) -> Dependency {
self.features = features;
self
}
/// Sets whether the dependency requests default features of the package.
pub fn set_default_features(mut self, default_features: bool) -> Dependency {
self.default_features = default_features;
self
}
/// Sets whether the dependency is optional.
pub fn set_optional(mut self, optional: bool) -> Dependency {
self.optional = optional;
self
}
/// Set the source id for this dependency
pub fn set_source_id(mut self, id: SourceId) -> Dependency {
self.source_id = id;
self
}
/// Set the version requirement for this dependency
pub fn set_version_req(mut self, req: VersionReq) -> Dependency {
self.req = req;
self
}
pub fn set_only_for_platform(mut self, platform: Option<String>)
-> Dependency {
self.only_for_platform = platform;
self
}
/// Lock this dependency to depending on the specified package id
pub fn lock_to(self, id: &PackageId) -> Dependency
|
/// Returns false if the dependency is only used to build the local package.
pub fn is_transitive(&self) -> bool {
match self.kind {
Kind::Normal | Kind::Build => true,
Kind::Development => false,
}
}
pub fn is_build(&self) -> bool {
match self.kind { Kind::Build => true, _ => false }
}
pub fn is_optional(&self) -> bool { self.optional }
/// Returns true if the default features of the dependency are requested.
pub fn uses_default_features(&self) -> bool { self.default_features }
/// Returns the list of features that are requested by the dependency.
pub fn features(&self) -> &[String] { &self.features }
/// Returns true if the package (`sum`) can fulfill this dependency request.
pub fn matches(&self, sum: &Summary) -> bool {
self.matches_id(sum.package_id())
}
/// Returns true if the package (`id`) can fulfill this dependency request.
pub fn matches_id(&self, id: &PackageId) -> bool {
self.name == id.name() &&
(self.only_match_name || (self.req.matches(id.version()) &&
&self.source_id == id.source_id()))
}
}
#[derive(PartialEq,Clone,RustcEncodable)]
pub struct SerializedDependency {
name: String,
req: String
}
impl SerializedDependency {
pub fn from_dependency(dep: &Dependency) -> SerializedDependency {
SerializedDependency {
name: dep.name().to_string(),
req: dep.version_req().to_string()
}
}
}
|
{
assert_eq!(self.source_id, *id.source_id());
assert!(self.req.matches(id.version()));
self.set_version_req(VersionReq::exact(id.version()))
.set_source_id(id.source_id().clone())
}
|
identifier_body
|
dependency.rs
|
use semver::VersionReq;
use core::{SourceId, Summary, PackageId};
use util::CargoResult;
/// Informations about a dependency requested by a Cargo manifest.
#[derive(PartialEq,Clone,Debug)]
pub struct Dependency {
name: String,
source_id: SourceId,
req: VersionReq,
specified_req: Option<String>,
kind: Kind,
only_match_name: bool,
optional: bool,
default_features: bool,
features: Vec<String>,
// This dependency should be used only for this platform.
// `None` means *all platforms*.
only_for_platform: Option<String>,
}
#[derive(PartialEq, Clone, Debug, Copy)]
pub enum Kind {
Normal,
Development,
Build,
}
impl Dependency {
/// Attempt to create a `Dependency` from an entry in the manifest.
pub fn parse(name: &str,
version: Option<&str>,
source_id: &SourceId) -> CargoResult<Dependency> {
let version_req = match version {
Some(v) => try!(VersionReq::parse(v)),
None => VersionReq::any()
};
Ok(Dependency {
only_match_name: false,
req: version_req,
specified_req: version.map(|s| s.to_string()),
.. Dependency::new_override(name, source_id)
})
}
pub fn new_override(name: &str, source_id: &SourceId) -> Dependency {
Dependency {
name: name.to_string(),
source_id: source_id.clone(),
req: VersionReq::any(),
kind: Kind::Normal,
only_match_name: true,
optional: false,
features: Vec::new(),
default_features: true,
specified_req: None,
only_for_platform: None,
}
}
pub fn version_req(&self) -> &VersionReq { &self.req }
pub fn name(&self) -> &str { &self.name }
pub fn source_id(&self) -> &SourceId { &self.source_id }
pub fn kind(&self) -> Kind { self.kind }
pub fn specified_req(&self) -> Option<&str> {
self.specified_req.as_ref().map(|s| &s[..])
}
/// If none, this dependencies must be built for all platforms.
/// If some, it must only be built for the specified platform.
pub fn only_for_platform(&self) -> Option<&str> {
self.only_for_platform.as_ref().map(|s| &s[..])
}
pub fn set_kind(mut self, kind: Kind) -> Dependency {
self.kind = kind;
self
}
/// Sets the list of features requested for the package.
pub fn set_features(mut self, features: Vec<String>) -> Dependency {
self.features = features;
self
}
/// Sets whether the dependency requests default features of the package.
pub fn set_default_features(mut self, default_features: bool) -> Dependency {
self.default_features = default_features;
self
}
/// Sets whether the dependency is optional.
pub fn set_optional(mut self, optional: bool) -> Dependency {
self.optional = optional;
self
}
/// Set the source id for this dependency
pub fn set_source_id(mut self, id: SourceId) -> Dependency {
self.source_id = id;
self
}
/// Set the version requirement for this dependency
pub fn set_version_req(mut self, req: VersionReq) -> Dependency {
self.req = req;
self
}
pub fn set_only_for_platform(mut self, platform: Option<String>)
-> Dependency {
self.only_for_platform = platform;
self
}
/// Lock this dependency to depending on the specified package id
pub fn lock_to(self, id: &PackageId) -> Dependency {
assert_eq!(self.source_id, *id.source_id());
assert!(self.req.matches(id.version()));
self.set_version_req(VersionReq::exact(id.version()))
.set_source_id(id.source_id().clone())
}
/// Returns false if the dependency is only used to build the local package.
pub fn is_transitive(&self) -> bool {
match self.kind {
Kind::Normal | Kind::Build => true,
Kind::Development => false,
}
}
pub fn is_build(&self) -> bool {
match self.kind { Kind::Build => true, _ => false }
}
pub fn is_optional(&self) -> bool { self.optional }
/// Returns true if the default features of the dependency are requested.
pub fn uses_default_features(&self) -> bool { self.default_features }
/// Returns the list of features that are requested by the dependency.
pub fn features(&self) -> &[String] { &self.features }
/// Returns true if the package (`sum`) can fulfill this dependency request.
pub fn matches(&self, sum: &Summary) -> bool {
self.matches_id(sum.package_id())
}
/// Returns true if the package (`id`) can fulfill this dependency request.
pub fn matches_id(&self, id: &PackageId) -> bool {
self.name == id.name() &&
(self.only_match_name || (self.req.matches(id.version()) &&
&self.source_id == id.source_id()))
}
}
#[derive(PartialEq,Clone,RustcEncodable)]
pub struct
|
{
name: String,
req: String
}
impl SerializedDependency {
pub fn from_dependency(dep: &Dependency) -> SerializedDependency {
SerializedDependency {
name: dep.name().to_string(),
req: dep.version_req().to_string()
}
}
}
|
SerializedDependency
|
identifier_name
|
dependency.rs
|
use semver::VersionReq;
use core::{SourceId, Summary, PackageId};
use util::CargoResult;
/// Informations about a dependency requested by a Cargo manifest.
#[derive(PartialEq,Clone,Debug)]
pub struct Dependency {
name: String,
source_id: SourceId,
req: VersionReq,
specified_req: Option<String>,
kind: Kind,
only_match_name: bool,
optional: bool,
default_features: bool,
features: Vec<String>,
// This dependency should be used only for this platform.
// `None` means *all platforms*.
only_for_platform: Option<String>,
}
#[derive(PartialEq, Clone, Debug, Copy)]
pub enum Kind {
Normal,
Development,
Build,
}
impl Dependency {
/// Attempt to create a `Dependency` from an entry in the manifest.
pub fn parse(name: &str,
version: Option<&str>,
source_id: &SourceId) -> CargoResult<Dependency> {
let version_req = match version {
Some(v) => try!(VersionReq::parse(v)),
None => VersionReq::any()
};
Ok(Dependency {
only_match_name: false,
req: version_req,
specified_req: version.map(|s| s.to_string()),
.. Dependency::new_override(name, source_id)
})
}
pub fn new_override(name: &str, source_id: &SourceId) -> Dependency {
Dependency {
name: name.to_string(),
source_id: source_id.clone(),
req: VersionReq::any(),
kind: Kind::Normal,
only_match_name: true,
optional: false,
features: Vec::new(),
default_features: true,
specified_req: None,
only_for_platform: None,
}
}
pub fn version_req(&self) -> &VersionReq { &self.req }
pub fn name(&self) -> &str { &self.name }
pub fn source_id(&self) -> &SourceId { &self.source_id }
pub fn kind(&self) -> Kind { self.kind }
pub fn specified_req(&self) -> Option<&str> {
self.specified_req.as_ref().map(|s| &s[..])
}
/// If none, this dependencies must be built for all platforms.
/// If some, it must only be built for the specified platform.
pub fn only_for_platform(&self) -> Option<&str> {
self.only_for_platform.as_ref().map(|s| &s[..])
}
pub fn set_kind(mut self, kind: Kind) -> Dependency {
self.kind = kind;
self
}
/// Sets the list of features requested for the package.
pub fn set_features(mut self, features: Vec<String>) -> Dependency {
self.features = features;
self
}
/// Sets whether the dependency requests default features of the package.
pub fn set_default_features(mut self, default_features: bool) -> Dependency {
self.default_features = default_features;
self
}
/// Sets whether the dependency is optional.
pub fn set_optional(mut self, optional: bool) -> Dependency {
self.optional = optional;
|
/// Set the source id for this dependency
pub fn set_source_id(mut self, id: SourceId) -> Dependency {
self.source_id = id;
self
}
/// Set the version requirement for this dependency
pub fn set_version_req(mut self, req: VersionReq) -> Dependency {
self.req = req;
self
}
pub fn set_only_for_platform(mut self, platform: Option<String>)
-> Dependency {
self.only_for_platform = platform;
self
}
/// Lock this dependency to depending on the specified package id
pub fn lock_to(self, id: &PackageId) -> Dependency {
assert_eq!(self.source_id, *id.source_id());
assert!(self.req.matches(id.version()));
self.set_version_req(VersionReq::exact(id.version()))
.set_source_id(id.source_id().clone())
}
/// Returns false if the dependency is only used to build the local package.
pub fn is_transitive(&self) -> bool {
match self.kind {
Kind::Normal | Kind::Build => true,
Kind::Development => false,
}
}
pub fn is_build(&self) -> bool {
match self.kind { Kind::Build => true, _ => false }
}
pub fn is_optional(&self) -> bool { self.optional }
/// Returns true if the default features of the dependency are requested.
pub fn uses_default_features(&self) -> bool { self.default_features }
/// Returns the list of features that are requested by the dependency.
pub fn features(&self) -> &[String] { &self.features }
/// Returns true if the package (`sum`) can fulfill this dependency request.
pub fn matches(&self, sum: &Summary) -> bool {
self.matches_id(sum.package_id())
}
/// Returns true if the package (`id`) can fulfill this dependency request.
pub fn matches_id(&self, id: &PackageId) -> bool {
self.name == id.name() &&
(self.only_match_name || (self.req.matches(id.version()) &&
&self.source_id == id.source_id()))
}
}
#[derive(PartialEq,Clone,RustcEncodable)]
pub struct SerializedDependency {
name: String,
req: String
}
impl SerializedDependency {
pub fn from_dependency(dep: &Dependency) -> SerializedDependency {
SerializedDependency {
name: dep.name().to_string(),
req: dep.version_req().to_string()
}
}
}
|
self
}
|
random_line_split
|
lib.rs
|
/****************************************************************************
Copyright (c) 2015 Roland Ruckerbauer All Rights Reserved.
This file is part of hidapi_rust.
hidapi_rust is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
hidapi_rust is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with hidapi_rust. If not, see <http://www.gnu.org/licenses/>.
****************************************************************************/
extern crate libc;
mod ffi;
use std::ffi::{CStr};
use libc::{wchar_t, c_char, size_t};
pub use libc::{c_ushort, c_int};
pub struct HidApi;
static mut hid_api_lock: bool = false;
impl HidApi {
pub fn new() -> Result<Self, &'static str> {
if unsafe {!hid_api_lock} {
unsafe {
ffi::hid_init();
hid_api_lock = true;
}
Ok(HidApi)
}else {
Err("Error already one HidApi in use.")
}
}
pub fn enumerate_info(&self) -> HidDeviceInfoEnumeration {
let list = unsafe {ffi::hid_enumerate(0, 0)};
HidDeviceInfoEnumeration {
_hid_device_info: list,
_next: list,
}
}
pub fn open(&self, vendor_id: c_ushort, product_id: c_ushort)
-> Result<HidDevice, &'static str> {
let device = unsafe {ffi::hid_open(vendor_id, product_id, std::ptr::null())};
if device.is_null() {
Err("Can not open hid device.")
}else {
Ok(HidDevice {_hid_device: device, api: self})
}
}
}
impl Drop for HidApi {
fn drop(&mut self) {
unsafe {
ffi::hid_exit();
hid_api_lock = false;
}
}
}
unsafe fn wcs_to_string<'a>(src: *const wchar_t) -> String {
let length = ffi::wcstombs(std::ptr::null_mut(), src, 0);
let mut chars = Vec::<c_char>::with_capacity(length as usize + 1);
chars.set_len(length as usize + 1);
let ptr = chars.as_mut_ptr();
ffi::wcstombs(ptr, src, length);
chars[length as usize] = 0;
std::str::from_utf8(CStr::from_ptr(ptr).to_bytes()).unwrap().to_owned()
}
unsafe fn conv_hid_device_info(src: *mut ffi::HidDeviceInfo) -> HidDeviceInfo {
HidDeviceInfo {
path: std::str::from_utf8(CStr::from_ptr((*src).path).to_bytes()).unwrap().to_owned(),
vendor_id: (*src).vendor_id,
product_id: (*src).product_id,
//serial_number: wcs_to_string((*src).serial_number),
release_number: (*src).release_number,
manufactor_string: wcs_to_string((*src).manufactor_string),
product_string: wcs_to_string((*src).product_string),
usage_page: (*src).usage_page,
usage: (*src).usage,
interface_number: (*src).interface_number,
}
}
pub struct HidDeviceInfoEnumeration {
_hid_device_info: *mut ffi::HidDeviceInfo,
_next: *mut ffi::HidDeviceInfo,
}
impl Drop for HidDeviceInfoEnumeration {
fn drop(&mut self) {
unsafe {
ffi::hid_free_enumeration(self._hid_device_info);
}
}
}
impl Iterator for HidDeviceInfoEnumeration {
type Item = HidDeviceInfo;
fn next(&mut self) -> Option<HidDeviceInfo> {
if self._next.is_null() {
None
}else {
let ret = self._next;
self._next = unsafe {(*self._next).next};
Some(unsafe {conv_hid_device_info(ret)})
}
|
#[derive(Debug)]
pub struct HidDeviceInfo {
path: String,
vendor_id: c_ushort,
product_id: c_ushort,
//serial_number: String,
release_number: c_ushort,
manufactor_string: String,
product_string: String,
usage_page: c_ushort,
usage: c_ushort,
interface_number: c_int,
}
pub struct HidDevice<'a> {
_hid_device: *mut ffi::HidDevice,
#[allow(dead_code)]
api: &'a HidApi, // Just to keep everything safe.
}
impl<'a> Drop for HidDevice<'a> {
fn drop(&mut self) {
unsafe {ffi::hid_close(self._hid_device)};
}
}
impl <'a> HidDevice<'a> {
pub fn write(&self, data: &[u8]) -> c_int {
unsafe {ffi::hid_write(self._hid_device, data.as_ptr(), data.len() as size_t)}
}
pub fn send_feature_report(&self, data: &[u8]) -> c_int {
unsafe {
ffi::hid_send_feature_report(self._hid_device, data.as_ptr(), data.len() as size_t)
}
}
}
|
}
}
|
random_line_split
|
lib.rs
|
/****************************************************************************
Copyright (c) 2015 Roland Ruckerbauer All Rights Reserved.
This file is part of hidapi_rust.
hidapi_rust is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
hidapi_rust is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with hidapi_rust. If not, see <http://www.gnu.org/licenses/>.
****************************************************************************/
extern crate libc;
mod ffi;
use std::ffi::{CStr};
use libc::{wchar_t, c_char, size_t};
pub use libc::{c_ushort, c_int};
pub struct HidApi;
static mut hid_api_lock: bool = false;
impl HidApi {
pub fn new() -> Result<Self, &'static str> {
if unsafe {!hid_api_lock} {
unsafe {
ffi::hid_init();
hid_api_lock = true;
}
Ok(HidApi)
}else {
Err("Error already one HidApi in use.")
}
}
pub fn enumerate_info(&self) -> HidDeviceInfoEnumeration {
let list = unsafe {ffi::hid_enumerate(0, 0)};
HidDeviceInfoEnumeration {
_hid_device_info: list,
_next: list,
}
}
pub fn open(&self, vendor_id: c_ushort, product_id: c_ushort)
-> Result<HidDevice, &'static str> {
let device = unsafe {ffi::hid_open(vendor_id, product_id, std::ptr::null())};
if device.is_null() {
Err("Can not open hid device.")
}else
|
}
}
impl Drop for HidApi {
fn drop(&mut self) {
unsafe {
ffi::hid_exit();
hid_api_lock = false;
}
}
}
unsafe fn wcs_to_string<'a>(src: *const wchar_t) -> String {
let length = ffi::wcstombs(std::ptr::null_mut(), src, 0);
let mut chars = Vec::<c_char>::with_capacity(length as usize + 1);
chars.set_len(length as usize + 1);
let ptr = chars.as_mut_ptr();
ffi::wcstombs(ptr, src, length);
chars[length as usize] = 0;
std::str::from_utf8(CStr::from_ptr(ptr).to_bytes()).unwrap().to_owned()
}
unsafe fn conv_hid_device_info(src: *mut ffi::HidDeviceInfo) -> HidDeviceInfo {
HidDeviceInfo {
path: std::str::from_utf8(CStr::from_ptr((*src).path).to_bytes()).unwrap().to_owned(),
vendor_id: (*src).vendor_id,
product_id: (*src).product_id,
//serial_number: wcs_to_string((*src).serial_number),
release_number: (*src).release_number,
manufactor_string: wcs_to_string((*src).manufactor_string),
product_string: wcs_to_string((*src).product_string),
usage_page: (*src).usage_page,
usage: (*src).usage,
interface_number: (*src).interface_number,
}
}
pub struct HidDeviceInfoEnumeration {
_hid_device_info: *mut ffi::HidDeviceInfo,
_next: *mut ffi::HidDeviceInfo,
}
impl Drop for HidDeviceInfoEnumeration {
fn drop(&mut self) {
unsafe {
ffi::hid_free_enumeration(self._hid_device_info);
}
}
}
impl Iterator for HidDeviceInfoEnumeration {
type Item = HidDeviceInfo;
fn next(&mut self) -> Option<HidDeviceInfo> {
if self._next.is_null() {
None
}else {
let ret = self._next;
self._next = unsafe {(*self._next).next};
Some(unsafe {conv_hid_device_info(ret)})
}
}
}
#[derive(Debug)]
pub struct HidDeviceInfo {
path: String,
vendor_id: c_ushort,
product_id: c_ushort,
//serial_number: String,
release_number: c_ushort,
manufactor_string: String,
product_string: String,
usage_page: c_ushort,
usage: c_ushort,
interface_number: c_int,
}
pub struct HidDevice<'a> {
_hid_device: *mut ffi::HidDevice,
#[allow(dead_code)]
api: &'a HidApi, // Just to keep everything safe.
}
impl<'a> Drop for HidDevice<'a> {
fn drop(&mut self) {
unsafe {ffi::hid_close(self._hid_device)};
}
}
impl <'a> HidDevice<'a> {
pub fn write(&self, data: &[u8]) -> c_int {
unsafe {ffi::hid_write(self._hid_device, data.as_ptr(), data.len() as size_t)}
}
pub fn send_feature_report(&self, data: &[u8]) -> c_int {
unsafe {
ffi::hid_send_feature_report(self._hid_device, data.as_ptr(), data.len() as size_t)
}
}
}
|
{
Ok(HidDevice {_hid_device: device, api: self})
}
|
conditional_block
|
lib.rs
|
/****************************************************************************
Copyright (c) 2015 Roland Ruckerbauer All Rights Reserved.
This file is part of hidapi_rust.
hidapi_rust is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
hidapi_rust is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with hidapi_rust. If not, see <http://www.gnu.org/licenses/>.
****************************************************************************/
extern crate libc;
mod ffi;
use std::ffi::{CStr};
use libc::{wchar_t, c_char, size_t};
pub use libc::{c_ushort, c_int};
pub struct HidApi;
static mut hid_api_lock: bool = false;
impl HidApi {
pub fn new() -> Result<Self, &'static str> {
if unsafe {!hid_api_lock} {
unsafe {
ffi::hid_init();
hid_api_lock = true;
}
Ok(HidApi)
}else {
Err("Error already one HidApi in use.")
}
}
pub fn enumerate_info(&self) -> HidDeviceInfoEnumeration {
let list = unsafe {ffi::hid_enumerate(0, 0)};
HidDeviceInfoEnumeration {
_hid_device_info: list,
_next: list,
}
}
pub fn open(&self, vendor_id: c_ushort, product_id: c_ushort)
-> Result<HidDevice, &'static str> {
let device = unsafe {ffi::hid_open(vendor_id, product_id, std::ptr::null())};
if device.is_null() {
Err("Can not open hid device.")
}else {
Ok(HidDevice {_hid_device: device, api: self})
}
}
}
impl Drop for HidApi {
fn drop(&mut self) {
unsafe {
ffi::hid_exit();
hid_api_lock = false;
}
}
}
unsafe fn wcs_to_string<'a>(src: *const wchar_t) -> String
|
unsafe fn conv_hid_device_info(src: *mut ffi::HidDeviceInfo) -> HidDeviceInfo {
HidDeviceInfo {
path: std::str::from_utf8(CStr::from_ptr((*src).path).to_bytes()).unwrap().to_owned(),
vendor_id: (*src).vendor_id,
product_id: (*src).product_id,
//serial_number: wcs_to_string((*src).serial_number),
release_number: (*src).release_number,
manufactor_string: wcs_to_string((*src).manufactor_string),
product_string: wcs_to_string((*src).product_string),
usage_page: (*src).usage_page,
usage: (*src).usage,
interface_number: (*src).interface_number,
}
}
pub struct HidDeviceInfoEnumeration {
_hid_device_info: *mut ffi::HidDeviceInfo,
_next: *mut ffi::HidDeviceInfo,
}
impl Drop for HidDeviceInfoEnumeration {
fn drop(&mut self) {
unsafe {
ffi::hid_free_enumeration(self._hid_device_info);
}
}
}
impl Iterator for HidDeviceInfoEnumeration {
type Item = HidDeviceInfo;
fn next(&mut self) -> Option<HidDeviceInfo> {
if self._next.is_null() {
None
}else {
let ret = self._next;
self._next = unsafe {(*self._next).next};
Some(unsafe {conv_hid_device_info(ret)})
}
}
}
#[derive(Debug)]
pub struct HidDeviceInfo {
path: String,
vendor_id: c_ushort,
product_id: c_ushort,
//serial_number: String,
release_number: c_ushort,
manufactor_string: String,
product_string: String,
usage_page: c_ushort,
usage: c_ushort,
interface_number: c_int,
}
pub struct HidDevice<'a> {
_hid_device: *mut ffi::HidDevice,
#[allow(dead_code)]
api: &'a HidApi, // Just to keep everything safe.
}
impl<'a> Drop for HidDevice<'a> {
fn drop(&mut self) {
unsafe {ffi::hid_close(self._hid_device)};
}
}
impl <'a> HidDevice<'a> {
pub fn write(&self, data: &[u8]) -> c_int {
unsafe {ffi::hid_write(self._hid_device, data.as_ptr(), data.len() as size_t)}
}
pub fn send_feature_report(&self, data: &[u8]) -> c_int {
unsafe {
ffi::hid_send_feature_report(self._hid_device, data.as_ptr(), data.len() as size_t)
}
}
}
|
{
let length = ffi::wcstombs(std::ptr::null_mut(), src, 0);
let mut chars = Vec::<c_char>::with_capacity(length as usize + 1);
chars.set_len(length as usize + 1);
let ptr = chars.as_mut_ptr();
ffi::wcstombs(ptr, src, length);
chars[length as usize] = 0;
std::str::from_utf8(CStr::from_ptr(ptr).to_bytes()).unwrap().to_owned()
}
|
identifier_body
|
lib.rs
|
/****************************************************************************
Copyright (c) 2015 Roland Ruckerbauer All Rights Reserved.
This file is part of hidapi_rust.
hidapi_rust is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
hidapi_rust is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with hidapi_rust. If not, see <http://www.gnu.org/licenses/>.
****************************************************************************/
extern crate libc;
mod ffi;
use std::ffi::{CStr};
use libc::{wchar_t, c_char, size_t};
pub use libc::{c_ushort, c_int};
pub struct HidApi;
static mut hid_api_lock: bool = false;
impl HidApi {
pub fn new() -> Result<Self, &'static str> {
if unsafe {!hid_api_lock} {
unsafe {
ffi::hid_init();
hid_api_lock = true;
}
Ok(HidApi)
}else {
Err("Error already one HidApi in use.")
}
}
pub fn enumerate_info(&self) -> HidDeviceInfoEnumeration {
let list = unsafe {ffi::hid_enumerate(0, 0)};
HidDeviceInfoEnumeration {
_hid_device_info: list,
_next: list,
}
}
pub fn open(&self, vendor_id: c_ushort, product_id: c_ushort)
-> Result<HidDevice, &'static str> {
let device = unsafe {ffi::hid_open(vendor_id, product_id, std::ptr::null())};
if device.is_null() {
Err("Can not open hid device.")
}else {
Ok(HidDevice {_hid_device: device, api: self})
}
}
}
impl Drop for HidApi {
fn
|
(&mut self) {
unsafe {
ffi::hid_exit();
hid_api_lock = false;
}
}
}
unsafe fn wcs_to_string<'a>(src: *const wchar_t) -> String {
let length = ffi::wcstombs(std::ptr::null_mut(), src, 0);
let mut chars = Vec::<c_char>::with_capacity(length as usize + 1);
chars.set_len(length as usize + 1);
let ptr = chars.as_mut_ptr();
ffi::wcstombs(ptr, src, length);
chars[length as usize] = 0;
std::str::from_utf8(CStr::from_ptr(ptr).to_bytes()).unwrap().to_owned()
}
unsafe fn conv_hid_device_info(src: *mut ffi::HidDeviceInfo) -> HidDeviceInfo {
HidDeviceInfo {
path: std::str::from_utf8(CStr::from_ptr((*src).path).to_bytes()).unwrap().to_owned(),
vendor_id: (*src).vendor_id,
product_id: (*src).product_id,
//serial_number: wcs_to_string((*src).serial_number),
release_number: (*src).release_number,
manufactor_string: wcs_to_string((*src).manufactor_string),
product_string: wcs_to_string((*src).product_string),
usage_page: (*src).usage_page,
usage: (*src).usage,
interface_number: (*src).interface_number,
}
}
pub struct HidDeviceInfoEnumeration {
_hid_device_info: *mut ffi::HidDeviceInfo,
_next: *mut ffi::HidDeviceInfo,
}
impl Drop for HidDeviceInfoEnumeration {
fn drop(&mut self) {
unsafe {
ffi::hid_free_enumeration(self._hid_device_info);
}
}
}
impl Iterator for HidDeviceInfoEnumeration {
type Item = HidDeviceInfo;
fn next(&mut self) -> Option<HidDeviceInfo> {
if self._next.is_null() {
None
}else {
let ret = self._next;
self._next = unsafe {(*self._next).next};
Some(unsafe {conv_hid_device_info(ret)})
}
}
}
#[derive(Debug)]
pub struct HidDeviceInfo {
path: String,
vendor_id: c_ushort,
product_id: c_ushort,
//serial_number: String,
release_number: c_ushort,
manufactor_string: String,
product_string: String,
usage_page: c_ushort,
usage: c_ushort,
interface_number: c_int,
}
pub struct HidDevice<'a> {
_hid_device: *mut ffi::HidDevice,
#[allow(dead_code)]
api: &'a HidApi, // Just to keep everything safe.
}
impl<'a> Drop for HidDevice<'a> {
fn drop(&mut self) {
unsafe {ffi::hid_close(self._hid_device)};
}
}
impl <'a> HidDevice<'a> {
pub fn write(&self, data: &[u8]) -> c_int {
unsafe {ffi::hid_write(self._hid_device, data.as_ptr(), data.len() as size_t)}
}
pub fn send_feature_report(&self, data: &[u8]) -> c_int {
unsafe {
ffi::hid_send_feature_report(self._hid_device, data.as_ptr(), data.len() as size_t)
}
}
}
|
drop
|
identifier_name
|
util.rs
|
use std::borrow::Cow;
/// Convert string to camel case.
///
/// Note: needs to be public because several macros use it.
#[doc(hidden)]
pub fn to_camel_case(s: &'_ str) -> Cow<'_, str>
|
.unwrap()
.to_uppercase()
.collect::<String>();
let second = &part[1..];
dest += Cow::Owned(first);
dest += second;
} else if i == 0 {
dest = Cow::Borrowed(part);
}
}
dest
}
#[test]
fn test_to_camel_case() {
assert_eq!(&to_camel_case("test")[..], "test");
assert_eq!(&to_camel_case("_test")[..], "test");
assert_eq!(&to_camel_case("first_second")[..], "firstSecond");
assert_eq!(&to_camel_case("first_")[..], "first");
assert_eq!(&to_camel_case("a_b_c")[..], "aBC");
assert_eq!(&to_camel_case("a_bc")[..], "aBc");
assert_eq!(&to_camel_case("a_b")[..], "aB");
assert_eq!(&to_camel_case("a")[..], "a");
assert_eq!(&to_camel_case("")[..], "");
}
|
{
let mut dest = Cow::Borrowed(s);
// handle '_' to be more friendly with the
// _var convention for unused variables
let s_iter = if let Some(stripped) = s.strip_prefix('_') {
stripped
} else {
s
}
.split('_')
.enumerate();
for (i, part) in s_iter {
if i > 0 && part.len() == 1 {
dest += Cow::Owned(part.to_uppercase());
} else if i > 0 && part.len() > 1 {
let first = part
.chars()
.next()
|
identifier_body
|
util.rs
|
use std::borrow::Cow;
/// Convert string to camel case.
///
/// Note: needs to be public because several macros use it.
#[doc(hidden)]
pub fn
|
(s: &'_ str) -> Cow<'_, str> {
let mut dest = Cow::Borrowed(s);
// handle '_' to be more friendly with the
// _var convention for unused variables
let s_iter = if let Some(stripped) = s.strip_prefix('_') {
stripped
} else {
s
}
.split('_')
.enumerate();
for (i, part) in s_iter {
if i > 0 && part.len() == 1 {
dest += Cow::Owned(part.to_uppercase());
} else if i > 0 && part.len() > 1 {
let first = part
.chars()
.next()
.unwrap()
.to_uppercase()
.collect::<String>();
let second = &part[1..];
dest += Cow::Owned(first);
dest += second;
} else if i == 0 {
dest = Cow::Borrowed(part);
}
}
dest
}
#[test]
fn test_to_camel_case() {
assert_eq!(&to_camel_case("test")[..], "test");
assert_eq!(&to_camel_case("_test")[..], "test");
assert_eq!(&to_camel_case("first_second")[..], "firstSecond");
assert_eq!(&to_camel_case("first_")[..], "first");
assert_eq!(&to_camel_case("a_b_c")[..], "aBC");
assert_eq!(&to_camel_case("a_bc")[..], "aBc");
assert_eq!(&to_camel_case("a_b")[..], "aB");
assert_eq!(&to_camel_case("a")[..], "a");
assert_eq!(&to_camel_case("")[..], "");
}
|
to_camel_case
|
identifier_name
|
util.rs
|
use std::borrow::Cow;
|
///
/// Note: needs to be public because several macros use it.
#[doc(hidden)]
pub fn to_camel_case(s: &'_ str) -> Cow<'_, str> {
let mut dest = Cow::Borrowed(s);
// handle '_' to be more friendly with the
// _var convention for unused variables
let s_iter = if let Some(stripped) = s.strip_prefix('_') {
stripped
} else {
s
}
.split('_')
.enumerate();
for (i, part) in s_iter {
if i > 0 && part.len() == 1 {
dest += Cow::Owned(part.to_uppercase());
} else if i > 0 && part.len() > 1 {
let first = part
.chars()
.next()
.unwrap()
.to_uppercase()
.collect::<String>();
let second = &part[1..];
dest += Cow::Owned(first);
dest += second;
} else if i == 0 {
dest = Cow::Borrowed(part);
}
}
dest
}
#[test]
fn test_to_camel_case() {
assert_eq!(&to_camel_case("test")[..], "test");
assert_eq!(&to_camel_case("_test")[..], "test");
assert_eq!(&to_camel_case("first_second")[..], "firstSecond");
assert_eq!(&to_camel_case("first_")[..], "first");
assert_eq!(&to_camel_case("a_b_c")[..], "aBC");
assert_eq!(&to_camel_case("a_bc")[..], "aBc");
assert_eq!(&to_camel_case("a_b")[..], "aB");
assert_eq!(&to_camel_case("a")[..], "a");
assert_eq!(&to_camel_case("")[..], "");
}
|
/// Convert string to camel case.
|
random_line_split
|
util.rs
|
use std::borrow::Cow;
/// Convert string to camel case.
///
/// Note: needs to be public because several macros use it.
#[doc(hidden)]
pub fn to_camel_case(s: &'_ str) -> Cow<'_, str> {
let mut dest = Cow::Borrowed(s);
// handle '_' to be more friendly with the
// _var convention for unused variables
let s_iter = if let Some(stripped) = s.strip_prefix('_') {
stripped
} else {
s
}
.split('_')
.enumerate();
for (i, part) in s_iter {
if i > 0 && part.len() == 1 {
dest += Cow::Owned(part.to_uppercase());
} else if i > 0 && part.len() > 1 {
let first = part
.chars()
.next()
.unwrap()
.to_uppercase()
.collect::<String>();
let second = &part[1..];
dest += Cow::Owned(first);
dest += second;
} else if i == 0
|
}
dest
}
#[test]
fn test_to_camel_case() {
assert_eq!(&to_camel_case("test")[..], "test");
assert_eq!(&to_camel_case("_test")[..], "test");
assert_eq!(&to_camel_case("first_second")[..], "firstSecond");
assert_eq!(&to_camel_case("first_")[..], "first");
assert_eq!(&to_camel_case("a_b_c")[..], "aBC");
assert_eq!(&to_camel_case("a_bc")[..], "aBc");
assert_eq!(&to_camel_case("a_b")[..], "aB");
assert_eq!(&to_camel_case("a")[..], "a");
assert_eq!(&to_camel_case("")[..], "");
}
|
{
dest = Cow::Borrowed(part);
}
|
conditional_block
|
bloom.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Simple counting bloom filters.
use string_cache::{Atom, Namespace};
const KEY_SIZE: uint = 12;
const ARRAY_SIZE: uint = 1 << KEY_SIZE;
const KEY_MASK: u32 = (1 << KEY_SIZE) - 1;
const KEY_SHIFT: uint = 16;
/// A counting Bloom filter with 8-bit counters. For now we assume
/// that having two hash functions is enough, but we may revisit that
/// decision later.
///
/// The filter uses an array with 2**KeySize entries.
///
/// Assuming a well-distributed hash function, a Bloom filter with
/// array size M containing N elements and
/// using k hash function has expected false positive rate exactly
///
/// $ (1 - (1 - 1/M)^{kN})^k $
///
/// because each array slot has a
///
/// $ (1 - 1/M)^{kN} $
///
/// chance of being 0, and the expected false positive rate is the
/// probability that all of the k hash functions will hit a nonzero
/// slot.
///
/// For reasonable assumptions (M large, kN large, which should both
/// hold if we're worried about false positives) about M and kN this
/// becomes approximately
///
/// $$ (1 - \exp(-kN/M))^k $$
///
/// For our special case of k == 2, that's $(1 - \exp(-2N/M))^2$,
/// or in other words
///
/// $$ N/M = -0.5 * \ln(1 - \sqrt(r)) $$
///
/// where r is the false positive rate. This can be used to compute
/// the desired KeySize for a given load N and false positive rate r.
///
/// If N/M is assumed small, then the false positive rate can
/// further be approximated as 4*N^2/M^2. So increasing KeySize by
/// 1, which doubles M, reduces the false positive rate by about a
/// factor of 4, and a false positive rate of 1% corresponds to
/// about M/N == 20.
///
/// What this means in practice is that for a few hundred keys using a
/// KeySize of 12 gives false positive rates on the order of 0.25-4%.
///
/// Similarly, using a KeySize of 10 would lead to a 4% false
/// positive rate for N == 100 and to quite bad false positive
/// rates for larger N.
pub struct BloomFilter {
counters: [u8,..ARRAY_SIZE],
}
impl Clone for BloomFilter {
#[inline]
fn clone(&self) -> BloomFilter {
BloomFilter {
counters: self.counters,
}
}
}
impl BloomFilter {
/// Creates a new bloom filter.
#[inline]
pub fn new() -> BloomFilter {
BloomFilter {
counters: [0,..ARRAY_SIZE],
}
}
#[inline]
fn first_slot(&self, hash: u32) -> &u8 {
&self.counters[hash1(hash) as uint]
}
#[inline]
fn first_mut_slot(&mut self, hash: u32) -> &mut u8 {
&mut self.counters[hash1(hash) as uint]
}
#[inline]
fn second_slot(&self, hash: u32) -> &u8 {
&self.counters[hash2(hash) as uint]
}
#[inline]
fn
|
(&mut self, hash: u32) -> &mut u8 {
&mut self.counters[hash2(hash) as uint]
}
#[inline]
pub fn clear(&mut self) {
self.counters = [0,..ARRAY_SIZE]
}
#[inline]
fn insert_hash(&mut self, hash: u32) {
{
let slot1 = self.first_mut_slot(hash);
if!full(slot1) {
*slot1 += 1
}
}
{
let slot2 = self.second_mut_slot(hash);
if!full(slot2) {
*slot2 += 1
}
}
}
/// Inserts an item into the bloom filter.
#[inline]
pub fn insert<T:BloomHash>(&mut self, elem: &T) {
self.insert_hash(elem.bloom_hash())
}
#[inline]
fn remove_hash(&mut self, hash: u32) {
{
let slot1 = self.first_mut_slot(hash);
if!full(slot1) {
*slot1 -= 1
}
}
{
let slot2 = self.second_mut_slot(hash);
if!full(slot2) {
*slot2 -= 1
}
}
}
/// Removes an item from the bloom filter.
#[inline]
pub fn remove<T:BloomHash>(&mut self, elem: &T) {
self.remove_hash(elem.bloom_hash())
}
#[inline]
fn might_contain_hash(&self, hash: u32) -> bool {
*self.first_slot(hash)!= 0 && *self.second_slot(hash)!= 0
}
/// Check whether the filter might contain an item. This can
/// sometimes return true even if the item is not in the filter,
/// but will never return false for items that are actually in the
/// filter.
#[inline]
pub fn might_contain<T:BloomHash>(&self, elem: &T) -> bool {
self.might_contain_hash(elem.bloom_hash())
}
}
pub trait BloomHash {
fn bloom_hash(&self) -> u32;
}
impl BloomHash for int {
#[allow(exceeding_bitshifts)]
#[inline]
fn bloom_hash(&self) -> u32 {
((*self >> 32) ^ *self) as u32
}
}
impl BloomHash for uint {
#[allow(exceeding_bitshifts)]
#[inline]
fn bloom_hash(&self) -> u32 {
((*self >> 32) ^ *self) as u32
}
}
impl BloomHash for Atom {
#[inline]
fn bloom_hash(&self) -> u32 {
((self.data >> 32) ^ self.data) as u32
}
}
impl BloomHash for Namespace {
#[inline]
fn bloom_hash(&self) -> u32 {
let Namespace(ref atom) = *self;
atom.bloom_hash()
}
}
#[inline]
fn full(slot: &u8) -> bool {
*slot == 0xff
}
#[inline]
fn hash1(hash: u32) -> u32 {
hash & KEY_MASK
}
#[inline]
fn hash2(hash: u32) -> u32 {
(hash >> KEY_SHIFT) & KEY_MASK
}
#[test]
fn create_and_insert_some_stuff() {
use std::iter::range;
let mut bf = BloomFilter::new();
for i in range(0u, 1000) {
bf.insert(&i);
}
for i in range(0u, 1000) {
assert!(bf.might_contain(&i));
}
let false_positives =
range(1001u, 2000).filter(|i| bf.might_contain(i)).count();
assert!(false_positives < 10) // 1%.
for i in range(0u, 100) {
bf.remove(&i);
}
for i in range(100u, 1000) {
assert!(bf.might_contain(&i));
}
let false_positives = range(0u, 100).filter(|i| bf.might_contain(i)).count();
assert!(false_positives < 2); // 2%.
bf.clear();
for i in range(0u, 2000) {
assert!(!bf.might_contain(&i));
}
}
#[cfg(test)]
mod bench {
extern crate test;
use std::hash::hash;
use std::iter;
use super::BloomFilter;
#[bench]
fn create_insert_1000_remove_100_lookup_100(b: &mut test::Bencher) {
b.iter(|| {
let mut bf = BloomFilter::new();
for i in iter::range(0u, 1000) {
bf.insert(&i);
}
for i in iter::range(0u, 100) {
bf.remove(&i);
}
for i in iter::range(100u, 200) {
test::black_box(bf.might_contain(&i));
}
});
}
#[bench]
fn might_contain(b: &mut test::Bencher) {
let mut bf = BloomFilter::new();
for i in iter::range(0u, 1000) {
bf.insert(&i);
}
let mut i = 0u;
b.bench_n(1000, |b| {
b.iter(|| {
test::black_box(bf.might_contain(&i));
i += 1;
});
});
}
#[bench]
fn insert(b: &mut test::Bencher) {
let mut bf = BloomFilter::new();
b.bench_n(1000, |b| {
let mut i = 0u;
b.iter(|| {
test::black_box(bf.insert(&i));
i += 1;
});
});
}
#[bench]
fn remove(b: &mut test::Bencher) {
let mut bf = BloomFilter::new();
for i in range(0u, 1000) {
bf.insert(&i);
}
b.bench_n(1000, |b| {
let mut i = 0u;
b.iter(|| {
bf.remove(&i);
i += 1;
});
});
test::black_box(bf.might_contain(&0u));
}
#[bench]
fn hash_a_uint(b: &mut test::Bencher) {
let mut i = 0u;
b.iter(|| {
test::black_box(hash(&i));
i += 1;
})
}
}
|
second_mut_slot
|
identifier_name
|
bloom.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Simple counting bloom filters.
use string_cache::{Atom, Namespace};
const KEY_SIZE: uint = 12;
const ARRAY_SIZE: uint = 1 << KEY_SIZE;
const KEY_MASK: u32 = (1 << KEY_SIZE) - 1;
const KEY_SHIFT: uint = 16;
/// A counting Bloom filter with 8-bit counters. For now we assume
/// that having two hash functions is enough, but we may revisit that
/// decision later.
///
/// The filter uses an array with 2**KeySize entries.
///
/// Assuming a well-distributed hash function, a Bloom filter with
/// array size M containing N elements and
/// using k hash function has expected false positive rate exactly
///
/// $ (1 - (1 - 1/M)^{kN})^k $
///
/// because each array slot has a
///
/// $ (1 - 1/M)^{kN} $
///
/// chance of being 0, and the expected false positive rate is the
/// probability that all of the k hash functions will hit a nonzero
/// slot.
///
/// For reasonable assumptions (M large, kN large, which should both
/// hold if we're worried about false positives) about M and kN this
/// becomes approximately
///
/// $$ (1 - \exp(-kN/M))^k $$
///
/// For our special case of k == 2, that's $(1 - \exp(-2N/M))^2$,
/// or in other words
///
/// $$ N/M = -0.5 * \ln(1 - \sqrt(r)) $$
///
/// where r is the false positive rate. This can be used to compute
/// the desired KeySize for a given load N and false positive rate r.
///
/// If N/M is assumed small, then the false positive rate can
/// further be approximated as 4*N^2/M^2. So increasing KeySize by
/// 1, which doubles M, reduces the false positive rate by about a
/// factor of 4, and a false positive rate of 1% corresponds to
/// about M/N == 20.
///
/// What this means in practice is that for a few hundred keys using a
/// KeySize of 12 gives false positive rates on the order of 0.25-4%.
///
/// Similarly, using a KeySize of 10 would lead to a 4% false
/// positive rate for N == 100 and to quite bad false positive
/// rates for larger N.
pub struct BloomFilter {
counters: [u8,..ARRAY_SIZE],
}
impl Clone for BloomFilter {
#[inline]
fn clone(&self) -> BloomFilter {
BloomFilter {
counters: self.counters,
}
}
}
impl BloomFilter {
/// Creates a new bloom filter.
#[inline]
pub fn new() -> BloomFilter {
BloomFilter {
counters: [0,..ARRAY_SIZE],
}
}
#[inline]
fn first_slot(&self, hash: u32) -> &u8 {
&self.counters[hash1(hash) as uint]
}
#[inline]
fn first_mut_slot(&mut self, hash: u32) -> &mut u8 {
&mut self.counters[hash1(hash) as uint]
}
#[inline]
fn second_slot(&self, hash: u32) -> &u8 {
&self.counters[hash2(hash) as uint]
}
#[inline]
fn second_mut_slot(&mut self, hash: u32) -> &mut u8 {
&mut self.counters[hash2(hash) as uint]
}
#[inline]
pub fn clear(&mut self) {
self.counters = [0,..ARRAY_SIZE]
}
#[inline]
fn insert_hash(&mut self, hash: u32) {
{
let slot1 = self.first_mut_slot(hash);
if!full(slot1) {
*slot1 += 1
}
}
{
let slot2 = self.second_mut_slot(hash);
if!full(slot2) {
*slot2 += 1
}
}
}
/// Inserts an item into the bloom filter.
#[inline]
pub fn insert<T:BloomHash>(&mut self, elem: &T) {
self.insert_hash(elem.bloom_hash())
}
#[inline]
fn remove_hash(&mut self, hash: u32) {
{
let slot1 = self.first_mut_slot(hash);
if!full(slot1) {
*slot1 -= 1
}
}
{
let slot2 = self.second_mut_slot(hash);
if!full(slot2) {
*slot2 -= 1
}
}
}
/// Removes an item from the bloom filter.
#[inline]
pub fn remove<T:BloomHash>(&mut self, elem: &T) {
self.remove_hash(elem.bloom_hash())
}
#[inline]
fn might_contain_hash(&self, hash: u32) -> bool {
*self.first_slot(hash)!= 0 && *self.second_slot(hash)!= 0
}
/// Check whether the filter might contain an item. This can
/// sometimes return true even if the item is not in the filter,
/// but will never return false for items that are actually in the
/// filter.
#[inline]
pub fn might_contain<T:BloomHash>(&self, elem: &T) -> bool {
self.might_contain_hash(elem.bloom_hash())
}
}
pub trait BloomHash {
fn bloom_hash(&self) -> u32;
}
impl BloomHash for int {
#[allow(exceeding_bitshifts)]
#[inline]
fn bloom_hash(&self) -> u32 {
((*self >> 32) ^ *self) as u32
}
}
impl BloomHash for uint {
#[allow(exceeding_bitshifts)]
#[inline]
fn bloom_hash(&self) -> u32 {
((*self >> 32) ^ *self) as u32
}
}
impl BloomHash for Atom {
#[inline]
fn bloom_hash(&self) -> u32 {
((self.data >> 32) ^ self.data) as u32
}
}
impl BloomHash for Namespace {
#[inline]
fn bloom_hash(&self) -> u32 {
let Namespace(ref atom) = *self;
atom.bloom_hash()
}
}
#[inline]
fn full(slot: &u8) -> bool {
*slot == 0xff
}
#[inline]
fn hash1(hash: u32) -> u32 {
hash & KEY_MASK
}
#[inline]
fn hash2(hash: u32) -> u32 {
(hash >> KEY_SHIFT) & KEY_MASK
}
#[test]
fn create_and_insert_some_stuff() {
use std::iter::range;
let mut bf = BloomFilter::new();
for i in range(0u, 1000) {
bf.insert(&i);
}
for i in range(0u, 1000) {
assert!(bf.might_contain(&i));
}
let false_positives =
range(1001u, 2000).filter(|i| bf.might_contain(i)).count();
assert!(false_positives < 10) // 1%.
for i in range(0u, 100) {
bf.remove(&i);
}
for i in range(100u, 1000) {
assert!(bf.might_contain(&i));
}
let false_positives = range(0u, 100).filter(|i| bf.might_contain(i)).count();
assert!(false_positives < 2); // 2%.
bf.clear();
for i in range(0u, 2000) {
assert!(!bf.might_contain(&i));
}
}
#[cfg(test)]
mod bench {
extern crate test;
use std::hash::hash;
use std::iter;
use super::BloomFilter;
#[bench]
fn create_insert_1000_remove_100_lookup_100(b: &mut test::Bencher) {
b.iter(|| {
let mut bf = BloomFilter::new();
for i in iter::range(0u, 1000) {
bf.insert(&i);
}
for i in iter::range(0u, 100) {
bf.remove(&i);
}
for i in iter::range(100u, 200) {
test::black_box(bf.might_contain(&i));
}
});
}
#[bench]
fn might_contain(b: &mut test::Bencher) {
let mut bf = BloomFilter::new();
for i in iter::range(0u, 1000) {
bf.insert(&i);
}
let mut i = 0u;
b.bench_n(1000, |b| {
b.iter(|| {
test::black_box(bf.might_contain(&i));
i += 1;
});
});
}
#[bench]
fn insert(b: &mut test::Bencher) {
let mut bf = BloomFilter::new();
b.bench_n(1000, |b| {
let mut i = 0u;
b.iter(|| {
test::black_box(bf.insert(&i));
i += 1;
});
});
}
#[bench]
fn remove(b: &mut test::Bencher) {
let mut bf = BloomFilter::new();
for i in range(0u, 1000) {
bf.insert(&i);
}
b.bench_n(1000, |b| {
let mut i = 0u;
b.iter(|| {
bf.remove(&i);
i += 1;
});
});
test::black_box(bf.might_contain(&0u));
}
|
#[bench]
fn hash_a_uint(b: &mut test::Bencher) {
let mut i = 0u;
b.iter(|| {
test::black_box(hash(&i));
i += 1;
})
}
}
|
random_line_split
|
|
bloom.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Simple counting bloom filters.
use string_cache::{Atom, Namespace};
const KEY_SIZE: uint = 12;
const ARRAY_SIZE: uint = 1 << KEY_SIZE;
const KEY_MASK: u32 = (1 << KEY_SIZE) - 1;
const KEY_SHIFT: uint = 16;
/// A counting Bloom filter with 8-bit counters. For now we assume
/// that having two hash functions is enough, but we may revisit that
/// decision later.
///
/// The filter uses an array with 2**KeySize entries.
///
/// Assuming a well-distributed hash function, a Bloom filter with
/// array size M containing N elements and
/// using k hash function has expected false positive rate exactly
///
/// $ (1 - (1 - 1/M)^{kN})^k $
///
/// because each array slot has a
///
/// $ (1 - 1/M)^{kN} $
///
/// chance of being 0, and the expected false positive rate is the
/// probability that all of the k hash functions will hit a nonzero
/// slot.
///
/// For reasonable assumptions (M large, kN large, which should both
/// hold if we're worried about false positives) about M and kN this
/// becomes approximately
///
/// $$ (1 - \exp(-kN/M))^k $$
///
/// For our special case of k == 2, that's $(1 - \exp(-2N/M))^2$,
/// or in other words
///
/// $$ N/M = -0.5 * \ln(1 - \sqrt(r)) $$
///
/// where r is the false positive rate. This can be used to compute
/// the desired KeySize for a given load N and false positive rate r.
///
/// If N/M is assumed small, then the false positive rate can
/// further be approximated as 4*N^2/M^2. So increasing KeySize by
/// 1, which doubles M, reduces the false positive rate by about a
/// factor of 4, and a false positive rate of 1% corresponds to
/// about M/N == 20.
///
/// What this means in practice is that for a few hundred keys using a
/// KeySize of 12 gives false positive rates on the order of 0.25-4%.
///
/// Similarly, using a KeySize of 10 would lead to a 4% false
/// positive rate for N == 100 and to quite bad false positive
/// rates for larger N.
pub struct BloomFilter {
counters: [u8,..ARRAY_SIZE],
}
impl Clone for BloomFilter {
#[inline]
fn clone(&self) -> BloomFilter {
BloomFilter {
counters: self.counters,
}
}
}
impl BloomFilter {
/// Creates a new bloom filter.
#[inline]
pub fn new() -> BloomFilter {
BloomFilter {
counters: [0,..ARRAY_SIZE],
}
}
#[inline]
fn first_slot(&self, hash: u32) -> &u8 {
&self.counters[hash1(hash) as uint]
}
#[inline]
fn first_mut_slot(&mut self, hash: u32) -> &mut u8 {
&mut self.counters[hash1(hash) as uint]
}
#[inline]
fn second_slot(&self, hash: u32) -> &u8 {
&self.counters[hash2(hash) as uint]
}
#[inline]
fn second_mut_slot(&mut self, hash: u32) -> &mut u8 {
&mut self.counters[hash2(hash) as uint]
}
#[inline]
pub fn clear(&mut self) {
self.counters = [0,..ARRAY_SIZE]
}
#[inline]
fn insert_hash(&mut self, hash: u32) {
{
let slot1 = self.first_mut_slot(hash);
if!full(slot1) {
*slot1 += 1
}
}
{
let slot2 = self.second_mut_slot(hash);
if!full(slot2) {
*slot2 += 1
}
}
}
/// Inserts an item into the bloom filter.
#[inline]
pub fn insert<T:BloomHash>(&mut self, elem: &T) {
self.insert_hash(elem.bloom_hash())
}
#[inline]
fn remove_hash(&mut self, hash: u32) {
{
let slot1 = self.first_mut_slot(hash);
if!full(slot1) {
*slot1 -= 1
}
}
{
let slot2 = self.second_mut_slot(hash);
if!full(slot2) {
*slot2 -= 1
}
}
}
/// Removes an item from the bloom filter.
#[inline]
pub fn remove<T:BloomHash>(&mut self, elem: &T) {
self.remove_hash(elem.bloom_hash())
}
#[inline]
fn might_contain_hash(&self, hash: u32) -> bool {
*self.first_slot(hash)!= 0 && *self.second_slot(hash)!= 0
}
/// Check whether the filter might contain an item. This can
/// sometimes return true even if the item is not in the filter,
/// but will never return false for items that are actually in the
/// filter.
#[inline]
pub fn might_contain<T:BloomHash>(&self, elem: &T) -> bool
|
}
pub trait BloomHash {
fn bloom_hash(&self) -> u32;
}
impl BloomHash for int {
#[allow(exceeding_bitshifts)]
#[inline]
fn bloom_hash(&self) -> u32 {
((*self >> 32) ^ *self) as u32
}
}
impl BloomHash for uint {
#[allow(exceeding_bitshifts)]
#[inline]
fn bloom_hash(&self) -> u32 {
((*self >> 32) ^ *self) as u32
}
}
impl BloomHash for Atom {
#[inline]
fn bloom_hash(&self) -> u32 {
((self.data >> 32) ^ self.data) as u32
}
}
impl BloomHash for Namespace {
#[inline]
fn bloom_hash(&self) -> u32 {
let Namespace(ref atom) = *self;
atom.bloom_hash()
}
}
#[inline]
fn full(slot: &u8) -> bool {
*slot == 0xff
}
#[inline]
fn hash1(hash: u32) -> u32 {
hash & KEY_MASK
}
#[inline]
fn hash2(hash: u32) -> u32 {
(hash >> KEY_SHIFT) & KEY_MASK
}
#[test]
fn create_and_insert_some_stuff() {
use std::iter::range;
let mut bf = BloomFilter::new();
for i in range(0u, 1000) {
bf.insert(&i);
}
for i in range(0u, 1000) {
assert!(bf.might_contain(&i));
}
let false_positives =
range(1001u, 2000).filter(|i| bf.might_contain(i)).count();
assert!(false_positives < 10) // 1%.
for i in range(0u, 100) {
bf.remove(&i);
}
for i in range(100u, 1000) {
assert!(bf.might_contain(&i));
}
let false_positives = range(0u, 100).filter(|i| bf.might_contain(i)).count();
assert!(false_positives < 2); // 2%.
bf.clear();
for i in range(0u, 2000) {
assert!(!bf.might_contain(&i));
}
}
#[cfg(test)]
mod bench {
extern crate test;
use std::hash::hash;
use std::iter;
use super::BloomFilter;
#[bench]
fn create_insert_1000_remove_100_lookup_100(b: &mut test::Bencher) {
b.iter(|| {
let mut bf = BloomFilter::new();
for i in iter::range(0u, 1000) {
bf.insert(&i);
}
for i in iter::range(0u, 100) {
bf.remove(&i);
}
for i in iter::range(100u, 200) {
test::black_box(bf.might_contain(&i));
}
});
}
#[bench]
fn might_contain(b: &mut test::Bencher) {
let mut bf = BloomFilter::new();
for i in iter::range(0u, 1000) {
bf.insert(&i);
}
let mut i = 0u;
b.bench_n(1000, |b| {
b.iter(|| {
test::black_box(bf.might_contain(&i));
i += 1;
});
});
}
#[bench]
fn insert(b: &mut test::Bencher) {
let mut bf = BloomFilter::new();
b.bench_n(1000, |b| {
let mut i = 0u;
b.iter(|| {
test::black_box(bf.insert(&i));
i += 1;
});
});
}
#[bench]
fn remove(b: &mut test::Bencher) {
let mut bf = BloomFilter::new();
for i in range(0u, 1000) {
bf.insert(&i);
}
b.bench_n(1000, |b| {
let mut i = 0u;
b.iter(|| {
bf.remove(&i);
i += 1;
});
});
test::black_box(bf.might_contain(&0u));
}
#[bench]
fn hash_a_uint(b: &mut test::Bencher) {
let mut i = 0u;
b.iter(|| {
test::black_box(hash(&i));
i += 1;
})
}
}
|
{
self.might_contain_hash(elem.bloom_hash())
}
|
identifier_body
|
bloom.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Simple counting bloom filters.
use string_cache::{Atom, Namespace};
const KEY_SIZE: uint = 12;
const ARRAY_SIZE: uint = 1 << KEY_SIZE;
const KEY_MASK: u32 = (1 << KEY_SIZE) - 1;
const KEY_SHIFT: uint = 16;
/// A counting Bloom filter with 8-bit counters. For now we assume
/// that having two hash functions is enough, but we may revisit that
/// decision later.
///
/// The filter uses an array with 2**KeySize entries.
///
/// Assuming a well-distributed hash function, a Bloom filter with
/// array size M containing N elements and
/// using k hash function has expected false positive rate exactly
///
/// $ (1 - (1 - 1/M)^{kN})^k $
///
/// because each array slot has a
///
/// $ (1 - 1/M)^{kN} $
///
/// chance of being 0, and the expected false positive rate is the
/// probability that all of the k hash functions will hit a nonzero
/// slot.
///
/// For reasonable assumptions (M large, kN large, which should both
/// hold if we're worried about false positives) about M and kN this
/// becomes approximately
///
/// $$ (1 - \exp(-kN/M))^k $$
///
/// For our special case of k == 2, that's $(1 - \exp(-2N/M))^2$,
/// or in other words
///
/// $$ N/M = -0.5 * \ln(1 - \sqrt(r)) $$
///
/// where r is the false positive rate. This can be used to compute
/// the desired KeySize for a given load N and false positive rate r.
///
/// If N/M is assumed small, then the false positive rate can
/// further be approximated as 4*N^2/M^2. So increasing KeySize by
/// 1, which doubles M, reduces the false positive rate by about a
/// factor of 4, and a false positive rate of 1% corresponds to
/// about M/N == 20.
///
/// What this means in practice is that for a few hundred keys using a
/// KeySize of 12 gives false positive rates on the order of 0.25-4%.
///
/// Similarly, using a KeySize of 10 would lead to a 4% false
/// positive rate for N == 100 and to quite bad false positive
/// rates for larger N.
pub struct BloomFilter {
counters: [u8,..ARRAY_SIZE],
}
impl Clone for BloomFilter {
#[inline]
fn clone(&self) -> BloomFilter {
BloomFilter {
counters: self.counters,
}
}
}
impl BloomFilter {
/// Creates a new bloom filter.
#[inline]
pub fn new() -> BloomFilter {
BloomFilter {
counters: [0,..ARRAY_SIZE],
}
}
#[inline]
fn first_slot(&self, hash: u32) -> &u8 {
&self.counters[hash1(hash) as uint]
}
#[inline]
fn first_mut_slot(&mut self, hash: u32) -> &mut u8 {
&mut self.counters[hash1(hash) as uint]
}
#[inline]
fn second_slot(&self, hash: u32) -> &u8 {
&self.counters[hash2(hash) as uint]
}
#[inline]
fn second_mut_slot(&mut self, hash: u32) -> &mut u8 {
&mut self.counters[hash2(hash) as uint]
}
#[inline]
pub fn clear(&mut self) {
self.counters = [0,..ARRAY_SIZE]
}
#[inline]
fn insert_hash(&mut self, hash: u32) {
{
let slot1 = self.first_mut_slot(hash);
if!full(slot1) {
*slot1 += 1
}
}
{
let slot2 = self.second_mut_slot(hash);
if!full(slot2) {
*slot2 += 1
}
}
}
/// Inserts an item into the bloom filter.
#[inline]
pub fn insert<T:BloomHash>(&mut self, elem: &T) {
self.insert_hash(elem.bloom_hash())
}
#[inline]
fn remove_hash(&mut self, hash: u32) {
{
let slot1 = self.first_mut_slot(hash);
if!full(slot1) {
*slot1 -= 1
}
}
{
let slot2 = self.second_mut_slot(hash);
if!full(slot2)
|
}
}
/// Removes an item from the bloom filter.
#[inline]
pub fn remove<T:BloomHash>(&mut self, elem: &T) {
self.remove_hash(elem.bloom_hash())
}
#[inline]
fn might_contain_hash(&self, hash: u32) -> bool {
*self.first_slot(hash)!= 0 && *self.second_slot(hash)!= 0
}
/// Check whether the filter might contain an item. This can
/// sometimes return true even if the item is not in the filter,
/// but will never return false for items that are actually in the
/// filter.
#[inline]
pub fn might_contain<T:BloomHash>(&self, elem: &T) -> bool {
self.might_contain_hash(elem.bloom_hash())
}
}
pub trait BloomHash {
fn bloom_hash(&self) -> u32;
}
impl BloomHash for int {
#[allow(exceeding_bitshifts)]
#[inline]
fn bloom_hash(&self) -> u32 {
((*self >> 32) ^ *self) as u32
}
}
impl BloomHash for uint {
#[allow(exceeding_bitshifts)]
#[inline]
fn bloom_hash(&self) -> u32 {
((*self >> 32) ^ *self) as u32
}
}
impl BloomHash for Atom {
#[inline]
fn bloom_hash(&self) -> u32 {
((self.data >> 32) ^ self.data) as u32
}
}
impl BloomHash for Namespace {
#[inline]
fn bloom_hash(&self) -> u32 {
let Namespace(ref atom) = *self;
atom.bloom_hash()
}
}
#[inline]
fn full(slot: &u8) -> bool {
*slot == 0xff
}
#[inline]
fn hash1(hash: u32) -> u32 {
hash & KEY_MASK
}
#[inline]
fn hash2(hash: u32) -> u32 {
(hash >> KEY_SHIFT) & KEY_MASK
}
#[test]
fn create_and_insert_some_stuff() {
use std::iter::range;
let mut bf = BloomFilter::new();
for i in range(0u, 1000) {
bf.insert(&i);
}
for i in range(0u, 1000) {
assert!(bf.might_contain(&i));
}
let false_positives =
range(1001u, 2000).filter(|i| bf.might_contain(i)).count();
assert!(false_positives < 10) // 1%.
for i in range(0u, 100) {
bf.remove(&i);
}
for i in range(100u, 1000) {
assert!(bf.might_contain(&i));
}
let false_positives = range(0u, 100).filter(|i| bf.might_contain(i)).count();
assert!(false_positives < 2); // 2%.
bf.clear();
for i in range(0u, 2000) {
assert!(!bf.might_contain(&i));
}
}
#[cfg(test)]
mod bench {
extern crate test;
use std::hash::hash;
use std::iter;
use super::BloomFilter;
#[bench]
fn create_insert_1000_remove_100_lookup_100(b: &mut test::Bencher) {
b.iter(|| {
let mut bf = BloomFilter::new();
for i in iter::range(0u, 1000) {
bf.insert(&i);
}
for i in iter::range(0u, 100) {
bf.remove(&i);
}
for i in iter::range(100u, 200) {
test::black_box(bf.might_contain(&i));
}
});
}
#[bench]
fn might_contain(b: &mut test::Bencher) {
let mut bf = BloomFilter::new();
for i in iter::range(0u, 1000) {
bf.insert(&i);
}
let mut i = 0u;
b.bench_n(1000, |b| {
b.iter(|| {
test::black_box(bf.might_contain(&i));
i += 1;
});
});
}
#[bench]
fn insert(b: &mut test::Bencher) {
let mut bf = BloomFilter::new();
b.bench_n(1000, |b| {
let mut i = 0u;
b.iter(|| {
test::black_box(bf.insert(&i));
i += 1;
});
});
}
#[bench]
fn remove(b: &mut test::Bencher) {
let mut bf = BloomFilter::new();
for i in range(0u, 1000) {
bf.insert(&i);
}
b.bench_n(1000, |b| {
let mut i = 0u;
b.iter(|| {
bf.remove(&i);
i += 1;
});
});
test::black_box(bf.might_contain(&0u));
}
#[bench]
fn hash_a_uint(b: &mut test::Bencher) {
let mut i = 0u;
b.iter(|| {
test::black_box(hash(&i));
i += 1;
})
}
}
|
{
*slot2 -= 1
}
|
conditional_block
|
vibration_motor.rs
|
#![feature(used)]
#![no_std]
extern crate cortex_m;
extern crate panic_abort;
extern crate stm32f30x_hal as hal;
use hal::prelude::*;
use hal::stm32f30x;
use hal::delay::Delay;
fn
|
() {
let cp = cortex_m::Peripherals::take().unwrap();
let dp = stm32f30x::Peripherals::take().unwrap();
let mut flash = dp.FLASH.constrain();
let mut rcc = dp.RCC.constrain();
let clocks = rcc.cfgr.freeze(&mut flash.acr);
let mut gpioa = dp.GPIOA.split(&mut rcc.ahb);
let mut motor = gpioa
.pa0
.into_push_pull_output(&mut gpioa.moder, &mut gpioa.otyper);
let mut delay = Delay::new(cp.SYST, clocks);
loop {
motor.set_high();
delay.delay_ms(1_000_u16);
motor.set_low();
delay.delay_ms(1_000_u16);
}
}
|
main
|
identifier_name
|
vibration_motor.rs
|
#![feature(used)]
#![no_std]
extern crate cortex_m;
extern crate panic_abort;
extern crate stm32f30x_hal as hal;
use hal::prelude::*;
use hal::stm32f30x;
use hal::delay::Delay;
fn main()
|
motor.set_low();
delay.delay_ms(1_000_u16);
}
}
|
{
let cp = cortex_m::Peripherals::take().unwrap();
let dp = stm32f30x::Peripherals::take().unwrap();
let mut flash = dp.FLASH.constrain();
let mut rcc = dp.RCC.constrain();
let clocks = rcc.cfgr.freeze(&mut flash.acr);
let mut gpioa = dp.GPIOA.split(&mut rcc.ahb);
let mut motor = gpioa
.pa0
.into_push_pull_output(&mut gpioa.moder, &mut gpioa.otyper);
let mut delay = Delay::new(cp.SYST, clocks);
loop {
motor.set_high();
delay.delay_ms(1_000_u16);
|
identifier_body
|
vibration_motor.rs
|
#![feature(used)]
#![no_std]
|
use hal::prelude::*;
use hal::stm32f30x;
use hal::delay::Delay;
fn main() {
let cp = cortex_m::Peripherals::take().unwrap();
let dp = stm32f30x::Peripherals::take().unwrap();
let mut flash = dp.FLASH.constrain();
let mut rcc = dp.RCC.constrain();
let clocks = rcc.cfgr.freeze(&mut flash.acr);
let mut gpioa = dp.GPIOA.split(&mut rcc.ahb);
let mut motor = gpioa
.pa0
.into_push_pull_output(&mut gpioa.moder, &mut gpioa.otyper);
let mut delay = Delay::new(cp.SYST, clocks);
loop {
motor.set_high();
delay.delay_ms(1_000_u16);
motor.set_low();
delay.delay_ms(1_000_u16);
}
}
|
extern crate cortex_m;
extern crate panic_abort;
extern crate stm32f30x_hal as hal;
|
random_line_split
|
mod.rs
|
/*
Precached - A Linux process monitor and pre-caching daemon
Copyright (C) 2017-2020 the precached developers
|
Precached is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Precached is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Precached. If not, see <http://www.gnu.org/licenses/>.
*/
pub mod config_file;
pub mod files;
pub mod mountinfo;
pub mod namespace;
pub use self::config_file::*;
pub use self::files::*;
pub use self::mountinfo::*;
pub use self::namespace::*;
|
This file is part of precached.
|
random_line_split
|
tree.rs
|
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use anyhow::Error;
use edenapi_types::TreeEntry as EdenApiTreeEntry;
use minibytes::Bytes;
use types::{Key, Parents};
use crate::{datastore::Metadata, indexedlogdatastore::Entry};
#[derive(Clone, Debug)]
pub struct StoreTree {
key: Option<Key>,
#[allow(dead_code)]
parents: Option<Parents>,
raw_content: Option<Bytes>,
#[allow(dead_code)]
entry_metadata: Option<Metadata>,
}
impl TryFrom<Entry> for StoreTree {
type Error = Error;
fn try_from(mut v: Entry) -> Result<Self, Self::Error> {
let raw_content = v.content()?;
let key = v.key().clone();
let entry_metadata = v.metadata().clone();
Ok(StoreTree {
key: Some(key),
parents: None,
entry_metadata: Some(entry_metadata),
raw_content: Some(raw_content),
})
}
}
impl TryFrom<EdenApiTreeEntry> for StoreTree {
type Error = Error;
fn try_from(v: EdenApiTreeEntry) -> Result<Self, Self::Error> {
// TODO(meyer): Optimize this to remove unnecessary clones.
let raw_content = v.data_checked()?.into();
Ok(StoreTree {
key: Some(v.key().clone()),
parents: v.parents.clone(),
entry_metadata: None,
raw_content: Some(raw_content),
})
}
}
impl StoreTree {
pub fn
|
(&self) -> Option<&Key> {
self.key.as_ref()
}
/// The tree content blob in the serialized tree-manifest format.
pub fn content(&self) -> Option<&Bytes> {
self.raw_content.as_ref()
}
}
|
key
|
identifier_name
|
tree.rs
|
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use anyhow::Error;
use edenapi_types::TreeEntry as EdenApiTreeEntry;
use minibytes::Bytes;
use types::{Key, Parents};
use crate::{datastore::Metadata, indexedlogdatastore::Entry};
#[derive(Clone, Debug)]
pub struct StoreTree {
key: Option<Key>,
#[allow(dead_code)]
parents: Option<Parents>,
raw_content: Option<Bytes>,
#[allow(dead_code)]
entry_metadata: Option<Metadata>,
}
impl TryFrom<Entry> for StoreTree {
type Error = Error;
fn try_from(mut v: Entry) -> Result<Self, Self::Error>
|
}
impl TryFrom<EdenApiTreeEntry> for StoreTree {
type Error = Error;
fn try_from(v: EdenApiTreeEntry) -> Result<Self, Self::Error> {
// TODO(meyer): Optimize this to remove unnecessary clones.
let raw_content = v.data_checked()?.into();
Ok(StoreTree {
key: Some(v.key().clone()),
parents: v.parents.clone(),
entry_metadata: None,
raw_content: Some(raw_content),
})
}
}
impl StoreTree {
pub fn key(&self) -> Option<&Key> {
self.key.as_ref()
}
/// The tree content blob in the serialized tree-manifest format.
pub fn content(&self) -> Option<&Bytes> {
self.raw_content.as_ref()
}
}
|
{
let raw_content = v.content()?;
let key = v.key().clone();
let entry_metadata = v.metadata().clone();
Ok(StoreTree {
key: Some(key),
parents: None,
entry_metadata: Some(entry_metadata),
raw_content: Some(raw_content),
})
}
|
identifier_body
|
tree.rs
|
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use anyhow::Error;
use edenapi_types::TreeEntry as EdenApiTreeEntry;
use minibytes::Bytes;
|
#[derive(Clone, Debug)]
pub struct StoreTree {
key: Option<Key>,
#[allow(dead_code)]
parents: Option<Parents>,
raw_content: Option<Bytes>,
#[allow(dead_code)]
entry_metadata: Option<Metadata>,
}
impl TryFrom<Entry> for StoreTree {
type Error = Error;
fn try_from(mut v: Entry) -> Result<Self, Self::Error> {
let raw_content = v.content()?;
let key = v.key().clone();
let entry_metadata = v.metadata().clone();
Ok(StoreTree {
key: Some(key),
parents: None,
entry_metadata: Some(entry_metadata),
raw_content: Some(raw_content),
})
}
}
impl TryFrom<EdenApiTreeEntry> for StoreTree {
type Error = Error;
fn try_from(v: EdenApiTreeEntry) -> Result<Self, Self::Error> {
// TODO(meyer): Optimize this to remove unnecessary clones.
let raw_content = v.data_checked()?.into();
Ok(StoreTree {
key: Some(v.key().clone()),
parents: v.parents.clone(),
entry_metadata: None,
raw_content: Some(raw_content),
})
}
}
impl StoreTree {
pub fn key(&self) -> Option<&Key> {
self.key.as_ref()
}
/// The tree content blob in the serialized tree-manifest format.
pub fn content(&self) -> Option<&Bytes> {
self.raw_content.as_ref()
}
}
|
use types::{Key, Parents};
use crate::{datastore::Metadata, indexedlogdatastore::Entry};
|
random_line_split
|
dialer.rs
|
use std::io::Cursor;
use std::str;
use byteorder::{ByteOrder, NativeEndian, NetworkEndian, ReadBytesExt};
use common::dialer::Dialer;
use common::utils::current_timestamp;
#[derive(Debug)]
pub enum Configuration {
Hainan,
}
#[derive(Debug)]
pub struct SingleNetDialer {
share_key: String,
secret_key: String,
key_table: String,
}
impl SingleNetDialer {
pub fn new(share_key: &str, secret_key: &str, key_table: &str) -> Self {
SingleNetDialer {
share_key: share_key.to_string(),
secret_key: secret_key.to_string(),
key_table: key_table.to_string(),
}
}
pub fn encrypt_account(&self, username: &str, timestamp: Option<u32>) -> String {
let username = username.to_uppercase();
let time_now = timestamp.unwrap_or_else(current_timestamp);
let first_hash: u16;
{
let mut time_now_bytes = [0u8; 4];
NetworkEndian::write_u32(&mut time_now_bytes, time_now);
let mut hash_data: Vec<u8> = Vec::new();
hash_data.extend_from_slice(&time_now_bytes);
hash_data.extend(self.share_key.as_bytes());
hash_data.extend(username.split('@').nth(0).unwrap().as_bytes());
first_hash = Self::calc_hash(&hash_data)
}
let second_hash: u16;
{
let mut first_hash_bytes = [0u8; 2];
NetworkEndian::write_u16(&mut first_hash_bytes, first_hash);
let mut hash_data: Vec<u8> = Vec::new();
hash_data.extend_from_slice(&first_hash_bytes);
hash_data.extend(self.secret_key.as_bytes());
second_hash = Self::calc_hash(&hash_data);
}
let mut scheduled_table: Vec<u8> = Vec::with_capacity(8);
{
let time_now_high = (time_now >> 16) as u16;
let time_now_low = (time_now & 0xFFFF) as u16;
let mut time_now_high_bytes = [0u8; 2];
let mut time_now_low_bytes = [0u8; 2];
let mut first_hash_bytes = [0u8; 2];
let mut second_hash_bytes = [0u8; 2];
NetworkEndian::write_u16(&mut time_now_high_bytes, time_now_high);
NetworkEndian::write_u16(&mut time_now_low_bytes, time_now_low);
NativeEndian::write_u16(&mut first_hash_bytes, first_hash);
NativeEndian::write_u16(&mut second_hash_bytes, second_hash);
scheduled_table.extend_from_slice(&time_now_high_bytes);
scheduled_table.extend_from_slice(&first_hash_bytes);
scheduled_table.extend_from_slice(&time_now_low_bytes);
scheduled_table.extend_from_slice(&second_hash_bytes);
}
let mut vectors: [u8; 12] = [0; 12];
for i in 0..4 {
let j = 2 * i + 1;
let k = 3 * i + 1;
vectors[k - 1] = scheduled_table[j - 1] >> 0x3 & 0x1F;
vectors[k] =
((scheduled_table[j - 1] & 0x7) << 0x2) | (scheduled_table[j] >> 0x6 & 0x3);
vectors[k + 1] = scheduled_table[j] & 0x3F;
}
let key_table_bytes = self.key_table_bytes();
let pin: Vec<u8> = vectors
.iter()
.map(|c| key_table_bytes[*c as usize])
.collect();
let pin_str = str::from_utf8(&pin).unwrap();
format!("~LL_{}_{}", pin_str, username)
}
fn calc_hash(data: &[u8]) -> u16 {
let length = data.len();
let mut summary: u32 = 0;
let mut data = data;
if length % 2!= 0 {
summary = u32::from(data[length - 1]);
data = &data[0..length - 1];
}
let data_shorts = {
let mut shorts = vec![0u16; 0];
let mut rdr = Cursor::new(data);
while let Ok(s) = rdr.read_u16::<NativeEndian>() {
shorts.push(s);
}
shorts
};
summary = data_shorts
.iter()
.fold(summary, |sum, x| sum + u32::from(*x));
if summary & 0xFFFF_0000!= 0 {
summary = ((summary >> 0x10) + summary) & 0xFFFF;
}
!summary as u16
}
fn key_table_bytes(&self) -> &[u8] {
self.key_table.as_bytes()
}
}
impl Configuration {
pub fn share_key(&self) -> &'static str {
match *self {
Configuration::Hainan => "hngx01",
}
}
pub fn secret_key(&self) -> &'static str {
match *self {
Configuration::Hainan => "000c29270712",
}
}
pub fn key_table(&self) -> &'static str {
match *self {
Configuration::Hainan => {
"abcdefghijklmnopqrstuvwxyz1234567890ZYXWVUTSRQPONMLKJIHGFEDCBA:_"
}
}
}
}
|
SingleNetDialer::new(config.share_key(), config.secret_key(), config.key_table())
}
}
#[test]
fn test_hash_key() {
let str1 = "123456".to_string();
let str2 = "1234567".to_string();
let hash1 = SingleNetDialer::calc_hash(str1.as_bytes());
let hash2 = SingleNetDialer::calc_hash(str2.as_bytes());
assert_eq!(hash1, 25446u16);
assert_eq!(hash2, 25391u16);
}
|
impl Dialer for SingleNetDialer {
type C = Configuration;
fn load_from_config(config: Self::C) -> Self {
|
random_line_split
|
dialer.rs
|
use std::io::Cursor;
use std::str;
use byteorder::{ByteOrder, NativeEndian, NetworkEndian, ReadBytesExt};
use common::dialer::Dialer;
use common::utils::current_timestamp;
#[derive(Debug)]
pub enum Configuration {
Hainan,
}
#[derive(Debug)]
pub struct SingleNetDialer {
share_key: String,
secret_key: String,
key_table: String,
}
impl SingleNetDialer {
pub fn new(share_key: &str, secret_key: &str, key_table: &str) -> Self {
SingleNetDialer {
share_key: share_key.to_string(),
secret_key: secret_key.to_string(),
key_table: key_table.to_string(),
}
}
pub fn encrypt_account(&self, username: &str, timestamp: Option<u32>) -> String
|
hash_data.extend_from_slice(&first_hash_bytes);
hash_data.extend(self.secret_key.as_bytes());
second_hash = Self::calc_hash(&hash_data);
}
let mut scheduled_table: Vec<u8> = Vec::with_capacity(8);
{
let time_now_high = (time_now >> 16) as u16;
let time_now_low = (time_now & 0xFFFF) as u16;
let mut time_now_high_bytes = [0u8; 2];
let mut time_now_low_bytes = [0u8; 2];
let mut first_hash_bytes = [0u8; 2];
let mut second_hash_bytes = [0u8; 2];
NetworkEndian::write_u16(&mut time_now_high_bytes, time_now_high);
NetworkEndian::write_u16(&mut time_now_low_bytes, time_now_low);
NativeEndian::write_u16(&mut first_hash_bytes, first_hash);
NativeEndian::write_u16(&mut second_hash_bytes, second_hash);
scheduled_table.extend_from_slice(&time_now_high_bytes);
scheduled_table.extend_from_slice(&first_hash_bytes);
scheduled_table.extend_from_slice(&time_now_low_bytes);
scheduled_table.extend_from_slice(&second_hash_bytes);
}
let mut vectors: [u8; 12] = [0; 12];
for i in 0..4 {
let j = 2 * i + 1;
let k = 3 * i + 1;
vectors[k - 1] = scheduled_table[j - 1] >> 0x3 & 0x1F;
vectors[k] =
((scheduled_table[j - 1] & 0x7) << 0x2) | (scheduled_table[j] >> 0x6 & 0x3);
vectors[k + 1] = scheduled_table[j] & 0x3F;
}
let key_table_bytes = self.key_table_bytes();
let pin: Vec<u8> = vectors
.iter()
.map(|c| key_table_bytes[*c as usize])
.collect();
let pin_str = str::from_utf8(&pin).unwrap();
format!("~LL_{}_{}", pin_str, username)
}
fn calc_hash(data: &[u8]) -> u16 {
let length = data.len();
let mut summary: u32 = 0;
let mut data = data;
if length % 2!= 0 {
summary = u32::from(data[length - 1]);
data = &data[0..length - 1];
}
let data_shorts = {
let mut shorts = vec![0u16; 0];
let mut rdr = Cursor::new(data);
while let Ok(s) = rdr.read_u16::<NativeEndian>() {
shorts.push(s);
}
shorts
};
summary = data_shorts
.iter()
.fold(summary, |sum, x| sum + u32::from(*x));
if summary & 0xFFFF_0000!= 0 {
summary = ((summary >> 0x10) + summary) & 0xFFFF;
}
!summary as u16
}
fn key_table_bytes(&self) -> &[u8] {
self.key_table.as_bytes()
}
}
impl Configuration {
pub fn share_key(&self) -> &'static str {
match *self {
Configuration::Hainan => "hngx01",
}
}
pub fn secret_key(&self) -> &'static str {
match *self {
Configuration::Hainan => "000c29270712",
}
}
pub fn key_table(&self) -> &'static str {
match *self {
Configuration::Hainan => {
"abcdefghijklmnopqrstuvwxyz1234567890ZYXWVUTSRQPONMLKJIHGFEDCBA:_"
}
}
}
}
impl Dialer for SingleNetDialer {
type C = Configuration;
fn load_from_config(config: Self::C) -> Self {
SingleNetDialer::new(config.share_key(), config.secret_key(), config.key_table())
}
}
#[test]
fn test_hash_key() {
let str1 = "123456".to_string();
let str2 = "1234567".to_string();
let hash1 = SingleNetDialer::calc_hash(str1.as_bytes());
let hash2 = SingleNetDialer::calc_hash(str2.as_bytes());
assert_eq!(hash1, 25446u16);
assert_eq!(hash2, 25391u16);
}
|
{
let username = username.to_uppercase();
let time_now = timestamp.unwrap_or_else(current_timestamp);
let first_hash: u16;
{
let mut time_now_bytes = [0u8; 4];
NetworkEndian::write_u32(&mut time_now_bytes, time_now);
let mut hash_data: Vec<u8> = Vec::new();
hash_data.extend_from_slice(&time_now_bytes);
hash_data.extend(self.share_key.as_bytes());
hash_data.extend(username.split('@').nth(0).unwrap().as_bytes());
first_hash = Self::calc_hash(&hash_data)
}
let second_hash: u16;
{
let mut first_hash_bytes = [0u8; 2];
NetworkEndian::write_u16(&mut first_hash_bytes, first_hash);
let mut hash_data: Vec<u8> = Vec::new();
|
identifier_body
|
dialer.rs
|
use std::io::Cursor;
use std::str;
use byteorder::{ByteOrder, NativeEndian, NetworkEndian, ReadBytesExt};
use common::dialer::Dialer;
use common::utils::current_timestamp;
#[derive(Debug)]
pub enum Configuration {
Hainan,
}
#[derive(Debug)]
pub struct SingleNetDialer {
share_key: String,
secret_key: String,
key_table: String,
}
impl SingleNetDialer {
pub fn new(share_key: &str, secret_key: &str, key_table: &str) -> Self {
SingleNetDialer {
share_key: share_key.to_string(),
secret_key: secret_key.to_string(),
key_table: key_table.to_string(),
}
}
pub fn
|
(&self, username: &str, timestamp: Option<u32>) -> String {
let username = username.to_uppercase();
let time_now = timestamp.unwrap_or_else(current_timestamp);
let first_hash: u16;
{
let mut time_now_bytes = [0u8; 4];
NetworkEndian::write_u32(&mut time_now_bytes, time_now);
let mut hash_data: Vec<u8> = Vec::new();
hash_data.extend_from_slice(&time_now_bytes);
hash_data.extend(self.share_key.as_bytes());
hash_data.extend(username.split('@').nth(0).unwrap().as_bytes());
first_hash = Self::calc_hash(&hash_data)
}
let second_hash: u16;
{
let mut first_hash_bytes = [0u8; 2];
NetworkEndian::write_u16(&mut first_hash_bytes, first_hash);
let mut hash_data: Vec<u8> = Vec::new();
hash_data.extend_from_slice(&first_hash_bytes);
hash_data.extend(self.secret_key.as_bytes());
second_hash = Self::calc_hash(&hash_data);
}
let mut scheduled_table: Vec<u8> = Vec::with_capacity(8);
{
let time_now_high = (time_now >> 16) as u16;
let time_now_low = (time_now & 0xFFFF) as u16;
let mut time_now_high_bytes = [0u8; 2];
let mut time_now_low_bytes = [0u8; 2];
let mut first_hash_bytes = [0u8; 2];
let mut second_hash_bytes = [0u8; 2];
NetworkEndian::write_u16(&mut time_now_high_bytes, time_now_high);
NetworkEndian::write_u16(&mut time_now_low_bytes, time_now_low);
NativeEndian::write_u16(&mut first_hash_bytes, first_hash);
NativeEndian::write_u16(&mut second_hash_bytes, second_hash);
scheduled_table.extend_from_slice(&time_now_high_bytes);
scheduled_table.extend_from_slice(&first_hash_bytes);
scheduled_table.extend_from_slice(&time_now_low_bytes);
scheduled_table.extend_from_slice(&second_hash_bytes);
}
let mut vectors: [u8; 12] = [0; 12];
for i in 0..4 {
let j = 2 * i + 1;
let k = 3 * i + 1;
vectors[k - 1] = scheduled_table[j - 1] >> 0x3 & 0x1F;
vectors[k] =
((scheduled_table[j - 1] & 0x7) << 0x2) | (scheduled_table[j] >> 0x6 & 0x3);
vectors[k + 1] = scheduled_table[j] & 0x3F;
}
let key_table_bytes = self.key_table_bytes();
let pin: Vec<u8> = vectors
.iter()
.map(|c| key_table_bytes[*c as usize])
.collect();
let pin_str = str::from_utf8(&pin).unwrap();
format!("~LL_{}_{}", pin_str, username)
}
fn calc_hash(data: &[u8]) -> u16 {
let length = data.len();
let mut summary: u32 = 0;
let mut data = data;
if length % 2!= 0 {
summary = u32::from(data[length - 1]);
data = &data[0..length - 1];
}
let data_shorts = {
let mut shorts = vec![0u16; 0];
let mut rdr = Cursor::new(data);
while let Ok(s) = rdr.read_u16::<NativeEndian>() {
shorts.push(s);
}
shorts
};
summary = data_shorts
.iter()
.fold(summary, |sum, x| sum + u32::from(*x));
if summary & 0xFFFF_0000!= 0 {
summary = ((summary >> 0x10) + summary) & 0xFFFF;
}
!summary as u16
}
fn key_table_bytes(&self) -> &[u8] {
self.key_table.as_bytes()
}
}
impl Configuration {
pub fn share_key(&self) -> &'static str {
match *self {
Configuration::Hainan => "hngx01",
}
}
pub fn secret_key(&self) -> &'static str {
match *self {
Configuration::Hainan => "000c29270712",
}
}
pub fn key_table(&self) -> &'static str {
match *self {
Configuration::Hainan => {
"abcdefghijklmnopqrstuvwxyz1234567890ZYXWVUTSRQPONMLKJIHGFEDCBA:_"
}
}
}
}
impl Dialer for SingleNetDialer {
type C = Configuration;
fn load_from_config(config: Self::C) -> Self {
SingleNetDialer::new(config.share_key(), config.secret_key(), config.key_table())
}
}
#[test]
fn test_hash_key() {
let str1 = "123456".to_string();
let str2 = "1234567".to_string();
let hash1 = SingleNetDialer::calc_hash(str1.as_bytes());
let hash2 = SingleNetDialer::calc_hash(str2.as_bytes());
assert_eq!(hash1, 25446u16);
assert_eq!(hash2, 25391u16);
}
|
encrypt_account
|
identifier_name
|
dump-adt-brace-struct.rs
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Unit test for the "user substitutions" that are annotated on each
// node.
// compile-flags:-Zverbose
#![allow(warnings)]
#![feature(nll)]
#![feature(rustc_attrs)]
struct SomeStruct<T> { t: T }
|
#[rustc_dump_user_substs]
fn main() {
SomeStruct { t: 22 }; // Nothing given, no annotation.
SomeStruct::<_> { t: 22 }; // Nothing interesting given, no annotation.
SomeStruct::<u32> { t: 22 }; //~ ERROR [u32]
}
|
random_line_split
|
|
dump-adt-brace-struct.rs
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Unit test for the "user substitutions" that are annotated on each
// node.
// compile-flags:-Zverbose
#![allow(warnings)]
#![feature(nll)]
#![feature(rustc_attrs)]
struct SomeStruct<T> { t: T }
#[rustc_dump_user_substs]
fn
|
() {
SomeStruct { t: 22 }; // Nothing given, no annotation.
SomeStruct::<_> { t: 22 }; // Nothing interesting given, no annotation.
SomeStruct::<u32> { t: 22 }; //~ ERROR [u32]
}
|
main
|
identifier_name
|
lib.rs
|
extern crate walkdir;
extern crate phf_codegen;
extern crate flate2;
use std::{env, fmt, io};
use std::borrow::{Borrow, Cow};
use std::collections::HashMap;
use std::fs::{self, File};
use std::io::{BufReader, BufWriter, Write};
use std::path::{Path, PathBuf};
use walkdir::WalkDir;
use flate2::write::GzEncoder;
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum Compression {
None,
Gzip
}
impl fmt::Display for Compression {
fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
match *self {
Compression::None => fmt.write_str("None"),
Compression::Gzip => fmt.write_str("Gzip"),
}
}
}
pub struct IncludeDir {
files: HashMap<String, (Compression, PathBuf)>,
name: String,
manifest_dir: PathBuf
}
pub fn start(static_name: &str) -> IncludeDir {
IncludeDir {
files: HashMap::new(),
name: static_name.to_owned(),
manifest_dir: Path::new(&env::var("CARGO_MANIFEST_DIR").expect("CARGO_MANIFEST_DIR is not set")).to_owned()
}
}
#[cfg(windows)]
fn as_key(path: &str) -> Cow<str> {
Cow::Owned(path.replace("\\", "/"))
}
#[cfg(not(windows))]
fn
|
(path: &str) -> Cow<str> {
Cow::Borrowed(path)
}
impl IncludeDir {
/// Add a single file to the binary.
/// With Gzip compression, the file will be encoded to OUT_DIR first.
/// For chaining, it's not sensible to return a Result. If any to-be-included
/// files can't be found, or encoded, this function will panic!.
pub fn file<P: AsRef<Path>>(mut self, path: P, comp: Compression) -> IncludeDir {
self.add_file(path, comp).unwrap();
self
}
/// ## Panics
///
/// This function panics when CARGO_MANIFEST_DIR or OUT_DIR are not defined.
pub fn add_file<P: AsRef<Path>>(&mut self, path: P, comp: Compression) -> io::Result<()> {
let key = path.as_ref().to_string_lossy();
println!("cargo:rerun-if-changed={}", path.as_ref().display());
match comp {
Compression::None => {
self.files.insert(as_key(key.borrow()).into_owned(),
(comp, path.as_ref().to_owned()));
}
Compression::Gzip => {
// gzip encode file to OUT_DIR
let in_path = self.manifest_dir.join(&path);
let mut in_file = BufReader::new(File::open(&in_path)?);
let out_path = Path::new(&env::var("OUT_DIR").unwrap()).join(&path);
fs::create_dir_all(&out_path.parent().unwrap())?;
let out_file = BufWriter::new(File::create(&out_path)?);
let mut encoder = GzEncoder::new(out_file, flate2::Compression::best());
io::copy(&mut in_file, &mut encoder)?;
self.files.insert(as_key(key.borrow()).into_owned(),
(comp, out_path));
}
}
Ok(())
}
/// Add a whole directory recursively to the binary.
/// This function calls `file`, and therefore will panic! on missing files.
pub fn dir<P: AsRef<Path>>(mut self, path: P, comp: Compression) -> IncludeDir {
self.add_dir(path, comp).unwrap();
self
}
/// ## Panics
///
/// This function panics when CARGO_MANIFEST_DIR or OUT_DIR are not defined.
pub fn add_dir<P: AsRef<Path>>(&mut self, path: P, comp: Compression) -> io::Result<()> {
for entry in WalkDir::new(path).follow_links(true).into_iter() {
match entry {
Ok(ref e) if!e.file_type().is_dir() => {
self.add_file(e.path(), comp)?;
}
_ => (),
}
}
Ok(())
}
pub fn build(self, out_name: &str) -> io::Result<()> {
let out_path = Path::new(&env::var("OUT_DIR").unwrap()).join(out_name);
let mut out_file = BufWriter::new(File::create(&out_path)?);
writeln!(&mut out_file, "#[allow(clippy::unreadable_literal)]")?;
writeln!(&mut out_file,
"pub static {}: ::includedir::Files = ::includedir::Files {{\n\
files: ",
self.name)?;
let mut map: phf_codegen::Map<&str> = phf_codegen::Map::new();
let entries: Vec<_> = self.files.iter()
.map(|(name, (compression, path))| {
let include_path = format!("{}", self.manifest_dir.join(path).display());
(as_key(&name).to_string(), (compression, as_key(&include_path).to_string()))
})
.collect();
for (name, (compression, include_path)) in &entries {
map.entry(name,
&format!("(::includedir::Compression::{}, \
include_bytes!(\"{}\") as &'static [u8])",
compression, include_path));
}
writeln!(&mut out_file, "{}", map.build())?;
writeln!(&mut out_file, ", passthrough: ::std::sync::atomic::AtomicBool::new(false)")?;
writeln!(&mut out_file, "}};")?;
Ok(())
}
}
|
as_key
|
identifier_name
|
lib.rs
|
extern crate walkdir;
extern crate phf_codegen;
extern crate flate2;
use std::{env, fmt, io};
use std::borrow::{Borrow, Cow};
use std::collections::HashMap;
use std::fs::{self, File};
use std::io::{BufReader, BufWriter, Write};
use std::path::{Path, PathBuf};
use walkdir::WalkDir;
use flate2::write::GzEncoder;
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum Compression {
None,
Gzip
}
impl fmt::Display for Compression {
fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error>
|
}
pub struct IncludeDir {
files: HashMap<String, (Compression, PathBuf)>,
name: String,
manifest_dir: PathBuf
}
pub fn start(static_name: &str) -> IncludeDir {
IncludeDir {
files: HashMap::new(),
name: static_name.to_owned(),
manifest_dir: Path::new(&env::var("CARGO_MANIFEST_DIR").expect("CARGO_MANIFEST_DIR is not set")).to_owned()
}
}
#[cfg(windows)]
fn as_key(path: &str) -> Cow<str> {
Cow::Owned(path.replace("\\", "/"))
}
#[cfg(not(windows))]
fn as_key(path: &str) -> Cow<str> {
Cow::Borrowed(path)
}
impl IncludeDir {
/// Add a single file to the binary.
/// With Gzip compression, the file will be encoded to OUT_DIR first.
/// For chaining, it's not sensible to return a Result. If any to-be-included
/// files can't be found, or encoded, this function will panic!.
pub fn file<P: AsRef<Path>>(mut self, path: P, comp: Compression) -> IncludeDir {
self.add_file(path, comp).unwrap();
self
}
/// ## Panics
///
/// This function panics when CARGO_MANIFEST_DIR or OUT_DIR are not defined.
pub fn add_file<P: AsRef<Path>>(&mut self, path: P, comp: Compression) -> io::Result<()> {
let key = path.as_ref().to_string_lossy();
println!("cargo:rerun-if-changed={}", path.as_ref().display());
match comp {
Compression::None => {
self.files.insert(as_key(key.borrow()).into_owned(),
(comp, path.as_ref().to_owned()));
}
Compression::Gzip => {
// gzip encode file to OUT_DIR
let in_path = self.manifest_dir.join(&path);
let mut in_file = BufReader::new(File::open(&in_path)?);
let out_path = Path::new(&env::var("OUT_DIR").unwrap()).join(&path);
fs::create_dir_all(&out_path.parent().unwrap())?;
let out_file = BufWriter::new(File::create(&out_path)?);
let mut encoder = GzEncoder::new(out_file, flate2::Compression::best());
io::copy(&mut in_file, &mut encoder)?;
self.files.insert(as_key(key.borrow()).into_owned(),
(comp, out_path));
}
}
Ok(())
}
/// Add a whole directory recursively to the binary.
/// This function calls `file`, and therefore will panic! on missing files.
pub fn dir<P: AsRef<Path>>(mut self, path: P, comp: Compression) -> IncludeDir {
self.add_dir(path, comp).unwrap();
self
}
/// ## Panics
///
/// This function panics when CARGO_MANIFEST_DIR or OUT_DIR are not defined.
pub fn add_dir<P: AsRef<Path>>(&mut self, path: P, comp: Compression) -> io::Result<()> {
for entry in WalkDir::new(path).follow_links(true).into_iter() {
match entry {
Ok(ref e) if!e.file_type().is_dir() => {
self.add_file(e.path(), comp)?;
}
_ => (),
}
}
Ok(())
}
pub fn build(self, out_name: &str) -> io::Result<()> {
let out_path = Path::new(&env::var("OUT_DIR").unwrap()).join(out_name);
let mut out_file = BufWriter::new(File::create(&out_path)?);
writeln!(&mut out_file, "#[allow(clippy::unreadable_literal)]")?;
writeln!(&mut out_file,
"pub static {}: ::includedir::Files = ::includedir::Files {{\n\
files: ",
self.name)?;
let mut map: phf_codegen::Map<&str> = phf_codegen::Map::new();
let entries: Vec<_> = self.files.iter()
.map(|(name, (compression, path))| {
let include_path = format!("{}", self.manifest_dir.join(path).display());
(as_key(&name).to_string(), (compression, as_key(&include_path).to_string()))
})
.collect();
for (name, (compression, include_path)) in &entries {
map.entry(name,
&format!("(::includedir::Compression::{}, \
include_bytes!(\"{}\") as &'static [u8])",
compression, include_path));
}
writeln!(&mut out_file, "{}", map.build())?;
writeln!(&mut out_file, ", passthrough: ::std::sync::atomic::AtomicBool::new(false)")?;
writeln!(&mut out_file, "}};")?;
Ok(())
}
}
|
{
match *self {
Compression::None => fmt.write_str("None"),
Compression::Gzip => fmt.write_str("Gzip"),
}
}
|
identifier_body
|
lib.rs
|
extern crate walkdir;
extern crate phf_codegen;
extern crate flate2;
use std::{env, fmt, io};
use std::borrow::{Borrow, Cow};
use std::collections::HashMap;
use std::fs::{self, File};
use std::io::{BufReader, BufWriter, Write};
use std::path::{Path, PathBuf};
use walkdir::WalkDir;
use flate2::write::GzEncoder;
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum Compression {
None,
Gzip
}
impl fmt::Display for Compression {
fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
match *self {
Compression::None => fmt.write_str("None"),
Compression::Gzip => fmt.write_str("Gzip"),
}
}
}
pub struct IncludeDir {
files: HashMap<String, (Compression, PathBuf)>,
name: String,
manifest_dir: PathBuf
}
pub fn start(static_name: &str) -> IncludeDir {
IncludeDir {
|
}
#[cfg(windows)]
fn as_key(path: &str) -> Cow<str> {
Cow::Owned(path.replace("\\", "/"))
}
#[cfg(not(windows))]
fn as_key(path: &str) -> Cow<str> {
Cow::Borrowed(path)
}
impl IncludeDir {
/// Add a single file to the binary.
/// With Gzip compression, the file will be encoded to OUT_DIR first.
/// For chaining, it's not sensible to return a Result. If any to-be-included
/// files can't be found, or encoded, this function will panic!.
pub fn file<P: AsRef<Path>>(mut self, path: P, comp: Compression) -> IncludeDir {
self.add_file(path, comp).unwrap();
self
}
/// ## Panics
///
/// This function panics when CARGO_MANIFEST_DIR or OUT_DIR are not defined.
pub fn add_file<P: AsRef<Path>>(&mut self, path: P, comp: Compression) -> io::Result<()> {
let key = path.as_ref().to_string_lossy();
println!("cargo:rerun-if-changed={}", path.as_ref().display());
match comp {
Compression::None => {
self.files.insert(as_key(key.borrow()).into_owned(),
(comp, path.as_ref().to_owned()));
}
Compression::Gzip => {
// gzip encode file to OUT_DIR
let in_path = self.manifest_dir.join(&path);
let mut in_file = BufReader::new(File::open(&in_path)?);
let out_path = Path::new(&env::var("OUT_DIR").unwrap()).join(&path);
fs::create_dir_all(&out_path.parent().unwrap())?;
let out_file = BufWriter::new(File::create(&out_path)?);
let mut encoder = GzEncoder::new(out_file, flate2::Compression::best());
io::copy(&mut in_file, &mut encoder)?;
self.files.insert(as_key(key.borrow()).into_owned(),
(comp, out_path));
}
}
Ok(())
}
/// Add a whole directory recursively to the binary.
/// This function calls `file`, and therefore will panic! on missing files.
pub fn dir<P: AsRef<Path>>(mut self, path: P, comp: Compression) -> IncludeDir {
self.add_dir(path, comp).unwrap();
self
}
/// ## Panics
///
/// This function panics when CARGO_MANIFEST_DIR or OUT_DIR are not defined.
pub fn add_dir<P: AsRef<Path>>(&mut self, path: P, comp: Compression) -> io::Result<()> {
for entry in WalkDir::new(path).follow_links(true).into_iter() {
match entry {
Ok(ref e) if!e.file_type().is_dir() => {
self.add_file(e.path(), comp)?;
}
_ => (),
}
}
Ok(())
}
pub fn build(self, out_name: &str) -> io::Result<()> {
let out_path = Path::new(&env::var("OUT_DIR").unwrap()).join(out_name);
let mut out_file = BufWriter::new(File::create(&out_path)?);
writeln!(&mut out_file, "#[allow(clippy::unreadable_literal)]")?;
writeln!(&mut out_file,
"pub static {}: ::includedir::Files = ::includedir::Files {{\n\
files: ",
self.name)?;
let mut map: phf_codegen::Map<&str> = phf_codegen::Map::new();
let entries: Vec<_> = self.files.iter()
.map(|(name, (compression, path))| {
let include_path = format!("{}", self.manifest_dir.join(path).display());
(as_key(&name).to_string(), (compression, as_key(&include_path).to_string()))
})
.collect();
for (name, (compression, include_path)) in &entries {
map.entry(name,
&format!("(::includedir::Compression::{}, \
include_bytes!(\"{}\") as &'static [u8])",
compression, include_path));
}
writeln!(&mut out_file, "{}", map.build())?;
writeln!(&mut out_file, ", passthrough: ::std::sync::atomic::AtomicBool::new(false)")?;
writeln!(&mut out_file, "}};")?;
Ok(())
}
}
|
files: HashMap::new(),
name: static_name.to_owned(),
manifest_dir: Path::new(&env::var("CARGO_MANIFEST_DIR").expect("CARGO_MANIFEST_DIR is not set")).to_owned()
}
|
random_line_split
|
thrift_build.rs
|
// @generated by autocargo
use std::env;
use std::fs;
use std::path::Path;
use thrift_compiler::Config;
#[rustfmt::skip]
fn main() {
// Rerun if this gets rewritten.
println!("cargo:rerun-if-changed=thrift_build.rs");
let out_dir = env::var_os("OUT_DIR").expect("OUT_DIR env not provided");
let out_dir: &Path = out_dir.as_ref();
fs::write(
out_dir.join("cratemap"),
"test_thrift crate",
).expect("Failed to write cratemap");
let conf = {
let mut conf = Config::from_env().expect("Failed to instantiate thrift_compiler::Config");
let path_from_manifest_to_base: &Path = "../../../../../..".as_ref();
let cargo_manifest_dir =
env::var_os("CARGO_MANIFEST_DIR").expect("CARGO_MANIFEST_DIR not provided");
let cargo_manifest_dir: &Path = cargo_manifest_dir.as_ref();
let base_path = cargo_manifest_dir
.join(path_from_manifest_to_base)
.canonicalize()
.expect("Failed to canonicalize base_path");
// TODO: replace canonicalize() with std::path::absolute() when
// https://github.com/rust-lang/rust/pull/91673 is available (~Rust 1.60)
// and remove this block.
#[cfg(windows)]
let base_path = Path::new(
base_path
.as_path()
.to_string_lossy()
.trim_start_matches(r"\\?\"),
)
.to_path_buf();
conf.base_path(base_path);
let options = "serde";
if!options.is_empty()
|
let include_srcs = vec![
];
conf.include_srcs(include_srcs);
conf
};
conf
.run(&[
"../test_thrift.thrift"
])
.expect("Failed while running thrift compilation");
}
|
{
conf.options(options);
}
|
conditional_block
|
thrift_build.rs
|
// @generated by autocargo
use std::env;
use std::fs;
use std::path::Path;
use thrift_compiler::Config;
#[rustfmt::skip]
fn main() {
// Rerun if this gets rewritten.
println!("cargo:rerun-if-changed=thrift_build.rs");
let out_dir = env::var_os("OUT_DIR").expect("OUT_DIR env not provided");
let out_dir: &Path = out_dir.as_ref();
fs::write(
out_dir.join("cratemap"),
"test_thrift crate",
).expect("Failed to write cratemap");
let conf = {
let mut conf = Config::from_env().expect("Failed to instantiate thrift_compiler::Config");
|
let cargo_manifest_dir =
env::var_os("CARGO_MANIFEST_DIR").expect("CARGO_MANIFEST_DIR not provided");
let cargo_manifest_dir: &Path = cargo_manifest_dir.as_ref();
let base_path = cargo_manifest_dir
.join(path_from_manifest_to_base)
.canonicalize()
.expect("Failed to canonicalize base_path");
// TODO: replace canonicalize() with std::path::absolute() when
// https://github.com/rust-lang/rust/pull/91673 is available (~Rust 1.60)
// and remove this block.
#[cfg(windows)]
let base_path = Path::new(
base_path
.as_path()
.to_string_lossy()
.trim_start_matches(r"\\?\"),
)
.to_path_buf();
conf.base_path(base_path);
let options = "serde";
if!options.is_empty() {
conf.options(options);
}
let include_srcs = vec![
];
conf.include_srcs(include_srcs);
conf
};
conf
.run(&[
"../test_thrift.thrift"
])
.expect("Failed while running thrift compilation");
}
|
let path_from_manifest_to_base: &Path = "../../../../../..".as_ref();
|
random_line_split
|
thrift_build.rs
|
// @generated by autocargo
use std::env;
use std::fs;
use std::path::Path;
use thrift_compiler::Config;
#[rustfmt::skip]
fn
|
() {
// Rerun if this gets rewritten.
println!("cargo:rerun-if-changed=thrift_build.rs");
let out_dir = env::var_os("OUT_DIR").expect("OUT_DIR env not provided");
let out_dir: &Path = out_dir.as_ref();
fs::write(
out_dir.join("cratemap"),
"test_thrift crate",
).expect("Failed to write cratemap");
let conf = {
let mut conf = Config::from_env().expect("Failed to instantiate thrift_compiler::Config");
let path_from_manifest_to_base: &Path = "../../../../../..".as_ref();
let cargo_manifest_dir =
env::var_os("CARGO_MANIFEST_DIR").expect("CARGO_MANIFEST_DIR not provided");
let cargo_manifest_dir: &Path = cargo_manifest_dir.as_ref();
let base_path = cargo_manifest_dir
.join(path_from_manifest_to_base)
.canonicalize()
.expect("Failed to canonicalize base_path");
// TODO: replace canonicalize() with std::path::absolute() when
// https://github.com/rust-lang/rust/pull/91673 is available (~Rust 1.60)
// and remove this block.
#[cfg(windows)]
let base_path = Path::new(
base_path
.as_path()
.to_string_lossy()
.trim_start_matches(r"\\?\"),
)
.to_path_buf();
conf.base_path(base_path);
let options = "serde";
if!options.is_empty() {
conf.options(options);
}
let include_srcs = vec![
];
conf.include_srcs(include_srcs);
conf
};
conf
.run(&[
"../test_thrift.thrift"
])
.expect("Failed while running thrift compilation");
}
|
main
|
identifier_name
|
thrift_build.rs
|
// @generated by autocargo
use std::env;
use std::fs;
use std::path::Path;
use thrift_compiler::Config;
#[rustfmt::skip]
fn main()
|
.canonicalize()
.expect("Failed to canonicalize base_path");
// TODO: replace canonicalize() with std::path::absolute() when
// https://github.com/rust-lang/rust/pull/91673 is available (~Rust 1.60)
// and remove this block.
#[cfg(windows)]
let base_path = Path::new(
base_path
.as_path()
.to_string_lossy()
.trim_start_matches(r"\\?\"),
)
.to_path_buf();
conf.base_path(base_path);
let options = "serde";
if!options.is_empty() {
conf.options(options);
}
let include_srcs = vec![
];
conf.include_srcs(include_srcs);
conf
};
conf
.run(&[
"../test_thrift.thrift"
])
.expect("Failed while running thrift compilation");
}
|
{
// Rerun if this gets rewritten.
println!("cargo:rerun-if-changed=thrift_build.rs");
let out_dir = env::var_os("OUT_DIR").expect("OUT_DIR env not provided");
let out_dir: &Path = out_dir.as_ref();
fs::write(
out_dir.join("cratemap"),
"test_thrift crate",
).expect("Failed to write cratemap");
let conf = {
let mut conf = Config::from_env().expect("Failed to instantiate thrift_compiler::Config");
let path_from_manifest_to_base: &Path = "../../../../../..".as_ref();
let cargo_manifest_dir =
env::var_os("CARGO_MANIFEST_DIR").expect("CARGO_MANIFEST_DIR not provided");
let cargo_manifest_dir: &Path = cargo_manifest_dir.as_ref();
let base_path = cargo_manifest_dir
.join(path_from_manifest_to_base)
|
identifier_body
|
cci_class_4.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
// except according to those terms.
pub mod kitties {
pub struct cat {
meows : uint,
pub how_hungry : int,
pub name : String,
}
impl cat {
pub fn speak(&mut self) { self.meow(); }
pub fn eat(&mut self) -> bool {
if self.how_hungry > 0 {
println!("OM NOM NOM");
self.how_hungry -= 2;
return true;
} else {
println!("Not hungry!");
return false;
}
}
}
impl cat {
pub fn meow(&mut self) {
println!("Meow");
self.meows += 1_usize;
if self.meows % 5_usize == 0_usize {
self.how_hungry += 1;
}
}
}
pub fn cat(in_x : uint, in_y : int, in_name: String) -> cat {
cat {
meows: in_x,
how_hungry: in_y,
name: in_name
}
}
}
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
|
random_line_split
|
cci_class_4.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub mod kitties {
pub struct cat {
meows : uint,
pub how_hungry : int,
pub name : String,
}
impl cat {
pub fn speak(&mut self) { self.meow(); }
pub fn eat(&mut self) -> bool {
if self.how_hungry > 0 {
println!("OM NOM NOM");
self.how_hungry -= 2;
return true;
} else {
println!("Not hungry!");
return false;
}
}
}
impl cat {
pub fn meow(&mut self) {
println!("Meow");
self.meows += 1_usize;
if self.meows % 5_usize == 0_usize
|
}
}
pub fn cat(in_x : uint, in_y : int, in_name: String) -> cat {
cat {
meows: in_x,
how_hungry: in_y,
name: in_name
}
}
}
|
{
self.how_hungry += 1;
}
|
conditional_block
|
cci_class_4.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub mod kitties {
pub struct cat {
meows : uint,
pub how_hungry : int,
pub name : String,
}
impl cat {
pub fn
|
(&mut self) { self.meow(); }
pub fn eat(&mut self) -> bool {
if self.how_hungry > 0 {
println!("OM NOM NOM");
self.how_hungry -= 2;
return true;
} else {
println!("Not hungry!");
return false;
}
}
}
impl cat {
pub fn meow(&mut self) {
println!("Meow");
self.meows += 1_usize;
if self.meows % 5_usize == 0_usize {
self.how_hungry += 1;
}
}
}
pub fn cat(in_x : uint, in_y : int, in_name: String) -> cat {
cat {
meows: in_x,
how_hungry: in_y,
name: in_name
}
}
}
|
speak
|
identifier_name
|
mod.rs
|
/// VGA text buffer module.
pub mod color;
#[cfg(test)]
mod test;
pub use self::color::Color;
pub use self::color::VgaColorCode;
pub use core::ptr::Unique;
pub use spin::Mutex;
/// Height of the VGA text buffer.
pub const BUFFER_HEIGHT: usize = 25;
/// Width of the VGA text buffer.
pub const BUFFER_WIDTH: usize = 80;
/// The location of the VGA text buffer.
pub const VGA_BUFFER_LOCATION: *mut Buffer = (0xB8000 as *mut Buffer);
/// Represents a VGA screen character.
///
/// Each VGA screen character is a 2-byte value which is broken down into two 1-byte halves:
/// - The top byte is for the ASCII character.
/// - The bottom byte is for the VGA color code.
#[repr(C)]
#[derive(Clone, Copy, PartialEq)]
pub struct ScreenChar {
ascii_character: u8,
color_code: VgaColorCode,
}
/// Represents a VGA buffer.
///
/// Even though the VGA display is a two-dimensional `BUFFER_WIDTH` x `BUFFER_HEIGHT` char display,
/// the VGA text buffer is just a one-dimensional array of size `BUFFER_WIDTH` x `BUFFER_HEIGHT`.
pub struct Buffer {
chars: [[ScreenChar; BUFFER_WIDTH]; BUFFER_HEIGHT],
}
/// VGA text buffer writer. Use this struct for rudimentary writes to the VGA text buffer.
///
/// This struct keeps track of the current row and current column of the cursor, as well as the
/// current color code. It also owns the pointer to the VGA text buffer at `0xB8000`.
///
/// This struct is not threadsafe.
pub struct Writer {
row_position: usize,
column_position: usize,
color_code: VgaColorCode,
buffer: Unique<Buffer>,
}
impl Writer {
/// Writes a byte with the current color code to the current row and column of the VGA text
/// buffer.
///
/// Once a byte is written, `self.column_position` is incremented. If `self.column_position` is
/// greater than the width of our buffer, we set it to zero and increment `self.row_position.`
/// If `self.row_position` is greater than the height of our buffer, we move all rows up by one
/// (destroying the data in the first row) and set `self.row_position` to the last row and set
/// `self.column_position` to 0.
///
/// Handles standard alphanumeric characters as well as symbols.
/// Handles \n and \r.
///
/// # Arguments
///
/// * `byte` - The character to be written to the buffer at the current color, row, and column.
///
/// # Invariants
///
/// * 0 <= `self.row_position` < `BUFFER_HEIGHT`
/// * 0 <= `self.column_position` < `BUFFER_WIDTH`
/// * `pre.self.column_position` == `post.self.column_position`
/// || `post.self.column_position` == 0
/// * 0 <= `post.self.row_position` - `pre.self.row_position` <= 1
pub fn write_byte(&mut self, byte: u8) {
match byte {
// For newlines, just increment the `row_position.`
// If it goes off screen, then `scroll()` instead.
b'\n' => {
// This looks weird, but parse this as "if the next line is greater than the last
// line".
if self.row_position + 1 > BUFFER_HEIGHT - 1 {
self.scroll();
} else {
self.row_position += 1;
}
},
// Carriage returns.
b'\r' => self.column_position = 0,
// Everything else. Just write the char and increment `column_position.`
// Increment `row_position` if necessary. Scroll if necessary after that.
_ => {
if self.column_position > BUFFER_WIDTH - 1 {
self.row_position += 1;
self.column_position = 0;
}
if self.row_position > BUFFER_HEIGHT - 1 {
self.scroll();
}
// Write the char.
self.buffer().chars[self.row_position][self.column_position] = ScreenChar {
ascii_character: byte,
color_code: self.color_code,
};
self.column_position += 1;
}
}
}
/// Sets the position of the cursor. If the arguments are outside of the VGA buffer bounds
/// (`BUFFER_HEIGHT` and `BUFFER_WIDTH` are the respective max values, while 0 is the min value
/// for both), then we just default to the closest valid value.
///
/// # Arguments
///
/// * `row_position` - The row position to be set to.
/// * `column_position` - The column position to be set to.
///
/// # Invariants
///
/// * 0 <= `self.row_position` < `BUFFER_HEIGHT`
/// * 0 <= `self.column_position` < `BUFFER_WIDTH`
pub fn set_position(&mut self, row_position: usize, column_position: usize) {
// TODO(mgazzola) Write some min/max macros.
if row_position >= BUFFER_HEIGHT {
self.row_position = BUFFER_HEIGHT - 1;
} else {
self.row_position = row_position;
}
if column_position >= BUFFER_WIDTH {
self.column_position = BUFFER_WIDTH - 1;
} else {
self.column_position = column_position;
}
}
/// Clears the entire screen. Just calls `clear_row` for all rows. Then, sets the cursor
/// position to (0, 0).
///
/// # Invariants
///
/// * `post.self.row_position` == `post.self.column_position` == 0
pub fn clear_screen(&mut self) {
for row in 0..BUFFER_HEIGHT {
self.clear_row(row);
}
self.set_position(0, 0);
}
/// `write_byte()`'s each byte in a specified string.
///
/// # Arguments
///
/// *'s' - The string to write to the screen.
pub fn write_str(&mut self, s: &str) {
for byte in s.bytes() {
self.write_byte(byte);
}
}
pub fn set_color(&mut self, fg: Color, bg: Color) {
self.color_code = VgaColorCode::new(fg, bg);
}
/// Returns the mutable buffer pointer.
fn buffer(&mut self) -> &mut Buffer
|
/// Scrolls the entire screen by setting each row to be the row below it. This destroys the
/// first 80 characters of data that existed before this function was called. This function
/// also clears the final row of the text buffer (the last 80 characters of data). Afterwards,
/// we set the `column_position` to 0 and the `row_position` to the final row.
///
/// # Invariants
///
/// * `post.self.column_position` = 0
/// * `post.self.row_position` = `BUFFER_HEIGHT` - 1
fn scroll(&mut self) {
for row in 0..(BUFFER_HEIGHT - 1) {
self.buffer().chars[row] = self.buffer().chars[row + 1];
}
self.clear_row(BUFFER_HEIGHT - 1);
self.column_position = 0;
self.row_position = BUFFER_HEIGHT - 1;
}
/// Sets all data in row `row` to just be whitespace. Uses the current color code.
///
/// # Arguments
///
/// * `row` - The row to clear.
fn clear_row(&mut self, row: usize) {
let blank_char = ScreenChar {
ascii_character: b' ',
color_code: self.color_code,
};
self.buffer().chars[row] = [blank_char; BUFFER_WIDTH];
}
}
/// Implementation of the `Write` trait for the `Writer` struct.
impl ::core::fmt::Write for Writer {
/// Required.
///
/// Implements the `Write` `write_str` method.
fn write_str(&mut self, s: &str) -> ::core::fmt::Result {
self.write_str(s);
Ok(())
}
}
/// Defines the *one-and-only* writer that should exist.
/// This method is both public and thread-safe. Use this to access the writer.
/// Defaults the writer to Light Green foreground text and Black background.
pub static WRITER: Mutex<Writer> = Mutex::new(Writer {
row_position: 0,
column_position: 0,
color_code: VgaColorCode::new(Color::LightGreen, Color::Black),
buffer: unsafe {
Unique::new(VGA_BUFFER_LOCATION)
},
});
/// Macro for `print`.
macro_rules! print {
($($arg:tt)*) => ({
use core::fmt::Write;
let mut writer = $crate::drivers::vga::WRITER.lock();
writer.write_fmt(format_args!($($arg)*)).unwrap();
})
}
/// Macro for `println`.
macro_rules! println {
($fmt:expr) => (print!(concat!($fmt, "\n\r")));
($fmt:expr, $($arg:tt)*) => (print!(concat!($fmt, "\n\r"), $($arg)*));
}
|
{
unsafe {
self.buffer.get_mut()
}
}
|
identifier_body
|
mod.rs
|
/// VGA text buffer module.
pub mod color;
#[cfg(test)]
mod test;
pub use self::color::Color;
pub use self::color::VgaColorCode;
pub use core::ptr::Unique;
pub use spin::Mutex;
/// Height of the VGA text buffer.
pub const BUFFER_HEIGHT: usize = 25;
/// Width of the VGA text buffer.
pub const BUFFER_WIDTH: usize = 80;
/// The location of the VGA text buffer.
pub const VGA_BUFFER_LOCATION: *mut Buffer = (0xB8000 as *mut Buffer);
/// Represents a VGA screen character.
///
/// Each VGA screen character is a 2-byte value which is broken down into two 1-byte halves:
/// - The top byte is for the ASCII character.
/// - The bottom byte is for the VGA color code.
#[repr(C)]
#[derive(Clone, Copy, PartialEq)]
pub struct ScreenChar {
ascii_character: u8,
color_code: VgaColorCode,
}
/// Represents a VGA buffer.
///
/// Even though the VGA display is a two-dimensional `BUFFER_WIDTH` x `BUFFER_HEIGHT` char display,
/// the VGA text buffer is just a one-dimensional array of size `BUFFER_WIDTH` x `BUFFER_HEIGHT`.
pub struct Buffer {
chars: [[ScreenChar; BUFFER_WIDTH]; BUFFER_HEIGHT],
}
/// VGA text buffer writer. Use this struct for rudimentary writes to the VGA text buffer.
///
/// This struct keeps track of the current row and current column of the cursor, as well as the
/// current color code. It also owns the pointer to the VGA text buffer at `0xB8000`.
///
/// This struct is not threadsafe.
pub struct Writer {
row_position: usize,
column_position: usize,
color_code: VgaColorCode,
buffer: Unique<Buffer>,
}
impl Writer {
/// Writes a byte with the current color code to the current row and column of the VGA text
/// buffer.
///
/// Once a byte is written, `self.column_position` is incremented. If `self.column_position` is
/// greater than the width of our buffer, we set it to zero and increment `self.row_position.`
/// If `self.row_position` is greater than the height of our buffer, we move all rows up by one
/// (destroying the data in the first row) and set `self.row_position` to the last row and set
/// `self.column_position` to 0.
///
/// Handles standard alphanumeric characters as well as symbols.
/// Handles \n and \r.
///
/// # Arguments
///
/// * `byte` - The character to be written to the buffer at the current color, row, and column.
///
/// # Invariants
///
/// * 0 <= `self.row_position` < `BUFFER_HEIGHT`
/// * 0 <= `self.column_position` < `BUFFER_WIDTH`
/// * `pre.self.column_position` == `post.self.column_position`
/// || `post.self.column_position` == 0
/// * 0 <= `post.self.row_position` - `pre.self.row_position` <= 1
pub fn write_byte(&mut self, byte: u8) {
match byte {
// For newlines, just increment the `row_position.`
// If it goes off screen, then `scroll()` instead.
b'\n' => {
// This looks weird, but parse this as "if the next line is greater than the last
// line".
if self.row_position + 1 > BUFFER_HEIGHT - 1 {
self.scroll();
} else {
self.row_position += 1;
}
},
// Carriage returns.
b'\r' => self.column_position = 0,
// Everything else. Just write the char and increment `column_position.`
// Increment `row_position` if necessary. Scroll if necessary after that.
_ => {
if self.column_position > BUFFER_WIDTH - 1 {
self.row_position += 1;
self.column_position = 0;
}
if self.row_position > BUFFER_HEIGHT - 1 {
self.scroll();
}
// Write the char.
self.buffer().chars[self.row_position][self.column_position] = ScreenChar {
ascii_character: byte,
color_code: self.color_code,
};
self.column_position += 1;
}
}
}
|
/// # Arguments
///
/// * `row_position` - The row position to be set to.
/// * `column_position` - The column position to be set to.
///
/// # Invariants
///
/// * 0 <= `self.row_position` < `BUFFER_HEIGHT`
/// * 0 <= `self.column_position` < `BUFFER_WIDTH`
pub fn set_position(&mut self, row_position: usize, column_position: usize) {
// TODO(mgazzola) Write some min/max macros.
if row_position >= BUFFER_HEIGHT {
self.row_position = BUFFER_HEIGHT - 1;
} else {
self.row_position = row_position;
}
if column_position >= BUFFER_WIDTH {
self.column_position = BUFFER_WIDTH - 1;
} else {
self.column_position = column_position;
}
}
/// Clears the entire screen. Just calls `clear_row` for all rows. Then, sets the cursor
/// position to (0, 0).
///
/// # Invariants
///
/// * `post.self.row_position` == `post.self.column_position` == 0
pub fn clear_screen(&mut self) {
for row in 0..BUFFER_HEIGHT {
self.clear_row(row);
}
self.set_position(0, 0);
}
/// `write_byte()`'s each byte in a specified string.
///
/// # Arguments
///
/// *'s' - The string to write to the screen.
pub fn write_str(&mut self, s: &str) {
for byte in s.bytes() {
self.write_byte(byte);
}
}
pub fn set_color(&mut self, fg: Color, bg: Color) {
self.color_code = VgaColorCode::new(fg, bg);
}
/// Returns the mutable buffer pointer.
fn buffer(&mut self) -> &mut Buffer {
unsafe {
self.buffer.get_mut()
}
}
/// Scrolls the entire screen by setting each row to be the row below it. This destroys the
/// first 80 characters of data that existed before this function was called. This function
/// also clears the final row of the text buffer (the last 80 characters of data). Afterwards,
/// we set the `column_position` to 0 and the `row_position` to the final row.
///
/// # Invariants
///
/// * `post.self.column_position` = 0
/// * `post.self.row_position` = `BUFFER_HEIGHT` - 1
fn scroll(&mut self) {
for row in 0..(BUFFER_HEIGHT - 1) {
self.buffer().chars[row] = self.buffer().chars[row + 1];
}
self.clear_row(BUFFER_HEIGHT - 1);
self.column_position = 0;
self.row_position = BUFFER_HEIGHT - 1;
}
/// Sets all data in row `row` to just be whitespace. Uses the current color code.
///
/// # Arguments
///
/// * `row` - The row to clear.
fn clear_row(&mut self, row: usize) {
let blank_char = ScreenChar {
ascii_character: b' ',
color_code: self.color_code,
};
self.buffer().chars[row] = [blank_char; BUFFER_WIDTH];
}
}
/// Implementation of the `Write` trait for the `Writer` struct.
impl ::core::fmt::Write for Writer {
/// Required.
///
/// Implements the `Write` `write_str` method.
fn write_str(&mut self, s: &str) -> ::core::fmt::Result {
self.write_str(s);
Ok(())
}
}
/// Defines the *one-and-only* writer that should exist.
/// This method is both public and thread-safe. Use this to access the writer.
/// Defaults the writer to Light Green foreground text and Black background.
pub static WRITER: Mutex<Writer> = Mutex::new(Writer {
row_position: 0,
column_position: 0,
color_code: VgaColorCode::new(Color::LightGreen, Color::Black),
buffer: unsafe {
Unique::new(VGA_BUFFER_LOCATION)
},
});
/// Macro for `print`.
macro_rules! print {
($($arg:tt)*) => ({
use core::fmt::Write;
let mut writer = $crate::drivers::vga::WRITER.lock();
writer.write_fmt(format_args!($($arg)*)).unwrap();
})
}
/// Macro for `println`.
macro_rules! println {
($fmt:expr) => (print!(concat!($fmt, "\n\r")));
($fmt:expr, $($arg:tt)*) => (print!(concat!($fmt, "\n\r"), $($arg)*));
}
|
/// Sets the position of the cursor. If the arguments are outside of the VGA buffer bounds
/// (`BUFFER_HEIGHT` and `BUFFER_WIDTH` are the respective max values, while 0 is the min value
/// for both), then we just default to the closest valid value.
///
|
random_line_split
|
mod.rs
|
/// VGA text buffer module.
pub mod color;
#[cfg(test)]
mod test;
pub use self::color::Color;
pub use self::color::VgaColorCode;
pub use core::ptr::Unique;
pub use spin::Mutex;
/// Height of the VGA text buffer.
pub const BUFFER_HEIGHT: usize = 25;
/// Width of the VGA text buffer.
pub const BUFFER_WIDTH: usize = 80;
/// The location of the VGA text buffer.
pub const VGA_BUFFER_LOCATION: *mut Buffer = (0xB8000 as *mut Buffer);
/// Represents a VGA screen character.
///
/// Each VGA screen character is a 2-byte value which is broken down into two 1-byte halves:
/// - The top byte is for the ASCII character.
/// - The bottom byte is for the VGA color code.
#[repr(C)]
#[derive(Clone, Copy, PartialEq)]
pub struct ScreenChar {
ascii_character: u8,
color_code: VgaColorCode,
}
/// Represents a VGA buffer.
///
/// Even though the VGA display is a two-dimensional `BUFFER_WIDTH` x `BUFFER_HEIGHT` char display,
/// the VGA text buffer is just a one-dimensional array of size `BUFFER_WIDTH` x `BUFFER_HEIGHT`.
pub struct
|
{
chars: [[ScreenChar; BUFFER_WIDTH]; BUFFER_HEIGHT],
}
/// VGA text buffer writer. Use this struct for rudimentary writes to the VGA text buffer.
///
/// This struct keeps track of the current row and current column of the cursor, as well as the
/// current color code. It also owns the pointer to the VGA text buffer at `0xB8000`.
///
/// This struct is not threadsafe.
pub struct Writer {
row_position: usize,
column_position: usize,
color_code: VgaColorCode,
buffer: Unique<Buffer>,
}
impl Writer {
/// Writes a byte with the current color code to the current row and column of the VGA text
/// buffer.
///
/// Once a byte is written, `self.column_position` is incremented. If `self.column_position` is
/// greater than the width of our buffer, we set it to zero and increment `self.row_position.`
/// If `self.row_position` is greater than the height of our buffer, we move all rows up by one
/// (destroying the data in the first row) and set `self.row_position` to the last row and set
/// `self.column_position` to 0.
///
/// Handles standard alphanumeric characters as well as symbols.
/// Handles \n and \r.
///
/// # Arguments
///
/// * `byte` - The character to be written to the buffer at the current color, row, and column.
///
/// # Invariants
///
/// * 0 <= `self.row_position` < `BUFFER_HEIGHT`
/// * 0 <= `self.column_position` < `BUFFER_WIDTH`
/// * `pre.self.column_position` == `post.self.column_position`
/// || `post.self.column_position` == 0
/// * 0 <= `post.self.row_position` - `pre.self.row_position` <= 1
pub fn write_byte(&mut self, byte: u8) {
match byte {
// For newlines, just increment the `row_position.`
// If it goes off screen, then `scroll()` instead.
b'\n' => {
// This looks weird, but parse this as "if the next line is greater than the last
// line".
if self.row_position + 1 > BUFFER_HEIGHT - 1 {
self.scroll();
} else {
self.row_position += 1;
}
},
// Carriage returns.
b'\r' => self.column_position = 0,
// Everything else. Just write the char and increment `column_position.`
// Increment `row_position` if necessary. Scroll if necessary after that.
_ => {
if self.column_position > BUFFER_WIDTH - 1 {
self.row_position += 1;
self.column_position = 0;
}
if self.row_position > BUFFER_HEIGHT - 1 {
self.scroll();
}
// Write the char.
self.buffer().chars[self.row_position][self.column_position] = ScreenChar {
ascii_character: byte,
color_code: self.color_code,
};
self.column_position += 1;
}
}
}
/// Sets the position of the cursor. If the arguments are outside of the VGA buffer bounds
/// (`BUFFER_HEIGHT` and `BUFFER_WIDTH` are the respective max values, while 0 is the min value
/// for both), then we just default to the closest valid value.
///
/// # Arguments
///
/// * `row_position` - The row position to be set to.
/// * `column_position` - The column position to be set to.
///
/// # Invariants
///
/// * 0 <= `self.row_position` < `BUFFER_HEIGHT`
/// * 0 <= `self.column_position` < `BUFFER_WIDTH`
pub fn set_position(&mut self, row_position: usize, column_position: usize) {
// TODO(mgazzola) Write some min/max macros.
if row_position >= BUFFER_HEIGHT {
self.row_position = BUFFER_HEIGHT - 1;
} else {
self.row_position = row_position;
}
if column_position >= BUFFER_WIDTH {
self.column_position = BUFFER_WIDTH - 1;
} else {
self.column_position = column_position;
}
}
/// Clears the entire screen. Just calls `clear_row` for all rows. Then, sets the cursor
/// position to (0, 0).
///
/// # Invariants
///
/// * `post.self.row_position` == `post.self.column_position` == 0
pub fn clear_screen(&mut self) {
for row in 0..BUFFER_HEIGHT {
self.clear_row(row);
}
self.set_position(0, 0);
}
/// `write_byte()`'s each byte in a specified string.
///
/// # Arguments
///
/// *'s' - The string to write to the screen.
pub fn write_str(&mut self, s: &str) {
for byte in s.bytes() {
self.write_byte(byte);
}
}
pub fn set_color(&mut self, fg: Color, bg: Color) {
self.color_code = VgaColorCode::new(fg, bg);
}
/// Returns the mutable buffer pointer.
fn buffer(&mut self) -> &mut Buffer {
unsafe {
self.buffer.get_mut()
}
}
/// Scrolls the entire screen by setting each row to be the row below it. This destroys the
/// first 80 characters of data that existed before this function was called. This function
/// also clears the final row of the text buffer (the last 80 characters of data). Afterwards,
/// we set the `column_position` to 0 and the `row_position` to the final row.
///
/// # Invariants
///
/// * `post.self.column_position` = 0
/// * `post.self.row_position` = `BUFFER_HEIGHT` - 1
fn scroll(&mut self) {
for row in 0..(BUFFER_HEIGHT - 1) {
self.buffer().chars[row] = self.buffer().chars[row + 1];
}
self.clear_row(BUFFER_HEIGHT - 1);
self.column_position = 0;
self.row_position = BUFFER_HEIGHT - 1;
}
/// Sets all data in row `row` to just be whitespace. Uses the current color code.
///
/// # Arguments
///
/// * `row` - The row to clear.
fn clear_row(&mut self, row: usize) {
let blank_char = ScreenChar {
ascii_character: b' ',
color_code: self.color_code,
};
self.buffer().chars[row] = [blank_char; BUFFER_WIDTH];
}
}
/// Implementation of the `Write` trait for the `Writer` struct.
impl ::core::fmt::Write for Writer {
/// Required.
///
/// Implements the `Write` `write_str` method.
fn write_str(&mut self, s: &str) -> ::core::fmt::Result {
self.write_str(s);
Ok(())
}
}
/// Defines the *one-and-only* writer that should exist.
/// This method is both public and thread-safe. Use this to access the writer.
/// Defaults the writer to Light Green foreground text and Black background.
pub static WRITER: Mutex<Writer> = Mutex::new(Writer {
row_position: 0,
column_position: 0,
color_code: VgaColorCode::new(Color::LightGreen, Color::Black),
buffer: unsafe {
Unique::new(VGA_BUFFER_LOCATION)
},
});
/// Macro for `print`.
macro_rules! print {
($($arg:tt)*) => ({
use core::fmt::Write;
let mut writer = $crate::drivers::vga::WRITER.lock();
writer.write_fmt(format_args!($($arg)*)).unwrap();
})
}
/// Macro for `println`.
macro_rules! println {
($fmt:expr) => (print!(concat!($fmt, "\n\r")));
($fmt:expr, $($arg:tt)*) => (print!(concat!($fmt, "\n\r"), $($arg)*));
}
|
Buffer
|
identifier_name
|
attr.rs
|
not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Parsed representations of [DOM attributes][attr].
//!
//! [attr]: https://dom.spec.whatwg.org/#interface-attr
use {Atom, Prefix, Namespace, LocalName};
use app_units::Au;
use cssparser::{self, Color, RGBA};
use euclid::num::Zero;
use num_traits::ToPrimitive;
use properties::PropertyDeclarationBlock;
use selectors::attr::AttrSelectorOperation;
use servo_arc::Arc;
use servo_url::ServoUrl;
use shared_lock::Locked;
use std::ascii::AsciiExt;
use std::str::FromStr;
use str::{HTML_SPACE_CHARACTERS, read_exponent, read_fraction};
use str::{read_numbers, split_commas, split_html_space_chars};
use str::str_join;
use values::specified::Length;
// Duplicated from script::dom::values.
const UNSIGNED_LONG_MAX: u32 = 2147483647;
#[derive(Clone, Copy, Debug, PartialEq)]
#[cfg_attr(feature = "servo", derive(MallocSizeOf))]
pub enum LengthOrPercentageOrAuto {
Auto,
Percentage(f32),
Length(Au),
}
#[derive(Clone, Debug)]
#[cfg_attr(feature = "servo", derive(MallocSizeOf))]
pub enum AttrValue {
String(String),
TokenList(String, Vec<Atom>),
UInt(String, u32),
Int(String, i32),
Double(String, f64),
Atom(Atom),
Length(String, Option<Length>),
Color(String, Option<RGBA>),
Dimension(String, LengthOrPercentageOrAuto),
/// Stores a URL, computed from the input string and a document's base URL.
///
/// The URL is resolved at setting-time, so this kind of attribute value is
/// not actually suitable for most URL-reflecting IDL attributes.
ResolvedUrl(String, Option<ServoUrl>),
/// Note that this variant is only used transitively as a fast path to set
/// the property declaration block relevant to the style of an element when
/// set from the inline declaration of that element (that is,
/// `element.style`).
///
/// This can, as of this writing, only correspond to the value of the
/// `style` element, and is set from its relevant CSSInlineStyleDeclaration,
/// and then converted to a string in Element::attribute_mutated.
///
/// Note that we don't necessarily need to do that (we could just clone the
/// declaration block), but that avoids keeping a refcounted
/// declarationblock for longer than needed.
Declaration(String,
#[ignore_malloc_size_of = "Arc"]
Arc<Locked<PropertyDeclarationBlock>>)
}
/// Shared implementation to parse an integer according to
/// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-integers> or
/// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-non-negative-integers>
fn do_parse_integer<T: Iterator<Item=char>>(input: T) -> Result<i64, ()> {
let mut input = input.skip_while(|c| {
HTML_SPACE_CHARACTERS.iter().any(|s| s == c)
}).peekable();
let sign = match input.peek() {
None => return Err(()),
Some(&'-') => {
input.next();
-1
},
Some(&'+') => {
input.next();
1
},
Some(_) => 1,
};
let (value, _) = read_numbers(input);
value.and_then(|value| value.checked_mul(sign)).ok_or(())
}
/// Parse an integer according to
/// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-integers>.
pub fn parse_integer<T: Iterator<Item=char>>(input: T) -> Result<i32, ()> {
do_parse_integer(input).and_then(|result| {
result.to_i32().ok_or(())
})
}
/// Parse an integer according to
/// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-non-negative-integers>
pub fn parse_unsigned_integer<T: Iterator<Item=char>>(input: T) -> Result<u32, ()> {
do_parse_integer(input).and_then(|result| {
result.to_u32().ok_or(())
})
}
/// Parse a floating-point number according to
/// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-floating-point-number-values>
pub fn parse_double(string: &str) -> Result<f64, ()> {
let trimmed = string.trim_matches(HTML_SPACE_CHARACTERS);
let mut input = trimmed.chars().peekable();
let (value, divisor, chars_skipped) = match input.peek() {
None => return Err(()),
Some(&'-') => {
input.next();
(-1f64, -1f64, 1)
}
Some(&'+') => {
input.next();
(1f64, 1f64, 1)
}
_ => (1f64, 1f64, 0)
};
let (value, value_digits) = if let Some(&'.') = input.peek() {
(0f64, 0)
} else {
let (read_val, read_digits) = read_numbers(input);
(value * read_val.and_then(|result| result.to_f64()).unwrap_or(1f64), read_digits)
};
let input = trimmed.chars().skip(value_digits + chars_skipped).peekable();
let (mut value, fraction_digits) = read_fraction(input, divisor, value);
let input = trimmed.chars().skip(value_digits + chars_skipped + fraction_digits).peekable();
if let Some(exp) = read_exponent(input) {
value *= 10f64.powi(exp)
};
Ok(value)
}
impl AttrValue {
pub fn from_serialized_tokenlist(tokens: String) -> AttrValue {
let atoms =
split_html_space_chars(&tokens)
.map(Atom::from)
.fold(vec![], |mut acc, atom| {
if!acc.contains(&atom) { acc.push(atom) }
acc
});
AttrValue::TokenList(tokens, atoms)
}
pub fn from_comma_separated_tokenlist(tokens: String) -> AttrValue {
let atoms = split_commas(&tokens).map(Atom::from)
.fold(vec![], |mut acc, atom| {
if!acc.contains(&atom) { acc.push(atom) }
acc
});
AttrValue::TokenList(tokens, atoms)
}
pub fn from_atomic_tokens(atoms: Vec<Atom>) -> AttrValue {
// TODO(ajeffrey): effecient conversion of Vec<Atom> to String
let tokens = String::from(str_join(&atoms, "\x20"));
AttrValue::TokenList(tokens, atoms)
}
// https://html.spec.whatwg.org/multipage/#reflecting-content-attributes-in-idl-attributes:idl-unsigned-long
pub fn from_u32(string: String, default: u32) -> AttrValue {
let result = parse_unsigned_integer(string.chars()).unwrap_or(default);
let result = if result > UNSIGNED_LONG_MAX {
default
} else {
result
};
AttrValue::UInt(string, result)
}
pub fn from_i32(string: String, default: i32) -> AttrValue {
let result = parse_integer(string.chars()).unwrap_or(default);
AttrValue::Int(string, result)
}
// https://html.spec.whatwg.org/multipage/#reflecting-content-attributes-in-idl-attributes:idl-double
pub fn from_double(string: String, default: f64) -> AttrValue {
let result = parse_double(&string).unwrap_or(default);
if result.is_normal() {
AttrValue::Double(string, result)
} else {
AttrValue::Double(string, default)
}
}
// https://html.spec.whatwg.org/multipage/#limited-to-only-non-negative-numbers
pub fn from_limited_i32(string: String, default: i32) -> AttrValue {
let result = parse_integer(string.chars()).unwrap_or(default);
if result < 0 {
AttrValue::Int(string, default)
} else {
AttrValue::Int(string, result)
}
}
// https://html.spec.whatwg.org/multipage/#limited-to-only-non-negative-numbers-greater-than-zero
pub fn from_limited_u32(string: String, default: u32) -> AttrValue {
let result = parse_unsigned_integer(string.chars()).unwrap_or(default);
let result = if result == 0 || result > UNSIGNED_LONG_MAX {
default
} else {
result
};
AttrValue::UInt(string, result)
}
pub fn from_atomic(string: String) -> AttrValue {
let value = Atom::from(string);
AttrValue::Atom(value)
}
pub fn from_resolved_url(base: &ServoUrl, url: String) -> AttrValue {
let joined = base.join(&url).ok();
AttrValue::ResolvedUrl(url, joined)
}
pub fn from_legacy_color(string: String) -> AttrValue {
let parsed = parse_legacy_color(&string).ok();
AttrValue::Color(string, parsed)
}
pub fn from_dimension(string: String) -> AttrValue {
let parsed = parse_length(&string);
AttrValue::Dimension(string, parsed)
}
pub fn from_nonzero_dimension(string: String) -> AttrValue {
let parsed = parse_nonzero_length(&string);
AttrValue::Dimension(string, parsed)
}
/// Assumes the `AttrValue` is a `TokenList` and returns its tokens
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `TokenList`
pub fn as_tokens(&self) -> &[Atom] {
match *self {
AttrValue::TokenList(_, ref tokens) => tokens,
_ => panic!("Tokens not found"),
}
}
/// Assumes the `AttrValue` is an `Atom` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not an `Atom`
pub fn as_atom(&self) -> &Atom {
match *self {
AttrValue::Atom(ref value) => value,
_ => panic!("Atom not found"),
}
}
/// Assumes the `AttrValue` is a `Color` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `Color`
pub fn as_color(&self) -> Option<&RGBA> {
match *self {
AttrValue::Color(_, ref color) => color.as_ref(),
_ => panic!("Color not found"),
}
}
/// Assumes the `AttrValue` is a `Length` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `Length`
pub fn as_length(&self) -> Option<&Length>
|
/// Assumes the `AttrValue` is a `Dimension` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `Dimension`
pub fn as_dimension(&self) -> &LengthOrPercentageOrAuto {
match *self {
AttrValue::Dimension(_, ref l) => l,
_ => panic!("Dimension not found"),
}
}
/// Assumes the `AttrValue` is a `ResolvedUrl` and returns its value.
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `ResolvedUrl`
pub fn as_resolved_url(&self) -> Option<&ServoUrl> {
match *self {
AttrValue::ResolvedUrl(_, ref url) => url.as_ref(),
_ => panic!("Url not found"),
}
}
/// Return the AttrValue as its integer representation, if any.
/// This corresponds to attribute values returned as `AttrValue::UInt(_)`
/// by `VirtualMethods::parse_plain_attribute()`.
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `UInt`
pub fn as_uint(&self) -> u32 {
if let AttrValue::UInt(_, value) = *self {
value
} else {
panic!("Uint not found");
}
}
/// Return the AttrValue as a dimension computed from its integer
/// representation, assuming that integer representation specifies pixels.
///
/// This corresponds to attribute values returned as `AttrValue::UInt(_)`
/// by `VirtualMethods::parse_plain_attribute()`.
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `UInt`
pub fn as_uint_px_dimension(&self) -> LengthOrPercentageOrAuto {
if let AttrValue::UInt(_, value) = *self {
LengthOrPercentageOrAuto::Length(Au::from_px(value as i32))
} else {
panic!("Uint not found");
}
}
pub fn eval_selector(&self, selector: &AttrSelectorOperation<&String>) -> bool {
// FIXME(SimonSapin) this can be more efficient by matching on `(self, selector)` variants
// and doing Atom comparisons instead of string comparisons where possible,
// with SelectorImpl::AttrValue changed to Atom.
selector.eval_str(self)
}
}
impl ::std::ops::Deref for AttrValue {
type Target = str;
fn deref(&self) -> &str {
match *self {
AttrValue::String(ref value) |
AttrValue::TokenList(ref value, _) |
AttrValue::UInt(ref value, _) |
AttrValue::Double(ref value, _) |
AttrValue::Length(ref value, _) |
AttrValue::Color(ref value, _) |
AttrValue::Int(ref value, _) |
AttrValue::ResolvedUrl(ref value, _) |
AttrValue::Declaration(ref value, _) |
AttrValue::Dimension(ref value, _) => &value,
AttrValue::Atom(ref value) => &value,
}
}
}
impl PartialEq<Atom> for AttrValue {
fn eq(&self, other: &Atom) -> bool {
match *self {
AttrValue::Atom(ref value) => value == other,
_ => other == &**self,
}
}
}
/// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-non-zero-dimension-values>
pub fn parse_nonzero_length(value: &str) -> LengthOrPercentageOrAuto {
match parse_length(value) {
LengthOrPercentageOrAuto::Length(x) if x == Au::zero() => LengthOrPercentageOrAuto::Auto,
LengthOrPercentageOrAuto::Percentage(x) if x == 0. => LengthOrPercentageOrAuto::Auto,
x => x,
}
}
/// Parses a [legacy color][color]. If unparseable, `Err` is returned.
///
/// [color]: https://html.spec.whatwg.org/multipage/#rules-for-parsing-a-legacy-colour-value
pub fn parse_legacy_color(mut input: &str) -> Result<RGBA, ()> {
// Steps 1 and 2.
if input.is_empty() {
return Err(())
}
// Step 3.
input = input.trim_matches(HTML_SPACE_CHARACTERS);
// Step 4.
if input.eq_ignore_ascii_case("transparent") {
return Err(())
}
// Step 5.
if let Ok(Color::RGBA(rgba)) = cssparser::parse_color_keyword(input) {
return Ok(rgba);
}
// Step 6.
if input.len() == 4 {
if let (b'#', Ok(r), Ok(g), Ok(b)) =
(input.as_bytes()[0],
hex(input.as_bytes()[1] as char),
hex(input.as_bytes()[2] as char),
hex(input.as_bytes()[3] as char)) {
return Ok(RGBA::new(r * 17, g * 17, b * 17, 255))
}
}
// Step 7.
let mut new_input = String::new();
for ch in input.chars() {
if ch as u32 > 0xffff {
new_input.push_str("00")
} else {
new_input.push(ch)
}
}
let mut input = &*new_input;
// Step 8.
for (char_count, (index, _)) in input.char_indices().enumerate() {
if char_count == 128 {
input = &input[..index];
break
}
}
// Step 9.
if input.as_bytes()[0] == b'#' {
input = &input[1..]
}
// Step 10.
let mut new_input = Vec::new();
for ch in input.chars() {
if hex(ch).is_ok() {
new_input.push(ch as u8)
} else {
new_input.push(b'0')
}
}
let mut input = new_input;
// Step 11.
while input.is_empty() || (input.len() % 3)!= 0 {
input.push(b'0')
}
// Step 12.
let mut length = input.len() / 3;
let (mut red, mut green, mut blue) = (&input[..length],
&input[length..length * 2],
&input[length * 2..]);
// Step 13.
if length > 8 {
red = &red[length - 8..];
green = &green[length - 8..];
blue = &blue[length - 8..];
length = 8
}
// Step 14.
while length > 2 && red[0] == b'0' && green[0] == b'0' && blue[0] == b'0' {
red = &red[1..];
green = &green[1..];
blue = &blue[1..];
length -= 1
}
// Steps 15-20.
return Ok(RGBA::new(hex_string(red).unwrap(),
hex_string(green).unwrap(),
hex_string(blue).unwrap(),
255));
fn hex(ch: char) -> Result<u8, ()> {
match ch {
'0'...'9' => Ok((ch as u8) - b'0'),
'a'...'f' => Ok((ch as u8) - b'a' + 10),
'A'...'F' => Ok((ch as u8) - b'A' + 10),
_ => Err(()),
}
}
fn hex_string(string: &[u8]) -> Result<u8, ()> {
match string.len() {
0 => Err(()),
1 => hex(string[0] as char),
_ => {
let upper = hex(string[0] as char)?;
let lower = hex(string[1] as char)?;
Ok((upper << 4) | lower)
}
}
}
}
/// Parses a [dimension value][dim]. If unparseable, `Auto` is returned.
///
/// [dim]: https://html.spec.whatwg.org/multipage/#rules-for-parsing-dimension-values
// TODO: this function can be rewritten to return Result<LengthOrPercentage, _>
pub fn parse_length(mut value: &str) -> LengthOrPercentageOrAuto {
// Steps 1 & 2 are not relevant
// Step 3
value = value.trim_left_matches(HTML_SPACE_CHARACTERS);
// Step 4
if value.is_empty() {
return LengthOrPercentageOrAuto::Auto
}
// Step 5
if value.starts_with('+') {
value = &value[1..]
}
// Steps 6 & 7
match value.chars().nth(0) {
Some('0'...'9') => {},
_ => return LengthOrPercentageOrAuto::Auto,
}
// Steps 8 to 13
// We trim the string length to the minimum of:
// 1. the end of the string
// 2. the first occurence of a '%' (U+0025 PERCENT SIGN)
// 3. the second occurrence of a '.' (U+002E FULL STOP)
// 4. the occurrence of a character that is neither a digit nor '%' nor '.'
// Note: Step 10 is directly subsumed by FromStr::from_str
let mut end_index = value.len();
let (mut found_full_stop, mut found_percent) = (false, false);
for (i, ch) in value.chars().enumerate() {
match ch {
'0'...'9' => continue,
'%' => {
found_percent = true;
end_index = i;
break
}
'.' if!found_full_stop => {
found_full_stop = true;
continue
}
_ => {
end_index = i;
break
}
}
}
value = &value[..end_index];
if found_percent {
let result: Result<f32, _> = FromStr::from_str(value);
match result {
Ok(number) => return LengthOrPercentageOrAuto::Percentage((number as f32) / 100.0),
Err(_) => return LengthOrPercentageOrAuto::Auto,
}
}
match FromStr::from_str(value) {
Ok(number) => LengthOrPercentageOrAuto::Length(Au::from_f64_px(number)),
Err(_) => LengthOrPercentageOrAuto::Auto,
}
}
/// A struct that uniquely identifies an element's attribute.
|
{
match *self {
AttrValue::Length(_, ref length) => length.as_ref(),
_ => panic!("Length not found"),
}
}
|
identifier_body
|
attr.rs
|
was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Parsed representations of [DOM attributes][attr].
//!
//! [attr]: https://dom.spec.whatwg.org/#interface-attr
use {Atom, Prefix, Namespace, LocalName};
use app_units::Au;
use cssparser::{self, Color, RGBA};
use euclid::num::Zero;
use num_traits::ToPrimitive;
use properties::PropertyDeclarationBlock;
use selectors::attr::AttrSelectorOperation;
use servo_arc::Arc;
use servo_url::ServoUrl;
use shared_lock::Locked;
use std::ascii::AsciiExt;
use std::str::FromStr;
use str::{HTML_SPACE_CHARACTERS, read_exponent, read_fraction};
use str::{read_numbers, split_commas, split_html_space_chars};
use str::str_join;
use values::specified::Length;
// Duplicated from script::dom::values.
const UNSIGNED_LONG_MAX: u32 = 2147483647;
#[derive(Clone, Copy, Debug, PartialEq)]
#[cfg_attr(feature = "servo", derive(MallocSizeOf))]
pub enum LengthOrPercentageOrAuto {
Auto,
Percentage(f32),
Length(Au),
}
#[derive(Clone, Debug)]
#[cfg_attr(feature = "servo", derive(MallocSizeOf))]
pub enum AttrValue {
String(String),
TokenList(String, Vec<Atom>),
UInt(String, u32),
Int(String, i32),
Double(String, f64),
Atom(Atom),
Length(String, Option<Length>),
Color(String, Option<RGBA>),
Dimension(String, LengthOrPercentageOrAuto),
/// Stores a URL, computed from the input string and a document's base URL.
///
/// The URL is resolved at setting-time, so this kind of attribute value is
/// not actually suitable for most URL-reflecting IDL attributes.
ResolvedUrl(String, Option<ServoUrl>),
/// Note that this variant is only used transitively as a fast path to set
/// the property declaration block relevant to the style of an element when
/// set from the inline declaration of that element (that is,
/// `element.style`).
///
/// This can, as of this writing, only correspond to the value of the
/// `style` element, and is set from its relevant CSSInlineStyleDeclaration,
/// and then converted to a string in Element::attribute_mutated.
///
/// Note that we don't necessarily need to do that (we could just clone the
/// declaration block), but that avoids keeping a refcounted
/// declarationblock for longer than needed.
Declaration(String,
#[ignore_malloc_size_of = "Arc"]
Arc<Locked<PropertyDeclarationBlock>>)
}
/// Shared implementation to parse an integer according to
/// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-integers> or
/// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-non-negative-integers>
fn do_parse_integer<T: Iterator<Item=char>>(input: T) -> Result<i64, ()> {
let mut input = input.skip_while(|c| {
HTML_SPACE_CHARACTERS.iter().any(|s| s == c)
}).peekable();
let sign = match input.peek() {
None => return Err(()),
Some(&'-') => {
input.next();
-1
},
Some(&'+') => {
input.next();
1
},
Some(_) => 1,
};
let (value, _) = read_numbers(input);
value.and_then(|value| value.checked_mul(sign)).ok_or(())
}
/// Parse an integer according to
/// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-integers>.
pub fn parse_integer<T: Iterator<Item=char>>(input: T) -> Result<i32, ()> {
do_parse_integer(input).and_then(|result| {
result.to_i32().ok_or(())
})
}
/// Parse an integer according to
/// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-non-negative-integers>
pub fn parse_unsigned_integer<T: Iterator<Item=char>>(input: T) -> Result<u32, ()> {
do_parse_integer(input).and_then(|result| {
result.to_u32().ok_or(())
})
}
/// Parse a floating-point number according to
/// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-floating-point-number-values>
pub fn parse_double(string: &str) -> Result<f64, ()> {
let trimmed = string.trim_matches(HTML_SPACE_CHARACTERS);
let mut input = trimmed.chars().peekable();
let (value, divisor, chars_skipped) = match input.peek() {
None => return Err(()),
Some(&'-') => {
input.next();
(-1f64, -1f64, 1)
}
Some(&'+') => {
input.next();
(1f64, 1f64, 1)
}
_ => (1f64, 1f64, 0)
};
let (value, value_digits) = if let Some(&'.') = input.peek() {
(0f64, 0)
} else {
let (read_val, read_digits) = read_numbers(input);
(value * read_val.and_then(|result| result.to_f64()).unwrap_or(1f64), read_digits)
};
let input = trimmed.chars().skip(value_digits + chars_skipped).peekable();
let (mut value, fraction_digits) = read_fraction(input, divisor, value);
let input = trimmed.chars().skip(value_digits + chars_skipped + fraction_digits).peekable();
if let Some(exp) = read_exponent(input) {
value *= 10f64.powi(exp)
};
Ok(value)
}
impl AttrValue {
pub fn from_serialized_tokenlist(tokens: String) -> AttrValue {
let atoms =
split_html_space_chars(&tokens)
.map(Atom::from)
.fold(vec![], |mut acc, atom| {
if!acc.contains(&atom) { acc.push(atom) }
acc
});
AttrValue::TokenList(tokens, atoms)
}
pub fn from_comma_separated_tokenlist(tokens: String) -> AttrValue {
let atoms = split_commas(&tokens).map(Atom::from)
.fold(vec![], |mut acc, atom| {
if!acc.contains(&atom) { acc.push(atom) }
acc
});
AttrValue::TokenList(tokens, atoms)
}
pub fn from_atomic_tokens(atoms: Vec<Atom>) -> AttrValue {
// TODO(ajeffrey): effecient conversion of Vec<Atom> to String
let tokens = String::from(str_join(&atoms, "\x20"));
AttrValue::TokenList(tokens, atoms)
}
// https://html.spec.whatwg.org/multipage/#reflecting-content-attributes-in-idl-attributes:idl-unsigned-long
pub fn from_u32(string: String, default: u32) -> AttrValue {
let result = parse_unsigned_integer(string.chars()).unwrap_or(default);
let result = if result > UNSIGNED_LONG_MAX {
default
} else {
result
};
AttrValue::UInt(string, result)
}
pub fn from_i32(string: String, default: i32) -> AttrValue {
let result = parse_integer(string.chars()).unwrap_or(default);
AttrValue::Int(string, result)
}
// https://html.spec.whatwg.org/multipage/#reflecting-content-attributes-in-idl-attributes:idl-double
pub fn from_double(string: String, default: f64) -> AttrValue {
let result = parse_double(&string).unwrap_or(default);
if result.is_normal() {
AttrValue::Double(string, result)
} else {
AttrValue::Double(string, default)
}
}
// https://html.spec.whatwg.org/multipage/#limited-to-only-non-negative-numbers
pub fn from_limited_i32(string: String, default: i32) -> AttrValue {
let result = parse_integer(string.chars()).unwrap_or(default);
if result < 0 {
AttrValue::Int(string, default)
} else {
AttrValue::Int(string, result)
}
}
// https://html.spec.whatwg.org/multipage/#limited-to-only-non-negative-numbers-greater-than-zero
pub fn from_limited_u32(string: String, default: u32) -> AttrValue {
let result = parse_unsigned_integer(string.chars()).unwrap_or(default);
let result = if result == 0 || result > UNSIGNED_LONG_MAX {
default
} else {
result
};
AttrValue::UInt(string, result)
}
pub fn from_atomic(string: String) -> AttrValue {
let value = Atom::from(string);
AttrValue::Atom(value)
}
pub fn from_resolved_url(base: &ServoUrl, url: String) -> AttrValue {
let joined = base.join(&url).ok();
AttrValue::ResolvedUrl(url, joined)
}
pub fn from_legacy_color(string: String) -> AttrValue {
let parsed = parse_legacy_color(&string).ok();
AttrValue::Color(string, parsed)
}
pub fn from_dimension(string: String) -> AttrValue {
let parsed = parse_length(&string);
AttrValue::Dimension(string, parsed)
}
pub fn from_nonzero_dimension(string: String) -> AttrValue {
let parsed = parse_nonzero_length(&string);
AttrValue::Dimension(string, parsed)
}
/// Assumes the `AttrValue` is a `TokenList` and returns its tokens
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `TokenList`
pub fn as_tokens(&self) -> &[Atom] {
match *self {
AttrValue::TokenList(_, ref tokens) => tokens,
_ => panic!("Tokens not found"),
}
}
/// Assumes the `AttrValue` is an `Atom` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not an `Atom`
pub fn as_atom(&self) -> &Atom {
match *self {
AttrValue::Atom(ref value) => value,
_ => panic!("Atom not found"),
}
}
/// Assumes the `AttrValue` is a `Color` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `Color`
pub fn as_color(&self) -> Option<&RGBA> {
match *self {
AttrValue::Color(_, ref color) => color.as_ref(),
_ => panic!("Color not found"),
}
}
/// Assumes the `AttrValue` is a `Length` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `Length`
pub fn as_length(&self) -> Option<&Length> {
match *self {
AttrValue::Length(_, ref length) => length.as_ref(),
_ => panic!("Length not found"),
}
}
/// Assumes the `AttrValue` is a `Dimension` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `Dimension`
pub fn as_dimension(&self) -> &LengthOrPercentageOrAuto {
match *self {
AttrValue::Dimension(_, ref l) => l,
_ => panic!("Dimension not found"),
}
}
/// Assumes the `AttrValue` is a `ResolvedUrl` and returns its value.
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `ResolvedUrl`
pub fn as_resolved_url(&self) -> Option<&ServoUrl> {
match *self {
AttrValue::ResolvedUrl(_, ref url) => url.as_ref(),
_ => panic!("Url not found"),
}
}
/// Return the AttrValue as its integer representation, if any.
/// This corresponds to attribute values returned as `AttrValue::UInt(_)`
/// by `VirtualMethods::parse_plain_attribute()`.
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `UInt`
pub fn as_uint(&self) -> u32 {
if let AttrValue::UInt(_, value) = *self {
value
} else {
panic!("Uint not found");
}
}
/// Return the AttrValue as a dimension computed from its integer
/// representation, assuming that integer representation specifies pixels.
///
/// This corresponds to attribute values returned as `AttrValue::UInt(_)`
/// by `VirtualMethods::parse_plain_attribute()`.
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `UInt`
pub fn as_uint_px_dimension(&self) -> LengthOrPercentageOrAuto {
if let AttrValue::UInt(_, value) = *self {
LengthOrPercentageOrAuto::Length(Au::from_px(value as i32))
} else {
panic!("Uint not found");
}
}
pub fn eval_selector(&self, selector: &AttrSelectorOperation<&String>) -> bool {
// FIXME(SimonSapin) this can be more efficient by matching on `(self, selector)` variants
// and doing Atom comparisons instead of string comparisons where possible,
// with SelectorImpl::AttrValue changed to Atom.
selector.eval_str(self)
}
}
impl ::std::ops::Deref for AttrValue {
type Target = str;
fn deref(&self) -> &str {
match *self {
AttrValue::String(ref value) |
AttrValue::TokenList(ref value, _) |
AttrValue::UInt(ref value, _) |
AttrValue::Double(ref value, _) |
AttrValue::Length(ref value, _) |
AttrValue::Color(ref value, _) |
AttrValue::Int(ref value, _) |
AttrValue::ResolvedUrl(ref value, _) |
AttrValue::Declaration(ref value, _) |
AttrValue::Dimension(ref value, _) => &value,
AttrValue::Atom(ref value) => &value,
}
}
}
impl PartialEq<Atom> for AttrValue {
fn eq(&self, other: &Atom) -> bool {
match *self {
AttrValue::Atom(ref value) => value == other,
_ => other == &**self,
}
}
}
/// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-non-zero-dimension-values>
pub fn parse_nonzero_length(value: &str) -> LengthOrPercentageOrAuto {
match parse_length(value) {
LengthOrPercentageOrAuto::Length(x) if x == Au::zero() => LengthOrPercentageOrAuto::Auto,
LengthOrPercentageOrAuto::Percentage(x) if x == 0. => LengthOrPercentageOrAuto::Auto,
x => x,
}
}
/// Parses a [legacy color][color]. If unparseable, `Err` is returned.
///
/// [color]: https://html.spec.whatwg.org/multipage/#rules-for-parsing-a-legacy-colour-value
pub fn parse_legacy_color(mut input: &str) -> Result<RGBA, ()> {
// Steps 1 and 2.
if input.is_empty() {
return Err(())
}
// Step 3.
input = input.trim_matches(HTML_SPACE_CHARACTERS);
// Step 4.
if input.eq_ignore_ascii_case("transparent") {
return Err(())
}
// Step 5.
if let Ok(Color::RGBA(rgba)) = cssparser::parse_color_keyword(input) {
return Ok(rgba);
}
// Step 6.
if input.len() == 4 {
if let (b'#', Ok(r), Ok(g), Ok(b)) =
(input.as_bytes()[0],
hex(input.as_bytes()[1] as char),
hex(input.as_bytes()[2] as char),
hex(input.as_bytes()[3] as char)) {
return Ok(RGBA::new(r * 17, g * 17, b * 17, 255))
}
}
// Step 7.
let mut new_input = String::new();
for ch in input.chars() {
if ch as u32 > 0xffff {
new_input.push_str("00")
} else {
new_input.push(ch)
}
}
let mut input = &*new_input;
// Step 8.
for (char_count, (index, _)) in input.char_indices().enumerate() {
if char_count == 128 {
input = &input[..index];
break
}
}
// Step 9.
if input.as_bytes()[0] == b'#' {
input = &input[1..]
|
// Step 10.
let mut new_input = Vec::new();
for ch in input.chars() {
if hex(ch).is_ok() {
new_input.push(ch as u8)
} else {
new_input.push(b'0')
}
}
let mut input = new_input;
// Step 11.
while input.is_empty() || (input.len() % 3)!= 0 {
input.push(b'0')
}
// Step 12.
let mut length = input.len() / 3;
let (mut red, mut green, mut blue) = (&input[..length],
&input[length..length * 2],
&input[length * 2..]);
// Step 13.
if length > 8 {
red = &red[length - 8..];
green = &green[length - 8..];
blue = &blue[length - 8..];
length = 8
}
// Step 14.
while length > 2 && red[0] == b'0' && green[0] == b'0' && blue[0] == b'0' {
red = &red[1..];
green = &green[1..];
blue = &blue[1..];
length -= 1
}
// Steps 15-20.
return Ok(RGBA::new(hex_string(red).unwrap(),
hex_string(green).unwrap(),
hex_string(blue).unwrap(),
255));
fn hex(ch: char) -> Result<u8, ()> {
match ch {
'0'...'9' => Ok((ch as u8) - b'0'),
'a'...'f' => Ok((ch as u8) - b'a' + 10),
'A'...'F' => Ok((ch as u8) - b'A' + 10),
_ => Err(()),
}
}
fn hex_string(string: &[u8]) -> Result<u8, ()> {
match string.len() {
0 => Err(()),
1 => hex(string[0] as char),
_ => {
let upper = hex(string[0] as char)?;
let lower = hex(string[1] as char)?;
Ok((upper << 4) | lower)
}
}
}
}
/// Parses a [dimension value][dim]. If unparseable, `Auto` is returned.
///
/// [dim]: https://html.spec.whatwg.org/multipage/#rules-for-parsing-dimension-values
// TODO: this function can be rewritten to return Result<LengthOrPercentage, _>
pub fn parse_length(mut value: &str) -> LengthOrPercentageOrAuto {
// Steps 1 & 2 are not relevant
// Step 3
value = value.trim_left_matches(HTML_SPACE_CHARACTERS);
// Step 4
if value.is_empty() {
return LengthOrPercentageOrAuto::Auto
}
// Step 5
if value.starts_with('+') {
value = &value[1..]
}
// Steps 6 & 7
match value.chars().nth(0) {
Some('0'...'9') => {},
_ => return LengthOrPercentageOrAuto::Auto,
}
// Steps 8 to 13
// We trim the string length to the minimum of:
// 1. the end of the string
// 2. the first occurence of a '%' (U+0025 PERCENT SIGN)
// 3. the second occurrence of a '.' (U+002E FULL STOP)
// 4. the occurrence of a character that is neither a digit nor '%' nor '.'
// Note: Step 10 is directly subsumed by FromStr::from_str
let mut end_index = value.len();
let (mut found_full_stop, mut found_percent) = (false, false);
for (i, ch) in value.chars().enumerate() {
match ch {
'0'...'9' => continue,
'%' => {
found_percent = true;
end_index = i;
break
}
'.' if!found_full_stop => {
found_full_stop = true;
continue
}
_ => {
end_index = i;
break
}
}
}
value = &value[..end_index];
if found_percent {
let result: Result<f32, _> = FromStr::from_str(value);
match result {
Ok(number) => return LengthOrPercentageOrAuto::Percentage((number as f32) / 100.0),
Err(_) => return LengthOrPercentageOrAuto::Auto,
}
}
match FromStr::from_str(value) {
Ok(number) => LengthOrPercentageOrAuto::Length(Au::from_f64_px(number)),
Err(_) => LengthOrPercentageOrAuto::Auto,
}
}
/// A struct that uniquely identifies an element's attribute.
#[derive
|
}
|
random_line_split
|
attr.rs
|
not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Parsed representations of [DOM attributes][attr].
//!
//! [attr]: https://dom.spec.whatwg.org/#interface-attr
use {Atom, Prefix, Namespace, LocalName};
use app_units::Au;
use cssparser::{self, Color, RGBA};
use euclid::num::Zero;
use num_traits::ToPrimitive;
use properties::PropertyDeclarationBlock;
use selectors::attr::AttrSelectorOperation;
use servo_arc::Arc;
use servo_url::ServoUrl;
use shared_lock::Locked;
use std::ascii::AsciiExt;
use std::str::FromStr;
use str::{HTML_SPACE_CHARACTERS, read_exponent, read_fraction};
use str::{read_numbers, split_commas, split_html_space_chars};
use str::str_join;
use values::specified::Length;
// Duplicated from script::dom::values.
const UNSIGNED_LONG_MAX: u32 = 2147483647;
#[derive(Clone, Copy, Debug, PartialEq)]
#[cfg_attr(feature = "servo", derive(MallocSizeOf))]
pub enum LengthOrPercentageOrAuto {
Auto,
Percentage(f32),
Length(Au),
}
#[derive(Clone, Debug)]
#[cfg_attr(feature = "servo", derive(MallocSizeOf))]
pub enum AttrValue {
String(String),
TokenList(String, Vec<Atom>),
UInt(String, u32),
Int(String, i32),
Double(String, f64),
Atom(Atom),
Length(String, Option<Length>),
Color(String, Option<RGBA>),
Dimension(String, LengthOrPercentageOrAuto),
/// Stores a URL, computed from the input string and a document's base URL.
///
/// The URL is resolved at setting-time, so this kind of attribute value is
/// not actually suitable for most URL-reflecting IDL attributes.
ResolvedUrl(String, Option<ServoUrl>),
/// Note that this variant is only used transitively as a fast path to set
/// the property declaration block relevant to the style of an element when
/// set from the inline declaration of that element (that is,
/// `element.style`).
///
/// This can, as of this writing, only correspond to the value of the
/// `style` element, and is set from its relevant CSSInlineStyleDeclaration,
/// and then converted to a string in Element::attribute_mutated.
///
/// Note that we don't necessarily need to do that (we could just clone the
/// declaration block), but that avoids keeping a refcounted
/// declarationblock for longer than needed.
Declaration(String,
#[ignore_malloc_size_of = "Arc"]
Arc<Locked<PropertyDeclarationBlock>>)
}
/// Shared implementation to parse an integer according to
/// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-integers> or
/// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-non-negative-integers>
fn do_parse_integer<T: Iterator<Item=char>>(input: T) -> Result<i64, ()> {
let mut input = input.skip_while(|c| {
HTML_SPACE_CHARACTERS.iter().any(|s| s == c)
}).peekable();
let sign = match input.peek() {
None => return Err(()),
Some(&'-') => {
input.next();
-1
},
Some(&'+') => {
input.next();
1
},
Some(_) => 1,
};
let (value, _) = read_numbers(input);
value.and_then(|value| value.checked_mul(sign)).ok_or(())
}
/// Parse an integer according to
/// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-integers>.
pub fn parse_integer<T: Iterator<Item=char>>(input: T) -> Result<i32, ()> {
do_parse_integer(input).and_then(|result| {
result.to_i32().ok_or(())
})
}
/// Parse an integer according to
/// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-non-negative-integers>
pub fn parse_unsigned_integer<T: Iterator<Item=char>>(input: T) -> Result<u32, ()> {
do_parse_integer(input).and_then(|result| {
result.to_u32().ok_or(())
})
}
/// Parse a floating-point number according to
/// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-floating-point-number-values>
pub fn parse_double(string: &str) -> Result<f64, ()> {
let trimmed = string.trim_matches(HTML_SPACE_CHARACTERS);
let mut input = trimmed.chars().peekable();
let (value, divisor, chars_skipped) = match input.peek() {
None => return Err(()),
Some(&'-') => {
input.next();
(-1f64, -1f64, 1)
}
Some(&'+') => {
input.next();
(1f64, 1f64, 1)
}
_ => (1f64, 1f64, 0)
};
let (value, value_digits) = if let Some(&'.') = input.peek() {
(0f64, 0)
} else {
let (read_val, read_digits) = read_numbers(input);
(value * read_val.and_then(|result| result.to_f64()).unwrap_or(1f64), read_digits)
};
let input = trimmed.chars().skip(value_digits + chars_skipped).peekable();
let (mut value, fraction_digits) = read_fraction(input, divisor, value);
let input = trimmed.chars().skip(value_digits + chars_skipped + fraction_digits).peekable();
if let Some(exp) = read_exponent(input) {
value *= 10f64.powi(exp)
};
Ok(value)
}
impl AttrValue {
pub fn from_serialized_tokenlist(tokens: String) -> AttrValue {
let atoms =
split_html_space_chars(&tokens)
.map(Atom::from)
.fold(vec![], |mut acc, atom| {
if!acc.contains(&atom) { acc.push(atom) }
acc
});
AttrValue::TokenList(tokens, atoms)
}
pub fn from_comma_separated_tokenlist(tokens: String) -> AttrValue {
let atoms = split_commas(&tokens).map(Atom::from)
.fold(vec![], |mut acc, atom| {
if!acc.contains(&atom) { acc.push(atom) }
acc
});
AttrValue::TokenList(tokens, atoms)
}
pub fn from_atomic_tokens(atoms: Vec<Atom>) -> AttrValue {
// TODO(ajeffrey): effecient conversion of Vec<Atom> to String
let tokens = String::from(str_join(&atoms, "\x20"));
AttrValue::TokenList(tokens, atoms)
}
// https://html.spec.whatwg.org/multipage/#reflecting-content-attributes-in-idl-attributes:idl-unsigned-long
pub fn from_u32(string: String, default: u32) -> AttrValue {
let result = parse_unsigned_integer(string.chars()).unwrap_or(default);
let result = if result > UNSIGNED_LONG_MAX {
default
} else {
result
};
AttrValue::UInt(string, result)
}
pub fn from_i32(string: String, default: i32) -> AttrValue {
let result = parse_integer(string.chars()).unwrap_or(default);
AttrValue::Int(string, result)
}
// https://html.spec.whatwg.org/multipage/#reflecting-content-attributes-in-idl-attributes:idl-double
pub fn from_double(string: String, default: f64) -> AttrValue {
let result = parse_double(&string).unwrap_or(default);
if result.is_normal() {
AttrValue::Double(string, result)
} else {
AttrValue::Double(string, default)
}
}
// https://html.spec.whatwg.org/multipage/#limited-to-only-non-negative-numbers
pub fn from_limited_i32(string: String, default: i32) -> AttrValue {
let result = parse_integer(string.chars()).unwrap_or(default);
if result < 0 {
AttrValue::Int(string, default)
} else {
AttrValue::Int(string, result)
}
}
// https://html.spec.whatwg.org/multipage/#limited-to-only-non-negative-numbers-greater-than-zero
pub fn from_limited_u32(string: String, default: u32) -> AttrValue {
let result = parse_unsigned_integer(string.chars()).unwrap_or(default);
let result = if result == 0 || result > UNSIGNED_LONG_MAX {
default
} else {
result
};
AttrValue::UInt(string, result)
}
pub fn from_atomic(string: String) -> AttrValue {
let value = Atom::from(string);
AttrValue::Atom(value)
}
pub fn from_resolved_url(base: &ServoUrl, url: String) -> AttrValue {
let joined = base.join(&url).ok();
AttrValue::ResolvedUrl(url, joined)
}
pub fn from_legacy_color(string: String) -> AttrValue {
let parsed = parse_legacy_color(&string).ok();
AttrValue::Color(string, parsed)
}
pub fn from_dimension(string: String) -> AttrValue {
let parsed = parse_length(&string);
AttrValue::Dimension(string, parsed)
}
pub fn from_nonzero_dimension(string: String) -> AttrValue {
let parsed = parse_nonzero_length(&string);
AttrValue::Dimension(string, parsed)
}
/// Assumes the `AttrValue` is a `TokenList` and returns its tokens
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `TokenList`
pub fn as_tokens(&self) -> &[Atom] {
match *self {
AttrValue::TokenList(_, ref tokens) => tokens,
_ => panic!("Tokens not found"),
}
}
/// Assumes the `AttrValue` is an `Atom` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not an `Atom`
pub fn as_atom(&self) -> &Atom {
match *self {
AttrValue::Atom(ref value) => value,
_ => panic!("Atom not found"),
}
}
/// Assumes the `AttrValue` is a `Color` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `Color`
pub fn as_color(&self) -> Option<&RGBA> {
match *self {
AttrValue::Color(_, ref color) => color.as_ref(),
_ => panic!("Color not found"),
}
}
/// Assumes the `AttrValue` is a `Length` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `Length`
pub fn as_length(&self) -> Option<&Length> {
match *self {
AttrValue::Length(_, ref length) => length.as_ref(),
_ => panic!("Length not found"),
}
}
/// Assumes the `AttrValue` is a `Dimension` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `Dimension`
pub fn
|
(&self) -> &LengthOrPercentageOrAuto {
match *self {
AttrValue::Dimension(_, ref l) => l,
_ => panic!("Dimension not found"),
}
}
/// Assumes the `AttrValue` is a `ResolvedUrl` and returns its value.
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `ResolvedUrl`
pub fn as_resolved_url(&self) -> Option<&ServoUrl> {
match *self {
AttrValue::ResolvedUrl(_, ref url) => url.as_ref(),
_ => panic!("Url not found"),
}
}
/// Return the AttrValue as its integer representation, if any.
/// This corresponds to attribute values returned as `AttrValue::UInt(_)`
/// by `VirtualMethods::parse_plain_attribute()`.
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `UInt`
pub fn as_uint(&self) -> u32 {
if let AttrValue::UInt(_, value) = *self {
value
} else {
panic!("Uint not found");
}
}
/// Return the AttrValue as a dimension computed from its integer
/// representation, assuming that integer representation specifies pixels.
///
/// This corresponds to attribute values returned as `AttrValue::UInt(_)`
/// by `VirtualMethods::parse_plain_attribute()`.
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `UInt`
pub fn as_uint_px_dimension(&self) -> LengthOrPercentageOrAuto {
if let AttrValue::UInt(_, value) = *self {
LengthOrPercentageOrAuto::Length(Au::from_px(value as i32))
} else {
panic!("Uint not found");
}
}
pub fn eval_selector(&self, selector: &AttrSelectorOperation<&String>) -> bool {
// FIXME(SimonSapin) this can be more efficient by matching on `(self, selector)` variants
// and doing Atom comparisons instead of string comparisons where possible,
// with SelectorImpl::AttrValue changed to Atom.
selector.eval_str(self)
}
}
impl ::std::ops::Deref for AttrValue {
type Target = str;
fn deref(&self) -> &str {
match *self {
AttrValue::String(ref value) |
AttrValue::TokenList(ref value, _) |
AttrValue::UInt(ref value, _) |
AttrValue::Double(ref value, _) |
AttrValue::Length(ref value, _) |
AttrValue::Color(ref value, _) |
AttrValue::Int(ref value, _) |
AttrValue::ResolvedUrl(ref value, _) |
AttrValue::Declaration(ref value, _) |
AttrValue::Dimension(ref value, _) => &value,
AttrValue::Atom(ref value) => &value,
}
}
}
impl PartialEq<Atom> for AttrValue {
fn eq(&self, other: &Atom) -> bool {
match *self {
AttrValue::Atom(ref value) => value == other,
_ => other == &**self,
}
}
}
/// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-non-zero-dimension-values>
pub fn parse_nonzero_length(value: &str) -> LengthOrPercentageOrAuto {
match parse_length(value) {
LengthOrPercentageOrAuto::Length(x) if x == Au::zero() => LengthOrPercentageOrAuto::Auto,
LengthOrPercentageOrAuto::Percentage(x) if x == 0. => LengthOrPercentageOrAuto::Auto,
x => x,
}
}
/// Parses a [legacy color][color]. If unparseable, `Err` is returned.
///
/// [color]: https://html.spec.whatwg.org/multipage/#rules-for-parsing-a-legacy-colour-value
pub fn parse_legacy_color(mut input: &str) -> Result<RGBA, ()> {
// Steps 1 and 2.
if input.is_empty() {
return Err(())
}
// Step 3.
input = input.trim_matches(HTML_SPACE_CHARACTERS);
// Step 4.
if input.eq_ignore_ascii_case("transparent") {
return Err(())
}
// Step 5.
if let Ok(Color::RGBA(rgba)) = cssparser::parse_color_keyword(input) {
return Ok(rgba);
}
// Step 6.
if input.len() == 4 {
if let (b'#', Ok(r), Ok(g), Ok(b)) =
(input.as_bytes()[0],
hex(input.as_bytes()[1] as char),
hex(input.as_bytes()[2] as char),
hex(input.as_bytes()[3] as char)) {
return Ok(RGBA::new(r * 17, g * 17, b * 17, 255))
}
}
// Step 7.
let mut new_input = String::new();
for ch in input.chars() {
if ch as u32 > 0xffff {
new_input.push_str("00")
} else {
new_input.push(ch)
}
}
let mut input = &*new_input;
// Step 8.
for (char_count, (index, _)) in input.char_indices().enumerate() {
if char_count == 128 {
input = &input[..index];
break
}
}
// Step 9.
if input.as_bytes()[0] == b'#' {
input = &input[1..]
}
// Step 10.
let mut new_input = Vec::new();
for ch in input.chars() {
if hex(ch).is_ok() {
new_input.push(ch as u8)
} else {
new_input.push(b'0')
}
}
let mut input = new_input;
// Step 11.
while input.is_empty() || (input.len() % 3)!= 0 {
input.push(b'0')
}
// Step 12.
let mut length = input.len() / 3;
let (mut red, mut green, mut blue) = (&input[..length],
&input[length..length * 2],
&input[length * 2..]);
// Step 13.
if length > 8 {
red = &red[length - 8..];
green = &green[length - 8..];
blue = &blue[length - 8..];
length = 8
}
// Step 14.
while length > 2 && red[0] == b'0' && green[0] == b'0' && blue[0] == b'0' {
red = &red[1..];
green = &green[1..];
blue = &blue[1..];
length -= 1
}
// Steps 15-20.
return Ok(RGBA::new(hex_string(red).unwrap(),
hex_string(green).unwrap(),
hex_string(blue).unwrap(),
255));
fn hex(ch: char) -> Result<u8, ()> {
match ch {
'0'...'9' => Ok((ch as u8) - b'0'),
'a'...'f' => Ok((ch as u8) - b'a' + 10),
'A'...'F' => Ok((ch as u8) - b'A' + 10),
_ => Err(()),
}
}
fn hex_string(string: &[u8]) -> Result<u8, ()> {
match string.len() {
0 => Err(()),
1 => hex(string[0] as char),
_ => {
let upper = hex(string[0] as char)?;
let lower = hex(string[1] as char)?;
Ok((upper << 4) | lower)
}
}
}
}
/// Parses a [dimension value][dim]. If unparseable, `Auto` is returned.
///
/// [dim]: https://html.spec.whatwg.org/multipage/#rules-for-parsing-dimension-values
// TODO: this function can be rewritten to return Result<LengthOrPercentage, _>
pub fn parse_length(mut value: &str) -> LengthOrPercentageOrAuto {
// Steps 1 & 2 are not relevant
// Step 3
value = value.trim_left_matches(HTML_SPACE_CHARACTERS);
// Step 4
if value.is_empty() {
return LengthOrPercentageOrAuto::Auto
}
// Step 5
if value.starts_with('+') {
value = &value[1..]
}
// Steps 6 & 7
match value.chars().nth(0) {
Some('0'...'9') => {},
_ => return LengthOrPercentageOrAuto::Auto,
}
// Steps 8 to 13
// We trim the string length to the minimum of:
// 1. the end of the string
// 2. the first occurence of a '%' (U+0025 PERCENT SIGN)
// 3. the second occurrence of a '.' (U+002E FULL STOP)
// 4. the occurrence of a character that is neither a digit nor '%' nor '.'
// Note: Step 10 is directly subsumed by FromStr::from_str
let mut end_index = value.len();
let (mut found_full_stop, mut found_percent) = (false, false);
for (i, ch) in value.chars().enumerate() {
match ch {
'0'...'9' => continue,
'%' => {
found_percent = true;
end_index = i;
break
}
'.' if!found_full_stop => {
found_full_stop = true;
continue
}
_ => {
end_index = i;
break
}
}
}
value = &value[..end_index];
if found_percent {
let result: Result<f32, _> = FromStr::from_str(value);
match result {
Ok(number) => return LengthOrPercentageOrAuto::Percentage((number as f32) / 100.0),
Err(_) => return LengthOrPercentageOrAuto::Auto,
}
}
match FromStr::from_str(value) {
Ok(number) => LengthOrPercentageOrAuto::Length(Au::from_f64_px(number)),
Err(_) => LengthOrPercentageOrAuto::Auto,
}
}
/// A struct that uniquely identifies an element's attribute.
|
as_dimension
|
identifier_name
|
attr.rs
|
not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Parsed representations of [DOM attributes][attr].
//!
//! [attr]: https://dom.spec.whatwg.org/#interface-attr
use {Atom, Prefix, Namespace, LocalName};
use app_units::Au;
use cssparser::{self, Color, RGBA};
use euclid::num::Zero;
use num_traits::ToPrimitive;
use properties::PropertyDeclarationBlock;
use selectors::attr::AttrSelectorOperation;
use servo_arc::Arc;
use servo_url::ServoUrl;
use shared_lock::Locked;
use std::ascii::AsciiExt;
use std::str::FromStr;
use str::{HTML_SPACE_CHARACTERS, read_exponent, read_fraction};
use str::{read_numbers, split_commas, split_html_space_chars};
use str::str_join;
use values::specified::Length;
// Duplicated from script::dom::values.
const UNSIGNED_LONG_MAX: u32 = 2147483647;
#[derive(Clone, Copy, Debug, PartialEq)]
#[cfg_attr(feature = "servo", derive(MallocSizeOf))]
pub enum LengthOrPercentageOrAuto {
Auto,
Percentage(f32),
Length(Au),
}
#[derive(Clone, Debug)]
#[cfg_attr(feature = "servo", derive(MallocSizeOf))]
pub enum AttrValue {
String(String),
TokenList(String, Vec<Atom>),
UInt(String, u32),
Int(String, i32),
Double(String, f64),
Atom(Atom),
Length(String, Option<Length>),
Color(String, Option<RGBA>),
Dimension(String, LengthOrPercentageOrAuto),
/// Stores a URL, computed from the input string and a document's base URL.
///
/// The URL is resolved at setting-time, so this kind of attribute value is
/// not actually suitable for most URL-reflecting IDL attributes.
ResolvedUrl(String, Option<ServoUrl>),
/// Note that this variant is only used transitively as a fast path to set
/// the property declaration block relevant to the style of an element when
/// set from the inline declaration of that element (that is,
/// `element.style`).
///
/// This can, as of this writing, only correspond to the value of the
/// `style` element, and is set from its relevant CSSInlineStyleDeclaration,
/// and then converted to a string in Element::attribute_mutated.
///
/// Note that we don't necessarily need to do that (we could just clone the
/// declaration block), but that avoids keeping a refcounted
/// declarationblock for longer than needed.
Declaration(String,
#[ignore_malloc_size_of = "Arc"]
Arc<Locked<PropertyDeclarationBlock>>)
}
/// Shared implementation to parse an integer according to
/// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-integers> or
/// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-non-negative-integers>
fn do_parse_integer<T: Iterator<Item=char>>(input: T) -> Result<i64, ()> {
let mut input = input.skip_while(|c| {
HTML_SPACE_CHARACTERS.iter().any(|s| s == c)
}).peekable();
let sign = match input.peek() {
None => return Err(()),
Some(&'-') => {
input.next();
-1
},
Some(&'+') => {
input.next();
1
},
Some(_) => 1,
};
let (value, _) = read_numbers(input);
value.and_then(|value| value.checked_mul(sign)).ok_or(())
}
/// Parse an integer according to
/// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-integers>.
pub fn parse_integer<T: Iterator<Item=char>>(input: T) -> Result<i32, ()> {
do_parse_integer(input).and_then(|result| {
result.to_i32().ok_or(())
})
}
/// Parse an integer according to
/// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-non-negative-integers>
pub fn parse_unsigned_integer<T: Iterator<Item=char>>(input: T) -> Result<u32, ()> {
do_parse_integer(input).and_then(|result| {
result.to_u32().ok_or(())
})
}
/// Parse a floating-point number according to
/// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-floating-point-number-values>
pub fn parse_double(string: &str) -> Result<f64, ()> {
let trimmed = string.trim_matches(HTML_SPACE_CHARACTERS);
let mut input = trimmed.chars().peekable();
let (value, divisor, chars_skipped) = match input.peek() {
None => return Err(()),
Some(&'-') => {
input.next();
(-1f64, -1f64, 1)
}
Some(&'+') => {
input.next();
(1f64, 1f64, 1)
}
_ => (1f64, 1f64, 0)
};
let (value, value_digits) = if let Some(&'.') = input.peek() {
(0f64, 0)
} else
|
;
let input = trimmed.chars().skip(value_digits + chars_skipped).peekable();
let (mut value, fraction_digits) = read_fraction(input, divisor, value);
let input = trimmed.chars().skip(value_digits + chars_skipped + fraction_digits).peekable();
if let Some(exp) = read_exponent(input) {
value *= 10f64.powi(exp)
};
Ok(value)
}
impl AttrValue {
pub fn from_serialized_tokenlist(tokens: String) -> AttrValue {
let atoms =
split_html_space_chars(&tokens)
.map(Atom::from)
.fold(vec![], |mut acc, atom| {
if!acc.contains(&atom) { acc.push(atom) }
acc
});
AttrValue::TokenList(tokens, atoms)
}
pub fn from_comma_separated_tokenlist(tokens: String) -> AttrValue {
let atoms = split_commas(&tokens).map(Atom::from)
.fold(vec![], |mut acc, atom| {
if!acc.contains(&atom) { acc.push(atom) }
acc
});
AttrValue::TokenList(tokens, atoms)
}
pub fn from_atomic_tokens(atoms: Vec<Atom>) -> AttrValue {
// TODO(ajeffrey): effecient conversion of Vec<Atom> to String
let tokens = String::from(str_join(&atoms, "\x20"));
AttrValue::TokenList(tokens, atoms)
}
// https://html.spec.whatwg.org/multipage/#reflecting-content-attributes-in-idl-attributes:idl-unsigned-long
pub fn from_u32(string: String, default: u32) -> AttrValue {
let result = parse_unsigned_integer(string.chars()).unwrap_or(default);
let result = if result > UNSIGNED_LONG_MAX {
default
} else {
result
};
AttrValue::UInt(string, result)
}
pub fn from_i32(string: String, default: i32) -> AttrValue {
let result = parse_integer(string.chars()).unwrap_or(default);
AttrValue::Int(string, result)
}
// https://html.spec.whatwg.org/multipage/#reflecting-content-attributes-in-idl-attributes:idl-double
pub fn from_double(string: String, default: f64) -> AttrValue {
let result = parse_double(&string).unwrap_or(default);
if result.is_normal() {
AttrValue::Double(string, result)
} else {
AttrValue::Double(string, default)
}
}
// https://html.spec.whatwg.org/multipage/#limited-to-only-non-negative-numbers
pub fn from_limited_i32(string: String, default: i32) -> AttrValue {
let result = parse_integer(string.chars()).unwrap_or(default);
if result < 0 {
AttrValue::Int(string, default)
} else {
AttrValue::Int(string, result)
}
}
// https://html.spec.whatwg.org/multipage/#limited-to-only-non-negative-numbers-greater-than-zero
pub fn from_limited_u32(string: String, default: u32) -> AttrValue {
let result = parse_unsigned_integer(string.chars()).unwrap_or(default);
let result = if result == 0 || result > UNSIGNED_LONG_MAX {
default
} else {
result
};
AttrValue::UInt(string, result)
}
pub fn from_atomic(string: String) -> AttrValue {
let value = Atom::from(string);
AttrValue::Atom(value)
}
pub fn from_resolved_url(base: &ServoUrl, url: String) -> AttrValue {
let joined = base.join(&url).ok();
AttrValue::ResolvedUrl(url, joined)
}
pub fn from_legacy_color(string: String) -> AttrValue {
let parsed = parse_legacy_color(&string).ok();
AttrValue::Color(string, parsed)
}
pub fn from_dimension(string: String) -> AttrValue {
let parsed = parse_length(&string);
AttrValue::Dimension(string, parsed)
}
pub fn from_nonzero_dimension(string: String) -> AttrValue {
let parsed = parse_nonzero_length(&string);
AttrValue::Dimension(string, parsed)
}
/// Assumes the `AttrValue` is a `TokenList` and returns its tokens
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `TokenList`
pub fn as_tokens(&self) -> &[Atom] {
match *self {
AttrValue::TokenList(_, ref tokens) => tokens,
_ => panic!("Tokens not found"),
}
}
/// Assumes the `AttrValue` is an `Atom` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not an `Atom`
pub fn as_atom(&self) -> &Atom {
match *self {
AttrValue::Atom(ref value) => value,
_ => panic!("Atom not found"),
}
}
/// Assumes the `AttrValue` is a `Color` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `Color`
pub fn as_color(&self) -> Option<&RGBA> {
match *self {
AttrValue::Color(_, ref color) => color.as_ref(),
_ => panic!("Color not found"),
}
}
/// Assumes the `AttrValue` is a `Length` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `Length`
pub fn as_length(&self) -> Option<&Length> {
match *self {
AttrValue::Length(_, ref length) => length.as_ref(),
_ => panic!("Length not found"),
}
}
/// Assumes the `AttrValue` is a `Dimension` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `Dimension`
pub fn as_dimension(&self) -> &LengthOrPercentageOrAuto {
match *self {
AttrValue::Dimension(_, ref l) => l,
_ => panic!("Dimension not found"),
}
}
/// Assumes the `AttrValue` is a `ResolvedUrl` and returns its value.
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `ResolvedUrl`
pub fn as_resolved_url(&self) -> Option<&ServoUrl> {
match *self {
AttrValue::ResolvedUrl(_, ref url) => url.as_ref(),
_ => panic!("Url not found"),
}
}
/// Return the AttrValue as its integer representation, if any.
/// This corresponds to attribute values returned as `AttrValue::UInt(_)`
/// by `VirtualMethods::parse_plain_attribute()`.
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `UInt`
pub fn as_uint(&self) -> u32 {
if let AttrValue::UInt(_, value) = *self {
value
} else {
panic!("Uint not found");
}
}
/// Return the AttrValue as a dimension computed from its integer
/// representation, assuming that integer representation specifies pixels.
///
/// This corresponds to attribute values returned as `AttrValue::UInt(_)`
/// by `VirtualMethods::parse_plain_attribute()`.
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `UInt`
pub fn as_uint_px_dimension(&self) -> LengthOrPercentageOrAuto {
if let AttrValue::UInt(_, value) = *self {
LengthOrPercentageOrAuto::Length(Au::from_px(value as i32))
} else {
panic!("Uint not found");
}
}
pub fn eval_selector(&self, selector: &AttrSelectorOperation<&String>) -> bool {
// FIXME(SimonSapin) this can be more efficient by matching on `(self, selector)` variants
// and doing Atom comparisons instead of string comparisons where possible,
// with SelectorImpl::AttrValue changed to Atom.
selector.eval_str(self)
}
}
impl ::std::ops::Deref for AttrValue {
type Target = str;
fn deref(&self) -> &str {
match *self {
AttrValue::String(ref value) |
AttrValue::TokenList(ref value, _) |
AttrValue::UInt(ref value, _) |
AttrValue::Double(ref value, _) |
AttrValue::Length(ref value, _) |
AttrValue::Color(ref value, _) |
AttrValue::Int(ref value, _) |
AttrValue::ResolvedUrl(ref value, _) |
AttrValue::Declaration(ref value, _) |
AttrValue::Dimension(ref value, _) => &value,
AttrValue::Atom(ref value) => &value,
}
}
}
impl PartialEq<Atom> for AttrValue {
fn eq(&self, other: &Atom) -> bool {
match *self {
AttrValue::Atom(ref value) => value == other,
_ => other == &**self,
}
}
}
/// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-non-zero-dimension-values>
pub fn parse_nonzero_length(value: &str) -> LengthOrPercentageOrAuto {
match parse_length(value) {
LengthOrPercentageOrAuto::Length(x) if x == Au::zero() => LengthOrPercentageOrAuto::Auto,
LengthOrPercentageOrAuto::Percentage(x) if x == 0. => LengthOrPercentageOrAuto::Auto,
x => x,
}
}
/// Parses a [legacy color][color]. If unparseable, `Err` is returned.
///
/// [color]: https://html.spec.whatwg.org/multipage/#rules-for-parsing-a-legacy-colour-value
pub fn parse_legacy_color(mut input: &str) -> Result<RGBA, ()> {
// Steps 1 and 2.
if input.is_empty() {
return Err(())
}
// Step 3.
input = input.trim_matches(HTML_SPACE_CHARACTERS);
// Step 4.
if input.eq_ignore_ascii_case("transparent") {
return Err(())
}
// Step 5.
if let Ok(Color::RGBA(rgba)) = cssparser::parse_color_keyword(input) {
return Ok(rgba);
}
// Step 6.
if input.len() == 4 {
if let (b'#', Ok(r), Ok(g), Ok(b)) =
(input.as_bytes()[0],
hex(input.as_bytes()[1] as char),
hex(input.as_bytes()[2] as char),
hex(input.as_bytes()[3] as char)) {
return Ok(RGBA::new(r * 17, g * 17, b * 17, 255))
}
}
// Step 7.
let mut new_input = String::new();
for ch in input.chars() {
if ch as u32 > 0xffff {
new_input.push_str("00")
} else {
new_input.push(ch)
}
}
let mut input = &*new_input;
// Step 8.
for (char_count, (index, _)) in input.char_indices().enumerate() {
if char_count == 128 {
input = &input[..index];
break
}
}
// Step 9.
if input.as_bytes()[0] == b'#' {
input = &input[1..]
}
// Step 10.
let mut new_input = Vec::new();
for ch in input.chars() {
if hex(ch).is_ok() {
new_input.push(ch as u8)
} else {
new_input.push(b'0')
}
}
let mut input = new_input;
// Step 11.
while input.is_empty() || (input.len() % 3)!= 0 {
input.push(b'0')
}
// Step 12.
let mut length = input.len() / 3;
let (mut red, mut green, mut blue) = (&input[..length],
&input[length..length * 2],
&input[length * 2..]);
// Step 13.
if length > 8 {
red = &red[length - 8..];
green = &green[length - 8..];
blue = &blue[length - 8..];
length = 8
}
// Step 14.
while length > 2 && red[0] == b'0' && green[0] == b'0' && blue[0] == b'0' {
red = &red[1..];
green = &green[1..];
blue = &blue[1..];
length -= 1
}
// Steps 15-20.
return Ok(RGBA::new(hex_string(red).unwrap(),
hex_string(green).unwrap(),
hex_string(blue).unwrap(),
255));
fn hex(ch: char) -> Result<u8, ()> {
match ch {
'0'...'9' => Ok((ch as u8) - b'0'),
'a'...'f' => Ok((ch as u8) - b'a' + 10),
'A'...'F' => Ok((ch as u8) - b'A' + 10),
_ => Err(()),
}
}
fn hex_string(string: &[u8]) -> Result<u8, ()> {
match string.len() {
0 => Err(()),
1 => hex(string[0] as char),
_ => {
let upper = hex(string[0] as char)?;
let lower = hex(string[1] as char)?;
Ok((upper << 4) | lower)
}
}
}
}
/// Parses a [dimension value][dim]. If unparseable, `Auto` is returned.
///
/// [dim]: https://html.spec.whatwg.org/multipage/#rules-for-parsing-dimension-values
// TODO: this function can be rewritten to return Result<LengthOrPercentage, _>
pub fn parse_length(mut value: &str) -> LengthOrPercentageOrAuto {
// Steps 1 & 2 are not relevant
// Step 3
value = value.trim_left_matches(HTML_SPACE_CHARACTERS);
// Step 4
if value.is_empty() {
return LengthOrPercentageOrAuto::Auto
}
// Step 5
if value.starts_with('+') {
value = &value[1..]
}
// Steps 6 & 7
match value.chars().nth(0) {
Some('0'...'9') => {},
_ => return LengthOrPercentageOrAuto::Auto,
}
// Steps 8 to 13
// We trim the string length to the minimum of:
// 1. the end of the string
// 2. the first occurence of a '%' (U+0025 PERCENT SIGN)
// 3. the second occurrence of a '.' (U+002E FULL STOP)
// 4. the occurrence of a character that is neither a digit nor '%' nor '.'
// Note: Step 10 is directly subsumed by FromStr::from_str
let mut end_index = value.len();
let (mut found_full_stop, mut found_percent) = (false, false);
for (i, ch) in value.chars().enumerate() {
match ch {
'0'...'9' => continue,
'%' => {
found_percent = true;
end_index = i;
break
}
'.' if!found_full_stop => {
found_full_stop = true;
continue
}
_ => {
end_index = i;
break
}
}
}
value = &value[..end_index];
if found_percent {
let result: Result<f32, _> = FromStr::from_str(value);
match result {
Ok(number) => return LengthOrPercentageOrAuto::Percentage((number as f32) / 100.0),
Err(_) => return LengthOrPercentageOrAuto::Auto,
}
}
match FromStr::from_str(value) {
Ok(number) => LengthOrPercentageOrAuto::Length(Au::from_f64_px(number)),
Err(_) => LengthOrPercentageOrAuto::Auto,
}
}
/// A struct that uniquely identifies an element's attribute.
|
{
let (read_val, read_digits) = read_numbers(input);
(value * read_val.and_then(|result| result.to_f64()).unwrap_or(1f64), read_digits)
}
|
conditional_block
|
mod.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Experimental extensions to `std` for Unix platforms.
//!
//! For now, this module is limited to extracting file descriptors,
//! but its functionality will grow over time.
//!
//! # Example
//!
//! ```no_run
//! use std::fs::File;
//! use std::os::unix::prelude::*;
//!
//! fn main() {
//! let f = File::create("foo.txt").unwrap();
//! let fd = f.as_raw_fd();
//!
//! // use fd with native unix bindings
//! }
//! ```
#![stable(feature = "rust1", since = "1.0.0")]
pub mod io;
pub mod ffi;
pub mod fs;
pub mod process;
pub mod raw;
/// A prelude for conveniently writing platform-specific code.
|
pub mod prelude {
#[doc(no_inline)]
pub use super::io::{RawFd, AsRawFd, FromRawFd};
#[doc(no_inline)] #[stable(feature = "rust1", since = "1.0.0")]
pub use super::ffi::{OsStrExt, OsStringExt};
#[doc(no_inline)]
pub use super::fs::{PermissionsExt, OpenOptionsExt, MetadataExt, FileTypeExt};
#[doc(no_inline)]
pub use super::fs::{DirEntryExt};
#[doc(no_inline)] #[stable(feature = "rust1", since = "1.0.0")]
pub use super::process::{CommandExt, ExitStatusExt};
}
|
///
/// Includes all extension traits, and some important type definitions.
#[stable(feature = "rust1", since = "1.0.0")]
|
random_line_split
|
winefs.rs
|
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option.
// All files in the project carrying such notice may not be copied, modified, or distributed
// except according to those terms.
use shared::basetsd::SIZE_T;
use shared::minwindef::{BOOL, DWORD, LPVOID, PBYTE, PDWORD, ULONG};
use um::minwinbase::SECURITY_ATTRIBUTES;
use um::wincrypt::ALG_ID;
use um::winnt::{LPCWSTR, LPWSTR, SID};
pub const WINEFS_SETUSERKEY_SET_CAPABILITIES: DWORD = 0x00000001;
STRUCT!{struct EFS_CERTIFICATE_BLOB {
dwCertEncodingType: DWORD,
cbData: DWORD,
pbData: PBYTE,
}}
pub type PEFS_CERTIFICATE_BLOB = *mut EFS_CERTIFICATE_BLOB;
STRUCT!{struct EFS_HASH_BLOB {
cbData: DWORD,
pbData: PBYTE,
}}
pub type PEFS_HASH_BLOB = *mut EFS_HASH_BLOB;
STRUCT!{struct EFS_RPC_BLOB {
cbData: DWORD,
pbData: PBYTE,
}}
pub type PEFS_RPC_BLOB = *mut EFS_RPC_BLOB;
STRUCT!{struct EFS_PIN_BLOB {
cbPadding: DWORD,
cbData: DWORD,
pbData: PBYTE,
}}
pub type PEFS_PIN_BLOB = *mut EFS_PIN_BLOB;
STRUCT!{struct EFS_KEY_INFO {
dwVersion: DWORD,
Entropy: ULONG,
Algorithm: ALG_ID,
KeyLength: ULONG,
}}
pub type PEFS_KEY_INFO = *mut EFS_KEY_INFO;
STRUCT!{struct EFS_COMPATIBILITY_INFO {
EfsVersion: DWORD,
}}
pub type PEFS_COMPATIBILITY_INFO = *mut EFS_COMPATIBILITY_INFO;
pub const EFS_COMPATIBILITY_VERSION_NCRYPT_PROTECTOR: DWORD = 5;
pub const EFS_COMPATIBILITY_VERSION_PFILE_PROTECTOR: DWORD = 6;
#[inline]
pub fn EFS_IS_DESCRIPTOR_VERSION(v: DWORD) -> bool {
v == EFS_COMPATIBILITY_VERSION_NCRYPT_PROTECTOR
|| v == EFS_COMPATIBILITY_VERSION_PFILE_PROTECTOR
}
pub const EFS_SUBVER_UNKNOWN: DWORD = 0;
pub const EFS_EFS_SUBVER_EFS_CERT: DWORD = 1;
pub const EFS_PFILE_SUBVER_RMS: DWORD = 2;
pub const EFS_PFILE_SUBVER_APPX: DWORD = 3;
STRUCT!{struct EFS_VERSION_INFO {
EfsVersion: DWORD,
SubVersion: DWORD,
}}
pub type PEFS_VERSION_INFO = *mut EFS_VERSION_INFO;
#[inline]
pub fn EFS_IS_APPX_VERSION(v: DWORD, subV: DWORD) -> bool
|
STRUCT!{struct EFS_DECRYPTION_STATUS_INFO {
dwDecryptionError: DWORD,
dwHashOffset: DWORD,
cbHash: DWORD,
}}
pub type PEFS_DECRYPTION_STATUS_INFO = *mut EFS_DECRYPTION_STATUS_INFO;
STRUCT!{struct EFS_ENCRYPTION_STATUS_INFO {
bHasCurrentKey: BOOL,
dwEncryptionError: DWORD,
}}
pub type PEFS_ENCRYPTION_STATUS_INFO = *mut EFS_ENCRYPTION_STATUS_INFO;
STRUCT!{struct ENCRYPTION_CERTIFICATE {
cbTotalLength: DWORD,
pUserSid: *mut SID,
pCertBlob: PEFS_CERTIFICATE_BLOB,
}}
pub type PENCRYPTION_CERTIFICATE = *mut ENCRYPTION_CERTIFICATE;
pub const MAX_SID_SIZE: SIZE_T = 256;
STRUCT!{struct ENCRYPTION_CERTIFICATE_HASH {
cbTotalLength: DWORD,
pUserSid: *mut SID,
pHash: PEFS_HASH_BLOB,
lpDisplayInformation: LPWSTR,
}}
pub type PENCRYPTION_CERTIFICATE_HASH = *mut ENCRYPTION_CERTIFICATE_HASH;
STRUCT!{struct ENCRYPTION_CERTIFICATE_HASH_LIST {
nCert_Hash: DWORD,
pUsers: *mut PENCRYPTION_CERTIFICATE_HASH,
}}
pub type PENCRYPTION_CERTIFICATE_HASH_LIST = *mut ENCRYPTION_CERTIFICATE_HASH_LIST;
STRUCT!{struct ENCRYPTION_CERTIFICATE_LIST {
nUsers: DWORD,
pUsers: *mut PENCRYPTION_CERTIFICATE,
}}
pub type PENCRYPTION_CERTIFICATE_LIST = *mut ENCRYPTION_CERTIFICATE_LIST;
pub const EFS_METADATA_ADD_USER: DWORD = 0x00000001;
pub const EFS_METADATA_REMOVE_USER: DWORD = 0x00000002;
pub const EFS_METADATA_REPLACE_USER: DWORD = 0x00000004;
pub const EFS_METADATA_GENERAL_OP: DWORD = 0x00000008;
STRUCT!{struct ENCRYPTED_FILE_METADATA_SIGNATURE {
dwEfsAccessType: DWORD,
pCertificatesAdded: PENCRYPTION_CERTIFICATE_HASH_LIST,
pEncryptionCertificate: PENCRYPTION_CERTIFICATE,
pEfsStreamSignature: PEFS_RPC_BLOB,
}}
pub type PENCRYPTED_FILE_METADATA_SIGNATURE = *mut ENCRYPTED_FILE_METADATA_SIGNATURE;
STRUCT!{struct ENCRYPTION_PROTECTOR {
cbTotalLength: DWORD,
pUserSid: *mut SID,
lpProtectorDescriptor: LPWSTR,
}}
pub type PENCRYPTION_PROTECTOR = *mut ENCRYPTION_PROTECTOR;
STRUCT!{struct ENCRYPTION_PROTECTOR_LIST {
nProtectors: DWORD,
pProtectors: *mut PENCRYPTION_PROTECTOR,
}}
pub type PENCRYPTION_PROTECTOR_LIST = *mut ENCRYPTION_PROTECTOR_LIST;
extern "system" {
pub fn QueryUsersOnEncryptedFile(
lpFileName: LPCWSTR,
pUsers: *mut PENCRYPTION_CERTIFICATE_HASH_LIST,
) -> DWORD;
pub fn QueryRecoveryAgentsOnEncryptedFile(
lpFileName: LPCWSTR,
pRecoveryAgents: *mut PENCRYPTION_CERTIFICATE_HASH_LIST,
) -> DWORD;
pub fn RemoveUsersFromEncryptedFile(
lpFileName: LPCWSTR,
pHashes: PENCRYPTION_CERTIFICATE_HASH_LIST,
) -> DWORD;
pub fn AddUsersToEncryptedFile(
lpFileName: LPCWSTR,
pEncryptionCertificate: PENCRYPTION_CERTIFICATE_LIST,
) -> DWORD;
pub fn SetUserFileEncryptionKey(
pEncryptionCertificate: PENCRYPTION_CERTIFICATE,
) -> DWORD;
pub fn SetUserFileEncryptionKeyEx(
pEncryptionCertificate: PENCRYPTION_CERTIFICATE,
dwCapabilities: DWORD,
dwFlags: DWORD,
pvReserved: LPVOID,
) -> DWORD;
pub fn FreeEncryptionCertificateHashList(
pUsers: PENCRYPTION_CERTIFICATE_HASH_LIST,
);
pub fn EncryptionDisable(
DirPath: LPCWSTR,
Disable: BOOL,
) -> BOOL;
pub fn DuplicateEncryptionInfoFile(
SrcFileName: LPCWSTR,
DstFileName: LPCWSTR,
dwCreationDistribution: DWORD,
dwAttributes: DWORD,
lpSecurityAttributes: *const SECURITY_ATTRIBUTES,
) -> DWORD;
pub fn GetEncryptedFileMetadata(
lpFileName: LPCWSTR,
pcbMetadata: PDWORD,
ppbMetadata: *mut PBYTE,
) -> DWORD;
pub fn SetEncryptedFileMetadata(
lpFileName: LPCWSTR,
pbOldMetadata: PBYTE,
pbNewMetadata: PBYTE,
pOwnerHash: PENCRYPTION_CERTIFICATE_HASH,
dwOperation: DWORD,
pCertificatesAdded: PENCRYPTION_CERTIFICATE_HASH_LIST,
) -> DWORD;
pub fn FreeEncryptedFileMetadata(
pbMetadata: PBYTE,
);
}
|
{
v == EFS_COMPATIBILITY_VERSION_PFILE_PROTECTOR && subV == EFS_PFILE_SUBVER_APPX
}
|
identifier_body
|
winefs.rs
|
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option.
// All files in the project carrying such notice may not be copied, modified, or distributed
// except according to those terms.
use shared::basetsd::SIZE_T;
use shared::minwindef::{BOOL, DWORD, LPVOID, PBYTE, PDWORD, ULONG};
use um::minwinbase::SECURITY_ATTRIBUTES;
use um::wincrypt::ALG_ID;
use um::winnt::{LPCWSTR, LPWSTR, SID};
pub const WINEFS_SETUSERKEY_SET_CAPABILITIES: DWORD = 0x00000001;
STRUCT!{struct EFS_CERTIFICATE_BLOB {
dwCertEncodingType: DWORD,
cbData: DWORD,
pbData: PBYTE,
}}
pub type PEFS_CERTIFICATE_BLOB = *mut EFS_CERTIFICATE_BLOB;
STRUCT!{struct EFS_HASH_BLOB {
cbData: DWORD,
pbData: PBYTE,
}}
pub type PEFS_HASH_BLOB = *mut EFS_HASH_BLOB;
STRUCT!{struct EFS_RPC_BLOB {
cbData: DWORD,
pbData: PBYTE,
}}
pub type PEFS_RPC_BLOB = *mut EFS_RPC_BLOB;
STRUCT!{struct EFS_PIN_BLOB {
cbPadding: DWORD,
cbData: DWORD,
pbData: PBYTE,
}}
pub type PEFS_PIN_BLOB = *mut EFS_PIN_BLOB;
STRUCT!{struct EFS_KEY_INFO {
dwVersion: DWORD,
Entropy: ULONG,
Algorithm: ALG_ID,
KeyLength: ULONG,
}}
pub type PEFS_KEY_INFO = *mut EFS_KEY_INFO;
STRUCT!{struct EFS_COMPATIBILITY_INFO {
EfsVersion: DWORD,
}}
pub type PEFS_COMPATIBILITY_INFO = *mut EFS_COMPATIBILITY_INFO;
pub const EFS_COMPATIBILITY_VERSION_NCRYPT_PROTECTOR: DWORD = 5;
pub const EFS_COMPATIBILITY_VERSION_PFILE_PROTECTOR: DWORD = 6;
#[inline]
pub fn
|
(v: DWORD) -> bool {
v == EFS_COMPATIBILITY_VERSION_NCRYPT_PROTECTOR
|| v == EFS_COMPATIBILITY_VERSION_PFILE_PROTECTOR
}
pub const EFS_SUBVER_UNKNOWN: DWORD = 0;
pub const EFS_EFS_SUBVER_EFS_CERT: DWORD = 1;
pub const EFS_PFILE_SUBVER_RMS: DWORD = 2;
pub const EFS_PFILE_SUBVER_APPX: DWORD = 3;
STRUCT!{struct EFS_VERSION_INFO {
EfsVersion: DWORD,
SubVersion: DWORD,
}}
pub type PEFS_VERSION_INFO = *mut EFS_VERSION_INFO;
#[inline]
pub fn EFS_IS_APPX_VERSION(v: DWORD, subV: DWORD) -> bool {
v == EFS_COMPATIBILITY_VERSION_PFILE_PROTECTOR && subV == EFS_PFILE_SUBVER_APPX
}
STRUCT!{struct EFS_DECRYPTION_STATUS_INFO {
dwDecryptionError: DWORD,
dwHashOffset: DWORD,
cbHash: DWORD,
}}
pub type PEFS_DECRYPTION_STATUS_INFO = *mut EFS_DECRYPTION_STATUS_INFO;
STRUCT!{struct EFS_ENCRYPTION_STATUS_INFO {
bHasCurrentKey: BOOL,
dwEncryptionError: DWORD,
}}
pub type PEFS_ENCRYPTION_STATUS_INFO = *mut EFS_ENCRYPTION_STATUS_INFO;
STRUCT!{struct ENCRYPTION_CERTIFICATE {
cbTotalLength: DWORD,
pUserSid: *mut SID,
pCertBlob: PEFS_CERTIFICATE_BLOB,
}}
pub type PENCRYPTION_CERTIFICATE = *mut ENCRYPTION_CERTIFICATE;
pub const MAX_SID_SIZE: SIZE_T = 256;
STRUCT!{struct ENCRYPTION_CERTIFICATE_HASH {
cbTotalLength: DWORD,
pUserSid: *mut SID,
pHash: PEFS_HASH_BLOB,
lpDisplayInformation: LPWSTR,
}}
pub type PENCRYPTION_CERTIFICATE_HASH = *mut ENCRYPTION_CERTIFICATE_HASH;
STRUCT!{struct ENCRYPTION_CERTIFICATE_HASH_LIST {
nCert_Hash: DWORD,
pUsers: *mut PENCRYPTION_CERTIFICATE_HASH,
}}
pub type PENCRYPTION_CERTIFICATE_HASH_LIST = *mut ENCRYPTION_CERTIFICATE_HASH_LIST;
STRUCT!{struct ENCRYPTION_CERTIFICATE_LIST {
nUsers: DWORD,
pUsers: *mut PENCRYPTION_CERTIFICATE,
}}
pub type PENCRYPTION_CERTIFICATE_LIST = *mut ENCRYPTION_CERTIFICATE_LIST;
pub const EFS_METADATA_ADD_USER: DWORD = 0x00000001;
pub const EFS_METADATA_REMOVE_USER: DWORD = 0x00000002;
pub const EFS_METADATA_REPLACE_USER: DWORD = 0x00000004;
pub const EFS_METADATA_GENERAL_OP: DWORD = 0x00000008;
STRUCT!{struct ENCRYPTED_FILE_METADATA_SIGNATURE {
dwEfsAccessType: DWORD,
pCertificatesAdded: PENCRYPTION_CERTIFICATE_HASH_LIST,
pEncryptionCertificate: PENCRYPTION_CERTIFICATE,
pEfsStreamSignature: PEFS_RPC_BLOB,
}}
pub type PENCRYPTED_FILE_METADATA_SIGNATURE = *mut ENCRYPTED_FILE_METADATA_SIGNATURE;
STRUCT!{struct ENCRYPTION_PROTECTOR {
cbTotalLength: DWORD,
pUserSid: *mut SID,
lpProtectorDescriptor: LPWSTR,
}}
pub type PENCRYPTION_PROTECTOR = *mut ENCRYPTION_PROTECTOR;
STRUCT!{struct ENCRYPTION_PROTECTOR_LIST {
nProtectors: DWORD,
pProtectors: *mut PENCRYPTION_PROTECTOR,
}}
pub type PENCRYPTION_PROTECTOR_LIST = *mut ENCRYPTION_PROTECTOR_LIST;
extern "system" {
pub fn QueryUsersOnEncryptedFile(
lpFileName: LPCWSTR,
pUsers: *mut PENCRYPTION_CERTIFICATE_HASH_LIST,
) -> DWORD;
pub fn QueryRecoveryAgentsOnEncryptedFile(
lpFileName: LPCWSTR,
pRecoveryAgents: *mut PENCRYPTION_CERTIFICATE_HASH_LIST,
) -> DWORD;
pub fn RemoveUsersFromEncryptedFile(
lpFileName: LPCWSTR,
pHashes: PENCRYPTION_CERTIFICATE_HASH_LIST,
) -> DWORD;
pub fn AddUsersToEncryptedFile(
lpFileName: LPCWSTR,
pEncryptionCertificate: PENCRYPTION_CERTIFICATE_LIST,
) -> DWORD;
pub fn SetUserFileEncryptionKey(
pEncryptionCertificate: PENCRYPTION_CERTIFICATE,
) -> DWORD;
pub fn SetUserFileEncryptionKeyEx(
pEncryptionCertificate: PENCRYPTION_CERTIFICATE,
dwCapabilities: DWORD,
dwFlags: DWORD,
pvReserved: LPVOID,
) -> DWORD;
pub fn FreeEncryptionCertificateHashList(
pUsers: PENCRYPTION_CERTIFICATE_HASH_LIST,
);
pub fn EncryptionDisable(
DirPath: LPCWSTR,
Disable: BOOL,
) -> BOOL;
pub fn DuplicateEncryptionInfoFile(
SrcFileName: LPCWSTR,
DstFileName: LPCWSTR,
dwCreationDistribution: DWORD,
dwAttributes: DWORD,
lpSecurityAttributes: *const SECURITY_ATTRIBUTES,
) -> DWORD;
pub fn GetEncryptedFileMetadata(
lpFileName: LPCWSTR,
pcbMetadata: PDWORD,
ppbMetadata: *mut PBYTE,
) -> DWORD;
pub fn SetEncryptedFileMetadata(
lpFileName: LPCWSTR,
pbOldMetadata: PBYTE,
pbNewMetadata: PBYTE,
pOwnerHash: PENCRYPTION_CERTIFICATE_HASH,
dwOperation: DWORD,
pCertificatesAdded: PENCRYPTION_CERTIFICATE_HASH_LIST,
) -> DWORD;
pub fn FreeEncryptedFileMetadata(
pbMetadata: PBYTE,
);
}
|
EFS_IS_DESCRIPTOR_VERSION
|
identifier_name
|
winefs.rs
|
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option.
// All files in the project carrying such notice may not be copied, modified, or distributed
// except according to those terms.
use shared::basetsd::SIZE_T;
use shared::minwindef::{BOOL, DWORD, LPVOID, PBYTE, PDWORD, ULONG};
use um::minwinbase::SECURITY_ATTRIBUTES;
use um::wincrypt::ALG_ID;
use um::winnt::{LPCWSTR, LPWSTR, SID};
pub const WINEFS_SETUSERKEY_SET_CAPABILITIES: DWORD = 0x00000001;
STRUCT!{struct EFS_CERTIFICATE_BLOB {
dwCertEncodingType: DWORD,
cbData: DWORD,
pbData: PBYTE,
}}
pub type PEFS_CERTIFICATE_BLOB = *mut EFS_CERTIFICATE_BLOB;
STRUCT!{struct EFS_HASH_BLOB {
cbData: DWORD,
pbData: PBYTE,
}}
pub type PEFS_HASH_BLOB = *mut EFS_HASH_BLOB;
STRUCT!{struct EFS_RPC_BLOB {
cbData: DWORD,
pbData: PBYTE,
}}
pub type PEFS_RPC_BLOB = *mut EFS_RPC_BLOB;
STRUCT!{struct EFS_PIN_BLOB {
cbPadding: DWORD,
cbData: DWORD,
pbData: PBYTE,
}}
pub type PEFS_PIN_BLOB = *mut EFS_PIN_BLOB;
STRUCT!{struct EFS_KEY_INFO {
dwVersion: DWORD,
Entropy: ULONG,
Algorithm: ALG_ID,
KeyLength: ULONG,
}}
pub type PEFS_KEY_INFO = *mut EFS_KEY_INFO;
STRUCT!{struct EFS_COMPATIBILITY_INFO {
EfsVersion: DWORD,
}}
pub type PEFS_COMPATIBILITY_INFO = *mut EFS_COMPATIBILITY_INFO;
pub const EFS_COMPATIBILITY_VERSION_NCRYPT_PROTECTOR: DWORD = 5;
pub const EFS_COMPATIBILITY_VERSION_PFILE_PROTECTOR: DWORD = 6;
#[inline]
pub fn EFS_IS_DESCRIPTOR_VERSION(v: DWORD) -> bool {
v == EFS_COMPATIBILITY_VERSION_NCRYPT_PROTECTOR
|| v == EFS_COMPATIBILITY_VERSION_PFILE_PROTECTOR
}
pub const EFS_SUBVER_UNKNOWN: DWORD = 0;
pub const EFS_EFS_SUBVER_EFS_CERT: DWORD = 1;
pub const EFS_PFILE_SUBVER_RMS: DWORD = 2;
pub const EFS_PFILE_SUBVER_APPX: DWORD = 3;
STRUCT!{struct EFS_VERSION_INFO {
EfsVersion: DWORD,
SubVersion: DWORD,
}}
pub type PEFS_VERSION_INFO = *mut EFS_VERSION_INFO;
#[inline]
pub fn EFS_IS_APPX_VERSION(v: DWORD, subV: DWORD) -> bool {
v == EFS_COMPATIBILITY_VERSION_PFILE_PROTECTOR && subV == EFS_PFILE_SUBVER_APPX
}
STRUCT!{struct EFS_DECRYPTION_STATUS_INFO {
dwDecryptionError: DWORD,
dwHashOffset: DWORD,
cbHash: DWORD,
}}
pub type PEFS_DECRYPTION_STATUS_INFO = *mut EFS_DECRYPTION_STATUS_INFO;
STRUCT!{struct EFS_ENCRYPTION_STATUS_INFO {
bHasCurrentKey: BOOL,
dwEncryptionError: DWORD,
}}
pub type PEFS_ENCRYPTION_STATUS_INFO = *mut EFS_ENCRYPTION_STATUS_INFO;
STRUCT!{struct ENCRYPTION_CERTIFICATE {
cbTotalLength: DWORD,
pUserSid: *mut SID,
pCertBlob: PEFS_CERTIFICATE_BLOB,
}}
pub type PENCRYPTION_CERTIFICATE = *mut ENCRYPTION_CERTIFICATE;
pub const MAX_SID_SIZE: SIZE_T = 256;
STRUCT!{struct ENCRYPTION_CERTIFICATE_HASH {
cbTotalLength: DWORD,
|
pHash: PEFS_HASH_BLOB,
lpDisplayInformation: LPWSTR,
}}
pub type PENCRYPTION_CERTIFICATE_HASH = *mut ENCRYPTION_CERTIFICATE_HASH;
STRUCT!{struct ENCRYPTION_CERTIFICATE_HASH_LIST {
nCert_Hash: DWORD,
pUsers: *mut PENCRYPTION_CERTIFICATE_HASH,
}}
pub type PENCRYPTION_CERTIFICATE_HASH_LIST = *mut ENCRYPTION_CERTIFICATE_HASH_LIST;
STRUCT!{struct ENCRYPTION_CERTIFICATE_LIST {
nUsers: DWORD,
pUsers: *mut PENCRYPTION_CERTIFICATE,
}}
pub type PENCRYPTION_CERTIFICATE_LIST = *mut ENCRYPTION_CERTIFICATE_LIST;
pub const EFS_METADATA_ADD_USER: DWORD = 0x00000001;
pub const EFS_METADATA_REMOVE_USER: DWORD = 0x00000002;
pub const EFS_METADATA_REPLACE_USER: DWORD = 0x00000004;
pub const EFS_METADATA_GENERAL_OP: DWORD = 0x00000008;
STRUCT!{struct ENCRYPTED_FILE_METADATA_SIGNATURE {
dwEfsAccessType: DWORD,
pCertificatesAdded: PENCRYPTION_CERTIFICATE_HASH_LIST,
pEncryptionCertificate: PENCRYPTION_CERTIFICATE,
pEfsStreamSignature: PEFS_RPC_BLOB,
}}
pub type PENCRYPTED_FILE_METADATA_SIGNATURE = *mut ENCRYPTED_FILE_METADATA_SIGNATURE;
STRUCT!{struct ENCRYPTION_PROTECTOR {
cbTotalLength: DWORD,
pUserSid: *mut SID,
lpProtectorDescriptor: LPWSTR,
}}
pub type PENCRYPTION_PROTECTOR = *mut ENCRYPTION_PROTECTOR;
STRUCT!{struct ENCRYPTION_PROTECTOR_LIST {
nProtectors: DWORD,
pProtectors: *mut PENCRYPTION_PROTECTOR,
}}
pub type PENCRYPTION_PROTECTOR_LIST = *mut ENCRYPTION_PROTECTOR_LIST;
extern "system" {
pub fn QueryUsersOnEncryptedFile(
lpFileName: LPCWSTR,
pUsers: *mut PENCRYPTION_CERTIFICATE_HASH_LIST,
) -> DWORD;
pub fn QueryRecoveryAgentsOnEncryptedFile(
lpFileName: LPCWSTR,
pRecoveryAgents: *mut PENCRYPTION_CERTIFICATE_HASH_LIST,
) -> DWORD;
pub fn RemoveUsersFromEncryptedFile(
lpFileName: LPCWSTR,
pHashes: PENCRYPTION_CERTIFICATE_HASH_LIST,
) -> DWORD;
pub fn AddUsersToEncryptedFile(
lpFileName: LPCWSTR,
pEncryptionCertificate: PENCRYPTION_CERTIFICATE_LIST,
) -> DWORD;
pub fn SetUserFileEncryptionKey(
pEncryptionCertificate: PENCRYPTION_CERTIFICATE,
) -> DWORD;
pub fn SetUserFileEncryptionKeyEx(
pEncryptionCertificate: PENCRYPTION_CERTIFICATE,
dwCapabilities: DWORD,
dwFlags: DWORD,
pvReserved: LPVOID,
) -> DWORD;
pub fn FreeEncryptionCertificateHashList(
pUsers: PENCRYPTION_CERTIFICATE_HASH_LIST,
);
pub fn EncryptionDisable(
DirPath: LPCWSTR,
Disable: BOOL,
) -> BOOL;
pub fn DuplicateEncryptionInfoFile(
SrcFileName: LPCWSTR,
DstFileName: LPCWSTR,
dwCreationDistribution: DWORD,
dwAttributes: DWORD,
lpSecurityAttributes: *const SECURITY_ATTRIBUTES,
) -> DWORD;
pub fn GetEncryptedFileMetadata(
lpFileName: LPCWSTR,
pcbMetadata: PDWORD,
ppbMetadata: *mut PBYTE,
) -> DWORD;
pub fn SetEncryptedFileMetadata(
lpFileName: LPCWSTR,
pbOldMetadata: PBYTE,
pbNewMetadata: PBYTE,
pOwnerHash: PENCRYPTION_CERTIFICATE_HASH,
dwOperation: DWORD,
pCertificatesAdded: PENCRYPTION_CERTIFICATE_HASH_LIST,
) -> DWORD;
pub fn FreeEncryptedFileMetadata(
pbMetadata: PBYTE,
);
}
|
pUserSid: *mut SID,
|
random_line_split
|
timings.rs
|
//! Tests for -Ztimings.
use cargo_test_support::project;
use cargo_test_support::registry::Package;
#[cargo_test]
fn timings_works()
|
p.cargo("build --all-targets -Ztimings")
.masquerade_as_nightly_cargo()
.with_stderr_unordered(
"\
[UPDATING] [..]
[DOWNLOADING] crates...
[DOWNLOADED] dep v0.1.0 [..]
[COMPILING] dep v0.1.0
[COMPILING] foo v0.1.0 [..]
[COMPLETED] dep v0.1.0 in [..]s
[COMPLETED] foo v0.1.0 in [..]s
[COMPLETED] foo v0.1.0 bin \"foo\" in [..]s
[COMPLETED] foo v0.1.0 example \"ex1\" in [..]s
[COMPLETED] foo v0.1.0 lib (test) in [..]s
[COMPLETED] foo v0.1.0 bin \"foo\" (test) in [..]s
[COMPLETED] foo v0.1.0 test \"t1\" (test) in [..]s
[FINISHED] [..]
Timing report saved to [..]/foo/cargo-timing-[..].html
",
)
.run();
p.cargo("clean").run();
p.cargo("test -Ztimings")
.masquerade_as_nightly_cargo()
.run();
p.cargo("clean").run();
p.cargo("check -Ztimings")
.masquerade_as_nightly_cargo()
.run();
p.cargo("clean").run();
p.cargo("doc -Ztimings").masquerade_as_nightly_cargo().run();
}
|
{
Package::new("dep", "0.1.0").publish();
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.1.0"
[dependencies]
dep = "0.1"
"#,
)
.file("src/lib.rs", "")
.file("src/main.rs", "fn main() {}")
.file("tests/t1.rs", "")
.file("examples/ex1.rs", "fn main() {}")
.build();
|
identifier_body
|
timings.rs
|
//! Tests for -Ztimings.
use cargo_test_support::project;
use cargo_test_support::registry::Package;
#[cargo_test]
fn
|
() {
Package::new("dep", "0.1.0").publish();
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.1.0"
[dependencies]
dep = "0.1"
"#,
)
.file("src/lib.rs", "")
.file("src/main.rs", "fn main() {}")
.file("tests/t1.rs", "")
.file("examples/ex1.rs", "fn main() {}")
.build();
p.cargo("build --all-targets -Ztimings")
.masquerade_as_nightly_cargo()
.with_stderr_unordered(
"\
[UPDATING] [..]
[DOWNLOADING] crates...
[DOWNLOADED] dep v0.1.0 [..]
[COMPILING] dep v0.1.0
[COMPILING] foo v0.1.0 [..]
[COMPLETED] dep v0.1.0 in [..]s
[COMPLETED] foo v0.1.0 in [..]s
[COMPLETED] foo v0.1.0 bin \"foo\" in [..]s
[COMPLETED] foo v0.1.0 example \"ex1\" in [..]s
[COMPLETED] foo v0.1.0 lib (test) in [..]s
[COMPLETED] foo v0.1.0 bin \"foo\" (test) in [..]s
[COMPLETED] foo v0.1.0 test \"t1\" (test) in [..]s
[FINISHED] [..]
Timing report saved to [..]/foo/cargo-timing-[..].html
",
)
.run();
p.cargo("clean").run();
p.cargo("test -Ztimings")
.masquerade_as_nightly_cargo()
.run();
p.cargo("clean").run();
p.cargo("check -Ztimings")
.masquerade_as_nightly_cargo()
.run();
p.cargo("clean").run();
p.cargo("doc -Ztimings").masquerade_as_nightly_cargo().run();
}
|
timings_works
|
identifier_name
|
timings.rs
|
//! Tests for -Ztimings.
use cargo_test_support::project;
use cargo_test_support::registry::Package;
#[cargo_test]
fn timings_works() {
Package::new("dep", "0.1.0").publish();
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.1.0"
[dependencies]
dep = "0.1"
"#,
)
.file("src/lib.rs", "")
.file("src/main.rs", "fn main() {}")
.file("tests/t1.rs", "")
.file("examples/ex1.rs", "fn main() {}")
.build();
p.cargo("build --all-targets -Ztimings")
.masquerade_as_nightly_cargo()
.with_stderr_unordered(
"\
[UPDATING] [..]
[DOWNLOADING] crates...
[DOWNLOADED] dep v0.1.0 [..]
[COMPILING] dep v0.1.0
[COMPILING] foo v0.1.0 [..]
[COMPLETED] dep v0.1.0 in [..]s
[COMPLETED] foo v0.1.0 in [..]s
[COMPLETED] foo v0.1.0 bin \"foo\" in [..]s
[COMPLETED] foo v0.1.0 example \"ex1\" in [..]s
[COMPLETED] foo v0.1.0 lib (test) in [..]s
[COMPLETED] foo v0.1.0 bin \"foo\" (test) in [..]s
[COMPLETED] foo v0.1.0 test \"t1\" (test) in [..]s
[FINISHED] [..]
Timing report saved to [..]/foo/cargo-timing-[..].html
|
)
.run();
p.cargo("clean").run();
p.cargo("test -Ztimings")
.masquerade_as_nightly_cargo()
.run();
p.cargo("clean").run();
p.cargo("check -Ztimings")
.masquerade_as_nightly_cargo()
.run();
p.cargo("clean").run();
p.cargo("doc -Ztimings").masquerade_as_nightly_cargo().run();
}
|
",
|
random_line_split
|
literals.rs
|
use common;
use ast;
use io;
pub fn parse_integer(source: &mut io::SourceFlexIterator) -> common::Status<ast::Token> {
assert!(source.peek(0).is_some());
assert!(match source.peek(0).unwrap() {
'0'...'9' => true,
_ => false,
});
let location = source.location();
let mut value = String::new();
while let Some(chr) = source.peek(0) {
if '0' <= chr && chr <= '9' {
value.push(chr);
source.next();
} else {
break;
}
}
let token = ast::Token::new(ast::TokenKind::Integer, value, location);
let error = match token.value.len() > 1 && token.value.starts_with("0") {
true => {
Some(common::Error {
location: Some(token.location),
message: format!("Invalid integer '{}'.", token.value),
})
}
false => None,
};
common::Status {
result: token,
error: error,
}
}
pub fn parse_string(source: &mut io::SourceFlexIterator) -> common::Status<ast::Token>
|
location: Some(source.location()),
message: format!("Invalid escape sequence '\\{}'.", next_chr),
});
break;
}
}
chr = next_chr;
}
}
value.push(chr);
source.next();
} else {
break;
}
}
let token = ast::Token::new(ast::TokenKind::String, value, location);
error = error.or(match source.peek(0) {
None => {
Some(common::Error {
location: Some(token.location),
message: format!("Invalid string '{}'.", token.value),
})
}
Some(_) => None,
});
source.next();
common::Status {
result: token,
error: error,
}
}
|
{
assert!(source.peek(0).is_some());
assert_eq!(source.peek(0).unwrap(), '"');
let mut error = None;
let location = source.location();
let mut value = String::new();
source.next();
while let Some(mut chr) = source.peek(0) {
if chr != '"' {
if chr == '\\' {
if let Some(next_chr) = source.peek(1) {
match next_chr {
'n' | 'r' | 't' | '"' | '\\' => {
value.push(chr);
source.next();
}
_ => {
error = Some(common::Error {
|
identifier_body
|
literals.rs
|
use common;
use ast;
use io;
pub fn parse_integer(source: &mut io::SourceFlexIterator) -> common::Status<ast::Token> {
assert!(source.peek(0).is_some());
assert!(match source.peek(0).unwrap() {
'0'...'9' => true,
_ => false,
});
let location = source.location();
let mut value = String::new();
while let Some(chr) = source.peek(0) {
if '0' <= chr && chr <= '9' {
value.push(chr);
source.next();
} else {
break;
}
}
let token = ast::Token::new(ast::TokenKind::Integer, value, location);
let error = match token.value.len() > 1 && token.value.starts_with("0") {
true => {
Some(common::Error {
location: Some(token.location),
message: format!("Invalid integer '{}'.", token.value),
})
}
false => None,
};
common::Status {
result: token,
error: error,
}
}
pub fn parse_string(source: &mut io::SourceFlexIterator) -> common::Status<ast::Token> {
assert!(source.peek(0).is_some());
assert_eq!(source.peek(0).unwrap(), '"');
let mut error = None;
let location = source.location();
let mut value = String::new();
source.next();
while let Some(mut chr) = source.peek(0) {
if chr!= '"'
|
source.next();
}
else {
break;
}
}
let token = ast::Token::new(ast::TokenKind::String, value, location);
error = error.or(match source.peek(0) {
None => {
Some(common::Error {
location: Some(token.location),
message: format!("Invalid string '{}'.", token.value),
})
}
Some(_) => None,
});
source.next();
common::Status {
result: token,
error: error,
}
}
|
{
if chr == '\\' {
if let Some(next_chr) = source.peek(1) {
match next_chr {
'n' | 'r' | 't' | '"' | '\\' => {
value.push(chr);
source.next();
}
_ => {
error = Some(common::Error {
location: Some(source.location()),
message: format!("Invalid escape sequence '\\{}'.", next_chr),
});
break;
}
}
chr = next_chr;
}
}
value.push(chr);
|
conditional_block
|
literals.rs
|
use common;
use ast;
use io;
pub fn parse_integer(source: &mut io::SourceFlexIterator) -> common::Status<ast::Token> {
assert!(source.peek(0).is_some());
assert!(match source.peek(0).unwrap() {
'0'...'9' => true,
_ => false,
});
let location = source.location();
let mut value = String::new();
while let Some(chr) = source.peek(0) {
if '0' <= chr && chr <= '9' {
value.push(chr);
source.next();
} else {
break;
}
}
let token = ast::Token::new(ast::TokenKind::Integer, value, location);
let error = match token.value.len() > 1 && token.value.starts_with("0") {
true => {
Some(common::Error {
location: Some(token.location),
message: format!("Invalid integer '{}'.", token.value),
})
}
false => None,
};
common::Status {
result: token,
error: error,
}
}
pub fn
|
(source: &mut io::SourceFlexIterator) -> common::Status<ast::Token> {
assert!(source.peek(0).is_some());
assert_eq!(source.peek(0).unwrap(), '"');
let mut error = None;
let location = source.location();
let mut value = String::new();
source.next();
while let Some(mut chr) = source.peek(0) {
if chr!= '"' {
if chr == '\\' {
if let Some(next_chr) = source.peek(1) {
match next_chr {
'n' | 'r' | 't' | '"' | '\\' => {
value.push(chr);
source.next();
}
_ => {
error = Some(common::Error {
location: Some(source.location()),
message: format!("Invalid escape sequence '\\{}'.", next_chr),
});
break;
}
}
chr = next_chr;
}
}
value.push(chr);
source.next();
} else {
break;
}
}
let token = ast::Token::new(ast::TokenKind::String, value, location);
error = error.or(match source.peek(0) {
None => {
Some(common::Error {
location: Some(token.location),
message: format!("Invalid string '{}'.", token.value),
})
}
Some(_) => None,
});
source.next();
common::Status {
result: token,
error: error,
}
}
|
parse_string
|
identifier_name
|
literals.rs
|
use common;
use ast;
use io;
pub fn parse_integer(source: &mut io::SourceFlexIterator) -> common::Status<ast::Token> {
assert!(source.peek(0).is_some());
assert!(match source.peek(0).unwrap() {
'0'...'9' => true,
_ => false,
});
let location = source.location();
let mut value = String::new();
while let Some(chr) = source.peek(0) {
if '0' <= chr && chr <= '9' {
value.push(chr);
source.next();
} else {
break;
}
}
let token = ast::Token::new(ast::TokenKind::Integer, value, location);
let error = match token.value.len() > 1 && token.value.starts_with("0") {
true => {
Some(common::Error {
location: Some(token.location),
message: format!("Invalid integer '{}'.", token.value),
})
}
false => None,
};
common::Status {
|
pub fn parse_string(source: &mut io::SourceFlexIterator) -> common::Status<ast::Token> {
assert!(source.peek(0).is_some());
assert_eq!(source.peek(0).unwrap(), '"');
let mut error = None;
let location = source.location();
let mut value = String::new();
source.next();
while let Some(mut chr) = source.peek(0) {
if chr!= '"' {
if chr == '\\' {
if let Some(next_chr) = source.peek(1) {
match next_chr {
'n' | 'r' | 't' | '"' | '\\' => {
value.push(chr);
source.next();
}
_ => {
error = Some(common::Error {
location: Some(source.location()),
message: format!("Invalid escape sequence '\\{}'.", next_chr),
});
break;
}
}
chr = next_chr;
}
}
value.push(chr);
source.next();
} else {
break;
}
}
let token = ast::Token::new(ast::TokenKind::String, value, location);
error = error.or(match source.peek(0) {
None => {
Some(common::Error {
location: Some(token.location),
message: format!("Invalid string '{}'.", token.value),
})
}
Some(_) => None,
});
source.next();
common::Status {
result: token,
error: error,
}
}
|
result: token,
error: error,
}
}
|
random_line_split
|
model.rs
|
use geometry::Vector3D;
use std::io::{BufReader,BufRead};
use std::fs::File;
use std::path::Path;
use tgacanvas::TgaCanvas;
use canvas::Canvas;
pub struct Model {
pub vertices: Vec<Vector3D<f32>>,
pub faces : Vec<[[i32; 3]; 3]>,
pub uv : Vec<[f32; 2]>,
pub diffusemap : TgaCanvas,
}
impl Model {
pub fn new(file_path: &str) -> Model {
let path = Path::new(file_path);
let file = BufReader::new(File::open(&path).unwrap());
let mut vertices = Vec::new();
let mut faces = Vec::new();
let mut uv = Vec::new();
for line in file.lines() {
let line = line.unwrap();
if line.starts_with("v ") {
let words: Vec<&str> = line.split_whitespace().collect();
vertices.push(Vector3D::new(words[1].parse().unwrap(),
words[2].parse().unwrap(),
words[3].parse().unwrap()));
debug!("readed vertex: {}", vertices.last().unwrap());
} else if line.starts_with("f ") {
let mut face: [[i32; 3]; 3] = [[-1; 3]; 3];
let words: Vec<&str> = line.split_whitespace().collect();
for i in 0..3 {
let mut j = 0;
for num in words[i+1].split("/") {
face[i][j] = num.parse::<i32>().unwrap() - 1;
j += 1;
}
debug!("face[{}] = [{}, {}, {}]", i, face[i][0], face[i][1], face[i][2]);
}
faces.push(face);
} else if line.starts_with("vt ") {
let words: Vec<&str> = line.split_whitespace().collect();
uv.push([words[1].parse().unwrap(), words[2].parse().unwrap()]);
debug!("uv: [{}, {}]", uv.last().unwrap()[0], uv.last().unwrap()[1]);
}
}
let texture_path = file_path.rsplitn(2, '.').last().unwrap().to_string() + "_diffuse.tga";
info!("loading texture from path: {}", texture_path);
Model {
vertices: vertices,
faces: faces,
uv: uv,
diffusemap: TgaCanvas::read(texture_path.split("*").next().unwrap()),
}
}
pub fn
|
(&self, iface: usize, nvert: usize) -> Vector3D<i32> {
let idx = self.faces[iface][nvert][1] as usize;
return Vector3D::new(self.uv[idx][0] * self.diffusemap.xsize() as f32, self.uv[idx][1] * self.diffusemap.ysize() as f32, 0.0).to::<i32>();
}
}
|
uv
|
identifier_name
|
model.rs
|
use geometry::Vector3D;
use std::io::{BufReader,BufRead};
use std::fs::File;
use std::path::Path;
use tgacanvas::TgaCanvas;
use canvas::Canvas;
pub struct Model {
pub vertices: Vec<Vector3D<f32>>,
pub faces : Vec<[[i32; 3]; 3]>,
pub uv : Vec<[f32; 2]>,
pub diffusemap : TgaCanvas,
}
impl Model {
pub fn new(file_path: &str) -> Model
|
face[i][j] = num.parse::<i32>().unwrap() - 1;
j += 1;
}
debug!("face[{}] = [{}, {}, {}]", i, face[i][0], face[i][1], face[i][2]);
}
faces.push(face);
} else if line.starts_with("vt ") {
let words: Vec<&str> = line.split_whitespace().collect();
uv.push([words[1].parse().unwrap(), words[2].parse().unwrap()]);
debug!("uv: [{}, {}]", uv.last().unwrap()[0], uv.last().unwrap()[1]);
}
}
let texture_path = file_path.rsplitn(2, '.').last().unwrap().to_string() + "_diffuse.tga";
info!("loading texture from path: {}", texture_path);
Model {
vertices: vertices,
faces: faces,
uv: uv,
diffusemap: TgaCanvas::read(texture_path.split("*").next().unwrap()),
}
}
pub fn uv(&self, iface: usize, nvert: usize) -> Vector3D<i32> {
let idx = self.faces[iface][nvert][1] as usize;
return Vector3D::new(self.uv[idx][0] * self.diffusemap.xsize() as f32, self.uv[idx][1] * self.diffusemap.ysize() as f32, 0.0).to::<i32>();
}
}
|
{
let path = Path::new(file_path);
let file = BufReader::new(File::open(&path).unwrap());
let mut vertices = Vec::new();
let mut faces = Vec::new();
let mut uv = Vec::new();
for line in file.lines() {
let line = line.unwrap();
if line.starts_with("v ") {
let words: Vec<&str> = line.split_whitespace().collect();
vertices.push(Vector3D::new(words[1].parse().unwrap(),
words[2].parse().unwrap(),
words[3].parse().unwrap()));
debug!("readed vertex: {}", vertices.last().unwrap());
} else if line.starts_with("f ") {
let mut face: [[i32; 3]; 3] = [[-1; 3]; 3];
let words: Vec<&str> = line.split_whitespace().collect();
for i in 0..3 {
let mut j = 0;
for num in words[i+1].split("/") {
|
identifier_body
|
model.rs
|
use geometry::Vector3D;
use std::io::{BufReader,BufRead};
use std::fs::File;
use std::path::Path;
use tgacanvas::TgaCanvas;
use canvas::Canvas;
pub struct Model {
pub vertices: Vec<Vector3D<f32>>,
pub faces : Vec<[[i32; 3]; 3]>,
pub uv : Vec<[f32; 2]>,
pub diffusemap : TgaCanvas,
}
impl Model {
pub fn new(file_path: &str) -> Model {
let path = Path::new(file_path);
let file = BufReader::new(File::open(&path).unwrap());
let mut vertices = Vec::new();
let mut faces = Vec::new();
let mut uv = Vec::new();
for line in file.lines() {
let line = line.unwrap();
if line.starts_with("v ") {
let words: Vec<&str> = line.split_whitespace().collect();
vertices.push(Vector3D::new(words[1].parse().unwrap(),
words[2].parse().unwrap(),
words[3].parse().unwrap()));
debug!("readed vertex: {}", vertices.last().unwrap());
} else if line.starts_with("f ") {
let mut face: [[i32; 3]; 3] = [[-1; 3]; 3];
let words: Vec<&str> = line.split_whitespace().collect();
for i in 0..3 {
let mut j = 0;
for num in words[i+1].split("/") {
face[i][j] = num.parse::<i32>().unwrap() - 1;
j += 1;
}
debug!("face[{}] = [{}, {}, {}]", i, face[i][0], face[i][1], face[i][2]);
}
faces.push(face);
} else if line.starts_with("vt ")
|
}
let texture_path = file_path.rsplitn(2, '.').last().unwrap().to_string() + "_diffuse.tga";
info!("loading texture from path: {}", texture_path);
Model {
vertices: vertices,
faces: faces,
uv: uv,
diffusemap: TgaCanvas::read(texture_path.split("*").next().unwrap()),
}
}
pub fn uv(&self, iface: usize, nvert: usize) -> Vector3D<i32> {
let idx = self.faces[iface][nvert][1] as usize;
return Vector3D::new(self.uv[idx][0] * self.diffusemap.xsize() as f32, self.uv[idx][1] * self.diffusemap.ysize() as f32, 0.0).to::<i32>();
}
}
|
{
let words: Vec<&str> = line.split_whitespace().collect();
uv.push([words[1].parse().unwrap(), words[2].parse().unwrap()]);
debug!("uv: [{}, {}]", uv.last().unwrap()[0], uv.last().unwrap()[1]);
}
|
conditional_block
|
model.rs
|
use geometry::Vector3D;
use std::io::{BufReader,BufRead};
use std::fs::File;
use std::path::Path;
use tgacanvas::TgaCanvas;
use canvas::Canvas;
pub struct Model {
pub vertices: Vec<Vector3D<f32>>,
pub faces : Vec<[[i32; 3]; 3]>,
pub uv : Vec<[f32; 2]>,
pub diffusemap : TgaCanvas,
}
impl Model {
pub fn new(file_path: &str) -> Model {
let path = Path::new(file_path);
let file = BufReader::new(File::open(&path).unwrap());
let mut vertices = Vec::new();
let mut faces = Vec::new();
let mut uv = Vec::new();
for line in file.lines() {
let line = line.unwrap();
if line.starts_with("v ") {
let words: Vec<&str> = line.split_whitespace().collect();
vertices.push(Vector3D::new(words[1].parse().unwrap(),
words[2].parse().unwrap(),
words[3].parse().unwrap()));
debug!("readed vertex: {}", vertices.last().unwrap());
} else if line.starts_with("f ") {
let mut face: [[i32; 3]; 3] = [[-1; 3]; 3];
let words: Vec<&str> = line.split_whitespace().collect();
for i in 0..3 {
let mut j = 0;
for num in words[i+1].split("/") {
face[i][j] = num.parse::<i32>().unwrap() - 1;
j += 1;
|
}
debug!("face[{}] = [{}, {}, {}]", i, face[i][0], face[i][1], face[i][2]);
}
faces.push(face);
} else if line.starts_with("vt ") {
let words: Vec<&str> = line.split_whitespace().collect();
uv.push([words[1].parse().unwrap(), words[2].parse().unwrap()]);
debug!("uv: [{}, {}]", uv.last().unwrap()[0], uv.last().unwrap()[1]);
}
}
let texture_path = file_path.rsplitn(2, '.').last().unwrap().to_string() + "_diffuse.tga";
info!("loading texture from path: {}", texture_path);
Model {
vertices: vertices,
faces: faces,
uv: uv,
diffusemap: TgaCanvas::read(texture_path.split("*").next().unwrap()),
}
}
pub fn uv(&self, iface: usize, nvert: usize) -> Vector3D<i32> {
let idx = self.faces[iface][nvert][1] as usize;
return Vector3D::new(self.uv[idx][0] * self.diffusemap.xsize() as f32, self.uv[idx][1] * self.diffusemap.ysize() as f32, 0.0).to::<i32>();
}
}
|
random_line_split
|
|
issue-22560.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-tidy-linelength
use std::ops::{Add, Sub};
type Test = Add +
//~^ ERROR the type parameter `RHS` must be explicitly specified in an object type because its default value `Self` references the type `Self`
//~^^ ERROR the value of the associated type `Output` (from the trait `core::ops::Add`) must be specified [E0191]
Sub;
//~^ ERROR only the builtin traits can be used as closure or object bounds
fn
|
() { }
|
main
|
identifier_name
|
issue-22560.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-tidy-linelength
use std::ops::{Add, Sub};
type Test = Add +
|
fn main() { }
|
//~^ ERROR the type parameter `RHS` must be explicitly specified in an object type because its default value `Self` references the type `Self`
//~^^ ERROR the value of the associated type `Output` (from the trait `core::ops::Add`) must be specified [E0191]
Sub;
//~^ ERROR only the builtin traits can be used as closure or object bounds
|
random_line_split
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.