file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
issue-22560.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-tidy-linelength
use std::ops::{Add, Sub};
type Test = Add +
//~^ ERROR the type parameter `RHS` must be explicitly specified in an object type because its default value `Self` references the type `Self`
//~^^ ERROR the value of the associated type `Output` (from the trait `core::ops::Add`) must be specified [E0191]
Sub;
//~^ ERROR only the builtin traits can be used as closure or object bounds
fn main()
|
{ }
|
identifier_body
|
|
word_index.rs
|
#![stable]
//! This module is used to index words and does the heavy lifting of our program.
/// A counted word.
///
/// This struct contains members for storing a word and the number of times it appeared. This
/// struct is intended for use with [`add_word`](fn.add_word.html).
///
/// # Examples
///
/// ```
/// use lib_word_count::word_index;
///
/// let indexed_word = word_index::IndexedWord{
/// word: "Text".to_string(),
/// appeared: 12
/// };
///
/// assert_eq!(indexed_word.word, "Text".to_string());
/// assert_eq!(indexed_word.appeared, 12i64);
/// ```
#[derive(Debug, PartialEq)]
#[stable]
pub struct IndexedWord {
/// The word that's indexed.
pub word: String,
/// The amount of times this word appeared.
pub appeared: i64
}
/// Add a word to a given index.
///
/// This function prevents duplicates and increments the count of the word appearances
/// automatically. The vector will be modified accordingly.
///
/// # Arguments
///
/// * `word` A string containing the word to add.
///
/// * `index` A reference to a vector containing all the indexed words.
///
/// # Examples
///
/// ```
/// use lib_word_count::word_index;
///
/// let mut index = Vec::new();
///
/// word_index::add_word("Hello".to_string(), &mut index);
/// word_index::add_word("hELLO".to_string(), &mut index);
/// word_index::add_word("World".to_string(), &mut index);
/// word_index::add_word("HELLO".to_string(), &mut index);
/// word_index::add_word("PFUDOR".to_string(), &mut index);
///
/// assert_eq!(index[0], word_index::IndexedWord{
/// word: "hello".to_string(),
/// appeared: 3
/// });
/// assert_eq!(index[1], word_index::IndexedWord{
/// word: "world".to_string(),
/// appeared: 1
/// });
/// assert_eq!(index[2], word_index::IndexedWord{
/// word: "pfudor".to_string(),
/// appeared: 1
/// });
/// ```
#[stable]
pub fn add_word(word: String, index: &mut Vec<IndexedWord>)
|
{
for indexed_word in index.iter_mut() {
if word.to_lowercase() == indexed_word.word {
indexed_word.appeared += 1;
return;
}
}
let new_word = IndexedWord{
word: word.to_lowercase(),
appeared: 1
};
index.push(new_word);
}
|
identifier_body
|
|
word_index.rs
|
#![stable]
//! This module is used to index words and does the heavy lifting of our program.
/// A counted word.
///
/// This struct contains members for storing a word and the number of times it appeared. This
/// struct is intended for use with [`add_word`](fn.add_word.html).
///
/// # Examples
///
/// ```
/// use lib_word_count::word_index;
///
/// let indexed_word = word_index::IndexedWord{
/// word: "Text".to_string(),
/// appeared: 12
/// };
///
/// assert_eq!(indexed_word.word, "Text".to_string());
/// assert_eq!(indexed_word.appeared, 12i64);
/// ```
#[derive(Debug, PartialEq)]
#[stable]
pub struct IndexedWord {
/// The word that's indexed.
pub word: String,
/// The amount of times this word appeared.
pub appeared: i64
}
/// Add a word to a given index.
///
/// This function prevents duplicates and increments the count of the word appearances
/// automatically. The vector will be modified accordingly.
///
/// # Arguments
///
/// * `word` A string containing the word to add.
///
/// * `index` A reference to a vector containing all the indexed words.
///
/// # Examples
///
/// ```
/// use lib_word_count::word_index;
///
/// let mut index = Vec::new();
///
/// word_index::add_word("Hello".to_string(), &mut index);
/// word_index::add_word("hELLO".to_string(), &mut index);
/// word_index::add_word("World".to_string(), &mut index);
/// word_index::add_word("HELLO".to_string(), &mut index);
/// word_index::add_word("PFUDOR".to_string(), &mut index);
///
/// assert_eq!(index[0], word_index::IndexedWord{
/// word: "hello".to_string(),
/// appeared: 3
/// });
/// assert_eq!(index[1], word_index::IndexedWord{
/// word: "world".to_string(),
/// appeared: 1
/// });
/// assert_eq!(index[2], word_index::IndexedWord{
/// word: "pfudor".to_string(),
/// appeared: 1
/// });
/// ```
#[stable]
pub fn add_word(word: String, index: &mut Vec<IndexedWord>) {
for indexed_word in index.iter_mut() {
if word.to_lowercase() == indexed_word.word
|
}
let new_word = IndexedWord{
word: word.to_lowercase(),
appeared: 1
};
index.push(new_word);
}
|
{
indexed_word.appeared += 1;
return;
}
|
conditional_block
|
word_index.rs
|
#![stable]
//! This module is used to index words and does the heavy lifting of our program.
/// A counted word.
///
/// This struct contains members for storing a word and the number of times it appeared. This
/// struct is intended for use with [`add_word`](fn.add_word.html).
///
/// # Examples
///
/// ```
/// use lib_word_count::word_index;
///
/// let indexed_word = word_index::IndexedWord{
/// word: "Text".to_string(),
/// appeared: 12
/// };
///
/// assert_eq!(indexed_word.word, "Text".to_string());
/// assert_eq!(indexed_word.appeared, 12i64);
/// ```
#[derive(Debug, PartialEq)]
#[stable]
pub struct IndexedWord {
/// The word that's indexed.
pub word: String,
/// The amount of times this word appeared.
pub appeared: i64
}
/// Add a word to a given index.
///
/// This function prevents duplicates and increments the count of the word appearances
/// automatically. The vector will be modified accordingly.
///
/// # Arguments
///
/// * `word` A string containing the word to add.
///
/// * `index` A reference to a vector containing all the indexed words.
///
/// # Examples
///
/// ```
/// use lib_word_count::word_index;
///
/// let mut index = Vec::new();
///
/// word_index::add_word("Hello".to_string(), &mut index);
/// word_index::add_word("hELLO".to_string(), &mut index);
/// word_index::add_word("World".to_string(), &mut index);
/// word_index::add_word("HELLO".to_string(), &mut index);
/// word_index::add_word("PFUDOR".to_string(), &mut index);
///
/// assert_eq!(index[0], word_index::IndexedWord{
/// word: "hello".to_string(),
/// appeared: 3
/// });
/// assert_eq!(index[1], word_index::IndexedWord{
/// word: "world".to_string(),
|
/// assert_eq!(index[2], word_index::IndexedWord{
/// word: "pfudor".to_string(),
/// appeared: 1
/// });
/// ```
#[stable]
pub fn add_word(word: String, index: &mut Vec<IndexedWord>) {
for indexed_word in index.iter_mut() {
if word.to_lowercase() == indexed_word.word {
indexed_word.appeared += 1;
return;
}
}
let new_word = IndexedWord{
word: word.to_lowercase(),
appeared: 1
};
index.push(new_word);
}
|
/// appeared: 1
/// });
|
random_line_split
|
word_index.rs
|
#![stable]
//! This module is used to index words and does the heavy lifting of our program.
/// A counted word.
///
/// This struct contains members for storing a word and the number of times it appeared. This
/// struct is intended for use with [`add_word`](fn.add_word.html).
///
/// # Examples
///
/// ```
/// use lib_word_count::word_index;
///
/// let indexed_word = word_index::IndexedWord{
/// word: "Text".to_string(),
/// appeared: 12
/// };
///
/// assert_eq!(indexed_word.word, "Text".to_string());
/// assert_eq!(indexed_word.appeared, 12i64);
/// ```
#[derive(Debug, PartialEq)]
#[stable]
pub struct
|
{
/// The word that's indexed.
pub word: String,
/// The amount of times this word appeared.
pub appeared: i64
}
/// Add a word to a given index.
///
/// This function prevents duplicates and increments the count of the word appearances
/// automatically. The vector will be modified accordingly.
///
/// # Arguments
///
/// * `word` A string containing the word to add.
///
/// * `index` A reference to a vector containing all the indexed words.
///
/// # Examples
///
/// ```
/// use lib_word_count::word_index;
///
/// let mut index = Vec::new();
///
/// word_index::add_word("Hello".to_string(), &mut index);
/// word_index::add_word("hELLO".to_string(), &mut index);
/// word_index::add_word("World".to_string(), &mut index);
/// word_index::add_word("HELLO".to_string(), &mut index);
/// word_index::add_word("PFUDOR".to_string(), &mut index);
///
/// assert_eq!(index[0], word_index::IndexedWord{
/// word: "hello".to_string(),
/// appeared: 3
/// });
/// assert_eq!(index[1], word_index::IndexedWord{
/// word: "world".to_string(),
/// appeared: 1
/// });
/// assert_eq!(index[2], word_index::IndexedWord{
/// word: "pfudor".to_string(),
/// appeared: 1
/// });
/// ```
#[stable]
pub fn add_word(word: String, index: &mut Vec<IndexedWord>) {
for indexed_word in index.iter_mut() {
if word.to_lowercase() == indexed_word.word {
indexed_word.appeared += 1;
return;
}
}
let new_word = IndexedWord{
word: word.to_lowercase(),
appeared: 1
};
index.push(new_word);
}
|
IndexedWord
|
identifier_name
|
constants.rs
|
use malachite_base::num::basic::traits::{NegativeOne, One, Two, Zero};
use malachite_nz::integer::Integer;
#[test]
fn test_zero() {
let zero = Integer::ZERO;
assert!(zero.is_valid());
assert_eq!(zero, 0);
assert_eq!(zero.to_string(), "0");
}
#[test]
fn test_one() {
|
#[test]
fn test_two() {
let two = Integer::TWO;
assert!(two.is_valid());
assert_eq!(two, 2);
assert_eq!(two.to_string(), "2");
}
#[test]
fn test_negative_one() {
let negative_one = Integer::NEGATIVE_ONE;
assert!(negative_one.is_valid());
assert_eq!(negative_one, -1);
assert_eq!(negative_one.to_string(), "-1");
}
|
let one = Integer::ONE;
assert!(one.is_valid());
assert_eq!(one, 1);
assert_eq!(one.to_string(), "1");
}
|
random_line_split
|
constants.rs
|
use malachite_base::num::basic::traits::{NegativeOne, One, Two, Zero};
use malachite_nz::integer::Integer;
#[test]
fn test_zero() {
let zero = Integer::ZERO;
assert!(zero.is_valid());
assert_eq!(zero, 0);
assert_eq!(zero.to_string(), "0");
}
#[test]
fn test_one() {
let one = Integer::ONE;
assert!(one.is_valid());
assert_eq!(one, 1);
assert_eq!(one.to_string(), "1");
}
#[test]
fn test_two() {
let two = Integer::TWO;
assert!(two.is_valid());
assert_eq!(two, 2);
assert_eq!(two.to_string(), "2");
}
#[test]
fn test_negative_one()
|
{
let negative_one = Integer::NEGATIVE_ONE;
assert!(negative_one.is_valid());
assert_eq!(negative_one, -1);
assert_eq!(negative_one.to_string(), "-1");
}
|
identifier_body
|
|
constants.rs
|
use malachite_base::num::basic::traits::{NegativeOne, One, Two, Zero};
use malachite_nz::integer::Integer;
#[test]
fn
|
() {
let zero = Integer::ZERO;
assert!(zero.is_valid());
assert_eq!(zero, 0);
assert_eq!(zero.to_string(), "0");
}
#[test]
fn test_one() {
let one = Integer::ONE;
assert!(one.is_valid());
assert_eq!(one, 1);
assert_eq!(one.to_string(), "1");
}
#[test]
fn test_two() {
let two = Integer::TWO;
assert!(two.is_valid());
assert_eq!(two, 2);
assert_eq!(two.to_string(), "2");
}
#[test]
fn test_negative_one() {
let negative_one = Integer::NEGATIVE_ONE;
assert!(negative_one.is_valid());
assert_eq!(negative_one, -1);
assert_eq!(negative_one.to_string(), "-1");
}
|
test_zero
|
identifier_name
|
level.rs
|
use super::dump::dump_tree;
use super::node::read_unchecked;
use super::node::write_node;
use super::node::NodeWriteGuard;
use super::prune::*;
use super::search::MutSearchResult;
use super::LevelTree;
use super::NodeCellRef;
use super::*;
use super::{external, BPlusTree};
use itertools::Itertools;
use std::fmt::Debug;
use std::sync::atomic::Ordering::Relaxed;
pub const LEVEL_PAGE_DIFF_MULTIPLIER: usize = 4;
pub const LEVEL_TREE_DEPTH: u32 = 2;
pub const LEVEL_M: usize = 32; // Smaller can be faster but more fragmented
pub const LEVEL_1: usize = LEVEL_M * LEVEL_PAGE_DIFF_MULTIPLIER;
pub const LEVEL_2: usize = LEVEL_1 * LEVEL_PAGE_DIFF_MULTIPLIER;
pub const NUM_LEVELS: usize = 3;
// Select left most leaf nodes and acquire their write guard
fn merge_prune<KS, PS>(
level: usize,
node: &NodeCellRef,
src_tree: &BPlusTree<KS, PS>,
dest_tree: &dyn LevelTree,
) -> (AlteredNodes, usize, Vec<NodeWriteGuard<KS, PS>>)
where
KS: Slice<EntryKey> + Debug +'static,
PS: Slice<NodeCellRef> +'static,
{
let search = mut_first::<KS, PS>(node);
match search {
MutSearchResult::External => {
debug!("Processing external level {}", level);
let left_most_leaf_guards = select_ext_nodes(node);
let num_guards = left_most_leaf_guards.len();
let (altered, num_keys) =
merge_remove_empty_ext_nodes(left_most_leaf_guards, src_tree, dest_tree);
debug!("Merged {} keys, {} pages", num_keys, num_guards);
(altered, num_keys, vec![])
}
MutSearchResult::Internal(sub_node) => {
let (lower_altered, num_keys, _lower_guards) =
merge_prune(level + 1, &sub_node, src_tree, dest_tree);
debug!("Processing internal level {}, node {:?}", level, node);
let (altered, guards) = prune(&node, lower_altered, level);
(altered, num_keys, guards)
}
}
}
fn
|
<KS, PS>(
mut left_most_leaf_guards: Vec<NodeWriteGuard<KS, PS>>,
src_tree: &BPlusTree<KS, PS>,
dest_tree: &dyn LevelTree,
) -> (AlteredNodes, usize)
where
KS: Slice<EntryKey> + Debug +'static,
PS: Slice<NodeCellRef> +'static,
{
let num_keys_moved;
let left_most_id = left_most_leaf_guards.first().unwrap().ext_id();
let prune_bound = left_most_leaf_guards.last().unwrap().right_bound().clone();
debug!("Merge selected {} pages", left_most_leaf_guards.len());
debug!(
"Have {:?} pages after selection",
num_pages(&left_most_leaf_guards[0])
);
if cfg!(debug_assertions) {
if left_most_id!= src_tree.head_page_id {
dump_tree(src_tree, "level_lsm_merge_failure_dump.json");
}
}
debug_assert_eq!(
left_most_id,
src_tree.head_page_id,
"{}",
left_most_leaf_guards.first().unwrap().type_name()
);
// merge to dest_tree
{
let deleted_keys = &src_tree.deleted;
let mut merged_deleted_keys = vec![];
let keys: Vec<EntryKey> = left_most_leaf_guards
.iter()
.map(|g| &g.keys()[..g.len()])
.flatten()
.filter(|&k| {
if deleted_keys.contains(k) {
merged_deleted_keys.push(k.clone());
false
} else {
true
}
})
.cloned()
.collect_vec();
num_keys_moved = keys.len();
debug!(
"Merging {} keys, have {}",
num_keys_moved,
src_tree.len.load(Relaxed)
);
dest_tree.merge_with_keys(box keys);
for rk in &merged_deleted_keys {
deleted_keys.remove(rk);
}
}
// adjust leaf left, right references
let mut removed_nodes = AlteredNodes {
removed: vec![],
key_modified: vec![],
};
{
let right_right_most = left_most_leaf_guards
.last()
.unwrap()
.right_ref()
.unwrap()
.clone();
let left_left_most = left_most_leaf_guards
.first()
.unwrap()
.left_ref()
.unwrap()
.clone();
debug_assert!(read_unchecked::<KS, PS>(&left_left_most).is_none());
debug_assert!(!read_unchecked::<KS, PS>(&right_right_most).is_none());
debug_assert!(read_unchecked::<KS, PS>(&right_right_most).is_ext());
debug_assert!(!right_right_most.ptr_eq(left_most_leaf_guards.last().unwrap().node_ref()));
for g in &mut left_most_leaf_guards {
external::make_deleted::<KS, PS>(&g.ext_id());
removed_nodes
.removed
.push((g.right_bound().clone(), g.node_ref().clone()));
g.make_empty_node(false);
g.left_ref_mut().map(|lr| *lr = NodeCellRef::default());
g.right_ref_mut().map(|rr| *rr = right_right_most.clone());
}
debug!(
"Have {:?} pages after removal",
num_pages(&left_most_leaf_guards[0])
);
trace!("Acquiring new first node");
let mut new_first_node = write_node::<KS, PS>(&right_right_most);
let mut new_first_node_ext = new_first_node.extnode_mut(src_tree);
debug!(
"Right most original id is {:?}, now is {:?}",
new_first_node_ext.id, src_tree.head_page_id
);
trace!(
"New first node right is {:?}",
read_unchecked::<KS, PS>(&new_first_node_ext.next).ext_id()
);
new_first_node_ext.id = src_tree.head_page_id;
new_first_node_ext.prev = NodeCellRef::default();
debug_assert!(&new_first_node_ext.right_bound > &prune_bound);
src_tree.len.fetch_sub(num_keys_moved, Relaxed);
(removed_nodes, num_keys_moved)
}
}
fn select_ext_nodes<KS, PS>(first_node: &NodeCellRef) -> Vec<NodeWriteGuard<KS, PS>>
where
KS: Slice<EntryKey> + Debug +'static,
PS: Slice<NodeCellRef> +'static,
{
trace!("Acquiring first node");
let first_node = write_node(first_node);
assert!(
read_unchecked::<KS, PS>(first_node.left_ref().unwrap()).is_none(),
"Left most is not none, have {}",
read_unchecked::<KS, PS>(first_node.left_ref().unwrap()).type_name()
);
let mut collected = vec![first_node];
let target_guards = if KS::slice_len() > LEVEL_M {
KS::slice_len()
} else {
KS::slice_len().pow(LEVEL_TREE_DEPTH - 1) >> 1 // merge half of the pages from memory
}; // pages to collect
while collected.len() < target_guards {
trace!("Acquiring select collection node");
let right = write_node(collected.last().unwrap().right_ref().unwrap());
if right.is_none() {
// Early break for reach the end of the linked list
// Should not be possible, will warn
warn!(
"Searching node to move and reach the end, maybe this is not the right parameter"
);
break;
} else {
debug_assert!(!right.is_empty(), "found empty node on selection!!!");
debug_assert!(!read_unchecked::<KS, PS>(right.right_ref().unwrap()).is_none());
debug_assert!(!read_unchecked::<KS, PS>(right.right_ref().unwrap()).is_empty_node());
collected.push(right);
}
}
return collected;
}
fn num_pages<KS, PS>(head_page: &NodeWriteGuard<KS, PS>) -> (usize, usize)
where
KS: Slice<EntryKey> + Debug +'static,
PS: Slice<NodeCellRef> +'static,
{
let mut num = 0;
let mut non_empty = 0;
let mut node_ref = head_page.node_ref().clone();
loop {
let node = read_unchecked::<KS, PS>(&node_ref);
if node.is_none() {
break;
}
if!node.is_empty() {
non_empty += 1;
}
if let Some(node) = node.right_ref() {
node_ref = node.clone()
} else {
break;
}
num += 1;
}
(num, non_empty)
}
pub async fn level_merge<KS, PS>(
level: usize,
src_tree: &BPlusTree<KS, PS>,
dest_tree: &dyn LevelTree,
) -> usize
where
KS: Slice<EntryKey> + Debug +'static,
PS: Slice<NodeCellRef> +'static,
{
debug!("Merging LSM tree level {}", level);
let (_, num_keys, _) = merge_prune(0, &src_tree.get_root(), src_tree, dest_tree);
debug_assert!(verification::tree_has_no_empty_node(&src_tree));
debug_assert!(verification::is_tree_in_order(&src_tree, level));
debug!("Merge and pruned level {}, waiting for storage", level);
storage::wait_until_updated().await;
debug!("MERGE LEVEL {} COMPLETED", level);
return num_keys;
}
|
merge_remove_empty_ext_nodes
|
identifier_name
|
level.rs
|
use super::dump::dump_tree;
use super::node::read_unchecked;
use super::node::write_node;
use super::node::NodeWriteGuard;
use super::prune::*;
use super::search::MutSearchResult;
use super::LevelTree;
use super::NodeCellRef;
use super::*;
use super::{external, BPlusTree};
use itertools::Itertools;
use std::fmt::Debug;
use std::sync::atomic::Ordering::Relaxed;
pub const LEVEL_PAGE_DIFF_MULTIPLIER: usize = 4;
pub const LEVEL_TREE_DEPTH: u32 = 2;
pub const LEVEL_M: usize = 32; // Smaller can be faster but more fragmented
pub const LEVEL_1: usize = LEVEL_M * LEVEL_PAGE_DIFF_MULTIPLIER;
pub const LEVEL_2: usize = LEVEL_1 * LEVEL_PAGE_DIFF_MULTIPLIER;
pub const NUM_LEVELS: usize = 3;
// Select left most leaf nodes and acquire their write guard
fn merge_prune<KS, PS>(
level: usize,
node: &NodeCellRef,
src_tree: &BPlusTree<KS, PS>,
dest_tree: &dyn LevelTree,
) -> (AlteredNodes, usize, Vec<NodeWriteGuard<KS, PS>>)
where
KS: Slice<EntryKey> + Debug +'static,
PS: Slice<NodeCellRef> +'static,
{
let search = mut_first::<KS, PS>(node);
match search {
MutSearchResult::External => {
debug!("Processing external level {}", level);
let left_most_leaf_guards = select_ext_nodes(node);
let num_guards = left_most_leaf_guards.len();
let (altered, num_keys) =
merge_remove_empty_ext_nodes(left_most_leaf_guards, src_tree, dest_tree);
debug!("Merged {} keys, {} pages", num_keys, num_guards);
(altered, num_keys, vec![])
}
MutSearchResult::Internal(sub_node) => {
let (lower_altered, num_keys, _lower_guards) =
merge_prune(level + 1, &sub_node, src_tree, dest_tree);
debug!("Processing internal level {}, node {:?}", level, node);
let (altered, guards) = prune(&node, lower_altered, level);
(altered, num_keys, guards)
}
}
}
fn merge_remove_empty_ext_nodes<KS, PS>(
mut left_most_leaf_guards: Vec<NodeWriteGuard<KS, PS>>,
src_tree: &BPlusTree<KS, PS>,
dest_tree: &dyn LevelTree,
) -> (AlteredNodes, usize)
where
KS: Slice<EntryKey> + Debug +'static,
PS: Slice<NodeCellRef> +'static,
{
let num_keys_moved;
let left_most_id = left_most_leaf_guards.first().unwrap().ext_id();
let prune_bound = left_most_leaf_guards.last().unwrap().right_bound().clone();
debug!("Merge selected {} pages", left_most_leaf_guards.len());
debug!(
"Have {:?} pages after selection",
num_pages(&left_most_leaf_guards[0])
);
if cfg!(debug_assertions) {
if left_most_id!= src_tree.head_page_id {
dump_tree(src_tree, "level_lsm_merge_failure_dump.json");
}
}
debug_assert_eq!(
left_most_id,
src_tree.head_page_id,
"{}",
left_most_leaf_guards.first().unwrap().type_name()
);
// merge to dest_tree
{
let deleted_keys = &src_tree.deleted;
let mut merged_deleted_keys = vec![];
let keys: Vec<EntryKey> = left_most_leaf_guards
.iter()
.map(|g| &g.keys()[..g.len()])
.flatten()
.filter(|&k| {
if deleted_keys.contains(k) {
merged_deleted_keys.push(k.clone());
false
} else {
true
}
})
.cloned()
.collect_vec();
num_keys_moved = keys.len();
debug!(
"Merging {} keys, have {}",
num_keys_moved,
src_tree.len.load(Relaxed)
);
dest_tree.merge_with_keys(box keys);
for rk in &merged_deleted_keys {
deleted_keys.remove(rk);
}
}
// adjust leaf left, right references
let mut removed_nodes = AlteredNodes {
removed: vec![],
key_modified: vec![],
};
{
let right_right_most = left_most_leaf_guards
.last()
.unwrap()
.right_ref()
.unwrap()
.clone();
let left_left_most = left_most_leaf_guards
.first()
.unwrap()
.left_ref()
.unwrap()
.clone();
debug_assert!(read_unchecked::<KS, PS>(&left_left_most).is_none());
debug_assert!(!read_unchecked::<KS, PS>(&right_right_most).is_none());
debug_assert!(read_unchecked::<KS, PS>(&right_right_most).is_ext());
debug_assert!(!right_right_most.ptr_eq(left_most_leaf_guards.last().unwrap().node_ref()));
for g in &mut left_most_leaf_guards {
external::make_deleted::<KS, PS>(&g.ext_id());
removed_nodes
.removed
.push((g.right_bound().clone(), g.node_ref().clone()));
g.make_empty_node(false);
g.left_ref_mut().map(|lr| *lr = NodeCellRef::default());
g.right_ref_mut().map(|rr| *rr = right_right_most.clone());
}
debug!(
"Have {:?} pages after removal",
num_pages(&left_most_leaf_guards[0])
);
trace!("Acquiring new first node");
let mut new_first_node = write_node::<KS, PS>(&right_right_most);
let mut new_first_node_ext = new_first_node.extnode_mut(src_tree);
debug!(
"Right most original id is {:?}, now is {:?}",
new_first_node_ext.id, src_tree.head_page_id
);
trace!(
"New first node right is {:?}",
read_unchecked::<KS, PS>(&new_first_node_ext.next).ext_id()
);
new_first_node_ext.id = src_tree.head_page_id;
new_first_node_ext.prev = NodeCellRef::default();
debug_assert!(&new_first_node_ext.right_bound > &prune_bound);
src_tree.len.fetch_sub(num_keys_moved, Relaxed);
(removed_nodes, num_keys_moved)
}
}
fn select_ext_nodes<KS, PS>(first_node: &NodeCellRef) -> Vec<NodeWriteGuard<KS, PS>>
where
KS: Slice<EntryKey> + Debug +'static,
PS: Slice<NodeCellRef> +'static,
{
trace!("Acquiring first node");
let first_node = write_node(first_node);
assert!(
read_unchecked::<KS, PS>(first_node.left_ref().unwrap()).is_none(),
"Left most is not none, have {}",
read_unchecked::<KS, PS>(first_node.left_ref().unwrap()).type_name()
);
let mut collected = vec![first_node];
let target_guards = if KS::slice_len() > LEVEL_M {
KS::slice_len()
} else {
KS::slice_len().pow(LEVEL_TREE_DEPTH - 1) >> 1 // merge half of the pages from memory
}; // pages to collect
while collected.len() < target_guards {
trace!("Acquiring select collection node");
let right = write_node(collected.last().unwrap().right_ref().unwrap());
if right.is_none() {
// Early break for reach the end of the linked list
// Should not be possible, will warn
warn!(
"Searching node to move and reach the end, maybe this is not the right parameter"
);
break;
} else {
debug_assert!(!right.is_empty(), "found empty node on selection!!!");
debug_assert!(!read_unchecked::<KS, PS>(right.right_ref().unwrap()).is_none());
debug_assert!(!read_unchecked::<KS, PS>(right.right_ref().unwrap()).is_empty_node());
collected.push(right);
}
}
return collected;
}
fn num_pages<KS, PS>(head_page: &NodeWriteGuard<KS, PS>) -> (usize, usize)
where
KS: Slice<EntryKey> + Debug +'static,
PS: Slice<NodeCellRef> +'static,
{
let mut num = 0;
let mut non_empty = 0;
let mut node_ref = head_page.node_ref().clone();
loop {
let node = read_unchecked::<KS, PS>(&node_ref);
if node.is_none() {
break;
}
if!node.is_empty() {
non_empty += 1;
}
if let Some(node) = node.right_ref() {
node_ref = node.clone()
} else
|
num += 1;
}
(num, non_empty)
}
pub async fn level_merge<KS, PS>(
level: usize,
src_tree: &BPlusTree<KS, PS>,
dest_tree: &dyn LevelTree,
) -> usize
where
KS: Slice<EntryKey> + Debug +'static,
PS: Slice<NodeCellRef> +'static,
{
debug!("Merging LSM tree level {}", level);
let (_, num_keys, _) = merge_prune(0, &src_tree.get_root(), src_tree, dest_tree);
debug_assert!(verification::tree_has_no_empty_node(&src_tree));
debug_assert!(verification::is_tree_in_order(&src_tree, level));
debug!("Merge and pruned level {}, waiting for storage", level);
storage::wait_until_updated().await;
debug!("MERGE LEVEL {} COMPLETED", level);
return num_keys;
}
|
{
break;
}
|
conditional_block
|
level.rs
|
use super::dump::dump_tree;
use super::node::read_unchecked;
use super::node::write_node;
use super::node::NodeWriteGuard;
use super::prune::*;
use super::search::MutSearchResult;
use super::LevelTree;
use super::NodeCellRef;
use super::*;
use super::{external, BPlusTree};
use itertools::Itertools;
use std::fmt::Debug;
use std::sync::atomic::Ordering::Relaxed;
pub const LEVEL_PAGE_DIFF_MULTIPLIER: usize = 4;
pub const LEVEL_TREE_DEPTH: u32 = 2;
pub const LEVEL_M: usize = 32; // Smaller can be faster but more fragmented
pub const LEVEL_1: usize = LEVEL_M * LEVEL_PAGE_DIFF_MULTIPLIER;
pub const LEVEL_2: usize = LEVEL_1 * LEVEL_PAGE_DIFF_MULTIPLIER;
pub const NUM_LEVELS: usize = 3;
// Select left most leaf nodes and acquire their write guard
fn merge_prune<KS, PS>(
level: usize,
node: &NodeCellRef,
src_tree: &BPlusTree<KS, PS>,
dest_tree: &dyn LevelTree,
) -> (AlteredNodes, usize, Vec<NodeWriteGuard<KS, PS>>)
where
KS: Slice<EntryKey> + Debug +'static,
PS: Slice<NodeCellRef> +'static,
|
}
fn merge_remove_empty_ext_nodes<KS, PS>(
mut left_most_leaf_guards: Vec<NodeWriteGuard<KS, PS>>,
src_tree: &BPlusTree<KS, PS>,
dest_tree: &dyn LevelTree,
) -> (AlteredNodes, usize)
where
KS: Slice<EntryKey> + Debug +'static,
PS: Slice<NodeCellRef> +'static,
{
let num_keys_moved;
let left_most_id = left_most_leaf_guards.first().unwrap().ext_id();
let prune_bound = left_most_leaf_guards.last().unwrap().right_bound().clone();
debug!("Merge selected {} pages", left_most_leaf_guards.len());
debug!(
"Have {:?} pages after selection",
num_pages(&left_most_leaf_guards[0])
);
if cfg!(debug_assertions) {
if left_most_id!= src_tree.head_page_id {
dump_tree(src_tree, "level_lsm_merge_failure_dump.json");
}
}
debug_assert_eq!(
left_most_id,
src_tree.head_page_id,
"{}",
left_most_leaf_guards.first().unwrap().type_name()
);
// merge to dest_tree
{
let deleted_keys = &src_tree.deleted;
let mut merged_deleted_keys = vec![];
let keys: Vec<EntryKey> = left_most_leaf_guards
.iter()
.map(|g| &g.keys()[..g.len()])
.flatten()
.filter(|&k| {
if deleted_keys.contains(k) {
merged_deleted_keys.push(k.clone());
false
} else {
true
}
})
.cloned()
.collect_vec();
num_keys_moved = keys.len();
debug!(
"Merging {} keys, have {}",
num_keys_moved,
src_tree.len.load(Relaxed)
);
dest_tree.merge_with_keys(box keys);
for rk in &merged_deleted_keys {
deleted_keys.remove(rk);
}
}
// adjust leaf left, right references
let mut removed_nodes = AlteredNodes {
removed: vec![],
key_modified: vec![],
};
{
let right_right_most = left_most_leaf_guards
.last()
.unwrap()
.right_ref()
.unwrap()
.clone();
let left_left_most = left_most_leaf_guards
.first()
.unwrap()
.left_ref()
.unwrap()
.clone();
debug_assert!(read_unchecked::<KS, PS>(&left_left_most).is_none());
debug_assert!(!read_unchecked::<KS, PS>(&right_right_most).is_none());
debug_assert!(read_unchecked::<KS, PS>(&right_right_most).is_ext());
debug_assert!(!right_right_most.ptr_eq(left_most_leaf_guards.last().unwrap().node_ref()));
for g in &mut left_most_leaf_guards {
external::make_deleted::<KS, PS>(&g.ext_id());
removed_nodes
.removed
.push((g.right_bound().clone(), g.node_ref().clone()));
g.make_empty_node(false);
g.left_ref_mut().map(|lr| *lr = NodeCellRef::default());
g.right_ref_mut().map(|rr| *rr = right_right_most.clone());
}
debug!(
"Have {:?} pages after removal",
num_pages(&left_most_leaf_guards[0])
);
trace!("Acquiring new first node");
let mut new_first_node = write_node::<KS, PS>(&right_right_most);
let mut new_first_node_ext = new_first_node.extnode_mut(src_tree);
debug!(
"Right most original id is {:?}, now is {:?}",
new_first_node_ext.id, src_tree.head_page_id
);
trace!(
"New first node right is {:?}",
read_unchecked::<KS, PS>(&new_first_node_ext.next).ext_id()
);
new_first_node_ext.id = src_tree.head_page_id;
new_first_node_ext.prev = NodeCellRef::default();
debug_assert!(&new_first_node_ext.right_bound > &prune_bound);
src_tree.len.fetch_sub(num_keys_moved, Relaxed);
(removed_nodes, num_keys_moved)
}
}
fn select_ext_nodes<KS, PS>(first_node: &NodeCellRef) -> Vec<NodeWriteGuard<KS, PS>>
where
KS: Slice<EntryKey> + Debug +'static,
PS: Slice<NodeCellRef> +'static,
{
trace!("Acquiring first node");
let first_node = write_node(first_node);
assert!(
read_unchecked::<KS, PS>(first_node.left_ref().unwrap()).is_none(),
"Left most is not none, have {}",
read_unchecked::<KS, PS>(first_node.left_ref().unwrap()).type_name()
);
let mut collected = vec![first_node];
let target_guards = if KS::slice_len() > LEVEL_M {
KS::slice_len()
} else {
KS::slice_len().pow(LEVEL_TREE_DEPTH - 1) >> 1 // merge half of the pages from memory
}; // pages to collect
while collected.len() < target_guards {
trace!("Acquiring select collection node");
let right = write_node(collected.last().unwrap().right_ref().unwrap());
if right.is_none() {
// Early break for reach the end of the linked list
// Should not be possible, will warn
warn!(
"Searching node to move and reach the end, maybe this is not the right parameter"
);
break;
} else {
debug_assert!(!right.is_empty(), "found empty node on selection!!!");
debug_assert!(!read_unchecked::<KS, PS>(right.right_ref().unwrap()).is_none());
debug_assert!(!read_unchecked::<KS, PS>(right.right_ref().unwrap()).is_empty_node());
collected.push(right);
}
}
return collected;
}
fn num_pages<KS, PS>(head_page: &NodeWriteGuard<KS, PS>) -> (usize, usize)
where
KS: Slice<EntryKey> + Debug +'static,
PS: Slice<NodeCellRef> +'static,
{
let mut num = 0;
let mut non_empty = 0;
let mut node_ref = head_page.node_ref().clone();
loop {
let node = read_unchecked::<KS, PS>(&node_ref);
if node.is_none() {
break;
}
if!node.is_empty() {
non_empty += 1;
}
if let Some(node) = node.right_ref() {
node_ref = node.clone()
} else {
break;
}
num += 1;
}
(num, non_empty)
}
pub async fn level_merge<KS, PS>(
level: usize,
src_tree: &BPlusTree<KS, PS>,
dest_tree: &dyn LevelTree,
) -> usize
where
KS: Slice<EntryKey> + Debug +'static,
PS: Slice<NodeCellRef> +'static,
{
debug!("Merging LSM tree level {}", level);
let (_, num_keys, _) = merge_prune(0, &src_tree.get_root(), src_tree, dest_tree);
debug_assert!(verification::tree_has_no_empty_node(&src_tree));
debug_assert!(verification::is_tree_in_order(&src_tree, level));
debug!("Merge and pruned level {}, waiting for storage", level);
storage::wait_until_updated().await;
debug!("MERGE LEVEL {} COMPLETED", level);
return num_keys;
}
|
{
let search = mut_first::<KS, PS>(node);
match search {
MutSearchResult::External => {
debug!("Processing external level {}", level);
let left_most_leaf_guards = select_ext_nodes(node);
let num_guards = left_most_leaf_guards.len();
let (altered, num_keys) =
merge_remove_empty_ext_nodes(left_most_leaf_guards, src_tree, dest_tree);
debug!("Merged {} keys, {} pages", num_keys, num_guards);
(altered, num_keys, vec![])
}
MutSearchResult::Internal(sub_node) => {
let (lower_altered, num_keys, _lower_guards) =
merge_prune(level + 1, &sub_node, src_tree, dest_tree);
debug!("Processing internal level {}, node {:?}", level, node);
let (altered, guards) = prune(&node, lower_altered, level);
(altered, num_keys, guards)
}
}
|
identifier_body
|
level.rs
|
use super::dump::dump_tree;
use super::node::read_unchecked;
use super::node::write_node;
use super::node::NodeWriteGuard;
use super::prune::*;
use super::search::MutSearchResult;
use super::LevelTree;
use super::NodeCellRef;
use super::*;
use super::{external, BPlusTree};
use itertools::Itertools;
use std::fmt::Debug;
use std::sync::atomic::Ordering::Relaxed;
pub const LEVEL_PAGE_DIFF_MULTIPLIER: usize = 4;
pub const LEVEL_TREE_DEPTH: u32 = 2;
|
pub const LEVEL_M: usize = 32; // Smaller can be faster but more fragmented
pub const LEVEL_1: usize = LEVEL_M * LEVEL_PAGE_DIFF_MULTIPLIER;
pub const LEVEL_2: usize = LEVEL_1 * LEVEL_PAGE_DIFF_MULTIPLIER;
pub const NUM_LEVELS: usize = 3;
// Select left most leaf nodes and acquire their write guard
fn merge_prune<KS, PS>(
level: usize,
node: &NodeCellRef,
src_tree: &BPlusTree<KS, PS>,
dest_tree: &dyn LevelTree,
) -> (AlteredNodes, usize, Vec<NodeWriteGuard<KS, PS>>)
where
KS: Slice<EntryKey> + Debug +'static,
PS: Slice<NodeCellRef> +'static,
{
let search = mut_first::<KS, PS>(node);
match search {
MutSearchResult::External => {
debug!("Processing external level {}", level);
let left_most_leaf_guards = select_ext_nodes(node);
let num_guards = left_most_leaf_guards.len();
let (altered, num_keys) =
merge_remove_empty_ext_nodes(left_most_leaf_guards, src_tree, dest_tree);
debug!("Merged {} keys, {} pages", num_keys, num_guards);
(altered, num_keys, vec![])
}
MutSearchResult::Internal(sub_node) => {
let (lower_altered, num_keys, _lower_guards) =
merge_prune(level + 1, &sub_node, src_tree, dest_tree);
debug!("Processing internal level {}, node {:?}", level, node);
let (altered, guards) = prune(&node, lower_altered, level);
(altered, num_keys, guards)
}
}
}
fn merge_remove_empty_ext_nodes<KS, PS>(
mut left_most_leaf_guards: Vec<NodeWriteGuard<KS, PS>>,
src_tree: &BPlusTree<KS, PS>,
dest_tree: &dyn LevelTree,
) -> (AlteredNodes, usize)
where
KS: Slice<EntryKey> + Debug +'static,
PS: Slice<NodeCellRef> +'static,
{
let num_keys_moved;
let left_most_id = left_most_leaf_guards.first().unwrap().ext_id();
let prune_bound = left_most_leaf_guards.last().unwrap().right_bound().clone();
debug!("Merge selected {} pages", left_most_leaf_guards.len());
debug!(
"Have {:?} pages after selection",
num_pages(&left_most_leaf_guards[0])
);
if cfg!(debug_assertions) {
if left_most_id!= src_tree.head_page_id {
dump_tree(src_tree, "level_lsm_merge_failure_dump.json");
}
}
debug_assert_eq!(
left_most_id,
src_tree.head_page_id,
"{}",
left_most_leaf_guards.first().unwrap().type_name()
);
// merge to dest_tree
{
let deleted_keys = &src_tree.deleted;
let mut merged_deleted_keys = vec![];
let keys: Vec<EntryKey> = left_most_leaf_guards
.iter()
.map(|g| &g.keys()[..g.len()])
.flatten()
.filter(|&k| {
if deleted_keys.contains(k) {
merged_deleted_keys.push(k.clone());
false
} else {
true
}
})
.cloned()
.collect_vec();
num_keys_moved = keys.len();
debug!(
"Merging {} keys, have {}",
num_keys_moved,
src_tree.len.load(Relaxed)
);
dest_tree.merge_with_keys(box keys);
for rk in &merged_deleted_keys {
deleted_keys.remove(rk);
}
}
// adjust leaf left, right references
let mut removed_nodes = AlteredNodes {
removed: vec![],
key_modified: vec![],
};
{
let right_right_most = left_most_leaf_guards
.last()
.unwrap()
.right_ref()
.unwrap()
.clone();
let left_left_most = left_most_leaf_guards
.first()
.unwrap()
.left_ref()
.unwrap()
.clone();
debug_assert!(read_unchecked::<KS, PS>(&left_left_most).is_none());
debug_assert!(!read_unchecked::<KS, PS>(&right_right_most).is_none());
debug_assert!(read_unchecked::<KS, PS>(&right_right_most).is_ext());
debug_assert!(!right_right_most.ptr_eq(left_most_leaf_guards.last().unwrap().node_ref()));
for g in &mut left_most_leaf_guards {
external::make_deleted::<KS, PS>(&g.ext_id());
removed_nodes
.removed
.push((g.right_bound().clone(), g.node_ref().clone()));
g.make_empty_node(false);
g.left_ref_mut().map(|lr| *lr = NodeCellRef::default());
g.right_ref_mut().map(|rr| *rr = right_right_most.clone());
}
debug!(
"Have {:?} pages after removal",
num_pages(&left_most_leaf_guards[0])
);
trace!("Acquiring new first node");
let mut new_first_node = write_node::<KS, PS>(&right_right_most);
let mut new_first_node_ext = new_first_node.extnode_mut(src_tree);
debug!(
"Right most original id is {:?}, now is {:?}",
new_first_node_ext.id, src_tree.head_page_id
);
trace!(
"New first node right is {:?}",
read_unchecked::<KS, PS>(&new_first_node_ext.next).ext_id()
);
new_first_node_ext.id = src_tree.head_page_id;
new_first_node_ext.prev = NodeCellRef::default();
debug_assert!(&new_first_node_ext.right_bound > &prune_bound);
src_tree.len.fetch_sub(num_keys_moved, Relaxed);
(removed_nodes, num_keys_moved)
}
}
fn select_ext_nodes<KS, PS>(first_node: &NodeCellRef) -> Vec<NodeWriteGuard<KS, PS>>
where
KS: Slice<EntryKey> + Debug +'static,
PS: Slice<NodeCellRef> +'static,
{
trace!("Acquiring first node");
let first_node = write_node(first_node);
assert!(
read_unchecked::<KS, PS>(first_node.left_ref().unwrap()).is_none(),
"Left most is not none, have {}",
read_unchecked::<KS, PS>(first_node.left_ref().unwrap()).type_name()
);
let mut collected = vec![first_node];
let target_guards = if KS::slice_len() > LEVEL_M {
KS::slice_len()
} else {
KS::slice_len().pow(LEVEL_TREE_DEPTH - 1) >> 1 // merge half of the pages from memory
}; // pages to collect
while collected.len() < target_guards {
trace!("Acquiring select collection node");
let right = write_node(collected.last().unwrap().right_ref().unwrap());
if right.is_none() {
// Early break for reach the end of the linked list
// Should not be possible, will warn
warn!(
"Searching node to move and reach the end, maybe this is not the right parameter"
);
break;
} else {
debug_assert!(!right.is_empty(), "found empty node on selection!!!");
debug_assert!(!read_unchecked::<KS, PS>(right.right_ref().unwrap()).is_none());
debug_assert!(!read_unchecked::<KS, PS>(right.right_ref().unwrap()).is_empty_node());
collected.push(right);
}
}
return collected;
}
fn num_pages<KS, PS>(head_page: &NodeWriteGuard<KS, PS>) -> (usize, usize)
where
KS: Slice<EntryKey> + Debug +'static,
PS: Slice<NodeCellRef> +'static,
{
let mut num = 0;
let mut non_empty = 0;
let mut node_ref = head_page.node_ref().clone();
loop {
let node = read_unchecked::<KS, PS>(&node_ref);
if node.is_none() {
break;
}
if!node.is_empty() {
non_empty += 1;
}
if let Some(node) = node.right_ref() {
node_ref = node.clone()
} else {
break;
}
num += 1;
}
(num, non_empty)
}
pub async fn level_merge<KS, PS>(
level: usize,
src_tree: &BPlusTree<KS, PS>,
dest_tree: &dyn LevelTree,
) -> usize
where
KS: Slice<EntryKey> + Debug +'static,
PS: Slice<NodeCellRef> +'static,
{
debug!("Merging LSM tree level {}", level);
let (_, num_keys, _) = merge_prune(0, &src_tree.get_root(), src_tree, dest_tree);
debug_assert!(verification::tree_has_no_empty_node(&src_tree));
debug_assert!(verification::is_tree_in_order(&src_tree, level));
debug!("Merge and pruned level {}, waiting for storage", level);
storage::wait_until_updated().await;
debug!("MERGE LEVEL {} COMPLETED", level);
return num_keys;
}
|
random_line_split
|
|
main.rs
|
//
// test ownership
fn test0() {
let a1 = ["s1", "s2"];
let a2 = a1; //why move didn't happen here?
println!("iterate a1");
for i in a1.iter() {
println!("{}", i);
}
// let str1 = String::from("hello");
// let str2 = str1; //str1 stored on heap, so move happened here but not copy, after move str1 is invalid
// println!("str1={}", str1) //compile error;
//
let str1 = "apple";
let str2 = str1; //str1 stored on stack not heap, copy happen here but not move, so str1 is still valid
println!("str1={}", str1)
}
// test slice
fn test1() {
let mut a = [1, 4, 2, 5, 3];
// println!("a={}", a); //compile error: std::fmt::Display` is not implemented for `[{integer}; 5]
let slice = &a[0..2]; //rust slice, [0,2) //[1,4]
let s2 = &a[..];
println!("iterate slice");
for i in slice.iter() {
//slice.iter() return a tuple for (for.. in..) syntax
println!("{}", i);
}
println!("iterate s2");
for i in s2.iter() {
//slice.iter() return a tuple for (for.. in..) syntax
println!("{}", i);
}
a.sort();
println!("after sort, iterate a");
//cannot borrow `a` as mutable because it is also borrowed as immutable
// for i in slice.iter() {
for i in a.iter() {
//slice.iter() return a tuple for (for.. in..) syntax
println!("{}", i);
}
}
// test tuple
fn test2() {
struct Color(i32, i32, i32);
struct Point(i32, i32, i32);
let black = Color(0xff, 0xfe, 0xfd);
let origin = Point(0, 0, 0);
println!("color: {} - {} - {}", black.0, black.1, black.2);
let tup = (500, 6.4, 1);
let (x, y, z) = tup;
println!("x={}, y={}", x, y);
println!("tup.0={}, tup.1={}", tup.0, tup.1);
let t2 = tup; //move tup is ok!
println!("tag2; tup.0={}, tup.1={}", tup.0, tup.1);
}
fn test3() {
struct Rectangle {
width: u32,
height: u32,
}
let rect1 = Rectangle {
width: 30,
height: 50,
};
println!("width={}, height={}", rect1.width, rect1.height)
}
//struct member function
struct Circle {
r: f64,
}
impl Circle {
fn area(&self) -> f64 {
return self.r * self.r * std::f64::consts::PI;
}
fn len(&self) -> f64 {
return self.r * 2.0 * std::f64::consts::PI;
}
}
struct Rectangle {
width: u32,
height: u32,
}
impl Rectangle {
fn area(&self) -> u32 {
self.width * self.height
}
}
impl Rectangle {
fn create_squre(e: u32) -> Rectangle {
Rectangle {
width: e,
height: e,
}
}
fn print(&self) {
println!("print self; width={}, height={}", self.width, self.height);
|
fn test4() {
let rect1 = Rectangle {
width: 30,
height: 50,
};
println!(
"The area of the rectangle is {} square pixels.",
rect1.area()
);
let c1 = Circle { r: 2.0 };
println!(
"the radius of c1={}, the area of c1={}, length of c1={}",
c1.r,
c1.area(),
c1.len()
);
let squre_1 = Rectangle::create_squre(5);
squre_1.print();
}
enum Coin {
Penny,
Nickel,
Dime,
Quarter,
}
fn value_in_cents(coin: Coin) -> u8 {
match coin {
Coin::Penny => {
println!("Lucky penny!");
1
}
Coin::Nickel => 5,
Coin::Dime => 10,
Coin::Quarter => 25,
}
}
fn test5() {
let ret = value_in_cents(Coin::Penny);
println!("ret={}", ret)
}
fn test6() {
let v = vec![1, 2, 3];
v[99]; //out of boundary
}
fn largest(list: &[i32]) -> &i32 {
let mut largest = &list[0];
for item in list {
if item > largest {
largest = item;
}
}
largest
}
fn test7() {
let number_list = vec![34, 50, 25, 100, 65];
let result = largest(&number_list);
println!("The largest number is {}", result);
let number_list = vec![102, 34, 6000, 89, 54, 2, 43, 8];
let result = largest(&number_list);
println!("The largest number is {}", result);
}
fn test8() {
let r;
{
let x = 5;
r = &x;
}
println!("r: {}", r);
}
fn main() {
println!("main; -enter");
test8();
println!("main; -exit");
}
|
}
}
|
random_line_split
|
main.rs
|
//
// test ownership
fn test0() {
let a1 = ["s1", "s2"];
let a2 = a1; //why move didn't happen here?
println!("iterate a1");
for i in a1.iter() {
println!("{}", i);
}
// let str1 = String::from("hello");
// let str2 = str1; //str1 stored on heap, so move happened here but not copy, after move str1 is invalid
// println!("str1={}", str1) //compile error;
//
let str1 = "apple";
let str2 = str1; //str1 stored on stack not heap, copy happen here but not move, so str1 is still valid
println!("str1={}", str1)
}
// test slice
fn test1() {
let mut a = [1, 4, 2, 5, 3];
// println!("a={}", a); //compile error: std::fmt::Display` is not implemented for `[{integer}; 5]
let slice = &a[0..2]; //rust slice, [0,2) //[1,4]
let s2 = &a[..];
println!("iterate slice");
for i in slice.iter() {
//slice.iter() return a tuple for (for.. in..) syntax
println!("{}", i);
}
println!("iterate s2");
for i in s2.iter() {
//slice.iter() return a tuple for (for.. in..) syntax
println!("{}", i);
}
a.sort();
println!("after sort, iterate a");
//cannot borrow `a` as mutable because it is also borrowed as immutable
// for i in slice.iter() {
for i in a.iter() {
//slice.iter() return a tuple for (for.. in..) syntax
println!("{}", i);
}
}
// test tuple
fn test2() {
struct Color(i32, i32, i32);
struct Point(i32, i32, i32);
let black = Color(0xff, 0xfe, 0xfd);
let origin = Point(0, 0, 0);
println!("color: {} - {} - {}", black.0, black.1, black.2);
let tup = (500, 6.4, 1);
let (x, y, z) = tup;
println!("x={}, y={}", x, y);
println!("tup.0={}, tup.1={}", tup.0, tup.1);
let t2 = tup; //move tup is ok!
println!("tag2; tup.0={}, tup.1={}", tup.0, tup.1);
}
fn test3() {
struct Rectangle {
width: u32,
height: u32,
}
let rect1 = Rectangle {
width: 30,
height: 50,
};
println!("width={}, height={}", rect1.width, rect1.height)
}
//struct member function
struct Circle {
r: f64,
}
impl Circle {
fn area(&self) -> f64 {
return self.r * self.r * std::f64::consts::PI;
}
fn len(&self) -> f64 {
return self.r * 2.0 * std::f64::consts::PI;
}
}
struct Rectangle {
width: u32,
height: u32,
}
impl Rectangle {
fn area(&self) -> u32 {
self.width * self.height
}
}
impl Rectangle {
fn create_squre(e: u32) -> Rectangle {
Rectangle {
width: e,
height: e,
}
}
fn
|
(&self) {
println!("print self; width={}, height={}", self.width, self.height);
}
}
fn test4() {
let rect1 = Rectangle {
width: 30,
height: 50,
};
println!(
"The area of the rectangle is {} square pixels.",
rect1.area()
);
let c1 = Circle { r: 2.0 };
println!(
"the radius of c1={}, the area of c1={}, length of c1={}",
c1.r,
c1.area(),
c1.len()
);
let squre_1 = Rectangle::create_squre(5);
squre_1.print();
}
enum Coin {
Penny,
Nickel,
Dime,
Quarter,
}
fn value_in_cents(coin: Coin) -> u8 {
match coin {
Coin::Penny => {
println!("Lucky penny!");
1
}
Coin::Nickel => 5,
Coin::Dime => 10,
Coin::Quarter => 25,
}
}
fn test5() {
let ret = value_in_cents(Coin::Penny);
println!("ret={}", ret)
}
fn test6() {
let v = vec![1, 2, 3];
v[99]; //out of boundary
}
fn largest(list: &[i32]) -> &i32 {
let mut largest = &list[0];
for item in list {
if item > largest {
largest = item;
}
}
largest
}
fn test7() {
let number_list = vec![34, 50, 25, 100, 65];
let result = largest(&number_list);
println!("The largest number is {}", result);
let number_list = vec![102, 34, 6000, 89, 54, 2, 43, 8];
let result = largest(&number_list);
println!("The largest number is {}", result);
}
fn test8() {
let r;
{
let x = 5;
r = &x;
}
println!("r: {}", r);
}
fn main() {
println!("main; -enter");
test8();
println!("main; -exit");
}
|
print
|
identifier_name
|
per_sequence_gc_content.rs
|
use crate::trust_seq::gc_model::GCModel;
use crate::trust_seq::qc::{QCModule, QCReport, QCResult};
use crate::trust_seq::trust_seq::{TrustSeqConfig, TrustSeqErr};
use crate::trust_seq::utils::Sequence;
use serde_json::map::Map;
use serde_json::value;
use serde_json::value::Value;
use std::collections::hash_map::HashMap;
use std::f64;
use std::io::Write;
use std::slice::Iter;
pub struct PerSequenceGCContents<'a> {
config: &'a TrustSeqConfig,
gc_distribution: [f64; 101],
gc_models: HashMap<usize, Box<GCModel>>,
}
#[derive(Serialize)]
struct PerSequenceGCReport {
status: QCResult,
gc_distribution: Vec<f64>,
theoretical_distribution: Vec<f64>,
}
impl<'a> PerSequenceGCContents<'a> {
pub fn new(config: &'a TrustSeqConfig) -> PerSequenceGCContents {
let mut gc_distribution = Vec::new();
gc_distribution.resize(101, 0.0);
return PerSequenceGCContents {
config: config,
gc_distribution: [0f64; 101],
gc_models: HashMap::new(),
};
}
}
fn
|
(sequence: &[u8]) -> &[u8] {
if sequence.len() > 1000 {
let length = (sequence.len() / 1000) * 1000;
return &sequence[..length];
} else if sequence.len() > 100 {
let length = (sequence.len() / 100) * 100;
return &sequence[..length];
} else {
return sequence;
}
}
fn calc_zscore_for_value(mean: f64, stddev: f64, value: f64) -> f64 {
let lhs = (2f64 * f64::consts::PI * stddev * stddev).sqrt();
let rhs = f64::consts::E.powf(-1.0 * (value - mean).powi(2) / (2.0 * stddev * stddev));
rhs / lhs
}
fn calc_stddev_total(values: Iter<f64>, mode: f64) -> (f64, f64) {
let mut total_count = 0.0;
let mut stddev = 0.0;
for (i, v) in values.enumerate() {
stddev += (i as f64 - mode).powi(2) * (*v);
total_count += *v;
}
stddev /= total_count - 1f64;
(stddev.sqrt(), total_count)
}
impl QCReport for PerSequenceGCReport {
fn get_name(&self) -> &'static str {
return "Per sequence GC content";
}
fn get_status(&self) -> QCResult {
return self.status;
}
fn add_json(&self, map: &mut Map<String, Value>) -> Result<(), TrustSeqErr> {
map.insert(self.get_name().to_string(), value::to_value(self)?);
return Ok(());
}
fn print_text_report(&self, writer: &mut Write) -> Result<(), TrustSeqErr> {
writeln!(writer, "#GC Content\tCount")?;
for idx in 0..101 {
writeln!(writer, "{}\t{}", idx, self.gc_distribution[idx])?;
}
return Ok(());
}
}
impl<'a> QCModule for PerSequenceGCContents<'a> {
fn calculate(&self, reports: &mut Vec<Box<QCReport>>) -> Result<(), TrustSeqErr> {
let mode = self
.gc_distribution
.iter()
.enumerate()
.max_by(|a, b| (a.1).partial_cmp(b.1).unwrap())
.unwrap();
let mode_th = mode.1 * 0.90;
let mut mode_total = 0;
let mut mode_count: u32 = 0;
let mut fell_off_top = true;
let mut fell_off_bottom = true;
for idx in (mode.0)..self.gc_distribution.len() {
if self.gc_distribution[idx] > mode_th {
mode_total += idx;
mode_count += 1;
} else {
fell_off_top = false;
break;
}
}
for idx in (0..mode.0).rev() {
if self.gc_distribution[idx] > mode_th {
mode_total += idx;
mode_count += 1;
} else {
fell_off_bottom = false;
break;
}
}
let mode2: f64 = if fell_off_top || fell_off_bottom {
*mode.1
} else {
mode_total as f64 / mode_count as f64
};
let (stddev, total_count) = calc_stddev_total(self.gc_distribution.iter(), mode2);
let mut theoretical_distribution = [0.0 as f64; 101];
let mut deviation_percent = 0.0;
for (i, v) in theoretical_distribution.iter_mut().enumerate() {
*v = calc_zscore_for_value(mode2, stddev, i as f64) * total_count;
deviation_percent += (*v - self.gc_distribution[i]).abs();
}
deviation_percent = deviation_percent * 100.0 / total_count;
let error_th = self.config.module_config.get("gc_sequence:error");
let warn_th = self.config.module_config.get("gc_sequence:warn");
let status = if deviation_percent > error_th {
QCResult::Fail
} else if deviation_percent > warn_th {
QCResult::Warn
} else {
QCResult::Pass
};
reports.push(Box::new(PerSequenceGCReport {
status: status,
gc_distribution: self.gc_distribution.to_vec(),
theoretical_distribution: theoretical_distribution.to_vec(),
}));
return Ok(());
}
fn process_sequence(&mut self, seq: &Sequence) -> () {
let mut gc_count: usize = 0;
let sequence = truncate_sequence(seq.sequence);
for s in sequence {
let ch = *s as char;
let is_gc = match ch {
'G' => true,
'g' => true,
'c' => true,
'C' => true,
_ => false,
};
if is_gc {
gc_count += 1;
}
}
let seq_len = seq.sequence.len();
if!self.gc_models.contains_key(&seq_len) {
self.gc_models
.insert(seq_len, Box::new(GCModel::new(seq_len)));
}
match self.gc_models.get(&seq_len) {
Some(model) => model.add_value(gc_count, &mut self.gc_distribution),
None => (),
}
}
}
|
truncate_sequence
|
identifier_name
|
per_sequence_gc_content.rs
|
use crate::trust_seq::gc_model::GCModel;
use crate::trust_seq::qc::{QCModule, QCReport, QCResult};
use crate::trust_seq::trust_seq::{TrustSeqConfig, TrustSeqErr};
use crate::trust_seq::utils::Sequence;
use serde_json::map::Map;
use serde_json::value;
use serde_json::value::Value;
use std::collections::hash_map::HashMap;
use std::f64;
use std::io::Write;
use std::slice::Iter;
pub struct PerSequenceGCContents<'a> {
config: &'a TrustSeqConfig,
gc_distribution: [f64; 101],
gc_models: HashMap<usize, Box<GCModel>>,
}
#[derive(Serialize)]
struct PerSequenceGCReport {
status: QCResult,
gc_distribution: Vec<f64>,
theoretical_distribution: Vec<f64>,
}
impl<'a> PerSequenceGCContents<'a> {
pub fn new(config: &'a TrustSeqConfig) -> PerSequenceGCContents {
let mut gc_distribution = Vec::new();
gc_distribution.resize(101, 0.0);
return PerSequenceGCContents {
config: config,
gc_distribution: [0f64; 101],
gc_models: HashMap::new(),
};
}
}
fn truncate_sequence(sequence: &[u8]) -> &[u8] {
if sequence.len() > 1000 {
let length = (sequence.len() / 1000) * 1000;
return &sequence[..length];
} else if sequence.len() > 100 {
let length = (sequence.len() / 100) * 100;
return &sequence[..length];
} else {
return sequence;
}
}
fn calc_zscore_for_value(mean: f64, stddev: f64, value: f64) -> f64 {
let lhs = (2f64 * f64::consts::PI * stddev * stddev).sqrt();
let rhs = f64::consts::E.powf(-1.0 * (value - mean).powi(2) / (2.0 * stddev * stddev));
rhs / lhs
}
fn calc_stddev_total(values: Iter<f64>, mode: f64) -> (f64, f64) {
let mut total_count = 0.0;
let mut stddev = 0.0;
for (i, v) in values.enumerate() {
stddev += (i as f64 - mode).powi(2) * (*v);
total_count += *v;
}
stddev /= total_count - 1f64;
(stddev.sqrt(), total_count)
}
impl QCReport for PerSequenceGCReport {
fn get_name(&self) -> &'static str {
return "Per sequence GC content";
}
fn get_status(&self) -> QCResult {
return self.status;
}
fn add_json(&self, map: &mut Map<String, Value>) -> Result<(), TrustSeqErr> {
map.insert(self.get_name().to_string(), value::to_value(self)?);
return Ok(());
}
fn print_text_report(&self, writer: &mut Write) -> Result<(), TrustSeqErr> {
writeln!(writer, "#GC Content\tCount")?;
for idx in 0..101 {
writeln!(writer, "{}\t{}", idx, self.gc_distribution[idx])?;
}
return Ok(());
}
}
impl<'a> QCModule for PerSequenceGCContents<'a> {
fn calculate(&self, reports: &mut Vec<Box<QCReport>>) -> Result<(), TrustSeqErr> {
let mode = self
.gc_distribution
.iter()
.enumerate()
.max_by(|a, b| (a.1).partial_cmp(b.1).unwrap())
.unwrap();
let mode_th = mode.1 * 0.90;
let mut mode_total = 0;
let mut mode_count: u32 = 0;
let mut fell_off_top = true;
let mut fell_off_bottom = true;
for idx in (mode.0)..self.gc_distribution.len() {
if self.gc_distribution[idx] > mode_th {
mode_total += idx;
mode_count += 1;
} else {
fell_off_top = false;
break;
}
}
for idx in (0..mode.0).rev() {
if self.gc_distribution[idx] > mode_th {
mode_total += idx;
mode_count += 1;
} else {
fell_off_bottom = false;
break;
}
}
let mode2: f64 = if fell_off_top || fell_off_bottom {
*mode.1
} else {
mode_total as f64 / mode_count as f64
};
let (stddev, total_count) = calc_stddev_total(self.gc_distribution.iter(), mode2);
let mut theoretical_distribution = [0.0 as f64; 101];
let mut deviation_percent = 0.0;
for (i, v) in theoretical_distribution.iter_mut().enumerate() {
*v = calc_zscore_for_value(mode2, stddev, i as f64) * total_count;
deviation_percent += (*v - self.gc_distribution[i]).abs();
}
deviation_percent = deviation_percent * 100.0 / total_count;
let error_th = self.config.module_config.get("gc_sequence:error");
let warn_th = self.config.module_config.get("gc_sequence:warn");
let status = if deviation_percent > error_th {
QCResult::Fail
} else if deviation_percent > warn_th {
|
reports.push(Box::new(PerSequenceGCReport {
status: status,
gc_distribution: self.gc_distribution.to_vec(),
theoretical_distribution: theoretical_distribution.to_vec(),
}));
return Ok(());
}
fn process_sequence(&mut self, seq: &Sequence) -> () {
let mut gc_count: usize = 0;
let sequence = truncate_sequence(seq.sequence);
for s in sequence {
let ch = *s as char;
let is_gc = match ch {
'G' => true,
'g' => true,
'c' => true,
'C' => true,
_ => false,
};
if is_gc {
gc_count += 1;
}
}
let seq_len = seq.sequence.len();
if!self.gc_models.contains_key(&seq_len) {
self.gc_models
.insert(seq_len, Box::new(GCModel::new(seq_len)));
}
match self.gc_models.get(&seq_len) {
Some(model) => model.add_value(gc_count, &mut self.gc_distribution),
None => (),
}
}
}
|
QCResult::Warn
} else {
QCResult::Pass
};
|
random_line_split
|
per_sequence_gc_content.rs
|
use crate::trust_seq::gc_model::GCModel;
use crate::trust_seq::qc::{QCModule, QCReport, QCResult};
use crate::trust_seq::trust_seq::{TrustSeqConfig, TrustSeqErr};
use crate::trust_seq::utils::Sequence;
use serde_json::map::Map;
use serde_json::value;
use serde_json::value::Value;
use std::collections::hash_map::HashMap;
use std::f64;
use std::io::Write;
use std::slice::Iter;
pub struct PerSequenceGCContents<'a> {
config: &'a TrustSeqConfig,
gc_distribution: [f64; 101],
gc_models: HashMap<usize, Box<GCModel>>,
}
#[derive(Serialize)]
struct PerSequenceGCReport {
status: QCResult,
gc_distribution: Vec<f64>,
theoretical_distribution: Vec<f64>,
}
impl<'a> PerSequenceGCContents<'a> {
pub fn new(config: &'a TrustSeqConfig) -> PerSequenceGCContents {
let mut gc_distribution = Vec::new();
gc_distribution.resize(101, 0.0);
return PerSequenceGCContents {
config: config,
gc_distribution: [0f64; 101],
gc_models: HashMap::new(),
};
}
}
fn truncate_sequence(sequence: &[u8]) -> &[u8] {
if sequence.len() > 1000 {
let length = (sequence.len() / 1000) * 1000;
return &sequence[..length];
} else if sequence.len() > 100 {
let length = (sequence.len() / 100) * 100;
return &sequence[..length];
} else {
return sequence;
}
}
fn calc_zscore_for_value(mean: f64, stddev: f64, value: f64) -> f64 {
let lhs = (2f64 * f64::consts::PI * stddev * stddev).sqrt();
let rhs = f64::consts::E.powf(-1.0 * (value - mean).powi(2) / (2.0 * stddev * stddev));
rhs / lhs
}
fn calc_stddev_total(values: Iter<f64>, mode: f64) -> (f64, f64) {
let mut total_count = 0.0;
let mut stddev = 0.0;
for (i, v) in values.enumerate() {
stddev += (i as f64 - mode).powi(2) * (*v);
total_count += *v;
}
stddev /= total_count - 1f64;
(stddev.sqrt(), total_count)
}
impl QCReport for PerSequenceGCReport {
fn get_name(&self) -> &'static str {
return "Per sequence GC content";
}
fn get_status(&self) -> QCResult {
return self.status;
}
fn add_json(&self, map: &mut Map<String, Value>) -> Result<(), TrustSeqErr> {
map.insert(self.get_name().to_string(), value::to_value(self)?);
return Ok(());
}
fn print_text_report(&self, writer: &mut Write) -> Result<(), TrustSeqErr> {
writeln!(writer, "#GC Content\tCount")?;
for idx in 0..101 {
writeln!(writer, "{}\t{}", idx, self.gc_distribution[idx])?;
}
return Ok(());
}
}
impl<'a> QCModule for PerSequenceGCContents<'a> {
fn calculate(&self, reports: &mut Vec<Box<QCReport>>) -> Result<(), TrustSeqErr> {
let mode = self
.gc_distribution
.iter()
.enumerate()
.max_by(|a, b| (a.1).partial_cmp(b.1).unwrap())
.unwrap();
let mode_th = mode.1 * 0.90;
let mut mode_total = 0;
let mut mode_count: u32 = 0;
let mut fell_off_top = true;
let mut fell_off_bottom = true;
for idx in (mode.0)..self.gc_distribution.len() {
if self.gc_distribution[idx] > mode_th {
mode_total += idx;
mode_count += 1;
} else {
fell_off_top = false;
break;
}
}
for idx in (0..mode.0).rev() {
if self.gc_distribution[idx] > mode_th {
mode_total += idx;
mode_count += 1;
} else {
fell_off_bottom = false;
break;
}
}
let mode2: f64 = if fell_off_top || fell_off_bottom {
*mode.1
} else {
mode_total as f64 / mode_count as f64
};
let (stddev, total_count) = calc_stddev_total(self.gc_distribution.iter(), mode2);
let mut theoretical_distribution = [0.0 as f64; 101];
let mut deviation_percent = 0.0;
for (i, v) in theoretical_distribution.iter_mut().enumerate() {
*v = calc_zscore_for_value(mode2, stddev, i as f64) * total_count;
deviation_percent += (*v - self.gc_distribution[i]).abs();
}
deviation_percent = deviation_percent * 100.0 / total_count;
let error_th = self.config.module_config.get("gc_sequence:error");
let warn_th = self.config.module_config.get("gc_sequence:warn");
let status = if deviation_percent > error_th {
QCResult::Fail
} else if deviation_percent > warn_th {
QCResult::Warn
} else
|
;
reports.push(Box::new(PerSequenceGCReport {
status: status,
gc_distribution: self.gc_distribution.to_vec(),
theoretical_distribution: theoretical_distribution.to_vec(),
}));
return Ok(());
}
fn process_sequence(&mut self, seq: &Sequence) -> () {
let mut gc_count: usize = 0;
let sequence = truncate_sequence(seq.sequence);
for s in sequence {
let ch = *s as char;
let is_gc = match ch {
'G' => true,
'g' => true,
'c' => true,
'C' => true,
_ => false,
};
if is_gc {
gc_count += 1;
}
}
let seq_len = seq.sequence.len();
if!self.gc_models.contains_key(&seq_len) {
self.gc_models
.insert(seq_len, Box::new(GCModel::new(seq_len)));
}
match self.gc_models.get(&seq_len) {
Some(model) => model.add_value(gc_count, &mut self.gc_distribution),
None => (),
}
}
}
|
{
QCResult::Pass
}
|
conditional_block
|
geometry.rs
|
use std::fmt;
/// This is what the layout engine produces when it's finished.
#[derive(Default, Clone)]
pub struct Geometry {
// Content Box
pub position: Xyz,
pub dimensions: Xyz,
pub border: Spacing,
pub margin: Spacing,
pub padding: Spacing,
}
impl Geometry {
pub fn within_border_box(&self, point: &Xy) -> bool {
self.position.x - self.border.left - self.padding.left < point.x &&
self.position.x + self.border.right + self.padding.right + self.dimensions.x > point.x &&
self.position.y - self.border.top - self.padding.top < point.y &&
self.position.y + self.border.bottom + self.padding.bottom + self.dimensions.y > point.y
}
/// [x, y, w, h]
pub fn border_box(&self) -> [f64;4] {
let padding_box = self.padding_box();
[
padding_box[0] - self.border.left,
padding_box[1] - self.border.top,
padding_box[2] + self.border.left + self.border.right,
padding_box[3] + self.border.top + self.border.bottom,
]
}
/// [x, y, w, h]
pub fn padding_box(&self) -> [f64;4] {
[
self.position.x - self.padding.left,
self.position.y - self.padding.top,
self.padding.left + self.dimensions.x + self.padding.right,
self.padding.top + self.dimensions.y + self.padding.bottom,
]
}
pub fn bounding_dimensions(&self) -> Xy {
Xy{
x: self.margin.left + self.margin.right + self.padding.left + self.padding.right + self.border.left + self.border.right + self.dimensions.x,
y: self.margin.top + self.margin.bottom + self.padding.top + self.padding.bottom + self.border.top + self.border.bottom + self.dimensions.y,
}
}
pub fn set_bounding_position_x(&mut self, bounding_pos_x: f64) -> bool {
let content_position_x = bounding_pos_x + self.margin.left + self.border.left + self.padding.left;
let changed = self.position.x!= content_position_x;
self.position.x = content_position_x;
changed
|
}
pub fn set_bounding_position_y(&mut self, bounding_pos_y: f64) -> bool {
let content_position_y = bounding_pos_y + self.margin.top + self.border.top + self.padding.top;
let changed = self.position.y!= content_position_y;
self.position.y = content_position_y;
changed
}
}
impl fmt::Debug for Geometry {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Geometry {{ position: {:?}, dimensions: {:?} }}", self.position, self.dimensions)
}
}
#[derive(Default, Clone, PartialEq, Debug)]
pub struct Xyz {
pub x: f64,
pub y: f64,
// Z-TODO
// pub z: f64,
}
#[derive(Default, Clone, PartialEq, Debug)]
pub struct Spacing {
pub top: f64,
pub bottom: f64,
pub right: f64,
pub left: f64,
// Z-TODO
// pub front: f64,
// pub back: f64,
}
#[derive(Default)]
pub struct Xy {
pub x: f64,
pub y: f64,
}
|
random_line_split
|
|
geometry.rs
|
use std::fmt;
/// This is what the layout engine produces when it's finished.
#[derive(Default, Clone)]
pub struct Geometry {
// Content Box
pub position: Xyz,
pub dimensions: Xyz,
pub border: Spacing,
pub margin: Spacing,
pub padding: Spacing,
}
impl Geometry {
pub fn
|
(&self, point: &Xy) -> bool {
self.position.x - self.border.left - self.padding.left < point.x &&
self.position.x + self.border.right + self.padding.right + self.dimensions.x > point.x &&
self.position.y - self.border.top - self.padding.top < point.y &&
self.position.y + self.border.bottom + self.padding.bottom + self.dimensions.y > point.y
}
/// [x, y, w, h]
pub fn border_box(&self) -> [f64;4] {
let padding_box = self.padding_box();
[
padding_box[0] - self.border.left,
padding_box[1] - self.border.top,
padding_box[2] + self.border.left + self.border.right,
padding_box[3] + self.border.top + self.border.bottom,
]
}
/// [x, y, w, h]
pub fn padding_box(&self) -> [f64;4] {
[
self.position.x - self.padding.left,
self.position.y - self.padding.top,
self.padding.left + self.dimensions.x + self.padding.right,
self.padding.top + self.dimensions.y + self.padding.bottom,
]
}
pub fn bounding_dimensions(&self) -> Xy {
Xy{
x: self.margin.left + self.margin.right + self.padding.left + self.padding.right + self.border.left + self.border.right + self.dimensions.x,
y: self.margin.top + self.margin.bottom + self.padding.top + self.padding.bottom + self.border.top + self.border.bottom + self.dimensions.y,
}
}
pub fn set_bounding_position_x(&mut self, bounding_pos_x: f64) -> bool {
let content_position_x = bounding_pos_x + self.margin.left + self.border.left + self.padding.left;
let changed = self.position.x!= content_position_x;
self.position.x = content_position_x;
changed
}
pub fn set_bounding_position_y(&mut self, bounding_pos_y: f64) -> bool {
let content_position_y = bounding_pos_y + self.margin.top + self.border.top + self.padding.top;
let changed = self.position.y!= content_position_y;
self.position.y = content_position_y;
changed
}
}
impl fmt::Debug for Geometry {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Geometry {{ position: {:?}, dimensions: {:?} }}", self.position, self.dimensions)
}
}
#[derive(Default, Clone, PartialEq, Debug)]
pub struct Xyz {
pub x: f64,
pub y: f64,
// Z-TODO
// pub z: f64,
}
#[derive(Default, Clone, PartialEq, Debug)]
pub struct Spacing {
pub top: f64,
pub bottom: f64,
pub right: f64,
pub left: f64,
// Z-TODO
// pub front: f64,
// pub back: f64,
}
#[derive(Default)]
pub struct Xy {
pub x: f64,
pub y: f64,
}
|
within_border_box
|
identifier_name
|
geometry.rs
|
use std::fmt;
/// This is what the layout engine produces when it's finished.
#[derive(Default, Clone)]
pub struct Geometry {
// Content Box
pub position: Xyz,
pub dimensions: Xyz,
pub border: Spacing,
pub margin: Spacing,
pub padding: Spacing,
}
impl Geometry {
pub fn within_border_box(&self, point: &Xy) -> bool {
self.position.x - self.border.left - self.padding.left < point.x &&
self.position.x + self.border.right + self.padding.right + self.dimensions.x > point.x &&
self.position.y - self.border.top - self.padding.top < point.y &&
self.position.y + self.border.bottom + self.padding.bottom + self.dimensions.y > point.y
}
/// [x, y, w, h]
pub fn border_box(&self) -> [f64;4]
|
/// [x, y, w, h]
pub fn padding_box(&self) -> [f64;4] {
[
self.position.x - self.padding.left,
self.position.y - self.padding.top,
self.padding.left + self.dimensions.x + self.padding.right,
self.padding.top + self.dimensions.y + self.padding.bottom,
]
}
pub fn bounding_dimensions(&self) -> Xy {
Xy{
x: self.margin.left + self.margin.right + self.padding.left + self.padding.right + self.border.left + self.border.right + self.dimensions.x,
y: self.margin.top + self.margin.bottom + self.padding.top + self.padding.bottom + self.border.top + self.border.bottom + self.dimensions.y,
}
}
pub fn set_bounding_position_x(&mut self, bounding_pos_x: f64) -> bool {
let content_position_x = bounding_pos_x + self.margin.left + self.border.left + self.padding.left;
let changed = self.position.x!= content_position_x;
self.position.x = content_position_x;
changed
}
pub fn set_bounding_position_y(&mut self, bounding_pos_y: f64) -> bool {
let content_position_y = bounding_pos_y + self.margin.top + self.border.top + self.padding.top;
let changed = self.position.y!= content_position_y;
self.position.y = content_position_y;
changed
}
}
impl fmt::Debug for Geometry {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Geometry {{ position: {:?}, dimensions: {:?} }}", self.position, self.dimensions)
}
}
#[derive(Default, Clone, PartialEq, Debug)]
pub struct Xyz {
pub x: f64,
pub y: f64,
// Z-TODO
// pub z: f64,
}
#[derive(Default, Clone, PartialEq, Debug)]
pub struct Spacing {
pub top: f64,
pub bottom: f64,
pub right: f64,
pub left: f64,
// Z-TODO
// pub front: f64,
// pub back: f64,
}
#[derive(Default)]
pub struct Xy {
pub x: f64,
pub y: f64,
}
|
{
let padding_box = self.padding_box();
[
padding_box[0] - self.border.left,
padding_box[1] - self.border.top,
padding_box[2] + self.border.left + self.border.right,
padding_box[3] + self.border.top + self.border.bottom,
]
}
|
identifier_body
|
lib.rs
|
//! This crates defines the type inference engine.
|
//! this code handles low-level equality and subtyping operations. The
//! type check pass in the compiler is found in the `rustc_typeck` crate.
//!
//! For more information about how rustc works, see the [rustc dev guide].
//!
//! [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/
//!
//! # Note
//!
//! This API is completely unstable and subject to change.
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
#![feature(bool_to_option)]
#![feature(box_patterns)]
#![feature(extend_one)]
#![feature(iter_zip)]
#![feature(never_type)]
#![feature(in_band_lifetimes)]
#![feature(control_flow_enum)]
#![feature(min_specialization)]
#![feature(label_break_value)]
#![recursion_limit = "512"] // For rustdoc
#[macro_use]
extern crate rustc_macros;
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
#[macro_use]
extern crate rustc_data_structures;
#[macro_use]
extern crate tracing;
#[macro_use]
extern crate rustc_middle;
pub mod infer;
pub mod traits;
|
//!
//! - **Type inference.** The type inference code can be found in the `infer` module;
|
random_line_split
|
read_texts.rs
|
extern crate quick_xml;
fn
|
() {
use quick_xml::events::Event;
use quick_xml::Reader;
let xml = "<tag1>text1</tag1><tag1>text2</tag1>\
<tag1>text3</tag1><tag1><tag2>text4</tag2></tag1>";
let mut reader = Reader::from_str(xml);
reader.trim_text(true);
let mut txt = Vec::new();
let mut buf = Vec::new();
loop {
match reader.read_event(&mut buf) {
Ok(Event::Start(ref e)) if e.name() == b"tag2" => {
txt.push(
reader
.read_text(b"tag2", &mut Vec::new())
.expect("Cannot decode text value"),
);
println!("{:?}", txt);
}
Ok(Event::Eof) => break, // exits the loop when reaching end of file
Err(e) => panic!("Error at position {}: {:?}", reader.buffer_position(), e),
_ => (), // There are several other `Event`s we do not consider here
}
buf.clear();
}
}
|
main
|
identifier_name
|
read_texts.rs
|
extern crate quick_xml;
fn main()
|
);
println!("{:?}", txt);
}
Ok(Event::Eof) => break, // exits the loop when reaching end of file
Err(e) => panic!("Error at position {}: {:?}", reader.buffer_position(), e),
_ => (), // There are several other `Event`s we do not consider here
}
buf.clear();
}
}
|
{
use quick_xml::events::Event;
use quick_xml::Reader;
let xml = "<tag1>text1</tag1><tag1>text2</tag1>\
<tag1>text3</tag1><tag1><tag2>text4</tag2></tag1>";
let mut reader = Reader::from_str(xml);
reader.trim_text(true);
let mut txt = Vec::new();
let mut buf = Vec::new();
loop {
match reader.read_event(&mut buf) {
Ok(Event::Start(ref e)) if e.name() == b"tag2" => {
txt.push(
reader
.read_text(b"tag2", &mut Vec::new())
.expect("Cannot decode text value"),
|
identifier_body
|
read_texts.rs
|
extern crate quick_xml;
fn main() {
use quick_xml::events::Event;
use quick_xml::Reader;
let xml = "<tag1>text1</tag1><tag1>text2</tag1>\
<tag1>text3</tag1><tag1><tag2>text4</tag2></tag1>";
let mut reader = Reader::from_str(xml);
reader.trim_text(true);
let mut txt = Vec::new();
let mut buf = Vec::new();
loop {
match reader.read_event(&mut buf) {
Ok(Event::Start(ref e)) if e.name() == b"tag2" =>
|
Ok(Event::Eof) => break, // exits the loop when reaching end of file
Err(e) => panic!("Error at position {}: {:?}", reader.buffer_position(), e),
_ => (), // There are several other `Event`s we do not consider here
}
buf.clear();
}
}
|
{
txt.push(
reader
.read_text(b"tag2", &mut Vec::new())
.expect("Cannot decode text value"),
);
println!("{:?}", txt);
}
|
conditional_block
|
read_texts.rs
|
extern crate quick_xml;
fn main() {
use quick_xml::events::Event;
use quick_xml::Reader;
let xml = "<tag1>text1</tag1><tag1>text2</tag1>\
<tag1>text3</tag1><tag1><tag2>text4</tag2></tag1>";
|
let mut txt = Vec::new();
let mut buf = Vec::new();
loop {
match reader.read_event(&mut buf) {
Ok(Event::Start(ref e)) if e.name() == b"tag2" => {
txt.push(
reader
.read_text(b"tag2", &mut Vec::new())
.expect("Cannot decode text value"),
);
println!("{:?}", txt);
}
Ok(Event::Eof) => break, // exits the loop when reaching end of file
Err(e) => panic!("Error at position {}: {:?}", reader.buffer_position(), e),
_ => (), // There are several other `Event`s we do not consider here
}
buf.clear();
}
}
|
let mut reader = Reader::from_str(xml);
reader.trim_text(true);
|
random_line_split
|
lib.rs
|
#![crate_type = "lib"]
#![crate_name = "comm"]
#![feature(box_syntax, core, alloc, oom, heap_api,
unsafe_no_drop_flag, filling_drop, wait_timeout, wait_timeout_with,
static_mutex, raw, nonzero, drain, num_bits_bytes)]
#![cfg_attr(test, feature(test, scoped))]
#![cfg_attr(test, allow(deprecated))]
#![allow(dead_code, trivial_casts, trivial_numeric_casts,
drop_with_repr_extern)]
//! Communication primitives.
//!
//! This library provides types for message passing between threads and polling.
//! Concretely, it provides
//!
//! - Single-producer single-consumer (SPSC),
//! - Single-producer multiple-consumers (SPMC),
//! - Multiple-producers single-consumer (MPSC), and
//! - Multiple-producers multiple-consumers (MPMC)
//!
//! channels of different flavors and a `Select` object which can poll the consuming ends
//! of these channels for readiness.
//!
//! ### Examples
//!
//! Simple usage:
//!
//! ```
//! use std::{thread};
//! use comm::{spsc};
//!
//! // Create a bounded SPSC channel.
//! let (send, recv) = spsc::bounded::new(10);
//! thread::spawn(move || {
//! send.send_sync(10).unwrap();
//! });
//! assert_eq!(recv.recv_sync().unwrap(), 10);
//! ```
//!
//! Shared usage:
//!
//! ```
//! use std::{thread};
//! use comm::{mpsc};
//!
//! // Create an unbounded MPSC channel.
//! let (send, recv) = mpsc::unbounded::new();
//! for i in 0..10 {
//! let send = send.clone();
//! thread::spawn(move || {
//! send.send(i).unwrap();
//! });
//! }
//! drop(send);
//! while let Ok(n) = recv.recv_sync() {
//! println!("{}", n);
//! }
//! ```
//!
//! Selecting:
//!
//! ```
//! #![feature(std_misc, thread_sleep)]
//!
//! use std::thread::{self, sleep_ms};
//! use comm::{spsc};
//! use comm::select::{Select, Selectable};
//!
//! let mut channels = vec!();
//! for i in 0..10 {
//! let (send, recv) = spsc::one_space::new();
//! channels.push(recv);
//! thread::spawn(move || {
//! sleep_ms(100);
//! send.send(i).ok();
//! });
//! }
//! let select = Select::new();
//! for recv in &channels {
//! select.add(recv);
//! }
//! let first_ready = select.wait(&mut [0])[0];
//! for recv in &channels {
//! if first_ready == recv.id() {
//! println!("First ready: {}", recv.recv_sync().unwrap());
//! return;
//! }
//! }
//! ```
extern crate core;
extern crate alloc;
#[cfg(test)] extern crate test;
pub use marker::{Sendable};
mod sortedvec;
mod marker;
pub mod arc;
pub mod select;
pub mod spsc;
pub mod spmc;
pub mod mpsc;
pub mod mpmc;
/// Errors that can happen during receiving and sending.
///
/// See the individual functions for a list of errors they can return and the specific
/// meaning.
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum
|
{
Disconnected,
Full,
Empty,
Deadlock,
}
|
Error
|
identifier_name
|
lib.rs
|
#![crate_type = "lib"]
#![crate_name = "comm"]
#![feature(box_syntax, core, alloc, oom, heap_api,
unsafe_no_drop_flag, filling_drop, wait_timeout, wait_timeout_with,
static_mutex, raw, nonzero, drain, num_bits_bytes)]
#![cfg_attr(test, feature(test, scoped))]
#![cfg_attr(test, allow(deprecated))]
#![allow(dead_code, trivial_casts, trivial_numeric_casts,
drop_with_repr_extern)]
//! Communication primitives.
//!
//! This library provides types for message passing between threads and polling.
//! Concretely, it provides
//!
//! - Single-producer single-consumer (SPSC),
//! - Single-producer multiple-consumers (SPMC),
//! - Multiple-producers single-consumer (MPSC), and
//! - Multiple-producers multiple-consumers (MPMC)
//!
//! channels of different flavors and a `Select` object which can poll the consuming ends
//! of these channels for readiness.
//!
//! ### Examples
//!
//! Simple usage:
//!
//! ```
//! use std::{thread};
//! use comm::{spsc};
//!
//! // Create a bounded SPSC channel.
//! let (send, recv) = spsc::bounded::new(10);
//! thread::spawn(move || {
//! send.send_sync(10).unwrap();
//! });
//! assert_eq!(recv.recv_sync().unwrap(), 10);
//! ```
//!
//! Shared usage:
//!
//! ```
//! use std::{thread};
//! use comm::{mpsc};
//!
//! // Create an unbounded MPSC channel.
//! let (send, recv) = mpsc::unbounded::new();
//! for i in 0..10 {
//! let send = send.clone();
//! thread::spawn(move || {
//! send.send(i).unwrap();
//! });
//! }
//! drop(send);
//! while let Ok(n) = recv.recv_sync() {
//! println!("{}", n);
//! }
//! ```
//!
//! Selecting:
//!
//! ```
//! #![feature(std_misc, thread_sleep)]
//!
//! use std::thread::{self, sleep_ms};
//! use comm::{spsc};
//! use comm::select::{Select, Selectable};
//!
//! let mut channels = vec!();
//! for i in 0..10 {
//! let (send, recv) = spsc::one_space::new();
//! channels.push(recv);
//! thread::spawn(move || {
//! sleep_ms(100);
//! send.send(i).ok();
//! });
//! }
//! let select = Select::new();
//! for recv in &channels {
//! select.add(recv);
//! }
//! let first_ready = select.wait(&mut [0])[0];
//! for recv in &channels {
//! if first_ready == recv.id() {
//! println!("First ready: {}", recv.recv_sync().unwrap());
//! return;
//! }
//! }
//! ```
extern crate core;
extern crate alloc;
#[cfg(test)] extern crate test;
pub use marker::{Sendable};
|
mod sortedvec;
mod marker;
pub mod arc;
pub mod select;
pub mod spsc;
pub mod spmc;
pub mod mpsc;
pub mod mpmc;
/// Errors that can happen during receiving and sending.
///
/// See the individual functions for a list of errors they can return and the specific
/// meaning.
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum Error {
Disconnected,
Full,
Empty,
Deadlock,
}
|
random_line_split
|
|
test.rs
|
extern crate pem_parser;
extern crate openssl;
use std::io::prelude::*;
use std::fs::{File};
use std::path::{Path};
use std::process::Command;
use self::openssl::crypto::pkey::{PKey, EncryptionPadding};
const PLAINTEXT_FILE_PATH: &'static str = concat!(env!("CARGO_MANIFEST_DIR"), "/test_data/original_data");
const PUBLIC_KEY_PATH: &'static str = concat!(env!("CARGO_MANIFEST_DIR"), "/test_data/public_key");
const PRIVATE_KEY_PATH: &'static str = concat!(env!("CARGO_MANIFEST_DIR"), "/test_data/private_key");
const RUST_ENCRYPTED_FILE_PATH: &'static str = concat!(env!("CARGO_MANIFEST_DIR"), "/test_data/rust_encrypted_data");
const OPENSSL_CLI_ENCRYPTED_FILE_PATH: &'static str = concat!(env!("CARGO_MANIFEST_DIR"), "/test_data/openssl_cli_encrypted_data");
fn read_binary_data<P: AsRef<Path>>(path: P) -> Vec<u8> {
let mut buffer = Vec::new();
File::open(path.as_ref()).and_then(|mut f| f.read_to_end(&mut buffer)).unwrap_or_else(|e| panic!("{}: {:?}", e, path.as_ref()));
buffer
}
fn read_string<P: AsRef<Path>>(path: P) -> String {
let mut string = String::new();
File::open(path.as_ref()).and_then(|mut f| f.read_to_string(&mut string)).unwrap();
string
}
fn openssl_cli_encrypt(plaintext_file_path: &str, encrypted_file_path: &str, private_key_path: &str) {
let _ = Command::new("openssl")
.arg("rsautl")
.arg("-encrypt")
.arg("-in")
.arg(plaintext_file_path)
.arg("-out")
.arg(encrypted_file_path)
.arg("-inkey")
.arg(private_key_path)
.output()
.unwrap();
}
fn openssl_cli_decrypt(encrypted_file_path: &str, private_key_path: &str) -> Vec<u8> {
let output = Command::new("openssl")
.arg("rsautl")
.arg("-decrypt")
.arg("-in")
.arg(encrypted_file_path)
.arg("-inkey")
.arg(private_key_path)
.output()
.unwrap();
output.stdout
}
#[test]
/// Assert data encrypted with the openssl CLI and decrypted from Rust stays the same.
fn test_private_key() {
openssl_cli_encrypt(PLAINTEXT_FILE_PATH, OPENSSL_CLI_ENCRYPTED_FILE_PATH, PRIVATE_KEY_PATH);
let encrypted_data: Vec<u8> = read_binary_data(OPENSSL_CLI_ENCRYPTED_FILE_PATH);
let pem_file_contents = read_string(PRIVATE_KEY_PATH);
let der_private_key = pem_parser::pem_to_der(&pem_file_contents);
|
EncryptionPadding::PKCS1v15 // PKCS is the default padding scheme.
);
let decrypted_data: String = String::from_utf8(decrypted_data).unwrap();
let original_data = read_string(PLAINTEXT_FILE_PATH);
assert_eq!(decrypted_data, original_data);
}
#[test]
/// Assert data encrypted from Rust and decrypted with the openssl CLI stays the same.
fn test_public_key() {
let public_key_pem_file_contents = read_string(PUBLIC_KEY_PATH);
let der_public_key = pem_parser::pem_to_der(&public_key_pem_file_contents);
let mut pkey = PKey::new();
pkey.load_pub(&der_public_key);
let original_data = read_binary_data(PLAINTEXT_FILE_PATH);
let encrypted_data = pkey.encrypt_with_padding(
&original_data,
EncryptionPadding::PKCS1v15
);
let mut f = File::create(RUST_ENCRYPTED_FILE_PATH).unwrap();
f.write_all(&encrypted_data).unwrap();
let decrypted_data = openssl_cli_decrypt(RUST_ENCRYPTED_FILE_PATH, PRIVATE_KEY_PATH);
assert_eq!(decrypted_data, original_data);
}
|
let mut pkey = PKey::new();
pkey.load_priv(&der_private_key);
let decrypted_data: Vec<u8> = pkey.decrypt_with_padding(
&encrypted_data,
|
random_line_split
|
test.rs
|
extern crate pem_parser;
extern crate openssl;
use std::io::prelude::*;
use std::fs::{File};
use std::path::{Path};
use std::process::Command;
use self::openssl::crypto::pkey::{PKey, EncryptionPadding};
const PLAINTEXT_FILE_PATH: &'static str = concat!(env!("CARGO_MANIFEST_DIR"), "/test_data/original_data");
const PUBLIC_KEY_PATH: &'static str = concat!(env!("CARGO_MANIFEST_DIR"), "/test_data/public_key");
const PRIVATE_KEY_PATH: &'static str = concat!(env!("CARGO_MANIFEST_DIR"), "/test_data/private_key");
const RUST_ENCRYPTED_FILE_PATH: &'static str = concat!(env!("CARGO_MANIFEST_DIR"), "/test_data/rust_encrypted_data");
const OPENSSL_CLI_ENCRYPTED_FILE_PATH: &'static str = concat!(env!("CARGO_MANIFEST_DIR"), "/test_data/openssl_cli_encrypted_data");
fn read_binary_data<P: AsRef<Path>>(path: P) -> Vec<u8> {
let mut buffer = Vec::new();
File::open(path.as_ref()).and_then(|mut f| f.read_to_end(&mut buffer)).unwrap_or_else(|e| panic!("{}: {:?}", e, path.as_ref()));
buffer
}
fn
|
<P: AsRef<Path>>(path: P) -> String {
let mut string = String::new();
File::open(path.as_ref()).and_then(|mut f| f.read_to_string(&mut string)).unwrap();
string
}
fn openssl_cli_encrypt(plaintext_file_path: &str, encrypted_file_path: &str, private_key_path: &str) {
let _ = Command::new("openssl")
.arg("rsautl")
.arg("-encrypt")
.arg("-in")
.arg(plaintext_file_path)
.arg("-out")
.arg(encrypted_file_path)
.arg("-inkey")
.arg(private_key_path)
.output()
.unwrap();
}
fn openssl_cli_decrypt(encrypted_file_path: &str, private_key_path: &str) -> Vec<u8> {
let output = Command::new("openssl")
.arg("rsautl")
.arg("-decrypt")
.arg("-in")
.arg(encrypted_file_path)
.arg("-inkey")
.arg(private_key_path)
.output()
.unwrap();
output.stdout
}
#[test]
/// Assert data encrypted with the openssl CLI and decrypted from Rust stays the same.
fn test_private_key() {
openssl_cli_encrypt(PLAINTEXT_FILE_PATH, OPENSSL_CLI_ENCRYPTED_FILE_PATH, PRIVATE_KEY_PATH);
let encrypted_data: Vec<u8> = read_binary_data(OPENSSL_CLI_ENCRYPTED_FILE_PATH);
let pem_file_contents = read_string(PRIVATE_KEY_PATH);
let der_private_key = pem_parser::pem_to_der(&pem_file_contents);
let mut pkey = PKey::new();
pkey.load_priv(&der_private_key);
let decrypted_data: Vec<u8> = pkey.decrypt_with_padding(
&encrypted_data,
EncryptionPadding::PKCS1v15 // PKCS is the default padding scheme.
);
let decrypted_data: String = String::from_utf8(decrypted_data).unwrap();
let original_data = read_string(PLAINTEXT_FILE_PATH);
assert_eq!(decrypted_data, original_data);
}
#[test]
/// Assert data encrypted from Rust and decrypted with the openssl CLI stays the same.
fn test_public_key() {
let public_key_pem_file_contents = read_string(PUBLIC_KEY_PATH);
let der_public_key = pem_parser::pem_to_der(&public_key_pem_file_contents);
let mut pkey = PKey::new();
pkey.load_pub(&der_public_key);
let original_data = read_binary_data(PLAINTEXT_FILE_PATH);
let encrypted_data = pkey.encrypt_with_padding(
&original_data,
EncryptionPadding::PKCS1v15
);
let mut f = File::create(RUST_ENCRYPTED_FILE_PATH).unwrap();
f.write_all(&encrypted_data).unwrap();
let decrypted_data = openssl_cli_decrypt(RUST_ENCRYPTED_FILE_PATH, PRIVATE_KEY_PATH);
assert_eq!(decrypted_data, original_data);
}
|
read_string
|
identifier_name
|
test.rs
|
extern crate pem_parser;
extern crate openssl;
use std::io::prelude::*;
use std::fs::{File};
use std::path::{Path};
use std::process::Command;
use self::openssl::crypto::pkey::{PKey, EncryptionPadding};
const PLAINTEXT_FILE_PATH: &'static str = concat!(env!("CARGO_MANIFEST_DIR"), "/test_data/original_data");
const PUBLIC_KEY_PATH: &'static str = concat!(env!("CARGO_MANIFEST_DIR"), "/test_data/public_key");
const PRIVATE_KEY_PATH: &'static str = concat!(env!("CARGO_MANIFEST_DIR"), "/test_data/private_key");
const RUST_ENCRYPTED_FILE_PATH: &'static str = concat!(env!("CARGO_MANIFEST_DIR"), "/test_data/rust_encrypted_data");
const OPENSSL_CLI_ENCRYPTED_FILE_PATH: &'static str = concat!(env!("CARGO_MANIFEST_DIR"), "/test_data/openssl_cli_encrypted_data");
fn read_binary_data<P: AsRef<Path>>(path: P) -> Vec<u8> {
let mut buffer = Vec::new();
File::open(path.as_ref()).and_then(|mut f| f.read_to_end(&mut buffer)).unwrap_or_else(|e| panic!("{}: {:?}", e, path.as_ref()));
buffer
}
fn read_string<P: AsRef<Path>>(path: P) -> String {
let mut string = String::new();
File::open(path.as_ref()).and_then(|mut f| f.read_to_string(&mut string)).unwrap();
string
}
fn openssl_cli_encrypt(plaintext_file_path: &str, encrypted_file_path: &str, private_key_path: &str) {
let _ = Command::new("openssl")
.arg("rsautl")
.arg("-encrypt")
.arg("-in")
.arg(plaintext_file_path)
.arg("-out")
.arg(encrypted_file_path)
.arg("-inkey")
.arg(private_key_path)
.output()
.unwrap();
}
fn openssl_cli_decrypt(encrypted_file_path: &str, private_key_path: &str) -> Vec<u8> {
let output = Command::new("openssl")
.arg("rsautl")
.arg("-decrypt")
.arg("-in")
.arg(encrypted_file_path)
.arg("-inkey")
.arg(private_key_path)
.output()
.unwrap();
output.stdout
}
#[test]
/// Assert data encrypted with the openssl CLI and decrypted from Rust stays the same.
fn test_private_key() {
openssl_cli_encrypt(PLAINTEXT_FILE_PATH, OPENSSL_CLI_ENCRYPTED_FILE_PATH, PRIVATE_KEY_PATH);
let encrypted_data: Vec<u8> = read_binary_data(OPENSSL_CLI_ENCRYPTED_FILE_PATH);
let pem_file_contents = read_string(PRIVATE_KEY_PATH);
let der_private_key = pem_parser::pem_to_der(&pem_file_contents);
let mut pkey = PKey::new();
pkey.load_priv(&der_private_key);
let decrypted_data: Vec<u8> = pkey.decrypt_with_padding(
&encrypted_data,
EncryptionPadding::PKCS1v15 // PKCS is the default padding scheme.
);
let decrypted_data: String = String::from_utf8(decrypted_data).unwrap();
let original_data = read_string(PLAINTEXT_FILE_PATH);
assert_eq!(decrypted_data, original_data);
}
#[test]
/// Assert data encrypted from Rust and decrypted with the openssl CLI stays the same.
fn test_public_key()
|
}
|
{
let public_key_pem_file_contents = read_string(PUBLIC_KEY_PATH);
let der_public_key = pem_parser::pem_to_der(&public_key_pem_file_contents);
let mut pkey = PKey::new();
pkey.load_pub(&der_public_key);
let original_data = read_binary_data(PLAINTEXT_FILE_PATH);
let encrypted_data = pkey.encrypt_with_padding(
&original_data,
EncryptionPadding::PKCS1v15
);
let mut f = File::create(RUST_ENCRYPTED_FILE_PATH).unwrap();
f.write_all(&encrypted_data).unwrap();
let decrypted_data = openssl_cli_decrypt(RUST_ENCRYPTED_FILE_PATH, PRIVATE_KEY_PATH);
assert_eq!(decrypted_data, original_data);
|
identifier_body
|
ejdb_bson.rs
|
//! Contains low-level utilities for conversion between Rust and EJDB BSON representations.
//!
//! This module is only public to facilitate direct usage of `ejdb-sys` library, if such
//! need arises. The types provided here are useful for converting Rust BSON values
//! to EJDB ones and vice versa.
//!
//! Types from this module should not be used unless absolutely necessary.
use std::slice;
use bson::oid;
use bson::{self, DecoderResult, Document, EncoderResult};
use ejdb_sys;
pub struct EjdbBsonDocument(*mut ejdb_sys::bson);
impl EjdbBsonDocument {
pub fn empty() -> EjdbBsonDocument {
unsafe {
// TODO: check for alloc errors properly
let bson_ptr = ejdb_sys::bson_create();
if bson_ptr.is_null() {
panic!("Cannot allocate new BSON document");
}
ejdb_sys::bson_init(bson_ptr);
EjdbBsonDocument::from_ptr(bson_ptr)
}
}
#[inline]
pub unsafe fn from_ptr(ptr: *mut ejdb_sys::bson) -> EjdbBsonDocument {
EjdbBsonDocument(ptr)
}
#[inline]
pub fn from_buffer(buf: &[u8]) -> EjdbBsonDocument {
unsafe {
EjdbBsonDocument(ejdb_sys::bson_create_from_buffer(
buf.as_ptr() as *const _,
buf.len() as i32,
))
}
}
pub fn from_bson(bson: &Document) -> EncoderResult<EjdbBsonDocument> {
let mut buffer = Vec::new();
bson::encode_document(&mut buffer, bson).map(|_| EjdbBsonDocument::from_buffer(&buffer))
}
pub fn to_bson(&self) -> DecoderResult<Document> {
let buf_ptr = unsafe { ejdb_sys::bson_data(self.0 as *const _) as *const u8 };
let buf_size = unsafe { ejdb_sys::bson_size(self.0 as *const _) };
let mut buf = unsafe { slice::from_raw_parts(buf_ptr, buf_size as usize) };
bson::decode_document(&mut buf)
}
#[inline]
pub fn as_raw(&self) -> *const ejdb_sys::bson {
self.0 as *const _
}
#[inline]
pub fn as_raw_mut(&mut self) -> *mut ejdb_sys::bson {
self.0 as *mut _
}
}
impl Drop for EjdbBsonDocument {
fn drop(&mut self) {
unsafe {
ejdb_sys::bson_del(self.0);
}
}
}
#[derive(Copy, Clone)]
pub struct EjdbObjectId(ejdb_sys::bson_oid_t);
impl EjdbObjectId {
#[inline]
pub fn empty() -> EjdbObjectId {
let empty_arr: [i8; 12] = [0; 12];
EjdbObjectId(ejdb_sys::bson_oid_t { bytes: empty_arr })
}
#[inline]
pub fn to_rust(self) -> oid::ObjectId
|
#[inline]
pub fn from_rust(oid: oid::ObjectId) -> EjdbObjectId {
EjdbObjectId(ejdb_sys::bson_oid_t {
bytes: to_i(oid.bytes()),
})
}
#[inline]
pub fn to_ejdb(self) -> ejdb_sys::bson_oid_t {
self.0
}
#[inline]
pub fn as_raw(&self) -> *const ejdb_sys::bson_oid_t {
&self.0
}
#[inline]
pub fn as_raw_mut(&mut self) -> *mut ejdb_sys::bson_oid_t {
&mut self.0
}
}
impl From<ejdb_sys::bson_oid_t> for EjdbObjectId {
#[inline]
fn from(oid: ejdb_sys::bson_oid_t) -> EjdbObjectId {
EjdbObjectId(oid)
}
}
impl From<oid::ObjectId> for EjdbObjectId {
#[inline]
fn from(oid: oid::ObjectId) -> EjdbObjectId {
EjdbObjectId::from_rust(oid)
}
}
impl Into<ejdb_sys::bson_oid_t> for EjdbObjectId {
#[inline]
fn into(self) -> ejdb_sys::bson_oid_t {
self.to_ejdb()
}
}
impl Into<oid::ObjectId> for EjdbObjectId {
#[inline]
fn into(self) -> oid::ObjectId {
self.to_rust()
}
}
fn to_i(arr: [u8; 12]) -> [i8; 12] {
let mut result: [i8; 12] = [0; 12];
for i in 0..arr.len() {
result[i] = arr[i] as i8;
}
return result;
}
fn to_u(arr: [i8; 12]) -> [u8; 12] {
let mut result: [u8; 12] = [0; 12];
for i in 0..arr.len() {
result[i] = arr[i] as u8;
}
return result;
}
|
{
let bytes: [i8; 12];
unsafe {
bytes = (self.0).bytes;
}
oid::ObjectId::with_bytes(to_u(bytes))
}
|
identifier_body
|
ejdb_bson.rs
|
//! Contains low-level utilities for conversion between Rust and EJDB BSON representations.
//!
//! This module is only public to facilitate direct usage of `ejdb-sys` library, if such
//! need arises. The types provided here are useful for converting Rust BSON values
//! to EJDB ones and vice versa.
//!
//! Types from this module should not be used unless absolutely necessary.
use std::slice;
use bson::oid;
use bson::{self, DecoderResult, Document, EncoderResult};
use ejdb_sys;
pub struct EjdbBsonDocument(*mut ejdb_sys::bson);
impl EjdbBsonDocument {
pub fn empty() -> EjdbBsonDocument {
unsafe {
// TODO: check for alloc errors properly
let bson_ptr = ejdb_sys::bson_create();
if bson_ptr.is_null() {
panic!("Cannot allocate new BSON document");
}
ejdb_sys::bson_init(bson_ptr);
EjdbBsonDocument::from_ptr(bson_ptr)
}
}
#[inline]
pub unsafe fn from_ptr(ptr: *mut ejdb_sys::bson) -> EjdbBsonDocument {
EjdbBsonDocument(ptr)
}
#[inline]
pub fn from_buffer(buf: &[u8]) -> EjdbBsonDocument {
unsafe {
EjdbBsonDocument(ejdb_sys::bson_create_from_buffer(
buf.as_ptr() as *const _,
buf.len() as i32,
))
}
}
pub fn from_bson(bson: &Document) -> EncoderResult<EjdbBsonDocument> {
let mut buffer = Vec::new();
bson::encode_document(&mut buffer, bson).map(|_| EjdbBsonDocument::from_buffer(&buffer))
}
pub fn to_bson(&self) -> DecoderResult<Document> {
let buf_ptr = unsafe { ejdb_sys::bson_data(self.0 as *const _) as *const u8 };
let buf_size = unsafe { ejdb_sys::bson_size(self.0 as *const _) };
let mut buf = unsafe { slice::from_raw_parts(buf_ptr, buf_size as usize) };
bson::decode_document(&mut buf)
}
#[inline]
pub fn as_raw(&self) -> *const ejdb_sys::bson {
self.0 as *const _
}
#[inline]
pub fn as_raw_mut(&mut self) -> *mut ejdb_sys::bson {
self.0 as *mut _
}
}
impl Drop for EjdbBsonDocument {
fn drop(&mut self) {
unsafe {
ejdb_sys::bson_del(self.0);
}
}
}
#[derive(Copy, Clone)]
pub struct EjdbObjectId(ejdb_sys::bson_oid_t);
impl EjdbObjectId {
#[inline]
pub fn empty() -> EjdbObjectId {
let empty_arr: [i8; 12] = [0; 12];
EjdbObjectId(ejdb_sys::bson_oid_t { bytes: empty_arr })
}
#[inline]
pub fn to_rust(self) -> oid::ObjectId {
let bytes: [i8; 12];
unsafe {
bytes = (self.0).bytes;
}
oid::ObjectId::with_bytes(to_u(bytes))
}
#[inline]
pub fn from_rust(oid: oid::ObjectId) -> EjdbObjectId {
EjdbObjectId(ejdb_sys::bson_oid_t {
bytes: to_i(oid.bytes()),
})
}
#[inline]
pub fn to_ejdb(self) -> ejdb_sys::bson_oid_t {
self.0
}
#[inline]
pub fn as_raw(&self) -> *const ejdb_sys::bson_oid_t {
&self.0
}
#[inline]
pub fn as_raw_mut(&mut self) -> *mut ejdb_sys::bson_oid_t {
&mut self.0
}
}
|
}
}
impl From<oid::ObjectId> for EjdbObjectId {
#[inline]
fn from(oid: oid::ObjectId) -> EjdbObjectId {
EjdbObjectId::from_rust(oid)
}
}
impl Into<ejdb_sys::bson_oid_t> for EjdbObjectId {
#[inline]
fn into(self) -> ejdb_sys::bson_oid_t {
self.to_ejdb()
}
}
impl Into<oid::ObjectId> for EjdbObjectId {
#[inline]
fn into(self) -> oid::ObjectId {
self.to_rust()
}
}
fn to_i(arr: [u8; 12]) -> [i8; 12] {
let mut result: [i8; 12] = [0; 12];
for i in 0..arr.len() {
result[i] = arr[i] as i8;
}
return result;
}
fn to_u(arr: [i8; 12]) -> [u8; 12] {
let mut result: [u8; 12] = [0; 12];
for i in 0..arr.len() {
result[i] = arr[i] as u8;
}
return result;
}
|
impl From<ejdb_sys::bson_oid_t> for EjdbObjectId {
#[inline]
fn from(oid: ejdb_sys::bson_oid_t) -> EjdbObjectId {
EjdbObjectId(oid)
|
random_line_split
|
ejdb_bson.rs
|
//! Contains low-level utilities for conversion between Rust and EJDB BSON representations.
//!
//! This module is only public to facilitate direct usage of `ejdb-sys` library, if such
//! need arises. The types provided here are useful for converting Rust BSON values
//! to EJDB ones and vice versa.
//!
//! Types from this module should not be used unless absolutely necessary.
use std::slice;
use bson::oid;
use bson::{self, DecoderResult, Document, EncoderResult};
use ejdb_sys;
pub struct EjdbBsonDocument(*mut ejdb_sys::bson);
impl EjdbBsonDocument {
pub fn empty() -> EjdbBsonDocument {
unsafe {
// TODO: check for alloc errors properly
let bson_ptr = ejdb_sys::bson_create();
if bson_ptr.is_null() {
panic!("Cannot allocate new BSON document");
}
ejdb_sys::bson_init(bson_ptr);
EjdbBsonDocument::from_ptr(bson_ptr)
}
}
#[inline]
pub unsafe fn from_ptr(ptr: *mut ejdb_sys::bson) -> EjdbBsonDocument {
EjdbBsonDocument(ptr)
}
#[inline]
pub fn from_buffer(buf: &[u8]) -> EjdbBsonDocument {
unsafe {
EjdbBsonDocument(ejdb_sys::bson_create_from_buffer(
buf.as_ptr() as *const _,
buf.len() as i32,
))
}
}
pub fn from_bson(bson: &Document) -> EncoderResult<EjdbBsonDocument> {
let mut buffer = Vec::new();
bson::encode_document(&mut buffer, bson).map(|_| EjdbBsonDocument::from_buffer(&buffer))
}
pub fn to_bson(&self) -> DecoderResult<Document> {
let buf_ptr = unsafe { ejdb_sys::bson_data(self.0 as *const _) as *const u8 };
let buf_size = unsafe { ejdb_sys::bson_size(self.0 as *const _) };
let mut buf = unsafe { slice::from_raw_parts(buf_ptr, buf_size as usize) };
bson::decode_document(&mut buf)
}
#[inline]
pub fn as_raw(&self) -> *const ejdb_sys::bson {
self.0 as *const _
}
#[inline]
pub fn
|
(&mut self) -> *mut ejdb_sys::bson {
self.0 as *mut _
}
}
impl Drop for EjdbBsonDocument {
fn drop(&mut self) {
unsafe {
ejdb_sys::bson_del(self.0);
}
}
}
#[derive(Copy, Clone)]
pub struct EjdbObjectId(ejdb_sys::bson_oid_t);
impl EjdbObjectId {
#[inline]
pub fn empty() -> EjdbObjectId {
let empty_arr: [i8; 12] = [0; 12];
EjdbObjectId(ejdb_sys::bson_oid_t { bytes: empty_arr })
}
#[inline]
pub fn to_rust(self) -> oid::ObjectId {
let bytes: [i8; 12];
unsafe {
bytes = (self.0).bytes;
}
oid::ObjectId::with_bytes(to_u(bytes))
}
#[inline]
pub fn from_rust(oid: oid::ObjectId) -> EjdbObjectId {
EjdbObjectId(ejdb_sys::bson_oid_t {
bytes: to_i(oid.bytes()),
})
}
#[inline]
pub fn to_ejdb(self) -> ejdb_sys::bson_oid_t {
self.0
}
#[inline]
pub fn as_raw(&self) -> *const ejdb_sys::bson_oid_t {
&self.0
}
#[inline]
pub fn as_raw_mut(&mut self) -> *mut ejdb_sys::bson_oid_t {
&mut self.0
}
}
impl From<ejdb_sys::bson_oid_t> for EjdbObjectId {
#[inline]
fn from(oid: ejdb_sys::bson_oid_t) -> EjdbObjectId {
EjdbObjectId(oid)
}
}
impl From<oid::ObjectId> for EjdbObjectId {
#[inline]
fn from(oid: oid::ObjectId) -> EjdbObjectId {
EjdbObjectId::from_rust(oid)
}
}
impl Into<ejdb_sys::bson_oid_t> for EjdbObjectId {
#[inline]
fn into(self) -> ejdb_sys::bson_oid_t {
self.to_ejdb()
}
}
impl Into<oid::ObjectId> for EjdbObjectId {
#[inline]
fn into(self) -> oid::ObjectId {
self.to_rust()
}
}
fn to_i(arr: [u8; 12]) -> [i8; 12] {
let mut result: [i8; 12] = [0; 12];
for i in 0..arr.len() {
result[i] = arr[i] as i8;
}
return result;
}
fn to_u(arr: [i8; 12]) -> [u8; 12] {
let mut result: [u8; 12] = [0; 12];
for i in 0..arr.len() {
result[i] = arr[i] as u8;
}
return result;
}
|
as_raw_mut
|
identifier_name
|
ejdb_bson.rs
|
//! Contains low-level utilities for conversion between Rust and EJDB BSON representations.
//!
//! This module is only public to facilitate direct usage of `ejdb-sys` library, if such
//! need arises. The types provided here are useful for converting Rust BSON values
//! to EJDB ones and vice versa.
//!
//! Types from this module should not be used unless absolutely necessary.
use std::slice;
use bson::oid;
use bson::{self, DecoderResult, Document, EncoderResult};
use ejdb_sys;
pub struct EjdbBsonDocument(*mut ejdb_sys::bson);
impl EjdbBsonDocument {
pub fn empty() -> EjdbBsonDocument {
unsafe {
// TODO: check for alloc errors properly
let bson_ptr = ejdb_sys::bson_create();
if bson_ptr.is_null()
|
ejdb_sys::bson_init(bson_ptr);
EjdbBsonDocument::from_ptr(bson_ptr)
}
}
#[inline]
pub unsafe fn from_ptr(ptr: *mut ejdb_sys::bson) -> EjdbBsonDocument {
EjdbBsonDocument(ptr)
}
#[inline]
pub fn from_buffer(buf: &[u8]) -> EjdbBsonDocument {
unsafe {
EjdbBsonDocument(ejdb_sys::bson_create_from_buffer(
buf.as_ptr() as *const _,
buf.len() as i32,
))
}
}
pub fn from_bson(bson: &Document) -> EncoderResult<EjdbBsonDocument> {
let mut buffer = Vec::new();
bson::encode_document(&mut buffer, bson).map(|_| EjdbBsonDocument::from_buffer(&buffer))
}
pub fn to_bson(&self) -> DecoderResult<Document> {
let buf_ptr = unsafe { ejdb_sys::bson_data(self.0 as *const _) as *const u8 };
let buf_size = unsafe { ejdb_sys::bson_size(self.0 as *const _) };
let mut buf = unsafe { slice::from_raw_parts(buf_ptr, buf_size as usize) };
bson::decode_document(&mut buf)
}
#[inline]
pub fn as_raw(&self) -> *const ejdb_sys::bson {
self.0 as *const _
}
#[inline]
pub fn as_raw_mut(&mut self) -> *mut ejdb_sys::bson {
self.0 as *mut _
}
}
impl Drop for EjdbBsonDocument {
fn drop(&mut self) {
unsafe {
ejdb_sys::bson_del(self.0);
}
}
}
#[derive(Copy, Clone)]
pub struct EjdbObjectId(ejdb_sys::bson_oid_t);
impl EjdbObjectId {
#[inline]
pub fn empty() -> EjdbObjectId {
let empty_arr: [i8; 12] = [0; 12];
EjdbObjectId(ejdb_sys::bson_oid_t { bytes: empty_arr })
}
#[inline]
pub fn to_rust(self) -> oid::ObjectId {
let bytes: [i8; 12];
unsafe {
bytes = (self.0).bytes;
}
oid::ObjectId::with_bytes(to_u(bytes))
}
#[inline]
pub fn from_rust(oid: oid::ObjectId) -> EjdbObjectId {
EjdbObjectId(ejdb_sys::bson_oid_t {
bytes: to_i(oid.bytes()),
})
}
#[inline]
pub fn to_ejdb(self) -> ejdb_sys::bson_oid_t {
self.0
}
#[inline]
pub fn as_raw(&self) -> *const ejdb_sys::bson_oid_t {
&self.0
}
#[inline]
pub fn as_raw_mut(&mut self) -> *mut ejdb_sys::bson_oid_t {
&mut self.0
}
}
impl From<ejdb_sys::bson_oid_t> for EjdbObjectId {
#[inline]
fn from(oid: ejdb_sys::bson_oid_t) -> EjdbObjectId {
EjdbObjectId(oid)
}
}
impl From<oid::ObjectId> for EjdbObjectId {
#[inline]
fn from(oid: oid::ObjectId) -> EjdbObjectId {
EjdbObjectId::from_rust(oid)
}
}
impl Into<ejdb_sys::bson_oid_t> for EjdbObjectId {
#[inline]
fn into(self) -> ejdb_sys::bson_oid_t {
self.to_ejdb()
}
}
impl Into<oid::ObjectId> for EjdbObjectId {
#[inline]
fn into(self) -> oid::ObjectId {
self.to_rust()
}
}
fn to_i(arr: [u8; 12]) -> [i8; 12] {
let mut result: [i8; 12] = [0; 12];
for i in 0..arr.len() {
result[i] = arr[i] as i8;
}
return result;
}
fn to_u(arr: [i8; 12]) -> [u8; 12] {
let mut result: [u8; 12] = [0; 12];
for i in 0..arr.len() {
result[i] = arr[i] as u8;
}
return result;
}
|
{
panic!("Cannot allocate new BSON document");
}
|
conditional_block
|
u256.rs
|
// Copyright Ethereum Classic Contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Rust Bitcoin Library
// Written in 2014 by
// Andrew Poelstra <[email protected]>
//
// To the extent possible under law, the author(s) have dedicated all
// copyright and related and neighboring rights to this software to
// the public domain worldwide. This software is distributed without
// any warranty.
//
// You should have received a copy of the CC0 Public Domain Dedication
// along with this software.
// If not, see <http://creativecommons.org/publicdomain/zero/1.0/>.
//
//! # Big unsigned integer types
//!
//! Implementation of a various large-but-fixed sized unsigned integer types.
//! The functions here are designed to be fast.
//!
// #![no_std]
use std::convert::{From, Into, AsRef};
use std::str::FromStr;
use std::ops::{Add, Sub, Not, Mul, Div, Shr, Shl, BitAnd, BitOr, BitXor, Rem};
use std::cmp::Ordering;
use std::fmt;
use super::{Sign, ParseHexError, read_hex};
use super::algorithms::{add2, mac3, from_signed, sub2_sign, big_digit};
pub const SIGN_BIT_MASK: U256 = U256([0b01111111111111111111111111111111u32,
0xffffffffu32, 0xffffffffu32, 0xffffffffu32,
0xffffffffu32, 0xffffffffu32, 0xffffffffu32, 0xffffffffu32]);
#[repr(C)]
#[derive(Eq, PartialEq, Debug, Copy, Clone, Hash)]
/// Represents an unsigned 256-bit integer.
pub struct U256([u32; 8]);
impl U256 {
/// Zero value of U256.
pub fn zero() -> U256 { 0u64.into() }
/// One value of U256.
pub fn one() -> U256 { 1u64.into() }
/// Maximum value of U256.
pub fn max_value() -> U256 {
!U256::zero()
}
/// Minimum value of U256.
pub fn min_value() -> U256 {
U256::zero()
}
/// Add two U256 with overflowing. The same as M256::add.
pub fn overflowing_add(mut self, other: U256) -> (U256, bool) {
let U256(ref mut a) = self;
let U256(ref b) = other;
let carry = add2(a, b);
(U256(*a), if carry > 0 { true } else { false })
}
/// Substract two U256 with underflowing. The same as M256::sub.
pub fn underflowing_sub(mut self, other: U256) -> (U256, bool) {
let U256(ref mut a) = self;
let U256(ref b) = other;
let sign = sub2_sign(a, b);
from_signed(sign, a);
(U256(*a), if sign == Sign::Minus { true } else { false })
}
/// Multiply two U256 with overflowing. The same as M256::mul.
pub fn overflowing_mul(mut self, other: U256) -> (U256, bool) {
let mut ret = [0u32; 8];
let U256(ref mut a) = self;
let U256(ref b) = other;
let mut carry = 0;
for (i, bi) in b.iter().rev().enumerate() {
carry = mac3(&mut ret[0..(8-i)], a, *bi);
}
(U256(ret), if carry > 0 { true } else { false })
}
/// Bits needed to represent this value.
pub fn bits(&self) -> usize {
let &U256(ref arr) = self;
let mut current_bits = 0;
for i in (0..8).rev() {
if arr[i] == 0 {
continue;
}
current_bits = (32 - arr[i].leading_zeros() as usize) + ((7 - i) * 32);
}
current_bits
}
/// Equals `floor(log2(*))`. This is always an integer.
pub fn log2floor(&self) -> usize {
assert!(*self!= U256::zero());
let mut l: usize = 256;
for i in 0..8 {
if self.0[i] == 0u32 {
l -= 32;
} else {
l -= self.0[i].leading_zeros() as usize;
if l == 0 {
return l
} else {
return l-1;
}
}
}
return l;
}
}
// Froms, Intos and Defaults
impl Default for U256 {
fn default() -> U256 {
U256::zero()
}
}
impl FromStr for U256 {
type Err = ParseHexError;
fn from_str(s: &str) -> Result<U256, ParseHexError> {
read_hex(s).map(|s| {
U256::from(s.as_ref())
})
}
}
impl From<bool> for U256 {
fn from(val: bool) -> U256 {
if val {
U256::one()
} else {
U256::zero()
}
}
}
impl From<u64> for U256 {
fn from(val: u64) -> U256 {
U256([0, 0, 0, 0, 0, 0, big_digit::get_hi(val), big_digit::get_lo(val)])
}
}
impl Into<u64> for U256 {
fn into(self) -> u64 {
let p = self.0.iter().position(|s| *s!= 0);
assert!(p.is_none() || p.unwrap() >= 6);
let lo = self.0[7] as u64;
let hi = self.0[6] as u64;
lo + (hi << 32)
}
}
impl From<usize> for U256 {
fn from(val: usize) -> U256 {
(val as u64).into()
}
}
impl Into<usize> for U256 {
fn into(self) -> usize {
let v64: u64 = self.into();
v64 as usize
}
}
impl<'a> From<&'a [u8]> for U256 {
fn from(val: &'a [u8]) -> U256 {
assert!(val.len() <= 256 / 8);
let mut r = [0u8; 32];
let reserved = 32 - val.len();
for i in 0..val.len() {
r[i + reserved] = val[i];
}
r.into()
}
}
impl From<[u8; 32]> for U256 {
fn from(val: [u8; 32]) -> U256 {
let mut r = [0u32; 8];
for i in 0..32 {
let pos = i / 4;
r[pos] += (val[i] as u32) << (8 * (3 - (i - (pos * 4))));
}
U256(r)
}
}
impl Into<[u8; 32]> for U256 {
fn into(self) -> [u8; 32] {
let mut r = [0u8; 32];
for i in 0..32 {
let pos = i / 4;
r[i] = (self.0[pos] >> (8 * (3 - (i - (pos * 4)))) & 0xFF) as u8;
}
r
}
}
impl Into<[u32; 8]> for U256 {
fn into(self) -> [u32; 8] {
self.0
}
}
impl From<[u32; 8]> for U256 {
fn from(val: [u32; 8]) -> U256 {
U256(val)
}
}
// Ord
impl Ord for U256 {
fn cmp(&self, other: &U256) -> Ordering {
let &U256(ref me) = self;
let &U256(ref you) = other;
let mut i = 0;
while i < 8 {
if me[i] < you[i] { return Ordering::Less; }
if me[i] > you[i] { return Ordering::Greater; }
i += 1;
}
Ordering::Equal
}
}
impl PartialOrd for U256 {
fn partial_cmp(&self, other: &U256) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl BitAnd<U256> for U256 {
type Output = U256;
fn bitand(self, other: U256) -> U256 {
let mut r: U256 = self;
for i in 0..8 {
r.0[i] = r.0[i] & other.0[i];
}
r
}
}
impl BitOr<U256> for U256 {
type Output = U256;
fn bitor(self, other: U256) -> U256 {
let mut r: U256 = self;
for i in 0..8 {
r.0[i] = r.0[i] | other.0[i];
}
r
}
}
impl BitXor<U256> for U256 {
type Output = U256;
fn bitxor(self, other: U256) -> U256 {
let mut r: U256 = self;
for i in 0..8 {
r.0[i] = r.0[i] ^ other.0[i];
}
r
}
}
impl Shl<usize> for U256 {
type Output = U256;
fn shl(self, shift: usize) -> U256 {
let U256(ref original) = self;
let mut ret = [0u32; 8];
let word_shift = shift / 32;
let bit_shift = shift % 32;
for i in (0..8).rev() {
// Shift
if i >= word_shift {
ret[i - word_shift] += original[i] << bit_shift;
}
// Carry
if bit_shift > 0 && i >= word_shift + 1 {
ret[i - word_shift - 1] += original[i] >> (32 - bit_shift);
}
}
U256(ret)
}
}
impl Shr<usize> for U256 {
type Output = U256;
fn shr(self, shift: usize) -> U256 {
let U256(ref original) = self;
let mut ret = [0u32; 8];
let word_shift = shift / 32;
let bit_shift = shift % 32;
for i in (0..8).rev() {
// Shift
if i + word_shift < 8 {
ret[i + word_shift] += original[i] >> bit_shift;
}
// Carry
if bit_shift > 0 && i > 0 && i + word_shift < 8 {
ret[i + word_shift] += original[i - 1] << (32 - bit_shift);
}
}
U256(ret)
}
}
impl Add<U256> for U256 {
type Output = U256;
fn add(self, other: U256) -> U256
|
}
impl Sub<U256> for U256 {
type Output = U256;
fn sub(self, other: U256) -> U256 {
let (o, v) = self.underflowing_sub(other);
assert!(!v);
o
}
}
impl Mul<U256> for U256 {
type Output = U256;
fn mul(self, other: U256) -> U256 {
let (o, v) = self.overflowing_mul(other);
assert!(!v);
o
}
}
impl Div for U256 {
type Output = U256;
fn div(self, other: U256) -> U256 {
let mut sub_copy = self;
let mut shift_copy = other;
let mut ret = [0u32; 8];
let my_bits = self.bits();
let your_bits = other.bits();
// Check for division by 0
assert!(your_bits!= 0);
// Early return in case we are dividing by a larger number than us
if my_bits < your_bits {
return U256(ret);
}
// Bitwise long division
let mut shift = my_bits - your_bits;
shift_copy = shift_copy << shift;
loop {
if sub_copy >= shift_copy {
ret[7 - shift / 32] |= 1 << (shift % 32);
sub_copy = sub_copy - shift_copy;
}
shift_copy = shift_copy >> 1;
if shift == 0 { break; }
shift -= 1;
}
U256(ret)
}
}
impl Rem for U256 {
type Output = U256;
fn rem(self, other: U256) -> U256 {
let d = self / other;
self - (other * d)
}
}
impl Not for U256 {
type Output = U256;
fn not(self) -> U256 {
let U256(ref arr) = self;
let mut ret = [0u32; 8];
for i in 0..8 {
ret[i] =!arr[i];
}
U256(ret)
}
}
impl fmt::LowerHex for U256 {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for i in 0..8 {
write!(f, "{:08x}", self.0[i])?;
}
Ok(())
}
}
impl fmt::UpperHex for U256 {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for i in 0..8 {
write!(f, "{:08X}", self.0[i])?;
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::U256;
#[test]
pub fn mul() {
assert_eq!(U256([0, 0, 0, 0, 0, 0, 0, 2]) * U256([0, 0, 0, 0, 0, 0, 0, 3]),
U256([0, 0, 0, 0, 0, 0, 0, 6]));
assert_eq!(U256([0x7FFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF]) *
U256([0, 0, 0, 0, 0, 0, 0, 2]),
U256([0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFE]));
}
#[test]
pub fn div() {
assert_eq!(U256([0, 0, 0, 0, 0, 0, 0, 3]) / U256([0, 0, 0, 0, 0, 0, 0, 2]),
U256::one());
assert_eq!(U256([0, 0, 0, 0, 0, 0, 0, 1000000001]) / U256([0, 0, 0, 0, 0, 0, 0, 2]),
U256([0, 0, 0, 0, 0, 0, 0, 500000000]));
assert_eq!(U256([0, 0, 0, 0, 0, 0, 0, 0xFFFFFFFD]) /
U256([0, 0, 0, 0, 0, 0, 0, 2]),
U256([0, 0, 0, 0, 0, 0, 0, 0x7FFFFFFE]));
assert_eq!(U256([0, 0, 0, 0, 0, 0, 0xFFFFFFFF, 0xFFFFFFFD]) /
U256([0, 0, 0, 0, 0, 0, 0, 2]),
U256([0, 0, 0, 0, 0, 0, 0x7FFFFFFF, 0xFFFFFFFE]));
assert_eq!(U256([0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFD]) /
U256([0, 0, 0, 0, 0, 0, 0, 2]),
U256([0x7FFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFE]));
}
}
|
{
let (o, v) = self.overflowing_add(other);
assert!(!v);
o
}
|
identifier_body
|
u256.rs
|
// Copyright Ethereum Classic Contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Rust Bitcoin Library
// Written in 2014 by
// Andrew Poelstra <[email protected]>
//
// To the extent possible under law, the author(s) have dedicated all
// copyright and related and neighboring rights to this software to
// the public domain worldwide. This software is distributed without
// any warranty.
//
// You should have received a copy of the CC0 Public Domain Dedication
// along with this software.
// If not, see <http://creativecommons.org/publicdomain/zero/1.0/>.
//
//! # Big unsigned integer types
//!
//! Implementation of a various large-but-fixed sized unsigned integer types.
//! The functions here are designed to be fast.
//!
// #![no_std]
use std::convert::{From, Into, AsRef};
use std::str::FromStr;
use std::ops::{Add, Sub, Not, Mul, Div, Shr, Shl, BitAnd, BitOr, BitXor, Rem};
use std::cmp::Ordering;
use std::fmt;
use super::{Sign, ParseHexError, read_hex};
use super::algorithms::{add2, mac3, from_signed, sub2_sign, big_digit};
pub const SIGN_BIT_MASK: U256 = U256([0b01111111111111111111111111111111u32,
0xffffffffu32, 0xffffffffu32, 0xffffffffu32,
0xffffffffu32, 0xffffffffu32, 0xffffffffu32, 0xffffffffu32]);
#[repr(C)]
#[derive(Eq, PartialEq, Debug, Copy, Clone, Hash)]
/// Represents an unsigned 256-bit integer.
pub struct U256([u32; 8]);
impl U256 {
/// Zero value of U256.
pub fn zero() -> U256 { 0u64.into() }
/// One value of U256.
pub fn one() -> U256 { 1u64.into() }
/// Maximum value of U256.
pub fn max_value() -> U256 {
!U256::zero()
}
/// Minimum value of U256.
pub fn min_value() -> U256 {
U256::zero()
}
/// Add two U256 with overflowing. The same as M256::add.
pub fn overflowing_add(mut self, other: U256) -> (U256, bool) {
let U256(ref mut a) = self;
let U256(ref b) = other;
let carry = add2(a, b);
(U256(*a), if carry > 0 { true } else { false })
}
/// Substract two U256 with underflowing. The same as M256::sub.
pub fn underflowing_sub(mut self, other: U256) -> (U256, bool) {
let U256(ref mut a) = self;
let U256(ref b) = other;
let sign = sub2_sign(a, b);
from_signed(sign, a);
(U256(*a), if sign == Sign::Minus { true } else { false })
}
/// Multiply two U256 with overflowing. The same as M256::mul.
pub fn overflowing_mul(mut self, other: U256) -> (U256, bool) {
let mut ret = [0u32; 8];
let U256(ref mut a) = self;
let U256(ref b) = other;
let mut carry = 0;
for (i, bi) in b.iter().rev().enumerate() {
carry = mac3(&mut ret[0..(8-i)], a, *bi);
}
(U256(ret), if carry > 0 { true } else { false })
}
/// Bits needed to represent this value.
pub fn bits(&self) -> usize {
let &U256(ref arr) = self;
let mut current_bits = 0;
for i in (0..8).rev() {
if arr[i] == 0 {
continue;
}
current_bits = (32 - arr[i].leading_zeros() as usize) + ((7 - i) * 32);
}
current_bits
}
/// Equals `floor(log2(*))`. This is always an integer.
pub fn log2floor(&self) -> usize {
assert!(*self!= U256::zero());
let mut l: usize = 256;
for i in 0..8 {
if self.0[i] == 0u32 {
l -= 32;
} else {
l -= self.0[i].leading_zeros() as usize;
if l == 0
|
else {
return l-1;
}
}
}
return l;
}
}
// Froms, Intos and Defaults
impl Default for U256 {
fn default() -> U256 {
U256::zero()
}
}
impl FromStr for U256 {
type Err = ParseHexError;
fn from_str(s: &str) -> Result<U256, ParseHexError> {
read_hex(s).map(|s| {
U256::from(s.as_ref())
})
}
}
impl From<bool> for U256 {
fn from(val: bool) -> U256 {
if val {
U256::one()
} else {
U256::zero()
}
}
}
impl From<u64> for U256 {
fn from(val: u64) -> U256 {
U256([0, 0, 0, 0, 0, 0, big_digit::get_hi(val), big_digit::get_lo(val)])
}
}
impl Into<u64> for U256 {
fn into(self) -> u64 {
let p = self.0.iter().position(|s| *s!= 0);
assert!(p.is_none() || p.unwrap() >= 6);
let lo = self.0[7] as u64;
let hi = self.0[6] as u64;
lo + (hi << 32)
}
}
impl From<usize> for U256 {
fn from(val: usize) -> U256 {
(val as u64).into()
}
}
impl Into<usize> for U256 {
fn into(self) -> usize {
let v64: u64 = self.into();
v64 as usize
}
}
impl<'a> From<&'a [u8]> for U256 {
fn from(val: &'a [u8]) -> U256 {
assert!(val.len() <= 256 / 8);
let mut r = [0u8; 32];
let reserved = 32 - val.len();
for i in 0..val.len() {
r[i + reserved] = val[i];
}
r.into()
}
}
impl From<[u8; 32]> for U256 {
fn from(val: [u8; 32]) -> U256 {
let mut r = [0u32; 8];
for i in 0..32 {
let pos = i / 4;
r[pos] += (val[i] as u32) << (8 * (3 - (i - (pos * 4))));
}
U256(r)
}
}
impl Into<[u8; 32]> for U256 {
fn into(self) -> [u8; 32] {
let mut r = [0u8; 32];
for i in 0..32 {
let pos = i / 4;
r[i] = (self.0[pos] >> (8 * (3 - (i - (pos * 4)))) & 0xFF) as u8;
}
r
}
}
impl Into<[u32; 8]> for U256 {
fn into(self) -> [u32; 8] {
self.0
}
}
impl From<[u32; 8]> for U256 {
fn from(val: [u32; 8]) -> U256 {
U256(val)
}
}
// Ord
impl Ord for U256 {
fn cmp(&self, other: &U256) -> Ordering {
let &U256(ref me) = self;
let &U256(ref you) = other;
let mut i = 0;
while i < 8 {
if me[i] < you[i] { return Ordering::Less; }
if me[i] > you[i] { return Ordering::Greater; }
i += 1;
}
Ordering::Equal
}
}
impl PartialOrd for U256 {
fn partial_cmp(&self, other: &U256) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl BitAnd<U256> for U256 {
type Output = U256;
fn bitand(self, other: U256) -> U256 {
let mut r: U256 = self;
for i in 0..8 {
r.0[i] = r.0[i] & other.0[i];
}
r
}
}
impl BitOr<U256> for U256 {
type Output = U256;
fn bitor(self, other: U256) -> U256 {
let mut r: U256 = self;
for i in 0..8 {
r.0[i] = r.0[i] | other.0[i];
}
r
}
}
impl BitXor<U256> for U256 {
type Output = U256;
fn bitxor(self, other: U256) -> U256 {
let mut r: U256 = self;
for i in 0..8 {
r.0[i] = r.0[i] ^ other.0[i];
}
r
}
}
impl Shl<usize> for U256 {
type Output = U256;
fn shl(self, shift: usize) -> U256 {
let U256(ref original) = self;
let mut ret = [0u32; 8];
let word_shift = shift / 32;
let bit_shift = shift % 32;
for i in (0..8).rev() {
// Shift
if i >= word_shift {
ret[i - word_shift] += original[i] << bit_shift;
}
// Carry
if bit_shift > 0 && i >= word_shift + 1 {
ret[i - word_shift - 1] += original[i] >> (32 - bit_shift);
}
}
U256(ret)
}
}
impl Shr<usize> for U256 {
type Output = U256;
fn shr(self, shift: usize) -> U256 {
let U256(ref original) = self;
let mut ret = [0u32; 8];
let word_shift = shift / 32;
let bit_shift = shift % 32;
for i in (0..8).rev() {
// Shift
if i + word_shift < 8 {
ret[i + word_shift] += original[i] >> bit_shift;
}
// Carry
if bit_shift > 0 && i > 0 && i + word_shift < 8 {
ret[i + word_shift] += original[i - 1] << (32 - bit_shift);
}
}
U256(ret)
}
}
impl Add<U256> for U256 {
type Output = U256;
fn add(self, other: U256) -> U256 {
let (o, v) = self.overflowing_add(other);
assert!(!v);
o
}
}
impl Sub<U256> for U256 {
type Output = U256;
fn sub(self, other: U256) -> U256 {
let (o, v) = self.underflowing_sub(other);
assert!(!v);
o
}
}
impl Mul<U256> for U256 {
type Output = U256;
fn mul(self, other: U256) -> U256 {
let (o, v) = self.overflowing_mul(other);
assert!(!v);
o
}
}
impl Div for U256 {
type Output = U256;
fn div(self, other: U256) -> U256 {
let mut sub_copy = self;
let mut shift_copy = other;
let mut ret = [0u32; 8];
let my_bits = self.bits();
let your_bits = other.bits();
// Check for division by 0
assert!(your_bits!= 0);
// Early return in case we are dividing by a larger number than us
if my_bits < your_bits {
return U256(ret);
}
// Bitwise long division
let mut shift = my_bits - your_bits;
shift_copy = shift_copy << shift;
loop {
if sub_copy >= shift_copy {
ret[7 - shift / 32] |= 1 << (shift % 32);
sub_copy = sub_copy - shift_copy;
}
shift_copy = shift_copy >> 1;
if shift == 0 { break; }
shift -= 1;
}
U256(ret)
}
}
impl Rem for U256 {
type Output = U256;
fn rem(self, other: U256) -> U256 {
let d = self / other;
self - (other * d)
}
}
impl Not for U256 {
type Output = U256;
fn not(self) -> U256 {
let U256(ref arr) = self;
let mut ret = [0u32; 8];
for i in 0..8 {
ret[i] =!arr[i];
}
U256(ret)
}
}
impl fmt::LowerHex for U256 {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for i in 0..8 {
write!(f, "{:08x}", self.0[i])?;
}
Ok(())
}
}
impl fmt::UpperHex for U256 {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for i in 0..8 {
write!(f, "{:08X}", self.0[i])?;
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::U256;
#[test]
pub fn mul() {
assert_eq!(U256([0, 0, 0, 0, 0, 0, 0, 2]) * U256([0, 0, 0, 0, 0, 0, 0, 3]),
U256([0, 0, 0, 0, 0, 0, 0, 6]));
assert_eq!(U256([0x7FFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF]) *
U256([0, 0, 0, 0, 0, 0, 0, 2]),
U256([0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFE]));
}
#[test]
pub fn div() {
assert_eq!(U256([0, 0, 0, 0, 0, 0, 0, 3]) / U256([0, 0, 0, 0, 0, 0, 0, 2]),
U256::one());
assert_eq!(U256([0, 0, 0, 0, 0, 0, 0, 1000000001]) / U256([0, 0, 0, 0, 0, 0, 0, 2]),
U256([0, 0, 0, 0, 0, 0, 0, 500000000]));
assert_eq!(U256([0, 0, 0, 0, 0, 0, 0, 0xFFFFFFFD]) /
U256([0, 0, 0, 0, 0, 0, 0, 2]),
U256([0, 0, 0, 0, 0, 0, 0, 0x7FFFFFFE]));
assert_eq!(U256([0, 0, 0, 0, 0, 0, 0xFFFFFFFF, 0xFFFFFFFD]) /
U256([0, 0, 0, 0, 0, 0, 0, 2]),
U256([0, 0, 0, 0, 0, 0, 0x7FFFFFFF, 0xFFFFFFFE]));
assert_eq!(U256([0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFD]) /
U256([0, 0, 0, 0, 0, 0, 0, 2]),
U256([0x7FFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFE]));
}
}
|
{
return l
}
|
conditional_block
|
u256.rs
|
// Copyright Ethereum Classic Contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Rust Bitcoin Library
// Written in 2014 by
// Andrew Poelstra <[email protected]>
//
// To the extent possible under law, the author(s) have dedicated all
// copyright and related and neighboring rights to this software to
// the public domain worldwide. This software is distributed without
// any warranty.
//
// You should have received a copy of the CC0 Public Domain Dedication
// along with this software.
// If not, see <http://creativecommons.org/publicdomain/zero/1.0/>.
//
//! # Big unsigned integer types
//!
//! Implementation of a various large-but-fixed sized unsigned integer types.
//! The functions here are designed to be fast.
//!
// #![no_std]
use std::convert::{From, Into, AsRef};
use std::str::FromStr;
use std::ops::{Add, Sub, Not, Mul, Div, Shr, Shl, BitAnd, BitOr, BitXor, Rem};
use std::cmp::Ordering;
use std::fmt;
use super::{Sign, ParseHexError, read_hex};
use super::algorithms::{add2, mac3, from_signed, sub2_sign, big_digit};
pub const SIGN_BIT_MASK: U256 = U256([0b01111111111111111111111111111111u32,
0xffffffffu32, 0xffffffffu32, 0xffffffffu32,
0xffffffffu32, 0xffffffffu32, 0xffffffffu32, 0xffffffffu32]);
#[repr(C)]
#[derive(Eq, PartialEq, Debug, Copy, Clone, Hash)]
/// Represents an unsigned 256-bit integer.
pub struct U256([u32; 8]);
impl U256 {
/// Zero value of U256.
pub fn zero() -> U256 { 0u64.into() }
/// One value of U256.
pub fn one() -> U256 { 1u64.into() }
/// Maximum value of U256.
pub fn max_value() -> U256 {
!U256::zero()
}
/// Minimum value of U256.
pub fn min_value() -> U256 {
U256::zero()
}
/// Add two U256 with overflowing. The same as M256::add.
pub fn overflowing_add(mut self, other: U256) -> (U256, bool) {
let U256(ref mut a) = self;
let U256(ref b) = other;
let carry = add2(a, b);
(U256(*a), if carry > 0 { true } else { false })
}
/// Substract two U256 with underflowing. The same as M256::sub.
pub fn underflowing_sub(mut self, other: U256) -> (U256, bool) {
let U256(ref mut a) = self;
let U256(ref b) = other;
let sign = sub2_sign(a, b);
from_signed(sign, a);
(U256(*a), if sign == Sign::Minus { true } else { false })
}
/// Multiply two U256 with overflowing. The same as M256::mul.
pub fn overflowing_mul(mut self, other: U256) -> (U256, bool) {
let mut ret = [0u32; 8];
let U256(ref mut a) = self;
let U256(ref b) = other;
let mut carry = 0;
for (i, bi) in b.iter().rev().enumerate() {
carry = mac3(&mut ret[0..(8-i)], a, *bi);
}
(U256(ret), if carry > 0 { true } else { false })
}
/// Bits needed to represent this value.
pub fn bits(&self) -> usize {
let &U256(ref arr) = self;
let mut current_bits = 0;
for i in (0..8).rev() {
if arr[i] == 0 {
continue;
}
current_bits = (32 - arr[i].leading_zeros() as usize) + ((7 - i) * 32);
}
current_bits
}
/// Equals `floor(log2(*))`. This is always an integer.
pub fn log2floor(&self) -> usize {
assert!(*self!= U256::zero());
let mut l: usize = 256;
for i in 0..8 {
if self.0[i] == 0u32 {
l -= 32;
} else {
l -= self.0[i].leading_zeros() as usize;
if l == 0 {
return l
} else {
return l-1;
}
}
}
return l;
}
}
// Froms, Intos and Defaults
impl Default for U256 {
fn default() -> U256 {
U256::zero()
}
}
impl FromStr for U256 {
type Err = ParseHexError;
fn from_str(s: &str) -> Result<U256, ParseHexError> {
read_hex(s).map(|s| {
U256::from(s.as_ref())
})
}
}
impl From<bool> for U256 {
fn from(val: bool) -> U256 {
if val {
U256::one()
} else {
U256::zero()
}
}
}
impl From<u64> for U256 {
fn from(val: u64) -> U256 {
U256([0, 0, 0, 0, 0, 0, big_digit::get_hi(val), big_digit::get_lo(val)])
}
}
impl Into<u64> for U256 {
fn into(self) -> u64 {
let p = self.0.iter().position(|s| *s!= 0);
assert!(p.is_none() || p.unwrap() >= 6);
let lo = self.0[7] as u64;
let hi = self.0[6] as u64;
lo + (hi << 32)
}
}
impl From<usize> for U256 {
fn from(val: usize) -> U256 {
(val as u64).into()
}
}
impl Into<usize> for U256 {
fn into(self) -> usize {
let v64: u64 = self.into();
v64 as usize
}
}
impl<'a> From<&'a [u8]> for U256 {
fn from(val: &'a [u8]) -> U256 {
assert!(val.len() <= 256 / 8);
let mut r = [0u8; 32];
let reserved = 32 - val.len();
for i in 0..val.len() {
r[i + reserved] = val[i];
}
r.into()
}
}
impl From<[u8; 32]> for U256 {
fn from(val: [u8; 32]) -> U256 {
let mut r = [0u32; 8];
for i in 0..32 {
let pos = i / 4;
r[pos] += (val[i] as u32) << (8 * (3 - (i - (pos * 4))));
}
U256(r)
}
}
impl Into<[u8; 32]> for U256 {
fn into(self) -> [u8; 32] {
let mut r = [0u8; 32];
for i in 0..32 {
let pos = i / 4;
r[i] = (self.0[pos] >> (8 * (3 - (i - (pos * 4)))) & 0xFF) as u8;
}
r
}
}
impl Into<[u32; 8]> for U256 {
fn into(self) -> [u32; 8] {
self.0
}
}
impl From<[u32; 8]> for U256 {
fn from(val: [u32; 8]) -> U256 {
U256(val)
}
}
// Ord
impl Ord for U256 {
fn cmp(&self, other: &U256) -> Ordering {
let &U256(ref me) = self;
let &U256(ref you) = other;
let mut i = 0;
while i < 8 {
if me[i] < you[i] { return Ordering::Less; }
if me[i] > you[i] { return Ordering::Greater; }
i += 1;
}
Ordering::Equal
}
}
impl PartialOrd for U256 {
fn partial_cmp(&self, other: &U256) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl BitAnd<U256> for U256 {
type Output = U256;
fn bitand(self, other: U256) -> U256 {
let mut r: U256 = self;
for i in 0..8 {
r.0[i] = r.0[i] & other.0[i];
}
r
}
}
impl BitOr<U256> for U256 {
type Output = U256;
fn bitor(self, other: U256) -> U256 {
let mut r: U256 = self;
for i in 0..8 {
r.0[i] = r.0[i] | other.0[i];
}
r
}
}
impl BitXor<U256> for U256 {
type Output = U256;
fn bitxor(self, other: U256) -> U256 {
let mut r: U256 = self;
for i in 0..8 {
r.0[i] = r.0[i] ^ other.0[i];
}
r
}
}
impl Shl<usize> for U256 {
type Output = U256;
fn shl(self, shift: usize) -> U256 {
let U256(ref original) = self;
let mut ret = [0u32; 8];
let word_shift = shift / 32;
let bit_shift = shift % 32;
for i in (0..8).rev() {
// Shift
if i >= word_shift {
ret[i - word_shift] += original[i] << bit_shift;
}
// Carry
if bit_shift > 0 && i >= word_shift + 1 {
ret[i - word_shift - 1] += original[i] >> (32 - bit_shift);
}
}
U256(ret)
}
}
impl Shr<usize> for U256 {
type Output = U256;
fn shr(self, shift: usize) -> U256 {
let U256(ref original) = self;
let mut ret = [0u32; 8];
let word_shift = shift / 32;
let bit_shift = shift % 32;
for i in (0..8).rev() {
// Shift
if i + word_shift < 8 {
ret[i + word_shift] += original[i] >> bit_shift;
}
// Carry
if bit_shift > 0 && i > 0 && i + word_shift < 8 {
ret[i + word_shift] += original[i - 1] << (32 - bit_shift);
}
}
U256(ret)
}
}
impl Add<U256> for U256 {
type Output = U256;
fn add(self, other: U256) -> U256 {
let (o, v) = self.overflowing_add(other);
assert!(!v);
o
}
}
impl Sub<U256> for U256 {
type Output = U256;
fn sub(self, other: U256) -> U256 {
let (o, v) = self.underflowing_sub(other);
assert!(!v);
o
}
}
impl Mul<U256> for U256 {
type Output = U256;
fn
|
(self, other: U256) -> U256 {
let (o, v) = self.overflowing_mul(other);
assert!(!v);
o
}
}
impl Div for U256 {
type Output = U256;
fn div(self, other: U256) -> U256 {
let mut sub_copy = self;
let mut shift_copy = other;
let mut ret = [0u32; 8];
let my_bits = self.bits();
let your_bits = other.bits();
// Check for division by 0
assert!(your_bits!= 0);
// Early return in case we are dividing by a larger number than us
if my_bits < your_bits {
return U256(ret);
}
// Bitwise long division
let mut shift = my_bits - your_bits;
shift_copy = shift_copy << shift;
loop {
if sub_copy >= shift_copy {
ret[7 - shift / 32] |= 1 << (shift % 32);
sub_copy = sub_copy - shift_copy;
}
shift_copy = shift_copy >> 1;
if shift == 0 { break; }
shift -= 1;
}
U256(ret)
}
}
impl Rem for U256 {
type Output = U256;
fn rem(self, other: U256) -> U256 {
let d = self / other;
self - (other * d)
}
}
impl Not for U256 {
type Output = U256;
fn not(self) -> U256 {
let U256(ref arr) = self;
let mut ret = [0u32; 8];
for i in 0..8 {
ret[i] =!arr[i];
}
U256(ret)
}
}
impl fmt::LowerHex for U256 {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for i in 0..8 {
write!(f, "{:08x}", self.0[i])?;
}
Ok(())
}
}
impl fmt::UpperHex for U256 {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for i in 0..8 {
write!(f, "{:08X}", self.0[i])?;
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::U256;
#[test]
pub fn mul() {
assert_eq!(U256([0, 0, 0, 0, 0, 0, 0, 2]) * U256([0, 0, 0, 0, 0, 0, 0, 3]),
U256([0, 0, 0, 0, 0, 0, 0, 6]));
assert_eq!(U256([0x7FFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF]) *
U256([0, 0, 0, 0, 0, 0, 0, 2]),
U256([0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFE]));
}
#[test]
pub fn div() {
assert_eq!(U256([0, 0, 0, 0, 0, 0, 0, 3]) / U256([0, 0, 0, 0, 0, 0, 0, 2]),
U256::one());
assert_eq!(U256([0, 0, 0, 0, 0, 0, 0, 1000000001]) / U256([0, 0, 0, 0, 0, 0, 0, 2]),
U256([0, 0, 0, 0, 0, 0, 0, 500000000]));
assert_eq!(U256([0, 0, 0, 0, 0, 0, 0, 0xFFFFFFFD]) /
U256([0, 0, 0, 0, 0, 0, 0, 2]),
U256([0, 0, 0, 0, 0, 0, 0, 0x7FFFFFFE]));
assert_eq!(U256([0, 0, 0, 0, 0, 0, 0xFFFFFFFF, 0xFFFFFFFD]) /
U256([0, 0, 0, 0, 0, 0, 0, 2]),
U256([0, 0, 0, 0, 0, 0, 0x7FFFFFFF, 0xFFFFFFFE]));
assert_eq!(U256([0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFD]) /
U256([0, 0, 0, 0, 0, 0, 0, 2]),
U256([0x7FFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFE]));
}
}
|
mul
|
identifier_name
|
u256.rs
|
// Copyright Ethereum Classic Contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Rust Bitcoin Library
// Written in 2014 by
// Andrew Poelstra <[email protected]>
//
// To the extent possible under law, the author(s) have dedicated all
// copyright and related and neighboring rights to this software to
// the public domain worldwide. This software is distributed without
// any warranty.
//
// You should have received a copy of the CC0 Public Domain Dedication
// along with this software.
// If not, see <http://creativecommons.org/publicdomain/zero/1.0/>.
//
//! # Big unsigned integer types
//!
//! Implementation of a various large-but-fixed sized unsigned integer types.
//! The functions here are designed to be fast.
//!
// #![no_std]
use std::convert::{From, Into, AsRef};
use std::str::FromStr;
use std::ops::{Add, Sub, Not, Mul, Div, Shr, Shl, BitAnd, BitOr, BitXor, Rem};
use std::cmp::Ordering;
use std::fmt;
use super::{Sign, ParseHexError, read_hex};
use super::algorithms::{add2, mac3, from_signed, sub2_sign, big_digit};
pub const SIGN_BIT_MASK: U256 = U256([0b01111111111111111111111111111111u32,
0xffffffffu32, 0xffffffffu32, 0xffffffffu32,
0xffffffffu32, 0xffffffffu32, 0xffffffffu32, 0xffffffffu32]);
#[repr(C)]
#[derive(Eq, PartialEq, Debug, Copy, Clone, Hash)]
/// Represents an unsigned 256-bit integer.
pub struct U256([u32; 8]);
impl U256 {
/// Zero value of U256.
pub fn zero() -> U256 { 0u64.into() }
/// One value of U256.
pub fn one() -> U256 { 1u64.into() }
/// Maximum value of U256.
pub fn max_value() -> U256 {
!U256::zero()
|
}
/// Minimum value of U256.
pub fn min_value() -> U256 {
U256::zero()
}
/// Add two U256 with overflowing. The same as M256::add.
pub fn overflowing_add(mut self, other: U256) -> (U256, bool) {
let U256(ref mut a) = self;
let U256(ref b) = other;
let carry = add2(a, b);
(U256(*a), if carry > 0 { true } else { false })
}
/// Substract two U256 with underflowing. The same as M256::sub.
pub fn underflowing_sub(mut self, other: U256) -> (U256, bool) {
let U256(ref mut a) = self;
let U256(ref b) = other;
let sign = sub2_sign(a, b);
from_signed(sign, a);
(U256(*a), if sign == Sign::Minus { true } else { false })
}
/// Multiply two U256 with overflowing. The same as M256::mul.
pub fn overflowing_mul(mut self, other: U256) -> (U256, bool) {
let mut ret = [0u32; 8];
let U256(ref mut a) = self;
let U256(ref b) = other;
let mut carry = 0;
for (i, bi) in b.iter().rev().enumerate() {
carry = mac3(&mut ret[0..(8-i)], a, *bi);
}
(U256(ret), if carry > 0 { true } else { false })
}
/// Bits needed to represent this value.
pub fn bits(&self) -> usize {
let &U256(ref arr) = self;
let mut current_bits = 0;
for i in (0..8).rev() {
if arr[i] == 0 {
continue;
}
current_bits = (32 - arr[i].leading_zeros() as usize) + ((7 - i) * 32);
}
current_bits
}
/// Equals `floor(log2(*))`. This is always an integer.
pub fn log2floor(&self) -> usize {
assert!(*self!= U256::zero());
let mut l: usize = 256;
for i in 0..8 {
if self.0[i] == 0u32 {
l -= 32;
} else {
l -= self.0[i].leading_zeros() as usize;
if l == 0 {
return l
} else {
return l-1;
}
}
}
return l;
}
}
// Froms, Intos and Defaults
impl Default for U256 {
fn default() -> U256 {
U256::zero()
}
}
impl FromStr for U256 {
type Err = ParseHexError;
fn from_str(s: &str) -> Result<U256, ParseHexError> {
read_hex(s).map(|s| {
U256::from(s.as_ref())
})
}
}
impl From<bool> for U256 {
fn from(val: bool) -> U256 {
if val {
U256::one()
} else {
U256::zero()
}
}
}
impl From<u64> for U256 {
fn from(val: u64) -> U256 {
U256([0, 0, 0, 0, 0, 0, big_digit::get_hi(val), big_digit::get_lo(val)])
}
}
impl Into<u64> for U256 {
fn into(self) -> u64 {
let p = self.0.iter().position(|s| *s!= 0);
assert!(p.is_none() || p.unwrap() >= 6);
let lo = self.0[7] as u64;
let hi = self.0[6] as u64;
lo + (hi << 32)
}
}
impl From<usize> for U256 {
fn from(val: usize) -> U256 {
(val as u64).into()
}
}
impl Into<usize> for U256 {
fn into(self) -> usize {
let v64: u64 = self.into();
v64 as usize
}
}
impl<'a> From<&'a [u8]> for U256 {
fn from(val: &'a [u8]) -> U256 {
assert!(val.len() <= 256 / 8);
let mut r = [0u8; 32];
let reserved = 32 - val.len();
for i in 0..val.len() {
r[i + reserved] = val[i];
}
r.into()
}
}
impl From<[u8; 32]> for U256 {
fn from(val: [u8; 32]) -> U256 {
let mut r = [0u32; 8];
for i in 0..32 {
let pos = i / 4;
r[pos] += (val[i] as u32) << (8 * (3 - (i - (pos * 4))));
}
U256(r)
}
}
impl Into<[u8; 32]> for U256 {
fn into(self) -> [u8; 32] {
let mut r = [0u8; 32];
for i in 0..32 {
let pos = i / 4;
r[i] = (self.0[pos] >> (8 * (3 - (i - (pos * 4)))) & 0xFF) as u8;
}
r
}
}
impl Into<[u32; 8]> for U256 {
fn into(self) -> [u32; 8] {
self.0
}
}
impl From<[u32; 8]> for U256 {
fn from(val: [u32; 8]) -> U256 {
U256(val)
}
}
// Ord
impl Ord for U256 {
fn cmp(&self, other: &U256) -> Ordering {
let &U256(ref me) = self;
let &U256(ref you) = other;
let mut i = 0;
while i < 8 {
if me[i] < you[i] { return Ordering::Less; }
if me[i] > you[i] { return Ordering::Greater; }
i += 1;
}
Ordering::Equal
}
}
impl PartialOrd for U256 {
fn partial_cmp(&self, other: &U256) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl BitAnd<U256> for U256 {
type Output = U256;
fn bitand(self, other: U256) -> U256 {
let mut r: U256 = self;
for i in 0..8 {
r.0[i] = r.0[i] & other.0[i];
}
r
}
}
impl BitOr<U256> for U256 {
type Output = U256;
fn bitor(self, other: U256) -> U256 {
let mut r: U256 = self;
for i in 0..8 {
r.0[i] = r.0[i] | other.0[i];
}
r
}
}
impl BitXor<U256> for U256 {
type Output = U256;
fn bitxor(self, other: U256) -> U256 {
let mut r: U256 = self;
for i in 0..8 {
r.0[i] = r.0[i] ^ other.0[i];
}
r
}
}
impl Shl<usize> for U256 {
type Output = U256;
fn shl(self, shift: usize) -> U256 {
let U256(ref original) = self;
let mut ret = [0u32; 8];
let word_shift = shift / 32;
let bit_shift = shift % 32;
for i in (0..8).rev() {
// Shift
if i >= word_shift {
ret[i - word_shift] += original[i] << bit_shift;
}
// Carry
if bit_shift > 0 && i >= word_shift + 1 {
ret[i - word_shift - 1] += original[i] >> (32 - bit_shift);
}
}
U256(ret)
}
}
impl Shr<usize> for U256 {
type Output = U256;
fn shr(self, shift: usize) -> U256 {
let U256(ref original) = self;
let mut ret = [0u32; 8];
let word_shift = shift / 32;
let bit_shift = shift % 32;
for i in (0..8).rev() {
// Shift
if i + word_shift < 8 {
ret[i + word_shift] += original[i] >> bit_shift;
}
// Carry
if bit_shift > 0 && i > 0 && i + word_shift < 8 {
ret[i + word_shift] += original[i - 1] << (32 - bit_shift);
}
}
U256(ret)
}
}
impl Add<U256> for U256 {
type Output = U256;
fn add(self, other: U256) -> U256 {
let (o, v) = self.overflowing_add(other);
assert!(!v);
o
}
}
impl Sub<U256> for U256 {
type Output = U256;
fn sub(self, other: U256) -> U256 {
let (o, v) = self.underflowing_sub(other);
assert!(!v);
o
}
}
impl Mul<U256> for U256 {
type Output = U256;
fn mul(self, other: U256) -> U256 {
let (o, v) = self.overflowing_mul(other);
assert!(!v);
o
}
}
impl Div for U256 {
type Output = U256;
fn div(self, other: U256) -> U256 {
let mut sub_copy = self;
let mut shift_copy = other;
let mut ret = [0u32; 8];
let my_bits = self.bits();
let your_bits = other.bits();
// Check for division by 0
assert!(your_bits!= 0);
// Early return in case we are dividing by a larger number than us
if my_bits < your_bits {
return U256(ret);
}
// Bitwise long division
let mut shift = my_bits - your_bits;
shift_copy = shift_copy << shift;
loop {
if sub_copy >= shift_copy {
ret[7 - shift / 32] |= 1 << (shift % 32);
sub_copy = sub_copy - shift_copy;
}
shift_copy = shift_copy >> 1;
if shift == 0 { break; }
shift -= 1;
}
U256(ret)
}
}
impl Rem for U256 {
type Output = U256;
fn rem(self, other: U256) -> U256 {
let d = self / other;
self - (other * d)
}
}
impl Not for U256 {
type Output = U256;
fn not(self) -> U256 {
let U256(ref arr) = self;
let mut ret = [0u32; 8];
for i in 0..8 {
ret[i] =!arr[i];
}
U256(ret)
}
}
impl fmt::LowerHex for U256 {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for i in 0..8 {
write!(f, "{:08x}", self.0[i])?;
}
Ok(())
}
}
impl fmt::UpperHex for U256 {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for i in 0..8 {
write!(f, "{:08X}", self.0[i])?;
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::U256;
#[test]
pub fn mul() {
assert_eq!(U256([0, 0, 0, 0, 0, 0, 0, 2]) * U256([0, 0, 0, 0, 0, 0, 0, 3]),
U256([0, 0, 0, 0, 0, 0, 0, 6]));
assert_eq!(U256([0x7FFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF]) *
U256([0, 0, 0, 0, 0, 0, 0, 2]),
U256([0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFE]));
}
#[test]
pub fn div() {
assert_eq!(U256([0, 0, 0, 0, 0, 0, 0, 3]) / U256([0, 0, 0, 0, 0, 0, 0, 2]),
U256::one());
assert_eq!(U256([0, 0, 0, 0, 0, 0, 0, 1000000001]) / U256([0, 0, 0, 0, 0, 0, 0, 2]),
U256([0, 0, 0, 0, 0, 0, 0, 500000000]));
assert_eq!(U256([0, 0, 0, 0, 0, 0, 0, 0xFFFFFFFD]) /
U256([0, 0, 0, 0, 0, 0, 0, 2]),
U256([0, 0, 0, 0, 0, 0, 0, 0x7FFFFFFE]));
assert_eq!(U256([0, 0, 0, 0, 0, 0, 0xFFFFFFFF, 0xFFFFFFFD]) /
U256([0, 0, 0, 0, 0, 0, 0, 2]),
U256([0, 0, 0, 0, 0, 0, 0x7FFFFFFF, 0xFFFFFFFE]));
assert_eq!(U256([0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFD]) /
U256([0, 0, 0, 0, 0, 0, 0, 2]),
U256([0x7FFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFE]));
}
}
|
random_line_split
|
|
chacha.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! The ChaCha random number generator.
use core::prelude::*;
use core::num::Int;
use {Rng, SeedableRng, Rand};
const KEY_WORDS : uint = 8; // 8 words for the 256-bit key
const STATE_WORDS : uint = 16;
const CHACHA_ROUNDS: uint = 20; // Cryptographically secure from 8 upwards as of this writing
/// A random number generator that uses the ChaCha20 algorithm [1].
///
/// The ChaCha algorithm is widely accepted as suitable for
/// cryptographic purposes, but this implementation has not been
/// verified as such. Prefer a generator like `OsRng` that defers to
/// the operating system for cases that need high security.
///
/// [1]: D. J. Bernstein, [*ChaCha, a variant of
/// Salsa20*](http://cr.yp.to/chacha.html)
pub struct ChaChaRng {
buffer: [u32,..STATE_WORDS], // Internal buffer of output
state: [u32,..STATE_WORDS], // Initial state
index: uint, // Index into state
}
impl Copy for ChaChaRng {}
static EMPTY: ChaChaRng = ChaChaRng {
buffer: [0,..STATE_WORDS],
state: [0,..STATE_WORDS],
index: STATE_WORDS
};
macro_rules! quarter_round{
($a: expr, $b: expr, $c: expr, $d: expr) => {{
$a += $b; $d ^= $a; $d = $d.rotate_left(16);
$c += $d; $b ^= $c; $b = $b.rotate_left(12);
$a += $b; $d ^= $a; $d = $d.rotate_left( 8);
$c += $d; $b ^= $c; $b = $b.rotate_left( 7);
}}
}
macro_rules! double_round{
($x: expr) => {{
// Column round
quarter_round!($x[ 0], $x[ 4], $x[ 8], $x[12]);
quarter_round!($x[ 1], $x[ 5], $x[ 9], $x[13]);
quarter_round!($x[ 2], $x[ 6], $x[10], $x[14]);
quarter_round!($x[ 3], $x[ 7], $x[11], $x[15]);
// Diagonal round
quarter_round!($x[ 0], $x[ 5], $x[10], $x[15]);
quarter_round!($x[ 1], $x[ 6], $x[11], $x[12]);
quarter_round!($x[ 2], $x[ 7], $x[ 8], $x[13]);
quarter_round!($x[ 3], $x[ 4], $x[ 9], $x[14]);
}}
}
#[inline]
fn core(output: &mut [u32,..STATE_WORDS], input: &[u32,..STATE_WORDS]) {
*output = *input;
for _ in range(0, CHACHA_ROUNDS / 2) {
double_round!(output);
}
for i in range(0, STATE_WORDS) {
output[i] += input[i];
}
}
impl ChaChaRng {
/// Create an ChaCha random number generator using the default
/// fixed key of 8 zero words.
pub fn new_unseeded() -> ChaChaRng {
let mut rng = EMPTY;
rng.init(&[0,..KEY_WORDS]);
rng
}
/// Sets the internal 128-bit ChaCha counter to
/// a user-provided value. This permits jumping
/// arbitrarily ahead (or backwards) in the pseudorandom stream.
///
/// Since the nonce words are used to extend the counter to 128 bits,
/// users wishing to obtain the conventional ChaCha pseudorandom stream
/// associated with a particular nonce can call this function with
/// arguments `0, desired_nonce`.
pub fn set_counter(&mut self, counter_low: u64, counter_high: u64) {
self.state[12] = (counter_low >> 0) as u32;
self.state[13] = (counter_low >> 32) as u32;
self.state[14] = (counter_high >> 0) as u32;
self.state[15] = (counter_high >> 32) as u32;
self.index = STATE_WORDS; // force recomputation
}
/// Initializes `self.state` with the appropriate key and constants
///
/// We deviate slightly from the ChaCha specification regarding
/// the nonce, which is used to extend the counter to 128 bits.
/// This is provably as strong as the original cipher, though,
/// since any distinguishing attack on our variant also works
/// against ChaCha with a chosen-nonce. See the XSalsa20 [1]
/// security proof for a more involved example of this.
///
/// The modified word layout is:
/// ```text
/// constant constant constant constant
/// key key key key
/// key key key key
/// counter counter counter counter
/// ```
/// [1]: Daniel J. Bernstein. [*Extending the Salsa20
/// nonce.*](http://cr.yp.to/papers.html#xsalsa)
fn init(&mut self, key: &[u32,..KEY_WORDS]) {
self.state[0] = 0x61707865;
self.state[1] = 0x3320646E;
self.state[2] = 0x79622D32;
self.state[3] = 0x6B206574;
for i in range(0, KEY_WORDS) {
self.state[4+i] = key[i];
}
self.state[12] = 0;
self.state[13] = 0;
self.state[14] = 0;
self.state[15] = 0;
self.index = STATE_WORDS;
}
/// Refill the internal output buffer (`self.buffer`)
fn update(&mut self) {
core(&mut self.buffer, &self.state);
self.index = 0;
// update 128-bit counter
self.state[12] += 1;
if self.state[12]!= 0 { return };
self.state[13] += 1;
if self.state[13]!= 0 { return };
self.state[14] += 1;
if self.state[14]!= 0
|
;
self.state[15] += 1;
}
}
impl Rng for ChaChaRng {
#[inline]
fn next_u32(&mut self) -> u32 {
if self.index == STATE_WORDS {
self.update();
}
let value = self.buffer[self.index % STATE_WORDS];
self.index += 1;
value
}
}
impl<'a> SeedableRng<&'a [u32]> for ChaChaRng {
fn reseed(&mut self, seed: &'a [u32]) {
// reset state
self.init(&[0u32,..KEY_WORDS]);
// set key in place
let key = self.state.slice_mut(4, 4+KEY_WORDS);
for (k, s) in key.iter_mut().zip(seed.iter()) {
*k = *s;
}
}
/// Create a ChaCha generator from a seed,
/// obtained from a variable-length u32 array.
/// Only up to 8 words are used; if less than 8
/// words are used, the remaining are set to zero.
fn from_seed(seed: &'a [u32]) -> ChaChaRng {
let mut rng = EMPTY;
rng.reseed(seed);
rng
}
}
impl Rand for ChaChaRng {
fn rand<R: Rng>(other: &mut R) -> ChaChaRng {
let mut key : [u32,..KEY_WORDS] = [0,..KEY_WORDS];
for word in key.iter_mut() {
*word = other.gen();
}
SeedableRng::from_seed(key.as_slice())
}
}
#[cfg(test)]
mod test {
use std::prelude::*;
use core::iter::order;
use {Rng, SeedableRng};
use super::ChaChaRng;
#[test]
fn test_rng_rand_seeded() {
let s = ::test::rng().gen_iter::<u32>().take(8).collect::<Vec<u32>>();
let mut ra: ChaChaRng = SeedableRng::from_seed(s.as_slice());
let mut rb: ChaChaRng = SeedableRng::from_seed(s.as_slice());
assert!(order::equals(ra.gen_ascii_chars().take(100),
rb.gen_ascii_chars().take(100)));
}
#[test]
fn test_rng_seeded() {
let seed : &[_] = &[0,1,2,3,4,5,6,7];
let mut ra: ChaChaRng = SeedableRng::from_seed(seed);
let mut rb: ChaChaRng = SeedableRng::from_seed(seed);
assert!(order::equals(ra.gen_ascii_chars().take(100),
rb.gen_ascii_chars().take(100)));
}
#[test]
fn test_rng_reseed() {
let s = ::test::rng().gen_iter::<u32>().take(8).collect::<Vec<u32>>();
let mut r: ChaChaRng = SeedableRng::from_seed(s.as_slice());
let string1: String = r.gen_ascii_chars().take(100).collect();
r.reseed(s.as_slice());
let string2: String = r.gen_ascii_chars().take(100).collect();
assert_eq!(string1, string2);
}
#[test]
fn test_rng_true_values() {
// Test vectors 1 and 2 from
// http://tools.ietf.org/html/draft-nir-cfrg-chacha20-poly1305-04
let seed : &[_] = &[0u32,..8];
let mut ra: ChaChaRng = SeedableRng::from_seed(seed);
let v = Vec::from_fn(16, |_| ra.next_u32());
assert_eq!(v,
vec!(0xade0b876, 0x903df1a0, 0xe56a5d40, 0x28bd8653,
0xb819d2bd, 0x1aed8da0, 0xccef36a8, 0xc70d778b,
0x7c5941da, 0x8d485751, 0x3fe02477, 0x374ad8b8,
0xf4b8436a, 0x1ca11815, 0x69b687c3, 0x8665eeb2));
let v = Vec::from_fn(16, |_| ra.next_u32());
assert_eq!(v,
vec!(0xbee7079f, 0x7a385155, 0x7c97ba98, 0x0d082d73,
0xa0290fcb, 0x6965e348, 0x3e53c612, 0xed7aee32,
0x7621b729, 0x434ee69c, 0xb03371d5, 0xd539d874,
0x281fed31, 0x45fb0a51, 0x1f0ae1ac, 0x6f4d794b));
let seed : &[_] = &[0,1,2,3,4,5,6,7];
let mut ra: ChaChaRng = SeedableRng::from_seed(seed);
// Store the 17*i-th 32-bit word,
// i.e., the i-th word of the i-th 16-word block
let mut v : Vec<u32> = Vec::new();
for _ in range(0u, 16) {
v.push(ra.next_u32());
for _ in range(0u, 16) {
ra.next_u32();
}
}
assert_eq!(v,
vec!(0xf225c81a, 0x6ab1be57, 0x04d42951, 0x70858036,
0x49884684, 0x64efec72, 0x4be2d186, 0x3615b384,
0x11cfa18e, 0xd3c50049, 0x75c775f6, 0x434c6530,
0x2c5bad8f, 0x898881dc, 0x5f1c86d9, 0xc1f8e7f4));
}
}
|
{ return }
|
conditional_block
|
chacha.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! The ChaCha random number generator.
use core::prelude::*;
use core::num::Int;
use {Rng, SeedableRng, Rand};
const KEY_WORDS : uint = 8; // 8 words for the 256-bit key
const STATE_WORDS : uint = 16;
const CHACHA_ROUNDS: uint = 20; // Cryptographically secure from 8 upwards as of this writing
/// A random number generator that uses the ChaCha20 algorithm [1].
///
/// The ChaCha algorithm is widely accepted as suitable for
/// cryptographic purposes, but this implementation has not been
/// verified as such. Prefer a generator like `OsRng` that defers to
/// the operating system for cases that need high security.
///
/// [1]: D. J. Bernstein, [*ChaCha, a variant of
/// Salsa20*](http://cr.yp.to/chacha.html)
pub struct ChaChaRng {
buffer: [u32,..STATE_WORDS], // Internal buffer of output
state: [u32,..STATE_WORDS], // Initial state
index: uint, // Index into state
}
impl Copy for ChaChaRng {}
static EMPTY: ChaChaRng = ChaChaRng {
buffer: [0,..STATE_WORDS],
state: [0,..STATE_WORDS],
index: STATE_WORDS
};
macro_rules! quarter_round{
($a: expr, $b: expr, $c: expr, $d: expr) => {{
$a += $b; $d ^= $a; $d = $d.rotate_left(16);
$c += $d; $b ^= $c; $b = $b.rotate_left(12);
$a += $b; $d ^= $a; $d = $d.rotate_left( 8);
$c += $d; $b ^= $c; $b = $b.rotate_left( 7);
}}
}
macro_rules! double_round{
($x: expr) => {{
// Column round
quarter_round!($x[ 0], $x[ 4], $x[ 8], $x[12]);
quarter_round!($x[ 1], $x[ 5], $x[ 9], $x[13]);
quarter_round!($x[ 2], $x[ 6], $x[10], $x[14]);
quarter_round!($x[ 3], $x[ 7], $x[11], $x[15]);
// Diagonal round
quarter_round!($x[ 0], $x[ 5], $x[10], $x[15]);
quarter_round!($x[ 1], $x[ 6], $x[11], $x[12]);
quarter_round!($x[ 2], $x[ 7], $x[ 8], $x[13]);
quarter_round!($x[ 3], $x[ 4], $x[ 9], $x[14]);
}}
}
#[inline]
fn core(output: &mut [u32,..STATE_WORDS], input: &[u32,..STATE_WORDS]) {
*output = *input;
for _ in range(0, CHACHA_ROUNDS / 2) {
double_round!(output);
}
for i in range(0, STATE_WORDS) {
output[i] += input[i];
}
}
impl ChaChaRng {
/// Create an ChaCha random number generator using the default
/// fixed key of 8 zero words.
pub fn new_unseeded() -> ChaChaRng {
let mut rng = EMPTY;
rng.init(&[0,..KEY_WORDS]);
rng
}
/// Sets the internal 128-bit ChaCha counter to
/// a user-provided value. This permits jumping
/// arbitrarily ahead (or backwards) in the pseudorandom stream.
///
/// Since the nonce words are used to extend the counter to 128 bits,
/// users wishing to obtain the conventional ChaCha pseudorandom stream
/// associated with a particular nonce can call this function with
/// arguments `0, desired_nonce`.
pub fn set_counter(&mut self, counter_low: u64, counter_high: u64) {
self.state[12] = (counter_low >> 0) as u32;
self.state[13] = (counter_low >> 32) as u32;
self.state[14] = (counter_high >> 0) as u32;
self.state[15] = (counter_high >> 32) as u32;
self.index = STATE_WORDS; // force recomputation
}
/// Initializes `self.state` with the appropriate key and constants
///
/// We deviate slightly from the ChaCha specification regarding
/// the nonce, which is used to extend the counter to 128 bits.
/// This is provably as strong as the original cipher, though,
/// since any distinguishing attack on our variant also works
/// against ChaCha with a chosen-nonce. See the XSalsa20 [1]
/// security proof for a more involved example of this.
///
/// The modified word layout is:
/// ```text
/// constant constant constant constant
/// key key key key
/// key key key key
/// counter counter counter counter
/// ```
/// [1]: Daniel J. Bernstein. [*Extending the Salsa20
/// nonce.*](http://cr.yp.to/papers.html#xsalsa)
fn init(&mut self, key: &[u32,..KEY_WORDS]) {
self.state[0] = 0x61707865;
self.state[1] = 0x3320646E;
self.state[2] = 0x79622D32;
self.state[3] = 0x6B206574;
for i in range(0, KEY_WORDS) {
self.state[4+i] = key[i];
}
self.state[12] = 0;
self.state[13] = 0;
self.state[14] = 0;
self.state[15] = 0;
self.index = STATE_WORDS;
}
/// Refill the internal output buffer (`self.buffer`)
fn update(&mut self) {
core(&mut self.buffer, &self.state);
self.index = 0;
// update 128-bit counter
self.state[12] += 1;
if self.state[12]!= 0 { return };
self.state[13] += 1;
if self.state[13]!= 0 { return };
self.state[14] += 1;
if self.state[14]!= 0 { return };
self.state[15] += 1;
}
}
impl Rng for ChaChaRng {
#[inline]
fn next_u32(&mut self) -> u32 {
if self.index == STATE_WORDS {
self.update();
}
let value = self.buffer[self.index % STATE_WORDS];
self.index += 1;
value
}
}
impl<'a> SeedableRng<&'a [u32]> for ChaChaRng {
fn reseed(&mut self, seed: &'a [u32]) {
// reset state
self.init(&[0u32,..KEY_WORDS]);
// set key in place
let key = self.state.slice_mut(4, 4+KEY_WORDS);
for (k, s) in key.iter_mut().zip(seed.iter()) {
*k = *s;
}
}
/// Create a ChaCha generator from a seed,
/// obtained from a variable-length u32 array.
/// Only up to 8 words are used; if less than 8
/// words are used, the remaining are set to zero.
fn from_seed(seed: &'a [u32]) -> ChaChaRng {
let mut rng = EMPTY;
rng.reseed(seed);
rng
}
}
impl Rand for ChaChaRng {
fn rand<R: Rng>(other: &mut R) -> ChaChaRng
|
}
#[cfg(test)]
mod test {
use std::prelude::*;
use core::iter::order;
use {Rng, SeedableRng};
use super::ChaChaRng;
#[test]
fn test_rng_rand_seeded() {
let s = ::test::rng().gen_iter::<u32>().take(8).collect::<Vec<u32>>();
let mut ra: ChaChaRng = SeedableRng::from_seed(s.as_slice());
let mut rb: ChaChaRng = SeedableRng::from_seed(s.as_slice());
assert!(order::equals(ra.gen_ascii_chars().take(100),
rb.gen_ascii_chars().take(100)));
}
#[test]
fn test_rng_seeded() {
let seed : &[_] = &[0,1,2,3,4,5,6,7];
let mut ra: ChaChaRng = SeedableRng::from_seed(seed);
let mut rb: ChaChaRng = SeedableRng::from_seed(seed);
assert!(order::equals(ra.gen_ascii_chars().take(100),
rb.gen_ascii_chars().take(100)));
}
#[test]
fn test_rng_reseed() {
let s = ::test::rng().gen_iter::<u32>().take(8).collect::<Vec<u32>>();
let mut r: ChaChaRng = SeedableRng::from_seed(s.as_slice());
let string1: String = r.gen_ascii_chars().take(100).collect();
r.reseed(s.as_slice());
let string2: String = r.gen_ascii_chars().take(100).collect();
assert_eq!(string1, string2);
}
#[test]
fn test_rng_true_values() {
// Test vectors 1 and 2 from
// http://tools.ietf.org/html/draft-nir-cfrg-chacha20-poly1305-04
let seed : &[_] = &[0u32,..8];
let mut ra: ChaChaRng = SeedableRng::from_seed(seed);
let v = Vec::from_fn(16, |_| ra.next_u32());
assert_eq!(v,
vec!(0xade0b876, 0x903df1a0, 0xe56a5d40, 0x28bd8653,
0xb819d2bd, 0x1aed8da0, 0xccef36a8, 0xc70d778b,
0x7c5941da, 0x8d485751, 0x3fe02477, 0x374ad8b8,
0xf4b8436a, 0x1ca11815, 0x69b687c3, 0x8665eeb2));
let v = Vec::from_fn(16, |_| ra.next_u32());
assert_eq!(v,
vec!(0xbee7079f, 0x7a385155, 0x7c97ba98, 0x0d082d73,
0xa0290fcb, 0x6965e348, 0x3e53c612, 0xed7aee32,
0x7621b729, 0x434ee69c, 0xb03371d5, 0xd539d874,
0x281fed31, 0x45fb0a51, 0x1f0ae1ac, 0x6f4d794b));
let seed : &[_] = &[0,1,2,3,4,5,6,7];
let mut ra: ChaChaRng = SeedableRng::from_seed(seed);
// Store the 17*i-th 32-bit word,
// i.e., the i-th word of the i-th 16-word block
let mut v : Vec<u32> = Vec::new();
for _ in range(0u, 16) {
v.push(ra.next_u32());
for _ in range(0u, 16) {
ra.next_u32();
}
}
assert_eq!(v,
vec!(0xf225c81a, 0x6ab1be57, 0x04d42951, 0x70858036,
0x49884684, 0x64efec72, 0x4be2d186, 0x3615b384,
0x11cfa18e, 0xd3c50049, 0x75c775f6, 0x434c6530,
0x2c5bad8f, 0x898881dc, 0x5f1c86d9, 0xc1f8e7f4));
}
}
|
{
let mut key : [u32, ..KEY_WORDS] = [0, ..KEY_WORDS];
for word in key.iter_mut() {
*word = other.gen();
}
SeedableRng::from_seed(key.as_slice())
}
|
identifier_body
|
chacha.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! The ChaCha random number generator.
use core::prelude::*;
use core::num::Int;
use {Rng, SeedableRng, Rand};
const KEY_WORDS : uint = 8; // 8 words for the 256-bit key
const STATE_WORDS : uint = 16;
const CHACHA_ROUNDS: uint = 20; // Cryptographically secure from 8 upwards as of this writing
/// A random number generator that uses the ChaCha20 algorithm [1].
///
/// The ChaCha algorithm is widely accepted as suitable for
/// cryptographic purposes, but this implementation has not been
/// verified as such. Prefer a generator like `OsRng` that defers to
/// the operating system for cases that need high security.
///
/// [1]: D. J. Bernstein, [*ChaCha, a variant of
/// Salsa20*](http://cr.yp.to/chacha.html)
pub struct ChaChaRng {
buffer: [u32,..STATE_WORDS], // Internal buffer of output
state: [u32,..STATE_WORDS], // Initial state
index: uint, // Index into state
}
impl Copy for ChaChaRng {}
static EMPTY: ChaChaRng = ChaChaRng {
buffer: [0,..STATE_WORDS],
state: [0,..STATE_WORDS],
index: STATE_WORDS
};
macro_rules! quarter_round{
($a: expr, $b: expr, $c: expr, $d: expr) => {{
$a += $b; $d ^= $a; $d = $d.rotate_left(16);
$c += $d; $b ^= $c; $b = $b.rotate_left(12);
$a += $b; $d ^= $a; $d = $d.rotate_left( 8);
$c += $d; $b ^= $c; $b = $b.rotate_left( 7);
}}
}
macro_rules! double_round{
($x: expr) => {{
// Column round
quarter_round!($x[ 0], $x[ 4], $x[ 8], $x[12]);
quarter_round!($x[ 1], $x[ 5], $x[ 9], $x[13]);
quarter_round!($x[ 2], $x[ 6], $x[10], $x[14]);
quarter_round!($x[ 3], $x[ 7], $x[11], $x[15]);
// Diagonal round
quarter_round!($x[ 0], $x[ 5], $x[10], $x[15]);
quarter_round!($x[ 1], $x[ 6], $x[11], $x[12]);
quarter_round!($x[ 2], $x[ 7], $x[ 8], $x[13]);
quarter_round!($x[ 3], $x[ 4], $x[ 9], $x[14]);
}}
}
#[inline]
fn core(output: &mut [u32,..STATE_WORDS], input: &[u32,..STATE_WORDS]) {
*output = *input;
for _ in range(0, CHACHA_ROUNDS / 2) {
double_round!(output);
}
for i in range(0, STATE_WORDS) {
output[i] += input[i];
}
}
impl ChaChaRng {
/// Create an ChaCha random number generator using the default
/// fixed key of 8 zero words.
pub fn new_unseeded() -> ChaChaRng {
let mut rng = EMPTY;
rng.init(&[0,..KEY_WORDS]);
rng
}
/// Sets the internal 128-bit ChaCha counter to
/// a user-provided value. This permits jumping
/// arbitrarily ahead (or backwards) in the pseudorandom stream.
///
/// Since the nonce words are used to extend the counter to 128 bits,
/// users wishing to obtain the conventional ChaCha pseudorandom stream
/// associated with a particular nonce can call this function with
/// arguments `0, desired_nonce`.
pub fn set_counter(&mut self, counter_low: u64, counter_high: u64) {
self.state[12] = (counter_low >> 0) as u32;
self.state[13] = (counter_low >> 32) as u32;
self.state[14] = (counter_high >> 0) as u32;
self.state[15] = (counter_high >> 32) as u32;
self.index = STATE_WORDS; // force recomputation
}
/// Initializes `self.state` with the appropriate key and constants
///
/// We deviate slightly from the ChaCha specification regarding
/// the nonce, which is used to extend the counter to 128 bits.
/// This is provably as strong as the original cipher, though,
/// since any distinguishing attack on our variant also works
/// against ChaCha with a chosen-nonce. See the XSalsa20 [1]
/// security proof for a more involved example of this.
///
/// The modified word layout is:
/// ```text
/// constant constant constant constant
/// key key key key
/// key key key key
/// counter counter counter counter
/// ```
/// [1]: Daniel J. Bernstein. [*Extending the Salsa20
/// nonce.*](http://cr.yp.to/papers.html#xsalsa)
fn init(&mut self, key: &[u32,..KEY_WORDS]) {
self.state[0] = 0x61707865;
self.state[1] = 0x3320646E;
self.state[2] = 0x79622D32;
self.state[3] = 0x6B206574;
for i in range(0, KEY_WORDS) {
self.state[4+i] = key[i];
}
self.state[12] = 0;
self.state[13] = 0;
self.state[14] = 0;
self.state[15] = 0;
self.index = STATE_WORDS;
}
/// Refill the internal output buffer (`self.buffer`)
fn update(&mut self) {
core(&mut self.buffer, &self.state);
self.index = 0;
// update 128-bit counter
self.state[12] += 1;
if self.state[12]!= 0 { return };
self.state[13] += 1;
if self.state[13]!= 0 { return };
self.state[14] += 1;
if self.state[14]!= 0 { return };
self.state[15] += 1;
}
}
impl Rng for ChaChaRng {
#[inline]
fn next_u32(&mut self) -> u32 {
if self.index == STATE_WORDS {
self.update();
}
let value = self.buffer[self.index % STATE_WORDS];
self.index += 1;
value
}
}
impl<'a> SeedableRng<&'a [u32]> for ChaChaRng {
fn reseed(&mut self, seed: &'a [u32]) {
// reset state
self.init(&[0u32,..KEY_WORDS]);
// set key in place
let key = self.state.slice_mut(4, 4+KEY_WORDS);
for (k, s) in key.iter_mut().zip(seed.iter()) {
*k = *s;
}
}
/// Create a ChaCha generator from a seed,
/// obtained from a variable-length u32 array.
/// Only up to 8 words are used; if less than 8
/// words are used, the remaining are set to zero.
fn from_seed(seed: &'a [u32]) -> ChaChaRng {
let mut rng = EMPTY;
rng.reseed(seed);
rng
}
}
impl Rand for ChaChaRng {
fn
|
<R: Rng>(other: &mut R) -> ChaChaRng {
let mut key : [u32,..KEY_WORDS] = [0,..KEY_WORDS];
for word in key.iter_mut() {
*word = other.gen();
}
SeedableRng::from_seed(key.as_slice())
}
}
#[cfg(test)]
mod test {
use std::prelude::*;
use core::iter::order;
use {Rng, SeedableRng};
use super::ChaChaRng;
#[test]
fn test_rng_rand_seeded() {
let s = ::test::rng().gen_iter::<u32>().take(8).collect::<Vec<u32>>();
let mut ra: ChaChaRng = SeedableRng::from_seed(s.as_slice());
let mut rb: ChaChaRng = SeedableRng::from_seed(s.as_slice());
assert!(order::equals(ra.gen_ascii_chars().take(100),
rb.gen_ascii_chars().take(100)));
}
#[test]
fn test_rng_seeded() {
let seed : &[_] = &[0,1,2,3,4,5,6,7];
let mut ra: ChaChaRng = SeedableRng::from_seed(seed);
let mut rb: ChaChaRng = SeedableRng::from_seed(seed);
assert!(order::equals(ra.gen_ascii_chars().take(100),
rb.gen_ascii_chars().take(100)));
}
#[test]
fn test_rng_reseed() {
let s = ::test::rng().gen_iter::<u32>().take(8).collect::<Vec<u32>>();
let mut r: ChaChaRng = SeedableRng::from_seed(s.as_slice());
let string1: String = r.gen_ascii_chars().take(100).collect();
r.reseed(s.as_slice());
let string2: String = r.gen_ascii_chars().take(100).collect();
assert_eq!(string1, string2);
}
#[test]
fn test_rng_true_values() {
// Test vectors 1 and 2 from
// http://tools.ietf.org/html/draft-nir-cfrg-chacha20-poly1305-04
let seed : &[_] = &[0u32,..8];
let mut ra: ChaChaRng = SeedableRng::from_seed(seed);
let v = Vec::from_fn(16, |_| ra.next_u32());
assert_eq!(v,
vec!(0xade0b876, 0x903df1a0, 0xe56a5d40, 0x28bd8653,
0xb819d2bd, 0x1aed8da0, 0xccef36a8, 0xc70d778b,
0x7c5941da, 0x8d485751, 0x3fe02477, 0x374ad8b8,
0xf4b8436a, 0x1ca11815, 0x69b687c3, 0x8665eeb2));
let v = Vec::from_fn(16, |_| ra.next_u32());
assert_eq!(v,
vec!(0xbee7079f, 0x7a385155, 0x7c97ba98, 0x0d082d73,
0xa0290fcb, 0x6965e348, 0x3e53c612, 0xed7aee32,
0x7621b729, 0x434ee69c, 0xb03371d5, 0xd539d874,
0x281fed31, 0x45fb0a51, 0x1f0ae1ac, 0x6f4d794b));
let seed : &[_] = &[0,1,2,3,4,5,6,7];
let mut ra: ChaChaRng = SeedableRng::from_seed(seed);
// Store the 17*i-th 32-bit word,
// i.e., the i-th word of the i-th 16-word block
let mut v : Vec<u32> = Vec::new();
for _ in range(0u, 16) {
v.push(ra.next_u32());
for _ in range(0u, 16) {
ra.next_u32();
}
}
assert_eq!(v,
vec!(0xf225c81a, 0x6ab1be57, 0x04d42951, 0x70858036,
0x49884684, 0x64efec72, 0x4be2d186, 0x3615b384,
0x11cfa18e, 0xd3c50049, 0x75c775f6, 0x434c6530,
0x2c5bad8f, 0x898881dc, 0x5f1c86d9, 0xc1f8e7f4));
}
}
|
rand
|
identifier_name
|
chacha.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! The ChaCha random number generator.
use core::prelude::*;
use core::num::Int;
use {Rng, SeedableRng, Rand};
const KEY_WORDS : uint = 8; // 8 words for the 256-bit key
const STATE_WORDS : uint = 16;
const CHACHA_ROUNDS: uint = 20; // Cryptographically secure from 8 upwards as of this writing
/// A random number generator that uses the ChaCha20 algorithm [1].
///
/// The ChaCha algorithm is widely accepted as suitable for
/// cryptographic purposes, but this implementation has not been
/// verified as such. Prefer a generator like `OsRng` that defers to
/// the operating system for cases that need high security.
///
/// [1]: D. J. Bernstein, [*ChaCha, a variant of
/// Salsa20*](http://cr.yp.to/chacha.html)
pub struct ChaChaRng {
buffer: [u32,..STATE_WORDS], // Internal buffer of output
state: [u32,..STATE_WORDS], // Initial state
index: uint, // Index into state
}
impl Copy for ChaChaRng {}
static EMPTY: ChaChaRng = ChaChaRng {
buffer: [0,..STATE_WORDS],
state: [0,..STATE_WORDS],
index: STATE_WORDS
};
macro_rules! quarter_round{
($a: expr, $b: expr, $c: expr, $d: expr) => {{
$a += $b; $d ^= $a; $d = $d.rotate_left(16);
$c += $d; $b ^= $c; $b = $b.rotate_left(12);
$a += $b; $d ^= $a; $d = $d.rotate_left( 8);
$c += $d; $b ^= $c; $b = $b.rotate_left( 7);
}}
}
macro_rules! double_round{
($x: expr) => {{
// Column round
quarter_round!($x[ 0], $x[ 4], $x[ 8], $x[12]);
quarter_round!($x[ 1], $x[ 5], $x[ 9], $x[13]);
quarter_round!($x[ 2], $x[ 6], $x[10], $x[14]);
quarter_round!($x[ 3], $x[ 7], $x[11], $x[15]);
// Diagonal round
quarter_round!($x[ 0], $x[ 5], $x[10], $x[15]);
quarter_round!($x[ 1], $x[ 6], $x[11], $x[12]);
quarter_round!($x[ 2], $x[ 7], $x[ 8], $x[13]);
quarter_round!($x[ 3], $x[ 4], $x[ 9], $x[14]);
}}
}
#[inline]
fn core(output: &mut [u32,..STATE_WORDS], input: &[u32,..STATE_WORDS]) {
*output = *input;
for _ in range(0, CHACHA_ROUNDS / 2) {
double_round!(output);
}
for i in range(0, STATE_WORDS) {
output[i] += input[i];
}
}
impl ChaChaRng {
/// Create an ChaCha random number generator using the default
/// fixed key of 8 zero words.
pub fn new_unseeded() -> ChaChaRng {
let mut rng = EMPTY;
rng.init(&[0,..KEY_WORDS]);
rng
}
/// Sets the internal 128-bit ChaCha counter to
/// a user-provided value. This permits jumping
/// arbitrarily ahead (or backwards) in the pseudorandom stream.
///
/// Since the nonce words are used to extend the counter to 128 bits,
/// users wishing to obtain the conventional ChaCha pseudorandom stream
/// associated with a particular nonce can call this function with
/// arguments `0, desired_nonce`.
pub fn set_counter(&mut self, counter_low: u64, counter_high: u64) {
self.state[12] = (counter_low >> 0) as u32;
self.state[13] = (counter_low >> 32) as u32;
self.state[14] = (counter_high >> 0) as u32;
self.state[15] = (counter_high >> 32) as u32;
self.index = STATE_WORDS; // force recomputation
}
/// Initializes `self.state` with the appropriate key and constants
///
/// We deviate slightly from the ChaCha specification regarding
/// the nonce, which is used to extend the counter to 128 bits.
/// This is provably as strong as the original cipher, though,
/// since any distinguishing attack on our variant also works
/// against ChaCha with a chosen-nonce. See the XSalsa20 [1]
/// security proof for a more involved example of this.
///
/// The modified word layout is:
/// ```text
/// constant constant constant constant
/// key key key key
/// key key key key
/// counter counter counter counter
/// ```
/// [1]: Daniel J. Bernstein. [*Extending the Salsa20
/// nonce.*](http://cr.yp.to/papers.html#xsalsa)
fn init(&mut self, key: &[u32,..KEY_WORDS]) {
self.state[0] = 0x61707865;
self.state[1] = 0x3320646E;
self.state[2] = 0x79622D32;
self.state[3] = 0x6B206574;
|
self.state[12] = 0;
self.state[13] = 0;
self.state[14] = 0;
self.state[15] = 0;
self.index = STATE_WORDS;
}
/// Refill the internal output buffer (`self.buffer`)
fn update(&mut self) {
core(&mut self.buffer, &self.state);
self.index = 0;
// update 128-bit counter
self.state[12] += 1;
if self.state[12]!= 0 { return };
self.state[13] += 1;
if self.state[13]!= 0 { return };
self.state[14] += 1;
if self.state[14]!= 0 { return };
self.state[15] += 1;
}
}
impl Rng for ChaChaRng {
#[inline]
fn next_u32(&mut self) -> u32 {
if self.index == STATE_WORDS {
self.update();
}
let value = self.buffer[self.index % STATE_WORDS];
self.index += 1;
value
}
}
impl<'a> SeedableRng<&'a [u32]> for ChaChaRng {
fn reseed(&mut self, seed: &'a [u32]) {
// reset state
self.init(&[0u32,..KEY_WORDS]);
// set key in place
let key = self.state.slice_mut(4, 4+KEY_WORDS);
for (k, s) in key.iter_mut().zip(seed.iter()) {
*k = *s;
}
}
/// Create a ChaCha generator from a seed,
/// obtained from a variable-length u32 array.
/// Only up to 8 words are used; if less than 8
/// words are used, the remaining are set to zero.
fn from_seed(seed: &'a [u32]) -> ChaChaRng {
let mut rng = EMPTY;
rng.reseed(seed);
rng
}
}
impl Rand for ChaChaRng {
fn rand<R: Rng>(other: &mut R) -> ChaChaRng {
let mut key : [u32,..KEY_WORDS] = [0,..KEY_WORDS];
for word in key.iter_mut() {
*word = other.gen();
}
SeedableRng::from_seed(key.as_slice())
}
}
#[cfg(test)]
mod test {
use std::prelude::*;
use core::iter::order;
use {Rng, SeedableRng};
use super::ChaChaRng;
#[test]
fn test_rng_rand_seeded() {
let s = ::test::rng().gen_iter::<u32>().take(8).collect::<Vec<u32>>();
let mut ra: ChaChaRng = SeedableRng::from_seed(s.as_slice());
let mut rb: ChaChaRng = SeedableRng::from_seed(s.as_slice());
assert!(order::equals(ra.gen_ascii_chars().take(100),
rb.gen_ascii_chars().take(100)));
}
#[test]
fn test_rng_seeded() {
let seed : &[_] = &[0,1,2,3,4,5,6,7];
let mut ra: ChaChaRng = SeedableRng::from_seed(seed);
let mut rb: ChaChaRng = SeedableRng::from_seed(seed);
assert!(order::equals(ra.gen_ascii_chars().take(100),
rb.gen_ascii_chars().take(100)));
}
#[test]
fn test_rng_reseed() {
let s = ::test::rng().gen_iter::<u32>().take(8).collect::<Vec<u32>>();
let mut r: ChaChaRng = SeedableRng::from_seed(s.as_slice());
let string1: String = r.gen_ascii_chars().take(100).collect();
r.reseed(s.as_slice());
let string2: String = r.gen_ascii_chars().take(100).collect();
assert_eq!(string1, string2);
}
#[test]
fn test_rng_true_values() {
// Test vectors 1 and 2 from
// http://tools.ietf.org/html/draft-nir-cfrg-chacha20-poly1305-04
let seed : &[_] = &[0u32,..8];
let mut ra: ChaChaRng = SeedableRng::from_seed(seed);
let v = Vec::from_fn(16, |_| ra.next_u32());
assert_eq!(v,
vec!(0xade0b876, 0x903df1a0, 0xe56a5d40, 0x28bd8653,
0xb819d2bd, 0x1aed8da0, 0xccef36a8, 0xc70d778b,
0x7c5941da, 0x8d485751, 0x3fe02477, 0x374ad8b8,
0xf4b8436a, 0x1ca11815, 0x69b687c3, 0x8665eeb2));
let v = Vec::from_fn(16, |_| ra.next_u32());
assert_eq!(v,
vec!(0xbee7079f, 0x7a385155, 0x7c97ba98, 0x0d082d73,
0xa0290fcb, 0x6965e348, 0x3e53c612, 0xed7aee32,
0x7621b729, 0x434ee69c, 0xb03371d5, 0xd539d874,
0x281fed31, 0x45fb0a51, 0x1f0ae1ac, 0x6f4d794b));
let seed : &[_] = &[0,1,2,3,4,5,6,7];
let mut ra: ChaChaRng = SeedableRng::from_seed(seed);
// Store the 17*i-th 32-bit word,
// i.e., the i-th word of the i-th 16-word block
let mut v : Vec<u32> = Vec::new();
for _ in range(0u, 16) {
v.push(ra.next_u32());
for _ in range(0u, 16) {
ra.next_u32();
}
}
assert_eq!(v,
vec!(0xf225c81a, 0x6ab1be57, 0x04d42951, 0x70858036,
0x49884684, 0x64efec72, 0x4be2d186, 0x3615b384,
0x11cfa18e, 0xd3c50049, 0x75c775f6, 0x434c6530,
0x2c5bad8f, 0x898881dc, 0x5f1c86d9, 0xc1f8e7f4));
}
}
|
for i in range(0, KEY_WORDS) {
self.state[4+i] = key[i];
}
|
random_line_split
|
regions-no-variance-from-fn-generics.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Issue #12856: a lifetime formal binding introduced by a generic fn
// should not upset the variance inference for actual occurrences of
// that lifetime in type expressions.
pub trait HasLife<'a> {
fn dummy(&'a self)
|
// just to induce a variance on 'a
}
trait UseLife01 {
fn refs<'a, H: HasLife<'a>>(&'a self) -> H;
}
trait UseLife02 {
fn refs<'a, T, H: HasType<&'a T>>(&'a self) -> H;
}
pub trait HasType<T>
{
fn dummy(&self, t: T) -> T { panic!() }
}
trait UseLife03<T> {
fn refs<'a, H: HasType<&'a T>>(&'a self) -> H;
}
// (The functions below were not actually a problem observed during
// fixing of #12856; they just seem like natural tests to put in to
// cover a couple more points in the testing space)
pub fn top_refs_1<'a, H: HasLife<'a>>(_s: &'a ()) -> H {
unimplemented!()
}
pub fn top_refs_2<'a, T, H: HasType<&'a T>>(_s: &'a ()) -> H {
unimplemented!()
}
pub fn main() {}
|
{ }
|
identifier_body
|
regions-no-variance-from-fn-generics.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Issue #12856: a lifetime formal binding introduced by a generic fn
// should not upset the variance inference for actual occurrences of
// that lifetime in type expressions.
pub trait HasLife<'a> {
fn dummy(&'a self) { } // just to induce a variance on 'a
}
trait UseLife01 {
fn refs<'a, H: HasLife<'a>>(&'a self) -> H;
}
trait UseLife02 {
fn refs<'a, T, H: HasType<&'a T>>(&'a self) -> H;
}
pub trait HasType<T>
{
fn dummy(&self, t: T) -> T { panic!() }
}
trait UseLife03<T> {
fn refs<'a, H: HasType<&'a T>>(&'a self) -> H;
}
// (The functions below were not actually a problem observed during
// fixing of #12856; they just seem like natural tests to put in to
// cover a couple more points in the testing space)
pub fn
|
<'a, H: HasLife<'a>>(_s: &'a ()) -> H {
unimplemented!()
}
pub fn top_refs_2<'a, T, H: HasType<&'a T>>(_s: &'a ()) -> H {
unimplemented!()
}
pub fn main() {}
|
top_refs_1
|
identifier_name
|
regions-no-variance-from-fn-generics.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Issue #12856: a lifetime formal binding introduced by a generic fn
// should not upset the variance inference for actual occurrences of
// that lifetime in type expressions.
pub trait HasLife<'a> {
fn dummy(&'a self) { } // just to induce a variance on 'a
}
trait UseLife01 {
fn refs<'a, H: HasLife<'a>>(&'a self) -> H;
}
trait UseLife02 {
fn refs<'a, T, H: HasType<&'a T>>(&'a self) -> H;
}
pub trait HasType<T>
{
fn dummy(&self, t: T) -> T { panic!() }
}
trait UseLife03<T> {
fn refs<'a, H: HasType<&'a T>>(&'a self) -> H;
}
// (The functions below were not actually a problem observed during
// fixing of #12856; they just seem like natural tests to put in to
// cover a couple more points in the testing space)
pub fn top_refs_1<'a, H: HasLife<'a>>(_s: &'a ()) -> H {
unimplemented!()
}
pub fn top_refs_2<'a, T, H: HasType<&'a T>>(_s: &'a ()) -> H {
unimplemented!()
|
}
pub fn main() {}
|
random_line_split
|
|
slave.rs
|
extern crate byteorder;
extern crate encoding;
use self::byteorder::{ ReadBytesExt, LittleEndian};
use std::fs::{File};
use std::io::{Write,Read,Result};
use std::path::Path;
|
use self::encoding::all::ISO_8859_1;
pub fn run_slave<T : Read>(stream : &mut T, cfg : SyncConfig) -> Result<()>{
loop {
println!("Waiting for a data");
let path_length : u16 = try!(stream.read_u16::<LittleEndian>());
if path_length == 0{
// ignore empty
continue;
}
println!("Got the file name size: {}", path_length);
let mut path_data = vec![0u8;0];
try!(stream.take(path_length as u64).read_to_end(&mut path_data));
let path = match ISO_8859_1.decode(&path_data[..],DecoderTrap::Strict){
Ok(s) => s,
Err(_) => "Error decoding".to_owned()
};
let data_length : u32 = try!(stream.read_u32::<LittleEndian>());
let mut data = vec![0u8;0];
try!(stream.take(data_length as u64).read_to_end(&mut data));
println!("Receiving file: {} of size: {}", path, data.len() );
// let full_path = Path::new(&cfg.path).join(&path);
println!("Writing data to {}",path);
let mut file = try!(File::create(path));
try!(file.write_all(&data[..]));
}
}
|
use sync::{SyncConfig};
use self::encoding::{Encoding, DecoderTrap};
|
random_line_split
|
slave.rs
|
extern crate byteorder;
extern crate encoding;
use self::byteorder::{ ReadBytesExt, LittleEndian};
use std::fs::{File};
use std::io::{Write,Read,Result};
use std::path::Path;
use sync::{SyncConfig};
use self::encoding::{Encoding, DecoderTrap};
use self::encoding::all::ISO_8859_1;
pub fn
|
<T : Read>(stream : &mut T, cfg : SyncConfig) -> Result<()>{
loop {
println!("Waiting for a data");
let path_length : u16 = try!(stream.read_u16::<LittleEndian>());
if path_length == 0{
// ignore empty
continue;
}
println!("Got the file name size: {}", path_length);
let mut path_data = vec![0u8;0];
try!(stream.take(path_length as u64).read_to_end(&mut path_data));
let path = match ISO_8859_1.decode(&path_data[..],DecoderTrap::Strict){
Ok(s) => s,
Err(_) => "Error decoding".to_owned()
};
let data_length : u32 = try!(stream.read_u32::<LittleEndian>());
let mut data = vec![0u8;0];
try!(stream.take(data_length as u64).read_to_end(&mut data));
println!("Receiving file: {} of size: {}", path, data.len() );
// let full_path = Path::new(&cfg.path).join(&path);
println!("Writing data to {}",path);
let mut file = try!(File::create(path));
try!(file.write_all(&data[..]));
}
}
|
run_slave
|
identifier_name
|
slave.rs
|
extern crate byteorder;
extern crate encoding;
use self::byteorder::{ ReadBytesExt, LittleEndian};
use std::fs::{File};
use std::io::{Write,Read,Result};
use std::path::Path;
use sync::{SyncConfig};
use self::encoding::{Encoding, DecoderTrap};
use self::encoding::all::ISO_8859_1;
pub fn run_slave<T : Read>(stream : &mut T, cfg : SyncConfig) -> Result<()>
|
println!("Writing data to {}",path);
let mut file = try!(File::create(path));
try!(file.write_all(&data[..]));
}
}
|
{
loop {
println!("Waiting for a data");
let path_length : u16 = try!(stream.read_u16::<LittleEndian>());
if path_length == 0{
// ignore empty
continue;
}
println!("Got the file name size: {}", path_length);
let mut path_data = vec![0u8;0];
try!(stream.take(path_length as u64).read_to_end(&mut path_data));
let path = match ISO_8859_1.decode(&path_data[..],DecoderTrap::Strict){
Ok(s) => s,
Err(_) => "Error decoding".to_owned()
};
let data_length : u32 = try!(stream.read_u32::<LittleEndian>());
let mut data = vec![0u8;0];
try!(stream.take(data_length as u64).read_to_end(&mut data));
println!("Receiving file: {} of size: {}", path, data.len() );
// let full_path = Path::new(&cfg.path).join(&path);
|
identifier_body
|
methods.rs
|
// TODO: set request's origin to request's client's origin
unimplemented!()
}
// Step 3.
set_default_accept(request.type_, request.destination, &mut request.headers);
// Step 4.
set_default_accept_language(&mut request.headers);
// Step 5.
// TODO: figure out what a Priority object is.
// Step 6.
// TODO: handle client hints headers.
// Step 7.
if request.is_subresource_request() {
// TODO: handle client hints headers.
}
// Step 8.
main_fetch(request, cache, false, false, target, &mut None, &context);
}
/// [Main fetch](https://fetch.spec.whatwg.org/#concept-main-fetch)
pub fn main_fetch(request: &mut Request,
cache: &mut CorsCache,
cors_flag: bool,
recursive_flag: bool,
target: Target,
done_chan: &mut DoneChannel,
context: &FetchContext)
-> Response {
// Step 1.
let mut response = None;
// Step 2.
if request.local_urls_only {
if!matches!(request.current_url().scheme(), "about" | "blob" | "data" | "filesystem") {
response = Some(Response::network_error(NetworkError::Internal("Non-local scheme".into())));
}
}
// Step 3.
// TODO: handle content security policy violations.
// Step 4.
// TODO: handle upgrade to a potentially secure URL.
// Step 5.
if should_be_blocked_due_to_bad_port(&request.url()) {
response = Some(Response::network_error(NetworkError::Internal("Request attempted on bad port".into())));
}
// TODO: handle blocking as mixed content.
// TODO: handle blocking by content security policy.
// Step 6
// TODO: handle request's client's referrer policy.
// Step 7.
request.referrer_policy = request.referrer_policy.or(Some(ReferrerPolicy::NoReferrerWhenDowngrade));
// Step 8.
{
let referrer_url = match mem::replace(&mut request.referrer, Referrer::NoReferrer) {
Referrer::NoReferrer => None,
Referrer::Client => {
// FIXME(#14507): We should never get this value here; it should
// already have been handled in the script thread.
request.headers.remove::<RefererHeader>();
None
},
Referrer::ReferrerUrl(url) => {
request.headers.remove::<RefererHeader>();
let current_url = request.current_url().clone();
determine_request_referrer(&mut request.headers,
request.referrer_policy.unwrap(),
url,
current_url)
}
};
if let Some(referrer_url) = referrer_url {
request.referrer = Referrer::ReferrerUrl(referrer_url);
}
}
// Step 9.
// TODO: handle FTP URLs.
// Step 10.
context.state.hsts_list.read().unwrap().switch_known_hsts_host_domain_url_to_https(
request.current_url_mut());
// Step 11.
// Not applicable: see fetch_async.
// Step 12.
let mut response = response.unwrap_or_else(|| {
let current_url = request.current_url();
let same_origin = if let Origin::Origin(ref origin) = request.origin {
*origin == current_url.origin()
} else {
false
};
if (same_origin &&!cors_flag ) ||
current_url.scheme() == "data" ||
current_url.scheme() == "file" || // FIXME: Fetch spec has already dropped filtering against file:
// and about: schemes, but CSS tests will break on loading Ahem
// since we load them through a file: URL.
current_url.scheme() == "about" ||
request.mode == RequestMode::Navigate {
// Substep 1.
request.response_tainting = ResponseTainting::Basic;
// Substep 2.
scheme_fetch(request, cache, target, done_chan, context)
} else if request.mode == RequestMode::SameOrigin {
Response::network_error(NetworkError::Internal("Cross-origin response".into()))
} else if request.mode == RequestMode::NoCors {
// Substep 1.
request.response_tainting = ResponseTainting::Opaque;
// Substep 2.
scheme_fetch(request, cache, target, done_chan, context)
} else if!matches!(current_url.scheme(), "http" | "https") {
Response::network_error(NetworkError::Internal("Non-http scheme".into()))
} else if request.use_cors_preflight ||
(request.unsafe_request &&
(!is_cors_safelisted_method(&request.method) ||
request.headers.iter().any(|h|!is_cors_safelisted_request_header(&h)))) {
// Substep 1.
request.response_tainting = ResponseTainting::CorsTainting;
// Substep 2.
let response = http_fetch(request, cache, true, true, false,
target, done_chan, context);
// Substep 3.
if response.is_network_error() {
// TODO clear cache entries using request
}
// Substep 4.
response
} else {
// Substep 1.
request.response_tainting = ResponseTainting::CorsTainting;
// Substep 2.
http_fetch(request, cache, true, false, false, target, done_chan, context)
}
});
// Step 13.
if recursive_flag {
return response;
}
// Step 14.
let mut response = if!response.is_network_error() && response.internal_response.is_none() {
// Substep 1.
if request.response_tainting == ResponseTainting::CorsTainting {
// Subsubstep 1.
let header_names = response.headers.get::<AccessControlExposeHeaders>();
match header_names {
// Subsubstep 2.
Some(list) if request.credentials_mode!= CredentialsMode::Include => {
if list.len() == 1 && list[0] == "*" {
response.cors_exposed_header_name_list =
response.headers.iter().map(|h| h.name().to_owned()).collect();
}
},
// Subsubstep 3.
Some(list) => {
response.cors_exposed_header_name_list = list.iter().map(|h| (**h).clone()).collect();
},
_ => (),
}
}
// Substep 2.
let response_type = match request.response_tainting {
ResponseTainting::Basic => ResponseType::Basic,
ResponseTainting::CorsTainting => ResponseType::Cors,
ResponseTainting::Opaque => ResponseType::Opaque,
};
response.to_filtered(response_type)
} else {
response
};
let internal_error = {
// Tests for steps 17 and 18, before step 15 for borrowing concerns.
let response_is_network_error = response.is_network_error();
let should_replace_with_nosniff_error =
!response_is_network_error && should_be_blocked_due_to_nosniff(request.type_, &response.headers);
let should_replace_with_mime_type_error =
!response_is_network_error && should_be_blocked_due_to_mime_type(request.type_, &response.headers);
// Step 15.
let mut network_error_response = response.get_network_error().cloned().map(Response::network_error);
let internal_response = if let Some(error_response) = network_error_response.as_mut() {
error_response
} else {
response.actual_response_mut()
};
// Step 16.
if internal_response.url_list.is_empty() {
internal_response.url_list = request.url_list.clone();
}
// Step 17.
// TODO: handle blocking as mixed content.
// TODO: handle blocking by content security policy.
let blocked_error_response;
let internal_response =
if should_replace_with_nosniff_error {
// Defer rebinding result
blocked_error_response = Response::network_error(NetworkError::Internal("Blocked by nosniff".into()));
&blocked_error_response
} else if should_replace_with_mime_type_error {
// Defer rebinding result
blocked_error_response = Response::network_error(NetworkError::Internal("Blocked by mime type".into()));
&blocked_error_response
} else {
internal_response
};
// Step 18.
// We check `internal_response` since we did not mutate `response`
// in the previous step.
let not_network_error =!response_is_network_error &&!internal_response.is_network_error();
if not_network_error && (is_null_body_status(&internal_response.status) ||
match request.method {
Method::Head | Method::Connect => true,
_ => false }) {
// when Fetch is used only asynchronously, we will need to make sure
// that nothing tries to write to the body at this point
let mut body = internal_response.body.lock().unwrap();
*body = ResponseBody::Empty;
}
internal_response.get_network_error().map(|e| e.clone())
};
// Execute deferred rebinding of response.
let response = if let Some(error) = internal_error {
Response::network_error(error)
} else {
response
};
// Step 19.
let mut response_loaded = false;
let response = if!response.is_network_error() &&!request.integrity_metadata.is_empty() {
// Step 19.1.
wait_for_response(&response, target, done_chan);
response_loaded = true;
// Step 19.2.
let ref integrity_metadata = &request.integrity_metadata;
if response.termination_reason.is_none() &&
!is_response_integrity_valid(integrity_metadata, &response) {
Response::network_error(NetworkError::Internal("Subresource integrity validation failed".into()))
} else {
response
}
} else {
response
};
// Step 20.
if request.synchronous {
// process_response is not supposed to be used
// by sync fetch, but we overload it here for simplicity
target.process_response(&response);
if!response_loaded {
wait_for_response(&response, target, done_chan);
}
// overloaded similarly to process_response
target.process_response_eof(&response);
return response;
}
// Step 21.
if request.body.is_some() && matches!(request.current_url().scheme(), "http" | "https") {
// XXXManishearth: We actually should be calling process_request
// in http_network_fetch. However, we can't yet follow the request
// upload progress, so I'm keeping it here for now and pretending
// the body got sent in one chunk
target.process_request_body(&request);
target.process_request_eof(&request);
}
// Step 22.
target.process_response(&response);
// Step 23.
if!response_loaded {
wait_for_response(&response, target, done_chan);
}
// Step 24.
target.process_response_eof(&response);
// Steps 25-27.
// TODO: remove this line when only asynchronous fetches are used
response
}
fn wait_for_response(response: &Response, target: Target, done_chan: &mut DoneChannel) {
if let Some(ref ch) = *done_chan {
loop {
match ch.1.recv()
.expect("fetch worker should always send Done before terminating") {
Data::Payload(vec) => {
target.process_response_chunk(vec);
},
Data::Done => break,
}
}
} else {
let body = response.body.lock().unwrap();
if let ResponseBody::Done(ref vec) = *body {
// in case there was no channel to wait for, the body was
// obtained synchronously via scheme_fetch for data/file/about/etc
// We should still send the body across as a chunk
target.process_response_chunk(vec.clone());
} else {
assert!(*body == ResponseBody::Empty)
}
}
|
/// [Scheme fetch](https://fetch.spec.whatwg.org#scheme-fetch)
fn scheme_fetch(request: &mut Request,
cache: &mut CorsCache,
target: Target,
done_chan: &mut DoneChannel,
context: &FetchContext)
-> Response {
let url = request.current_url();
match url.scheme() {
"about" if url.path() == "blank" => {
let mut response = Response::new(url);
response.headers.set(ContentType(mime!(Text / Html; Charset = Utf8)));
*response.body.lock().unwrap() = ResponseBody::Done(vec![]);
response
},
"http" | "https" => {
http_fetch(request, cache, false, false, false, target, done_chan, context)
},
"data" => {
match decode(&url) {
Ok((mime, bytes)) => {
let mut response = Response::new(url);
*response.body.lock().unwrap() = ResponseBody::Done(bytes);
response.headers.set(ContentType(mime));
response
},
Err(_) => Response::network_error(NetworkError::Internal("Decoding data URL failed".into()))
}
},
"file" => {
if request.method == Method::Get {
match url.to_file_path() {
Ok(file_path) => {
match File::open(file_path.clone()) {
Ok(mut file) => {
let mut bytes = vec![];
let _ = file.read_to_end(&mut bytes);
let mime = guess_mime_type(file_path);
let mut response = Response::new(url);
*response.body.lock().unwrap() = ResponseBody::Done(bytes);
response.headers.set(ContentType(mime));
response
},
_ => Response::network_error(NetworkError::Internal("Opening file failed".into())),
}
},
_ => Response::network_error(NetworkError::Internal("Constructing file path failed".into()))
}
} else {
Response::network_error(NetworkError::Internal("Unexpected method for file".into()))
}
},
"blob" => {
println!("Loading blob {}", url.as_str());
// Step 2.
if request.method!= Method::Get {
return Response::network_error(NetworkError::Internal("Unexpected method for blob".into()));
}
match load_blob_sync(url.clone(), context.filemanager.clone()) {
Ok((headers, bytes)) => {
let mut response = Response::new(url);
response.headers = headers;
*response.body.lock().unwrap() = ResponseBody::Done(bytes);
response
},
Err(e) => {
debug!("Failed to load {}: {:?}", url, e);
Response::network_error(e)
},
}
},
"ftp" => {
debug!("ftp is not implemented");
Response::network_error(NetworkError::Internal("Unexpected scheme".into()))
},
_ => Response::network_error(NetworkError::Internal("Unexpected scheme".into()))
}
}
/// <https://fetch.spec.whatwg.org/#cors-safelisted-request-header>
pub fn is_cors_safelisted_request_header(h: &HeaderView) -> bool {
if h.is::<ContentType>() {
match h.value() {
Some(&ContentType(Mime(TopLevel::Text, SubLevel::Plain, _))) |
Some(&ContentType(Mime(TopLevel::Application, SubLevel::WwwFormUrlEncoded, _))) |
Some(&ContentType(Mime(TopLevel::Multipart, SubLevel::FormData, _))) => true,
_ => false
}
} else {
h.is::<Accept>() || h.is::<AcceptLanguage>() || h.is::<ContentLanguage>()
}
}
/// <https://fetch.spec.whatwg.org/#cors-safelisted-method>
pub fn is_cors_safelisted_method(m: &Method) -> bool {
match *m {
Method::Get | Method::Head | Method::Post => true,
_ => false
}
}
fn is_null_body_status(status: &Option<StatusCode>) -> bool {
match *status {
Some(status) => match status {
StatusCode::SwitchingProtocols | StatusCode::NoContent |
StatusCode::ResetContent | StatusCode::NotModified => true,
_ => false
},
_ => false
}
}
/// <https://fetch.spec.whatwg.org/#should-response-to-request-be-blocked-due-to-nosniff?>
pub fn should_be_blocked_due_to_nosniff(request_type: Type, response_headers: &Headers) -> bool {
/// <https://fetch.spec.whatwg.org/#x-content-type-options-header>
/// This is needed to parse `X-Content-Type-Options` according to spec,
/// which requires that we inspect only the first value.
///
/// A [unit-like struct](https://doc.rust-lang.org/book/structs.html#unit-like-structs)
/// is sufficient since a valid header implies that we use `nosniff`.
#[derive(Clone, Copy, Debug)]
struct XContentTypeOptions;
impl Header for XContentTypeOptions {
fn header_name() -> &'static str {
"X-Content-Type-Options"
}
/// https://fetch.spec.whatwg.org/#should-response-to-request-be-blocked-due-to-nosniff%3F #2
fn parse_header(raw: &[Vec<u8>]) -> HyperResult<Self
|
}
|
random_line_split
|
methods.rs
|
// TODO: set request's origin to request's client's origin
unimplemented!()
}
// Step 3.
set_default_accept(request.type_, request.destination, &mut request.headers);
// Step 4.
set_default_accept_language(&mut request.headers);
// Step 5.
// TODO: figure out what a Priority object is.
// Step 6.
// TODO: handle client hints headers.
// Step 7.
if request.is_subresource_request() {
// TODO: handle client hints headers.
}
// Step 8.
main_fetch(request, cache, false, false, target, &mut None, &context);
}
/// [Main fetch](https://fetch.spec.whatwg.org/#concept-main-fetch)
pub fn main_fetch(request: &mut Request,
cache: &mut CorsCache,
cors_flag: bool,
recursive_flag: bool,
target: Target,
done_chan: &mut DoneChannel,
context: &FetchContext)
-> Response {
// Step 1.
let mut response = None;
// Step 2.
if request.local_urls_only {
if!matches!(request.current_url().scheme(), "about" | "blob" | "data" | "filesystem") {
response = Some(Response::network_error(NetworkError::Internal("Non-local scheme".into())));
}
}
// Step 3.
// TODO: handle content security policy violations.
// Step 4.
// TODO: handle upgrade to a potentially secure URL.
// Step 5.
if should_be_blocked_due_to_bad_port(&request.url()) {
response = Some(Response::network_error(NetworkError::Internal("Request attempted on bad port".into())));
}
// TODO: handle blocking as mixed content.
// TODO: handle blocking by content security policy.
// Step 6
// TODO: handle request's client's referrer policy.
// Step 7.
request.referrer_policy = request.referrer_policy.or(Some(ReferrerPolicy::NoReferrerWhenDowngrade));
// Step 8.
{
let referrer_url = match mem::replace(&mut request.referrer, Referrer::NoReferrer) {
Referrer::NoReferrer => None,
Referrer::Client => {
// FIXME(#14507): We should never get this value here; it should
// already have been handled in the script thread.
request.headers.remove::<RefererHeader>();
None
},
Referrer::ReferrerUrl(url) => {
request.headers.remove::<RefererHeader>();
let current_url = request.current_url().clone();
determine_request_referrer(&mut request.headers,
request.referrer_policy.unwrap(),
url,
current_url)
}
};
if let Some(referrer_url) = referrer_url {
request.referrer = Referrer::ReferrerUrl(referrer_url);
}
}
// Step 9.
// TODO: handle FTP URLs.
// Step 10.
context.state.hsts_list.read().unwrap().switch_known_hsts_host_domain_url_to_https(
request.current_url_mut());
// Step 11.
// Not applicable: see fetch_async.
// Step 12.
let mut response = response.unwrap_or_else(|| {
let current_url = request.current_url();
let same_origin = if let Origin::Origin(ref origin) = request.origin {
*origin == current_url.origin()
} else {
false
};
if (same_origin &&!cors_flag ) ||
current_url.scheme() == "data" ||
current_url.scheme() == "file" || // FIXME: Fetch spec has already dropped filtering against file:
// and about: schemes, but CSS tests will break on loading Ahem
// since we load them through a file: URL.
current_url.scheme() == "about" ||
request.mode == RequestMode::Navigate {
// Substep 1.
request.response_tainting = ResponseTainting::Basic;
// Substep 2.
scheme_fetch(request, cache, target, done_chan, context)
} else if request.mode == RequestMode::SameOrigin {
Response::network_error(NetworkError::Internal("Cross-origin response".into()))
} else if request.mode == RequestMode::NoCors {
// Substep 1.
request.response_tainting = ResponseTainting::Opaque;
// Substep 2.
scheme_fetch(request, cache, target, done_chan, context)
} else if!matches!(current_url.scheme(), "http" | "https") {
Response::network_error(NetworkError::Internal("Non-http scheme".into()))
} else if request.use_cors_preflight ||
(request.unsafe_request &&
(!is_cors_safelisted_method(&request.method) ||
request.headers.iter().any(|h|!is_cors_safelisted_request_header(&h)))) {
// Substep 1.
request.response_tainting = ResponseTainting::CorsTainting;
// Substep 2.
let response = http_fetch(request, cache, true, true, false,
target, done_chan, context);
// Substep 3.
if response.is_network_error() {
// TODO clear cache entries using request
}
// Substep 4.
response
} else {
// Substep 1.
request.response_tainting = ResponseTainting::CorsTainting;
// Substep 2.
http_fetch(request, cache, true, false, false, target, done_chan, context)
}
});
// Step 13.
if recursive_flag {
return response;
}
// Step 14.
let mut response = if!response.is_network_error() && response.internal_response.is_none() {
// Substep 1.
if request.response_tainting == ResponseTainting::CorsTainting {
// Subsubstep 1.
let header_names = response.headers.get::<AccessControlExposeHeaders>();
match header_names {
// Subsubstep 2.
Some(list) if request.credentials_mode!= CredentialsMode::Include => {
if list.len() == 1 && list[0] == "*" {
response.cors_exposed_header_name_list =
response.headers.iter().map(|h| h.name().to_owned()).collect();
}
},
// Subsubstep 3.
Some(list) => {
response.cors_exposed_header_name_list = list.iter().map(|h| (**h).clone()).collect();
},
_ => (),
}
}
// Substep 2.
let response_type = match request.response_tainting {
ResponseTainting::Basic => ResponseType::Basic,
ResponseTainting::CorsTainting => ResponseType::Cors,
ResponseTainting::Opaque => ResponseType::Opaque,
};
response.to_filtered(response_type)
} else {
response
};
let internal_error = {
// Tests for steps 17 and 18, before step 15 for borrowing concerns.
let response_is_network_error = response.is_network_error();
let should_replace_with_nosniff_error =
!response_is_network_error && should_be_blocked_due_to_nosniff(request.type_, &response.headers);
let should_replace_with_mime_type_error =
!response_is_network_error && should_be_blocked_due_to_mime_type(request.type_, &response.headers);
// Step 15.
let mut network_error_response = response.get_network_error().cloned().map(Response::network_error);
let internal_response = if let Some(error_response) = network_error_response.as_mut() {
error_response
} else {
response.actual_response_mut()
};
// Step 16.
if internal_response.url_list.is_empty() {
internal_response.url_list = request.url_list.clone();
}
// Step 17.
// TODO: handle blocking as mixed content.
// TODO: handle blocking by content security policy.
let blocked_error_response;
let internal_response =
if should_replace_with_nosniff_error {
// Defer rebinding result
blocked_error_response = Response::network_error(NetworkError::Internal("Blocked by nosniff".into()));
&blocked_error_response
} else if should_replace_with_mime_type_error {
// Defer rebinding result
blocked_error_response = Response::network_error(NetworkError::Internal("Blocked by mime type".into()));
&blocked_error_response
} else {
internal_response
};
// Step 18.
// We check `internal_response` since we did not mutate `response`
// in the previous step.
let not_network_error =!response_is_network_error &&!internal_response.is_network_error();
if not_network_error && (is_null_body_status(&internal_response.status) ||
match request.method {
Method::Head | Method::Connect => true,
_ => false }) {
// when Fetch is used only asynchronously, we will need to make sure
// that nothing tries to write to the body at this point
let mut body = internal_response.body.lock().unwrap();
*body = ResponseBody::Empty;
}
internal_response.get_network_error().map(|e| e.clone())
};
// Execute deferred rebinding of response.
let response = if let Some(error) = internal_error {
Response::network_error(error)
} else {
response
};
// Step 19.
let mut response_loaded = false;
let response = if!response.is_network_error() &&!request.integrity_metadata.is_empty() {
// Step 19.1.
wait_for_response(&response, target, done_chan);
response_loaded = true;
// Step 19.2.
let ref integrity_metadata = &request.integrity_metadata;
if response.termination_reason.is_none() &&
!is_response_integrity_valid(integrity_metadata, &response) {
Response::network_error(NetworkError::Internal("Subresource integrity validation failed".into()))
} else {
response
}
} else {
response
};
// Step 20.
if request.synchronous {
// process_response is not supposed to be used
// by sync fetch, but we overload it here for simplicity
target.process_response(&response);
if!response_loaded {
wait_for_response(&response, target, done_chan);
}
// overloaded similarly to process_response
target.process_response_eof(&response);
return response;
}
// Step 21.
if request.body.is_some() && matches!(request.current_url().scheme(), "http" | "https") {
// XXXManishearth: We actually should be calling process_request
// in http_network_fetch. However, we can't yet follow the request
// upload progress, so I'm keeping it here for now and pretending
// the body got sent in one chunk
target.process_request_body(&request);
target.process_request_eof(&request);
}
// Step 22.
target.process_response(&response);
// Step 23.
if!response_loaded {
wait_for_response(&response, target, done_chan);
}
// Step 24.
target.process_response_eof(&response);
// Steps 25-27.
// TODO: remove this line when only asynchronous fetches are used
response
}
fn wait_for_response(response: &Response, target: Target, done_chan: &mut DoneChannel) {
if let Some(ref ch) = *done_chan {
loop {
match ch.1.recv()
.expect("fetch worker should always send Done before terminating") {
Data::Payload(vec) => {
target.process_response_chunk(vec);
},
Data::Done => break,
}
}
} else {
let body = response.body.lock().unwrap();
if let ResponseBody::Done(ref vec) = *body {
// in case there was no channel to wait for, the body was
// obtained synchronously via scheme_fetch for data/file/about/etc
// We should still send the body across as a chunk
target.process_response_chunk(vec.clone());
} else
|
}
}
/// [Scheme fetch](https://fetch.spec.whatwg.org#scheme-fetch)
fn scheme_fetch(request: &mut Request,
cache: &mut CorsCache,
target: Target,
done_chan: &mut DoneChannel,
context: &FetchContext)
-> Response {
let url = request.current_url();
match url.scheme() {
"about" if url.path() == "blank" => {
let mut response = Response::new(url);
response.headers.set(ContentType(mime!(Text / Html; Charset = Utf8)));
*response.body.lock().unwrap() = ResponseBody::Done(vec![]);
response
},
"http" | "https" => {
http_fetch(request, cache, false, false, false, target, done_chan, context)
},
"data" => {
match decode(&url) {
Ok((mime, bytes)) => {
let mut response = Response::new(url);
*response.body.lock().unwrap() = ResponseBody::Done(bytes);
response.headers.set(ContentType(mime));
response
},
Err(_) => Response::network_error(NetworkError::Internal("Decoding data URL failed".into()))
}
},
"file" => {
if request.method == Method::Get {
match url.to_file_path() {
Ok(file_path) => {
match File::open(file_path.clone()) {
Ok(mut file) => {
let mut bytes = vec![];
let _ = file.read_to_end(&mut bytes);
let mime = guess_mime_type(file_path);
let mut response = Response::new(url);
*response.body.lock().unwrap() = ResponseBody::Done(bytes);
response.headers.set(ContentType(mime));
response
},
_ => Response::network_error(NetworkError::Internal("Opening file failed".into())),
}
},
_ => Response::network_error(NetworkError::Internal("Constructing file path failed".into()))
}
} else {
Response::network_error(NetworkError::Internal("Unexpected method for file".into()))
}
},
"blob" => {
println!("Loading blob {}", url.as_str());
// Step 2.
if request.method!= Method::Get {
return Response::network_error(NetworkError::Internal("Unexpected method for blob".into()));
}
match load_blob_sync(url.clone(), context.filemanager.clone()) {
Ok((headers, bytes)) => {
let mut response = Response::new(url);
response.headers = headers;
*response.body.lock().unwrap() = ResponseBody::Done(bytes);
response
},
Err(e) => {
debug!("Failed to load {}: {:?}", url, e);
Response::network_error(e)
},
}
},
"ftp" => {
debug!("ftp is not implemented");
Response::network_error(NetworkError::Internal("Unexpected scheme".into()))
},
_ => Response::network_error(NetworkError::Internal("Unexpected scheme".into()))
}
}
/// <https://fetch.spec.whatwg.org/#cors-safelisted-request-header>
pub fn is_cors_safelisted_request_header(h: &HeaderView) -> bool {
if h.is::<ContentType>() {
match h.value() {
Some(&ContentType(Mime(TopLevel::Text, SubLevel::Plain, _))) |
Some(&ContentType(Mime(TopLevel::Application, SubLevel::WwwFormUrlEncoded, _))) |
Some(&ContentType(Mime(TopLevel::Multipart, SubLevel::FormData, _))) => true,
_ => false
}
} else {
h.is::<Accept>() || h.is::<AcceptLanguage>() || h.is::<ContentLanguage>()
}
}
/// <https://fetch.spec.whatwg.org/#cors-safelisted-method>
pub fn is_cors_safelisted_method(m: &Method) -> bool {
match *m {
Method::Get | Method::Head | Method::Post => true,
_ => false
}
}
fn is_null_body_status(status: &Option<StatusCode>) -> bool {
match *status {
Some(status) => match status {
StatusCode::SwitchingProtocols | StatusCode::NoContent |
StatusCode::ResetContent | StatusCode::NotModified => true,
_ => false
},
_ => false
}
}
/// <https://fetch.spec.whatwg.org/#should-response-to-request-be-blocked-due-to-nosniff?>
pub fn should_be_blocked_due_to_nosniff(request_type: Type, response_headers: &Headers) -> bool {
/// <https://fetch.spec.whatwg.org/#x-content-type-options-header>
/// This is needed to parse `X-Content-Type-Options` according to spec,
/// which requires that we inspect only the first value.
///
/// A [unit-like struct](https://doc.rust-lang.org/book/structs.html#unit-like-structs)
/// is sufficient since a valid header implies that we use `nosniff`.
#[derive(Clone, Copy, Debug)]
struct XContentTypeOptions;
impl Header for XContentTypeOptions {
fn header_name() -> &'static str {
"X-Content-Type-Options"
}
/// https://fetch.spec.whatwg.org/#should-response-to-request-be-blocked-due-to-nosniff%3F #2
fn parse_header(raw: &[Vec<u8>]) -> HyperResult
|
{
assert!(*body == ResponseBody::Empty)
}
|
conditional_block
|
methods.rs
|
// TODO: set request's origin to request's client's origin
unimplemented!()
}
// Step 3.
set_default_accept(request.type_, request.destination, &mut request.headers);
// Step 4.
set_default_accept_language(&mut request.headers);
// Step 5.
// TODO: figure out what a Priority object is.
// Step 6.
// TODO: handle client hints headers.
// Step 7.
if request.is_subresource_request() {
// TODO: handle client hints headers.
}
// Step 8.
main_fetch(request, cache, false, false, target, &mut None, &context);
}
/// [Main fetch](https://fetch.spec.whatwg.org/#concept-main-fetch)
pub fn main_fetch(request: &mut Request,
cache: &mut CorsCache,
cors_flag: bool,
recursive_flag: bool,
target: Target,
done_chan: &mut DoneChannel,
context: &FetchContext)
-> Response {
// Step 1.
let mut response = None;
// Step 2.
if request.local_urls_only {
if!matches!(request.current_url().scheme(), "about" | "blob" | "data" | "filesystem") {
response = Some(Response::network_error(NetworkError::Internal("Non-local scheme".into())));
}
}
// Step 3.
// TODO: handle content security policy violations.
// Step 4.
// TODO: handle upgrade to a potentially secure URL.
// Step 5.
if should_be_blocked_due_to_bad_port(&request.url()) {
response = Some(Response::network_error(NetworkError::Internal("Request attempted on bad port".into())));
}
// TODO: handle blocking as mixed content.
// TODO: handle blocking by content security policy.
// Step 6
// TODO: handle request's client's referrer policy.
// Step 7.
request.referrer_policy = request.referrer_policy.or(Some(ReferrerPolicy::NoReferrerWhenDowngrade));
// Step 8.
{
let referrer_url = match mem::replace(&mut request.referrer, Referrer::NoReferrer) {
Referrer::NoReferrer => None,
Referrer::Client => {
// FIXME(#14507): We should never get this value here; it should
// already have been handled in the script thread.
request.headers.remove::<RefererHeader>();
None
},
Referrer::ReferrerUrl(url) => {
request.headers.remove::<RefererHeader>();
let current_url = request.current_url().clone();
determine_request_referrer(&mut request.headers,
request.referrer_policy.unwrap(),
url,
current_url)
}
};
if let Some(referrer_url) = referrer_url {
request.referrer = Referrer::ReferrerUrl(referrer_url);
}
}
// Step 9.
// TODO: handle FTP URLs.
// Step 10.
context.state.hsts_list.read().unwrap().switch_known_hsts_host_domain_url_to_https(
request.current_url_mut());
// Step 11.
// Not applicable: see fetch_async.
// Step 12.
let mut response = response.unwrap_or_else(|| {
let current_url = request.current_url();
let same_origin = if let Origin::Origin(ref origin) = request.origin {
*origin == current_url.origin()
} else {
false
};
if (same_origin &&!cors_flag ) ||
current_url.scheme() == "data" ||
current_url.scheme() == "file" || // FIXME: Fetch spec has already dropped filtering against file:
// and about: schemes, but CSS tests will break on loading Ahem
// since we load them through a file: URL.
current_url.scheme() == "about" ||
request.mode == RequestMode::Navigate {
// Substep 1.
request.response_tainting = ResponseTainting::Basic;
// Substep 2.
scheme_fetch(request, cache, target, done_chan, context)
} else if request.mode == RequestMode::SameOrigin {
Response::network_error(NetworkError::Internal("Cross-origin response".into()))
} else if request.mode == RequestMode::NoCors {
// Substep 1.
request.response_tainting = ResponseTainting::Opaque;
// Substep 2.
scheme_fetch(request, cache, target, done_chan, context)
} else if!matches!(current_url.scheme(), "http" | "https") {
Response::network_error(NetworkError::Internal("Non-http scheme".into()))
} else if request.use_cors_preflight ||
(request.unsafe_request &&
(!is_cors_safelisted_method(&request.method) ||
request.headers.iter().any(|h|!is_cors_safelisted_request_header(&h)))) {
// Substep 1.
request.response_tainting = ResponseTainting::CorsTainting;
// Substep 2.
let response = http_fetch(request, cache, true, true, false,
target, done_chan, context);
// Substep 3.
if response.is_network_error() {
// TODO clear cache entries using request
}
// Substep 4.
response
} else {
// Substep 1.
request.response_tainting = ResponseTainting::CorsTainting;
// Substep 2.
http_fetch(request, cache, true, false, false, target, done_chan, context)
}
});
// Step 13.
if recursive_flag {
return response;
}
// Step 14.
let mut response = if!response.is_network_error() && response.internal_response.is_none() {
// Substep 1.
if request.response_tainting == ResponseTainting::CorsTainting {
// Subsubstep 1.
let header_names = response.headers.get::<AccessControlExposeHeaders>();
match header_names {
// Subsubstep 2.
Some(list) if request.credentials_mode!= CredentialsMode::Include => {
if list.len() == 1 && list[0] == "*" {
response.cors_exposed_header_name_list =
response.headers.iter().map(|h| h.name().to_owned()).collect();
}
},
// Subsubstep 3.
Some(list) => {
response.cors_exposed_header_name_list = list.iter().map(|h| (**h).clone()).collect();
},
_ => (),
}
}
// Substep 2.
let response_type = match request.response_tainting {
ResponseTainting::Basic => ResponseType::Basic,
ResponseTainting::CorsTainting => ResponseType::Cors,
ResponseTainting::Opaque => ResponseType::Opaque,
};
response.to_filtered(response_type)
} else {
response
};
let internal_error = {
// Tests for steps 17 and 18, before step 15 for borrowing concerns.
let response_is_network_error = response.is_network_error();
let should_replace_with_nosniff_error =
!response_is_network_error && should_be_blocked_due_to_nosniff(request.type_, &response.headers);
let should_replace_with_mime_type_error =
!response_is_network_error && should_be_blocked_due_to_mime_type(request.type_, &response.headers);
// Step 15.
let mut network_error_response = response.get_network_error().cloned().map(Response::network_error);
let internal_response = if let Some(error_response) = network_error_response.as_mut() {
error_response
} else {
response.actual_response_mut()
};
// Step 16.
if internal_response.url_list.is_empty() {
internal_response.url_list = request.url_list.clone();
}
// Step 17.
// TODO: handle blocking as mixed content.
// TODO: handle blocking by content security policy.
let blocked_error_response;
let internal_response =
if should_replace_with_nosniff_error {
// Defer rebinding result
blocked_error_response = Response::network_error(NetworkError::Internal("Blocked by nosniff".into()));
&blocked_error_response
} else if should_replace_with_mime_type_error {
// Defer rebinding result
blocked_error_response = Response::network_error(NetworkError::Internal("Blocked by mime type".into()));
&blocked_error_response
} else {
internal_response
};
// Step 18.
// We check `internal_response` since we did not mutate `response`
// in the previous step.
let not_network_error =!response_is_network_error &&!internal_response.is_network_error();
if not_network_error && (is_null_body_status(&internal_response.status) ||
match request.method {
Method::Head | Method::Connect => true,
_ => false }) {
// when Fetch is used only asynchronously, we will need to make sure
// that nothing tries to write to the body at this point
let mut body = internal_response.body.lock().unwrap();
*body = ResponseBody::Empty;
}
internal_response.get_network_error().map(|e| e.clone())
};
// Execute deferred rebinding of response.
let response = if let Some(error) = internal_error {
Response::network_error(error)
} else {
response
};
// Step 19.
let mut response_loaded = false;
let response = if!response.is_network_error() &&!request.integrity_metadata.is_empty() {
// Step 19.1.
wait_for_response(&response, target, done_chan);
response_loaded = true;
// Step 19.2.
let ref integrity_metadata = &request.integrity_metadata;
if response.termination_reason.is_none() &&
!is_response_integrity_valid(integrity_metadata, &response) {
Response::network_error(NetworkError::Internal("Subresource integrity validation failed".into()))
} else {
response
}
} else {
response
};
// Step 20.
if request.synchronous {
// process_response is not supposed to be used
// by sync fetch, but we overload it here for simplicity
target.process_response(&response);
if!response_loaded {
wait_for_response(&response, target, done_chan);
}
// overloaded similarly to process_response
target.process_response_eof(&response);
return response;
}
// Step 21.
if request.body.is_some() && matches!(request.current_url().scheme(), "http" | "https") {
// XXXManishearth: We actually should be calling process_request
// in http_network_fetch. However, we can't yet follow the request
// upload progress, so I'm keeping it here for now and pretending
// the body got sent in one chunk
target.process_request_body(&request);
target.process_request_eof(&request);
}
// Step 22.
target.process_response(&response);
// Step 23.
if!response_loaded {
wait_for_response(&response, target, done_chan);
}
// Step 24.
target.process_response_eof(&response);
// Steps 25-27.
// TODO: remove this line when only asynchronous fetches are used
response
}
fn wait_for_response(response: &Response, target: Target, done_chan: &mut DoneChannel) {
if let Some(ref ch) = *done_chan {
loop {
match ch.1.recv()
.expect("fetch worker should always send Done before terminating") {
Data::Payload(vec) => {
target.process_response_chunk(vec);
},
Data::Done => break,
}
}
} else {
let body = response.body.lock().unwrap();
if let ResponseBody::Done(ref vec) = *body {
// in case there was no channel to wait for, the body was
// obtained synchronously via scheme_fetch for data/file/about/etc
// We should still send the body across as a chunk
target.process_response_chunk(vec.clone());
} else {
assert!(*body == ResponseBody::Empty)
}
}
}
/// [Scheme fetch](https://fetch.spec.whatwg.org#scheme-fetch)
fn scheme_fetch(request: &mut Request,
cache: &mut CorsCache,
target: Target,
done_chan: &mut DoneChannel,
context: &FetchContext)
-> Response {
let url = request.current_url();
match url.scheme() {
"about" if url.path() == "blank" => {
let mut response = Response::new(url);
response.headers.set(ContentType(mime!(Text / Html; Charset = Utf8)));
*response.body.lock().unwrap() = ResponseBody::Done(vec![]);
response
},
"http" | "https" => {
http_fetch(request, cache, false, false, false, target, done_chan, context)
},
"data" => {
match decode(&url) {
Ok((mime, bytes)) => {
let mut response = Response::new(url);
*response.body.lock().unwrap() = ResponseBody::Done(bytes);
response.headers.set(ContentType(mime));
response
},
Err(_) => Response::network_error(NetworkError::Internal("Decoding data URL failed".into()))
}
},
"file" => {
if request.method == Method::Get {
match url.to_file_path() {
Ok(file_path) => {
match File::open(file_path.clone()) {
Ok(mut file) => {
let mut bytes = vec![];
let _ = file.read_to_end(&mut bytes);
let mime = guess_mime_type(file_path);
let mut response = Response::new(url);
*response.body.lock().unwrap() = ResponseBody::Done(bytes);
response.headers.set(ContentType(mime));
response
},
_ => Response::network_error(NetworkError::Internal("Opening file failed".into())),
}
},
_ => Response::network_error(NetworkError::Internal("Constructing file path failed".into()))
}
} else {
Response::network_error(NetworkError::Internal("Unexpected method for file".into()))
}
},
"blob" => {
println!("Loading blob {}", url.as_str());
// Step 2.
if request.method!= Method::Get {
return Response::network_error(NetworkError::Internal("Unexpected method for blob".into()));
}
match load_blob_sync(url.clone(), context.filemanager.clone()) {
Ok((headers, bytes)) => {
let mut response = Response::new(url);
response.headers = headers;
*response.body.lock().unwrap() = ResponseBody::Done(bytes);
response
},
Err(e) => {
debug!("Failed to load {}: {:?}", url, e);
Response::network_error(e)
},
}
},
"ftp" => {
debug!("ftp is not implemented");
Response::network_error(NetworkError::Internal("Unexpected scheme".into()))
},
_ => Response::network_error(NetworkError::Internal("Unexpected scheme".into()))
}
}
/// <https://fetch.spec.whatwg.org/#cors-safelisted-request-header>
pub fn is_cors_safelisted_request_header(h: &HeaderView) -> bool
|
/// <https://fetch.spec.whatwg.org/#cors-safelisted-method>
pub fn is_cors_safelisted_method(m: &Method) -> bool {
match *m {
Method::Get | Method::Head | Method::Post => true,
_ => false
}
}
fn is_null_body_status(status: &Option<StatusCode>) -> bool {
match *status {
Some(status) => match status {
StatusCode::SwitchingProtocols | StatusCode::NoContent |
StatusCode::ResetContent | StatusCode::NotModified => true,
_ => false
},
_ => false
}
}
/// <https://fetch.spec.whatwg.org/#should-response-to-request-be-blocked-due-to-nosniff?>
pub fn should_be_blocked_due_to_nosniff(request_type: Type, response_headers: &Headers) -> bool {
/// <https://fetch.spec.whatwg.org/#x-content-type-options-header>
/// This is needed to parse `X-Content-Type-Options` according to spec,
/// which requires that we inspect only the first value.
///
/// A [unit-like struct](https://doc.rust-lang.org/book/structs.html#unit-like-structs)
/// is sufficient since a valid header implies that we use `nosniff`.
#[derive(Clone, Copy, Debug)]
struct XContentTypeOptions;
impl Header for XContentTypeOptions {
fn header_name() -> &'static str {
"X-Content-Type-Options"
}
/// https://fetch.spec.whatwg.org/#should-response-to-request-be-blocked-due-to-nosniff%3F #2
fn parse_header(raw: &[Vec<u8>]) -> HyperResult
|
{
if h.is::<ContentType>() {
match h.value() {
Some(&ContentType(Mime(TopLevel::Text, SubLevel::Plain, _))) |
Some(&ContentType(Mime(TopLevel::Application, SubLevel::WwwFormUrlEncoded, _))) |
Some(&ContentType(Mime(TopLevel::Multipart, SubLevel::FormData, _))) => true,
_ => false
}
} else {
h.is::<Accept>() || h.is::<AcceptLanguage>() || h.is::<ContentLanguage>()
}
}
|
identifier_body
|
methods.rs
|
// TODO: set request's origin to request's client's origin
unimplemented!()
}
// Step 3.
set_default_accept(request.type_, request.destination, &mut request.headers);
// Step 4.
set_default_accept_language(&mut request.headers);
// Step 5.
// TODO: figure out what a Priority object is.
// Step 6.
// TODO: handle client hints headers.
// Step 7.
if request.is_subresource_request() {
// TODO: handle client hints headers.
}
// Step 8.
main_fetch(request, cache, false, false, target, &mut None, &context);
}
/// [Main fetch](https://fetch.spec.whatwg.org/#concept-main-fetch)
pub fn main_fetch(request: &mut Request,
cache: &mut CorsCache,
cors_flag: bool,
recursive_flag: bool,
target: Target,
done_chan: &mut DoneChannel,
context: &FetchContext)
-> Response {
// Step 1.
let mut response = None;
// Step 2.
if request.local_urls_only {
if!matches!(request.current_url().scheme(), "about" | "blob" | "data" | "filesystem") {
response = Some(Response::network_error(NetworkError::Internal("Non-local scheme".into())));
}
}
// Step 3.
// TODO: handle content security policy violations.
// Step 4.
// TODO: handle upgrade to a potentially secure URL.
// Step 5.
if should_be_blocked_due_to_bad_port(&request.url()) {
response = Some(Response::network_error(NetworkError::Internal("Request attempted on bad port".into())));
}
// TODO: handle blocking as mixed content.
// TODO: handle blocking by content security policy.
// Step 6
// TODO: handle request's client's referrer policy.
// Step 7.
request.referrer_policy = request.referrer_policy.or(Some(ReferrerPolicy::NoReferrerWhenDowngrade));
// Step 8.
{
let referrer_url = match mem::replace(&mut request.referrer, Referrer::NoReferrer) {
Referrer::NoReferrer => None,
Referrer::Client => {
// FIXME(#14507): We should never get this value here; it should
// already have been handled in the script thread.
request.headers.remove::<RefererHeader>();
None
},
Referrer::ReferrerUrl(url) => {
request.headers.remove::<RefererHeader>();
let current_url = request.current_url().clone();
determine_request_referrer(&mut request.headers,
request.referrer_policy.unwrap(),
url,
current_url)
}
};
if let Some(referrer_url) = referrer_url {
request.referrer = Referrer::ReferrerUrl(referrer_url);
}
}
// Step 9.
// TODO: handle FTP URLs.
// Step 10.
context.state.hsts_list.read().unwrap().switch_known_hsts_host_domain_url_to_https(
request.current_url_mut());
// Step 11.
// Not applicable: see fetch_async.
// Step 12.
let mut response = response.unwrap_or_else(|| {
let current_url = request.current_url();
let same_origin = if let Origin::Origin(ref origin) = request.origin {
*origin == current_url.origin()
} else {
false
};
if (same_origin &&!cors_flag ) ||
current_url.scheme() == "data" ||
current_url.scheme() == "file" || // FIXME: Fetch spec has already dropped filtering against file:
// and about: schemes, but CSS tests will break on loading Ahem
// since we load them through a file: URL.
current_url.scheme() == "about" ||
request.mode == RequestMode::Navigate {
// Substep 1.
request.response_tainting = ResponseTainting::Basic;
// Substep 2.
scheme_fetch(request, cache, target, done_chan, context)
} else if request.mode == RequestMode::SameOrigin {
Response::network_error(NetworkError::Internal("Cross-origin response".into()))
} else if request.mode == RequestMode::NoCors {
// Substep 1.
request.response_tainting = ResponseTainting::Opaque;
// Substep 2.
scheme_fetch(request, cache, target, done_chan, context)
} else if!matches!(current_url.scheme(), "http" | "https") {
Response::network_error(NetworkError::Internal("Non-http scheme".into()))
} else if request.use_cors_preflight ||
(request.unsafe_request &&
(!is_cors_safelisted_method(&request.method) ||
request.headers.iter().any(|h|!is_cors_safelisted_request_header(&h)))) {
// Substep 1.
request.response_tainting = ResponseTainting::CorsTainting;
// Substep 2.
let response = http_fetch(request, cache, true, true, false,
target, done_chan, context);
// Substep 3.
if response.is_network_error() {
// TODO clear cache entries using request
}
// Substep 4.
response
} else {
// Substep 1.
request.response_tainting = ResponseTainting::CorsTainting;
// Substep 2.
http_fetch(request, cache, true, false, false, target, done_chan, context)
}
});
// Step 13.
if recursive_flag {
return response;
}
// Step 14.
let mut response = if!response.is_network_error() && response.internal_response.is_none() {
// Substep 1.
if request.response_tainting == ResponseTainting::CorsTainting {
// Subsubstep 1.
let header_names = response.headers.get::<AccessControlExposeHeaders>();
match header_names {
// Subsubstep 2.
Some(list) if request.credentials_mode!= CredentialsMode::Include => {
if list.len() == 1 && list[0] == "*" {
response.cors_exposed_header_name_list =
response.headers.iter().map(|h| h.name().to_owned()).collect();
}
},
// Subsubstep 3.
Some(list) => {
response.cors_exposed_header_name_list = list.iter().map(|h| (**h).clone()).collect();
},
_ => (),
}
}
// Substep 2.
let response_type = match request.response_tainting {
ResponseTainting::Basic => ResponseType::Basic,
ResponseTainting::CorsTainting => ResponseType::Cors,
ResponseTainting::Opaque => ResponseType::Opaque,
};
response.to_filtered(response_type)
} else {
response
};
let internal_error = {
// Tests for steps 17 and 18, before step 15 for borrowing concerns.
let response_is_network_error = response.is_network_error();
let should_replace_with_nosniff_error =
!response_is_network_error && should_be_blocked_due_to_nosniff(request.type_, &response.headers);
let should_replace_with_mime_type_error =
!response_is_network_error && should_be_blocked_due_to_mime_type(request.type_, &response.headers);
// Step 15.
let mut network_error_response = response.get_network_error().cloned().map(Response::network_error);
let internal_response = if let Some(error_response) = network_error_response.as_mut() {
error_response
} else {
response.actual_response_mut()
};
// Step 16.
if internal_response.url_list.is_empty() {
internal_response.url_list = request.url_list.clone();
}
// Step 17.
// TODO: handle blocking as mixed content.
// TODO: handle blocking by content security policy.
let blocked_error_response;
let internal_response =
if should_replace_with_nosniff_error {
// Defer rebinding result
blocked_error_response = Response::network_error(NetworkError::Internal("Blocked by nosniff".into()));
&blocked_error_response
} else if should_replace_with_mime_type_error {
// Defer rebinding result
blocked_error_response = Response::network_error(NetworkError::Internal("Blocked by mime type".into()));
&blocked_error_response
} else {
internal_response
};
// Step 18.
// We check `internal_response` since we did not mutate `response`
// in the previous step.
let not_network_error =!response_is_network_error &&!internal_response.is_network_error();
if not_network_error && (is_null_body_status(&internal_response.status) ||
match request.method {
Method::Head | Method::Connect => true,
_ => false }) {
// when Fetch is used only asynchronously, we will need to make sure
// that nothing tries to write to the body at this point
let mut body = internal_response.body.lock().unwrap();
*body = ResponseBody::Empty;
}
internal_response.get_network_error().map(|e| e.clone())
};
// Execute deferred rebinding of response.
let response = if let Some(error) = internal_error {
Response::network_error(error)
} else {
response
};
// Step 19.
let mut response_loaded = false;
let response = if!response.is_network_error() &&!request.integrity_metadata.is_empty() {
// Step 19.1.
wait_for_response(&response, target, done_chan);
response_loaded = true;
// Step 19.2.
let ref integrity_metadata = &request.integrity_metadata;
if response.termination_reason.is_none() &&
!is_response_integrity_valid(integrity_metadata, &response) {
Response::network_error(NetworkError::Internal("Subresource integrity validation failed".into()))
} else {
response
}
} else {
response
};
// Step 20.
if request.synchronous {
// process_response is not supposed to be used
// by sync fetch, but we overload it here for simplicity
target.process_response(&response);
if!response_loaded {
wait_for_response(&response, target, done_chan);
}
// overloaded similarly to process_response
target.process_response_eof(&response);
return response;
}
// Step 21.
if request.body.is_some() && matches!(request.current_url().scheme(), "http" | "https") {
// XXXManishearth: We actually should be calling process_request
// in http_network_fetch. However, we can't yet follow the request
// upload progress, so I'm keeping it here for now and pretending
// the body got sent in one chunk
target.process_request_body(&request);
target.process_request_eof(&request);
}
// Step 22.
target.process_response(&response);
// Step 23.
if!response_loaded {
wait_for_response(&response, target, done_chan);
}
// Step 24.
target.process_response_eof(&response);
// Steps 25-27.
// TODO: remove this line when only asynchronous fetches are used
response
}
fn wait_for_response(response: &Response, target: Target, done_chan: &mut DoneChannel) {
if let Some(ref ch) = *done_chan {
loop {
match ch.1.recv()
.expect("fetch worker should always send Done before terminating") {
Data::Payload(vec) => {
target.process_response_chunk(vec);
},
Data::Done => break,
}
}
} else {
let body = response.body.lock().unwrap();
if let ResponseBody::Done(ref vec) = *body {
// in case there was no channel to wait for, the body was
// obtained synchronously via scheme_fetch for data/file/about/etc
// We should still send the body across as a chunk
target.process_response_chunk(vec.clone());
} else {
assert!(*body == ResponseBody::Empty)
}
}
}
/// [Scheme fetch](https://fetch.spec.whatwg.org#scheme-fetch)
fn scheme_fetch(request: &mut Request,
cache: &mut CorsCache,
target: Target,
done_chan: &mut DoneChannel,
context: &FetchContext)
-> Response {
let url = request.current_url();
match url.scheme() {
"about" if url.path() == "blank" => {
let mut response = Response::new(url);
response.headers.set(ContentType(mime!(Text / Html; Charset = Utf8)));
*response.body.lock().unwrap() = ResponseBody::Done(vec![]);
response
},
"http" | "https" => {
http_fetch(request, cache, false, false, false, target, done_chan, context)
},
"data" => {
match decode(&url) {
Ok((mime, bytes)) => {
let mut response = Response::new(url);
*response.body.lock().unwrap() = ResponseBody::Done(bytes);
response.headers.set(ContentType(mime));
response
},
Err(_) => Response::network_error(NetworkError::Internal("Decoding data URL failed".into()))
}
},
"file" => {
if request.method == Method::Get {
match url.to_file_path() {
Ok(file_path) => {
match File::open(file_path.clone()) {
Ok(mut file) => {
let mut bytes = vec![];
let _ = file.read_to_end(&mut bytes);
let mime = guess_mime_type(file_path);
let mut response = Response::new(url);
*response.body.lock().unwrap() = ResponseBody::Done(bytes);
response.headers.set(ContentType(mime));
response
},
_ => Response::network_error(NetworkError::Internal("Opening file failed".into())),
}
},
_ => Response::network_error(NetworkError::Internal("Constructing file path failed".into()))
}
} else {
Response::network_error(NetworkError::Internal("Unexpected method for file".into()))
}
},
"blob" => {
println!("Loading blob {}", url.as_str());
// Step 2.
if request.method!= Method::Get {
return Response::network_error(NetworkError::Internal("Unexpected method for blob".into()));
}
match load_blob_sync(url.clone(), context.filemanager.clone()) {
Ok((headers, bytes)) => {
let mut response = Response::new(url);
response.headers = headers;
*response.body.lock().unwrap() = ResponseBody::Done(bytes);
response
},
Err(e) => {
debug!("Failed to load {}: {:?}", url, e);
Response::network_error(e)
},
}
},
"ftp" => {
debug!("ftp is not implemented");
Response::network_error(NetworkError::Internal("Unexpected scheme".into()))
},
_ => Response::network_error(NetworkError::Internal("Unexpected scheme".into()))
}
}
/// <https://fetch.spec.whatwg.org/#cors-safelisted-request-header>
pub fn
|
(h: &HeaderView) -> bool {
if h.is::<ContentType>() {
match h.value() {
Some(&ContentType(Mime(TopLevel::Text, SubLevel::Plain, _))) |
Some(&ContentType(Mime(TopLevel::Application, SubLevel::WwwFormUrlEncoded, _))) |
Some(&ContentType(Mime(TopLevel::Multipart, SubLevel::FormData, _))) => true,
_ => false
}
} else {
h.is::<Accept>() || h.is::<AcceptLanguage>() || h.is::<ContentLanguage>()
}
}
/// <https://fetch.spec.whatwg.org/#cors-safelisted-method>
pub fn is_cors_safelisted_method(m: &Method) -> bool {
match *m {
Method::Get | Method::Head | Method::Post => true,
_ => false
}
}
fn is_null_body_status(status: &Option<StatusCode>) -> bool {
match *status {
Some(status) => match status {
StatusCode::SwitchingProtocols | StatusCode::NoContent |
StatusCode::ResetContent | StatusCode::NotModified => true,
_ => false
},
_ => false
}
}
/// <https://fetch.spec.whatwg.org/#should-response-to-request-be-blocked-due-to-nosniff?>
pub fn should_be_blocked_due_to_nosniff(request_type: Type, response_headers: &Headers) -> bool {
/// <https://fetch.spec.whatwg.org/#x-content-type-options-header>
/// This is needed to parse `X-Content-Type-Options` according to spec,
/// which requires that we inspect only the first value.
///
/// A [unit-like struct](https://doc.rust-lang.org/book/structs.html#unit-like-structs)
/// is sufficient since a valid header implies that we use `nosniff`.
#[derive(Clone, Copy, Debug)]
struct XContentTypeOptions;
impl Header for XContentTypeOptions {
fn header_name() -> &'static str {
"X-Content-Type-Options"
}
/// https://fetch.spec.whatwg.org/#should-response-to-request-be-blocked-due-to-nosniff%3F #2
fn parse_header(raw: &[Vec<u8>]) -> HyperResult
|
is_cors_safelisted_request_header
|
identifier_name
|
procsrv.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::os;
use std::run;
use std::str;
#[cfg(target_os = "win32")]
fn target_env(lib_path: &str, prog: &str) -> ~[(~str,~str)] {
let mut env = os::env();
// Make sure we include the aux directory in the path
assert!(prog.ends_with(".exe"));
let aux_path = prog.slice(0u, prog.len() - 4u).to_owned() + ".libaux";
env = do env.map() |pair| {
let (k,v) = copy *pair;
if k == ~"PATH" { (~"PATH", v + ";" + lib_path + ";" + aux_path) }
else { (k,v) }
};
if prog.ends_with("rustc.exe") {
env.push((~"RUST_THREADS", ~"1"));
}
return env;
}
#[cfg(target_os = "linux")]
#[cfg(target_os = "macos")]
#[cfg(target_os = "freebsd")]
fn target_env(_lib_path: &str, _prog: &str) -> ~[(~str,~str)] {
os::env()
}
pub struct Result {status: int, out: ~str, err: ~str}
pub fn
|
(lib_path: &str,
prog: &str,
args: &[~str],
env: ~[(~str, ~str)],
input: Option<~str>) -> Result {
let env = env + target_env(lib_path, prog);
let mut proc = run::Process::new(prog, args, run::ProcessOptions {
env: Some(env.slice(0, env.len())),
dir: None,
in_fd: None,
out_fd: None,
err_fd: None
});
for input.iter().advance |input| {
proc.input().write_str(*input);
}
let output = proc.finish_with_output();
Result {
status: output.status,
out: str::from_bytes(output.output),
err: str::from_bytes(output.error)
}
}
|
run
|
identifier_name
|
procsrv.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::os;
use std::run;
use std::str;
#[cfg(target_os = "win32")]
fn target_env(lib_path: &str, prog: &str) -> ~[(~str,~str)] {
let mut env = os::env();
// Make sure we include the aux directory in the path
assert!(prog.ends_with(".exe"));
let aux_path = prog.slice(0u, prog.len() - 4u).to_owned() + ".libaux";
env = do env.map() |pair| {
let (k,v) = copy *pair;
if k == ~"PATH" { (~"PATH", v + ";" + lib_path + ";" + aux_path) }
else { (k,v) }
};
if prog.ends_with("rustc.exe") {
env.push((~"RUST_THREADS", ~"1"));
}
return env;
}
#[cfg(target_os = "linux")]
#[cfg(target_os = "macos")]
#[cfg(target_os = "freebsd")]
fn target_env(_lib_path: &str, _prog: &str) -> ~[(~str,~str)] {
os::env()
}
|
pub struct Result {status: int, out: ~str, err: ~str}
pub fn run(lib_path: &str,
prog: &str,
args: &[~str],
env: ~[(~str, ~str)],
input: Option<~str>) -> Result {
let env = env + target_env(lib_path, prog);
let mut proc = run::Process::new(prog, args, run::ProcessOptions {
env: Some(env.slice(0, env.len())),
dir: None,
in_fd: None,
out_fd: None,
err_fd: None
});
for input.iter().advance |input| {
proc.input().write_str(*input);
}
let output = proc.finish_with_output();
Result {
status: output.status,
out: str::from_bytes(output.output),
err: str::from_bytes(output.error)
}
}
|
random_line_split
|
|
procsrv.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::os;
use std::run;
use std::str;
#[cfg(target_os = "win32")]
fn target_env(lib_path: &str, prog: &str) -> ~[(~str,~str)] {
let mut env = os::env();
// Make sure we include the aux directory in the path
assert!(prog.ends_with(".exe"));
let aux_path = prog.slice(0u, prog.len() - 4u).to_owned() + ".libaux";
env = do env.map() |pair| {
let (k,v) = copy *pair;
if k == ~"PATH" { (~"PATH", v + ";" + lib_path + ";" + aux_path) }
else
|
};
if prog.ends_with("rustc.exe") {
env.push((~"RUST_THREADS", ~"1"));
}
return env;
}
#[cfg(target_os = "linux")]
#[cfg(target_os = "macos")]
#[cfg(target_os = "freebsd")]
fn target_env(_lib_path: &str, _prog: &str) -> ~[(~str,~str)] {
os::env()
}
pub struct Result {status: int, out: ~str, err: ~str}
pub fn run(lib_path: &str,
prog: &str,
args: &[~str],
env: ~[(~str, ~str)],
input: Option<~str>) -> Result {
let env = env + target_env(lib_path, prog);
let mut proc = run::Process::new(prog, args, run::ProcessOptions {
env: Some(env.slice(0, env.len())),
dir: None,
in_fd: None,
out_fd: None,
err_fd: None
});
for input.iter().advance |input| {
proc.input().write_str(*input);
}
let output = proc.finish_with_output();
Result {
status: output.status,
out: str::from_bytes(output.output),
err: str::from_bytes(output.error)
}
}
|
{ (k,v) }
|
conditional_block
|
exchange.rs
|
//! This module contains Exchange enum.
use std::fmt::Debug;
use std::convert::Into;
use std::str::FromStr;
use crate::error::*;
use crate::types::*;
#[derive(Debug, PartialEq, Clone, Copy)]
pub enum Exchange {
Bitstamp,
Kraken,
Poloniex,
Bittrex,
Gdax,
}
impl Into<String> for Exchange {
fn into(self) -> String {
match self {
Exchange::Bitstamp => "Bitstamp".to_string(),
Exchange::Kraken => "Kraken".to_string(),
Exchange::Poloniex => "Poloniex".to_string(),
Exchange::Bittrex => "Bittrex".to_string(),
Exchange::Gdax => "Gdax".to_string(),
}
}
}
impl FromStr for Exchange {
type Err = Error;
fn from_str(input: &str) -> ::std::result::Result<Self, Self::Err>
|
}
pub trait ExchangeApi: Debug {
/// Return a Ticker for the Pair specified.
fn ticker(&mut self, pair: Pair) -> Result<Ticker>;
/// Return an Orderbook for the specified Pair.
fn orderbook(&mut self, pair: Pair) -> Result<Orderbook>;
/// Place an order directly to the exchange.
/// Quantity is in quote currency. So if you want to buy 1 Bitcoin for X€ (pair BTC_EUR),
/// base currency (right member in the pair) is BTC and quote/counter currency is BTC (left
/// member in the pair).
/// So quantity = 1.
///
/// A good practice is to store the return type (OrderInfo) somewhere since it can later be used
/// to modify or cancel the order.
fn add_order(&mut self,
order_type: OrderType,
pair: Pair,
quantity: Volume,
price: Option<Price>)
-> Result<OrderInfo>;
/// Retrieve the current amounts of all the currencies that the account holds
/// The amounts returned are available (not used to open an order)
fn balances(&mut self) -> Result<Balances>;
}
|
{
match input.to_lowercase().as_str() {
"bitstamp" => Ok(Exchange::Bitstamp),
"kraken" => Ok(Exchange::Kraken),
"poloniex" => Ok(Exchange::Poloniex),
"bittrex" => Ok(Exchange::Bittrex),
"gdax" => Ok(Exchange::Gdax),
_ => Err(ErrorKind::InvalidExchange(input.to_string()).into()),
}
}
|
identifier_body
|
exchange.rs
|
//! This module contains Exchange enum.
use std::fmt::Debug;
use std::convert::Into;
use std::str::FromStr;
|
#[derive(Debug, PartialEq, Clone, Copy)]
pub enum Exchange {
Bitstamp,
Kraken,
Poloniex,
Bittrex,
Gdax,
}
impl Into<String> for Exchange {
fn into(self) -> String {
match self {
Exchange::Bitstamp => "Bitstamp".to_string(),
Exchange::Kraken => "Kraken".to_string(),
Exchange::Poloniex => "Poloniex".to_string(),
Exchange::Bittrex => "Bittrex".to_string(),
Exchange::Gdax => "Gdax".to_string(),
}
}
}
impl FromStr for Exchange {
type Err = Error;
fn from_str(input: &str) -> ::std::result::Result<Self, Self::Err> {
match input.to_lowercase().as_str() {
"bitstamp" => Ok(Exchange::Bitstamp),
"kraken" => Ok(Exchange::Kraken),
"poloniex" => Ok(Exchange::Poloniex),
"bittrex" => Ok(Exchange::Bittrex),
"gdax" => Ok(Exchange::Gdax),
_ => Err(ErrorKind::InvalidExchange(input.to_string()).into()),
}
}
}
pub trait ExchangeApi: Debug {
/// Return a Ticker for the Pair specified.
fn ticker(&mut self, pair: Pair) -> Result<Ticker>;
/// Return an Orderbook for the specified Pair.
fn orderbook(&mut self, pair: Pair) -> Result<Orderbook>;
/// Place an order directly to the exchange.
/// Quantity is in quote currency. So if you want to buy 1 Bitcoin for X€ (pair BTC_EUR),
/// base currency (right member in the pair) is BTC and quote/counter currency is BTC (left
/// member in the pair).
/// So quantity = 1.
///
/// A good practice is to store the return type (OrderInfo) somewhere since it can later be used
/// to modify or cancel the order.
fn add_order(&mut self,
order_type: OrderType,
pair: Pair,
quantity: Volume,
price: Option<Price>)
-> Result<OrderInfo>;
/// Retrieve the current amounts of all the currencies that the account holds
/// The amounts returned are available (not used to open an order)
fn balances(&mut self) -> Result<Balances>;
}
|
use crate::error::*;
use crate::types::*;
|
random_line_split
|
exchange.rs
|
//! This module contains Exchange enum.
use std::fmt::Debug;
use std::convert::Into;
use std::str::FromStr;
use crate::error::*;
use crate::types::*;
#[derive(Debug, PartialEq, Clone, Copy)]
pub enum
|
{
Bitstamp,
Kraken,
Poloniex,
Bittrex,
Gdax,
}
impl Into<String> for Exchange {
fn into(self) -> String {
match self {
Exchange::Bitstamp => "Bitstamp".to_string(),
Exchange::Kraken => "Kraken".to_string(),
Exchange::Poloniex => "Poloniex".to_string(),
Exchange::Bittrex => "Bittrex".to_string(),
Exchange::Gdax => "Gdax".to_string(),
}
}
}
impl FromStr for Exchange {
type Err = Error;
fn from_str(input: &str) -> ::std::result::Result<Self, Self::Err> {
match input.to_lowercase().as_str() {
"bitstamp" => Ok(Exchange::Bitstamp),
"kraken" => Ok(Exchange::Kraken),
"poloniex" => Ok(Exchange::Poloniex),
"bittrex" => Ok(Exchange::Bittrex),
"gdax" => Ok(Exchange::Gdax),
_ => Err(ErrorKind::InvalidExchange(input.to_string()).into()),
}
}
}
pub trait ExchangeApi: Debug {
/// Return a Ticker for the Pair specified.
fn ticker(&mut self, pair: Pair) -> Result<Ticker>;
/// Return an Orderbook for the specified Pair.
fn orderbook(&mut self, pair: Pair) -> Result<Orderbook>;
/// Place an order directly to the exchange.
/// Quantity is in quote currency. So if you want to buy 1 Bitcoin for X€ (pair BTC_EUR),
/// base currency (right member in the pair) is BTC and quote/counter currency is BTC (left
/// member in the pair).
/// So quantity = 1.
///
/// A good practice is to store the return type (OrderInfo) somewhere since it can later be used
/// to modify or cancel the order.
fn add_order(&mut self,
order_type: OrderType,
pair: Pair,
quantity: Volume,
price: Option<Price>)
-> Result<OrderInfo>;
/// Retrieve the current amounts of all the currencies that the account holds
/// The amounts returned are available (not used to open an order)
fn balances(&mut self) -> Result<Balances>;
}
|
Exchange
|
identifier_name
|
print_table.rs
|
//! Directly executes an SQL query and prints the result set to standard out
//!
//! This example also offers an idea, how to set up error handling for your ODBC Application.
extern crate odbc_safe;
use odbc_safe::*;
use std::str::from_utf8;
// Setup error handling
struct LastError(String);
type MyResult<T> = Result<T, LastError>;
impl<D: Diagnostics> From<D> for LastError {
fn from(source: D) -> Self {
let mut buffer = [0; 512];
match source.diagnostics(1, &mut buffer) {
ReturnOption::Success(dr) |
ReturnOption::Info(dr) => LastError(
from_utf8(&buffer[0..(dr.text_length as usize)])
.unwrap()
.to_owned(),
),
ReturnOption::Error(()) => panic!("Error during fetching diagnostic record"),
ReturnOption::NoData(()) => LastError("No Diagnostic Record present".to_owned()),
}
}
}
trait ExtReturn<T> {
fn into_result(self) -> MyResult<T>;
}
impl<T, D> ExtReturn<T> for Return<T, D>
where
D: Diagnostics,
{
fn into_result(self) -> MyResult<T> {
match self {
Success(v) | Info(v) => Ok(v),
Error(d) => Err(d.into()),
}
}
}
// Actual application
fn main()
|
fn run(env: &Environment<Odbc3>) -> MyResult<()> {
let conn = connect(&env)?;
let result_set = execute_query(&conn)?;
print_fields(result_set)
}
fn connect<V>(env: &Environment<V>) -> MyResult<Connection<impl AutocommitMode>>
where
V: Version,
{
let conn = DataSource::with_parent(env).unwrap();
conn.connect("TestDataSource", "", "").into_result()
}
fn execute_query<'a, AC: AutocommitMode>(conn: &'a Connection<AC>) -> MyResult<ResultSet<'a, 'a, 'a, Unprepared>> {
let stmt = Statement::with_parent(conn).unwrap();
match stmt.exec_direct("SELECT * FROM MOVIES") {
ReturnOption::Success(s) |
ReturnOption::Info(s) => Ok(s),
ReturnOption::NoData(_) => Err(LastError(
"Statement did not return a Result Set.".to_owned(),
)),
ReturnOption::Error(e) => Err(e.into()),
}
}
fn print_fields(result_set: ResultSet<Unprepared>) -> MyResult<()> {
let cols = result_set.num_result_cols().unwrap();
let mut buffer = [0u8; 512];
let mut cursor = match result_set.fetch() {
ReturnOption::Success(r) |
ReturnOption::Info(r) => r,
ReturnOption::NoData(_) => return Ok(()),
ReturnOption::Error(e) => return Err(e.into()),
};
loop {
for index in 1..(cols + 1) {
match cursor.get_data(index as u16, &mut buffer as &mut [u8]) {
ReturnOption::Success(ind) |
ReturnOption::Info(ind) => {
match ind {
Indicator::NoTotal => panic!("No Total"),
Indicator::Null => println!("NULL"),
Indicator::Length(l) => {
print!("{}", from_utf8(&buffer[0..l as usize]).unwrap());
}
}
}
ReturnOption::NoData(_) => panic!("No Field Data"),
ReturnOption::Error(_) => return Err(cursor.into()),
}
print!(" | ");
}
cursor = match cursor.fetch() {
ReturnOption::Success(r) |
ReturnOption::Info(r) => r,
ReturnOption::NoData(_) => break Ok(()),
ReturnOption::Error(e) => break Err(e.into()),
};
println!("");
}
}
|
{
let env = Environment::new().unwrap();
let env = env.declare_version_3().unwrap();
match run(&env) {
Ok(()) => (),
Err(LastError(message)) => println!("An error occurred: {}", message),
}
}
|
identifier_body
|
print_table.rs
|
//! Directly executes an SQL query and prints the result set to standard out
//!
//! This example also offers an idea, how to set up error handling for your ODBC Application.
extern crate odbc_safe;
use odbc_safe::*;
use std::str::from_utf8;
// Setup error handling
struct LastError(String);
type MyResult<T> = Result<T, LastError>;
impl<D: Diagnostics> From<D> for LastError {
fn
|
(source: D) -> Self {
let mut buffer = [0; 512];
match source.diagnostics(1, &mut buffer) {
ReturnOption::Success(dr) |
ReturnOption::Info(dr) => LastError(
from_utf8(&buffer[0..(dr.text_length as usize)])
.unwrap()
.to_owned(),
),
ReturnOption::Error(()) => panic!("Error during fetching diagnostic record"),
ReturnOption::NoData(()) => LastError("No Diagnostic Record present".to_owned()),
}
}
}
trait ExtReturn<T> {
fn into_result(self) -> MyResult<T>;
}
impl<T, D> ExtReturn<T> for Return<T, D>
where
D: Diagnostics,
{
fn into_result(self) -> MyResult<T> {
match self {
Success(v) | Info(v) => Ok(v),
Error(d) => Err(d.into()),
}
}
}
// Actual application
fn main() {
let env = Environment::new().unwrap();
let env = env.declare_version_3().unwrap();
match run(&env) {
Ok(()) => (),
Err(LastError(message)) => println!("An error occurred: {}", message),
}
}
fn run(env: &Environment<Odbc3>) -> MyResult<()> {
let conn = connect(&env)?;
let result_set = execute_query(&conn)?;
print_fields(result_set)
}
fn connect<V>(env: &Environment<V>) -> MyResult<Connection<impl AutocommitMode>>
where
V: Version,
{
let conn = DataSource::with_parent(env).unwrap();
conn.connect("TestDataSource", "", "").into_result()
}
fn execute_query<'a, AC: AutocommitMode>(conn: &'a Connection<AC>) -> MyResult<ResultSet<'a, 'a, 'a, Unprepared>> {
let stmt = Statement::with_parent(conn).unwrap();
match stmt.exec_direct("SELECT * FROM MOVIES") {
ReturnOption::Success(s) |
ReturnOption::Info(s) => Ok(s),
ReturnOption::NoData(_) => Err(LastError(
"Statement did not return a Result Set.".to_owned(),
)),
ReturnOption::Error(e) => Err(e.into()),
}
}
fn print_fields(result_set: ResultSet<Unprepared>) -> MyResult<()> {
let cols = result_set.num_result_cols().unwrap();
let mut buffer = [0u8; 512];
let mut cursor = match result_set.fetch() {
ReturnOption::Success(r) |
ReturnOption::Info(r) => r,
ReturnOption::NoData(_) => return Ok(()),
ReturnOption::Error(e) => return Err(e.into()),
};
loop {
for index in 1..(cols + 1) {
match cursor.get_data(index as u16, &mut buffer as &mut [u8]) {
ReturnOption::Success(ind) |
ReturnOption::Info(ind) => {
match ind {
Indicator::NoTotal => panic!("No Total"),
Indicator::Null => println!("NULL"),
Indicator::Length(l) => {
print!("{}", from_utf8(&buffer[0..l as usize]).unwrap());
}
}
}
ReturnOption::NoData(_) => panic!("No Field Data"),
ReturnOption::Error(_) => return Err(cursor.into()),
}
print!(" | ");
}
cursor = match cursor.fetch() {
ReturnOption::Success(r) |
ReturnOption::Info(r) => r,
ReturnOption::NoData(_) => break Ok(()),
ReturnOption::Error(e) => break Err(e.into()),
};
println!("");
}
}
|
from
|
identifier_name
|
print_table.rs
|
//! Directly executes an SQL query and prints the result set to standard out
//!
//! This example also offers an idea, how to set up error handling for your ODBC Application.
extern crate odbc_safe;
use odbc_safe::*;
use std::str::from_utf8;
// Setup error handling
struct LastError(String);
type MyResult<T> = Result<T, LastError>;
impl<D: Diagnostics> From<D> for LastError {
fn from(source: D) -> Self {
let mut buffer = [0; 512];
match source.diagnostics(1, &mut buffer) {
ReturnOption::Success(dr) |
ReturnOption::Info(dr) => LastError(
from_utf8(&buffer[0..(dr.text_length as usize)])
.unwrap()
.to_owned(),
),
ReturnOption::Error(()) => panic!("Error during fetching diagnostic record"),
ReturnOption::NoData(()) => LastError("No Diagnostic Record present".to_owned()),
}
}
}
trait ExtReturn<T> {
fn into_result(self) -> MyResult<T>;
}
impl<T, D> ExtReturn<T> for Return<T, D>
where
D: Diagnostics,
{
fn into_result(self) -> MyResult<T> {
match self {
Success(v) | Info(v) => Ok(v),
Error(d) => Err(d.into()),
}
}
}
|
fn main() {
let env = Environment::new().unwrap();
let env = env.declare_version_3().unwrap();
match run(&env) {
Ok(()) => (),
Err(LastError(message)) => println!("An error occurred: {}", message),
}
}
fn run(env: &Environment<Odbc3>) -> MyResult<()> {
let conn = connect(&env)?;
let result_set = execute_query(&conn)?;
print_fields(result_set)
}
fn connect<V>(env: &Environment<V>) -> MyResult<Connection<impl AutocommitMode>>
where
V: Version,
{
let conn = DataSource::with_parent(env).unwrap();
conn.connect("TestDataSource", "", "").into_result()
}
fn execute_query<'a, AC: AutocommitMode>(conn: &'a Connection<AC>) -> MyResult<ResultSet<'a, 'a, 'a, Unprepared>> {
let stmt = Statement::with_parent(conn).unwrap();
match stmt.exec_direct("SELECT * FROM MOVIES") {
ReturnOption::Success(s) |
ReturnOption::Info(s) => Ok(s),
ReturnOption::NoData(_) => Err(LastError(
"Statement did not return a Result Set.".to_owned(),
)),
ReturnOption::Error(e) => Err(e.into()),
}
}
fn print_fields(result_set: ResultSet<Unprepared>) -> MyResult<()> {
let cols = result_set.num_result_cols().unwrap();
let mut buffer = [0u8; 512];
let mut cursor = match result_set.fetch() {
ReturnOption::Success(r) |
ReturnOption::Info(r) => r,
ReturnOption::NoData(_) => return Ok(()),
ReturnOption::Error(e) => return Err(e.into()),
};
loop {
for index in 1..(cols + 1) {
match cursor.get_data(index as u16, &mut buffer as &mut [u8]) {
ReturnOption::Success(ind) |
ReturnOption::Info(ind) => {
match ind {
Indicator::NoTotal => panic!("No Total"),
Indicator::Null => println!("NULL"),
Indicator::Length(l) => {
print!("{}", from_utf8(&buffer[0..l as usize]).unwrap());
}
}
}
ReturnOption::NoData(_) => panic!("No Field Data"),
ReturnOption::Error(_) => return Err(cursor.into()),
}
print!(" | ");
}
cursor = match cursor.fetch() {
ReturnOption::Success(r) |
ReturnOption::Info(r) => r,
ReturnOption::NoData(_) => break Ok(()),
ReturnOption::Error(e) => break Err(e.into()),
};
println!("");
}
}
|
// Actual application
|
random_line_split
|
animate.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use darling::util::IdentList;
use derive_common::cg;
use proc_macro2::TokenStream;
use quote::TokenStreamExt;
use syn::{DeriveInput, Path, WhereClause};
use synstructure::{Structure, VariantInfo};
pub fn
|
(mut input: DeriveInput) -> TokenStream {
let animation_input_attrs = cg::parse_input_attrs::<AnimationInputAttrs>(&input);
let no_bound = animation_input_attrs.no_bound.unwrap_or_default();
let mut where_clause = input.generics.where_clause.take();
for param in input.generics.type_params() {
if!no_bound.contains(¶m.ident) {
cg::add_predicate(
&mut where_clause,
parse_quote!(#param: crate::values::animated::Animate),
);
}
}
let (mut match_body, append_error_clause) = {
let s = Structure::new(&input);
let mut append_error_clause = s.variants().len() > 1;
let match_body = s.variants().iter().fold(quote!(), |body, variant| {
let arm = match derive_variant_arm(variant, &mut where_clause) {
Ok(arm) => arm,
Err(()) => {
append_error_clause = true;
return body;
},
};
quote! { #body #arm }
});
(match_body, append_error_clause)
};
input.generics.where_clause = where_clause;
if append_error_clause {
let input_attrs = cg::parse_input_attrs::<AnimateInputAttrs>(&input);
if let Some(fallback) = input_attrs.fallback {
match_body.append_all(quote! {
(this, other) => #fallback(this, other, procedure)
});
} else {
match_body.append_all(quote! { _ => Err(()) });
}
}
let name = &input.ident;
let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl();
quote! {
impl #impl_generics crate::values::animated::Animate for #name #ty_generics #where_clause {
#[allow(unused_variables, unused_imports)]
#[inline]
fn animate(
&self,
other: &Self,
procedure: crate::values::animated::Procedure,
) -> Result<Self, ()> {
match (self, other) {
#match_body
}
}
}
}
}
fn derive_variant_arm(
variant: &VariantInfo,
where_clause: &mut Option<WhereClause>,
) -> Result<TokenStream, ()> {
let variant_attrs = cg::parse_variant_attrs_from_ast::<AnimationVariantAttrs>(&variant.ast());
if variant_attrs.error {
return Err(());
}
let (this_pattern, this_info) = cg::ref_pattern(&variant, "this");
let (other_pattern, other_info) = cg::ref_pattern(&variant, "other");
let (result_value, result_info) = cg::value(&variant, "result");
let mut computations = quote!();
let iter = result_info.iter().zip(this_info.iter().zip(&other_info));
computations.append_all(iter.map(|(result, (this, other))| {
let field_attrs = cg::parse_field_attrs::<AnimationFieldAttrs>(&result.ast());
if field_attrs.field_bound {
let ty = &this.ast().ty;
cg::add_predicate(
where_clause,
parse_quote!(#ty: crate::values::animated::Animate),
);
}
if field_attrs.constant {
quote! {
if #this!= #other {
return Err(());
}
let #result = std::clone::Clone::clone(#this);
}
} else {
quote! {
let #result =
crate::values::animated::Animate::animate(#this, #other, procedure)?;
}
}
}));
Ok(quote! {
(&#this_pattern, &#other_pattern) => {
#computations
Ok(#result_value)
}
})
}
#[darling(attributes(animate), default)]
#[derive(Default, FromDeriveInput)]
struct AnimateInputAttrs {
fallback: Option<Path>,
}
#[darling(attributes(animation), default)]
#[derive(Default, FromDeriveInput)]
pub struct AnimationInputAttrs {
pub no_bound: Option<IdentList>,
}
#[darling(attributes(animation), default)]
#[derive(Default, FromVariant)]
pub struct AnimationVariantAttrs {
pub error: bool,
// Only here because of structs, where the struct definition acts as a
// variant itself.
pub no_bound: Option<IdentList>,
}
#[darling(attributes(animation), default)]
#[derive(Default, FromField)]
pub struct AnimationFieldAttrs {
pub constant: bool,
pub field_bound: bool,
}
|
derive
|
identifier_name
|
animate.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use darling::util::IdentList;
use derive_common::cg;
use proc_macro2::TokenStream;
use quote::TokenStreamExt;
use syn::{DeriveInput, Path, WhereClause};
use synstructure::{Structure, VariantInfo};
pub fn derive(mut input: DeriveInput) -> TokenStream {
let animation_input_attrs = cg::parse_input_attrs::<AnimationInputAttrs>(&input);
let no_bound = animation_input_attrs.no_bound.unwrap_or_default();
let mut where_clause = input.generics.where_clause.take();
for param in input.generics.type_params() {
if!no_bound.contains(¶m.ident) {
cg::add_predicate(
&mut where_clause,
parse_quote!(#param: crate::values::animated::Animate),
);
}
}
let (mut match_body, append_error_clause) = {
let s = Structure::new(&input);
let mut append_error_clause = s.variants().len() > 1;
let match_body = s.variants().iter().fold(quote!(), |body, variant| {
let arm = match derive_variant_arm(variant, &mut where_clause) {
Ok(arm) => arm,
Err(()) => {
append_error_clause = true;
return body;
},
};
quote! { #body #arm }
});
(match_body, append_error_clause)
};
input.generics.where_clause = where_clause;
if append_error_clause {
let input_attrs = cg::parse_input_attrs::<AnimateInputAttrs>(&input);
if let Some(fallback) = input_attrs.fallback {
match_body.append_all(quote! {
(this, other) => #fallback(this, other, procedure)
});
} else {
match_body.append_all(quote! { _ => Err(()) });
}
}
let name = &input.ident;
let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl();
quote! {
impl #impl_generics crate::values::animated::Animate for #name #ty_generics #where_clause {
#[allow(unused_variables, unused_imports)]
#[inline]
fn animate(
&self,
other: &Self,
procedure: crate::values::animated::Procedure,
) -> Result<Self, ()> {
match (self, other) {
#match_body
}
|
fn derive_variant_arm(
variant: &VariantInfo,
where_clause: &mut Option<WhereClause>,
) -> Result<TokenStream, ()> {
let variant_attrs = cg::parse_variant_attrs_from_ast::<AnimationVariantAttrs>(&variant.ast());
if variant_attrs.error {
return Err(());
}
let (this_pattern, this_info) = cg::ref_pattern(&variant, "this");
let (other_pattern, other_info) = cg::ref_pattern(&variant, "other");
let (result_value, result_info) = cg::value(&variant, "result");
let mut computations = quote!();
let iter = result_info.iter().zip(this_info.iter().zip(&other_info));
computations.append_all(iter.map(|(result, (this, other))| {
let field_attrs = cg::parse_field_attrs::<AnimationFieldAttrs>(&result.ast());
if field_attrs.field_bound {
let ty = &this.ast().ty;
cg::add_predicate(
where_clause,
parse_quote!(#ty: crate::values::animated::Animate),
);
}
if field_attrs.constant {
quote! {
if #this!= #other {
return Err(());
}
let #result = std::clone::Clone::clone(#this);
}
} else {
quote! {
let #result =
crate::values::animated::Animate::animate(#this, #other, procedure)?;
}
}
}));
Ok(quote! {
(&#this_pattern, &#other_pattern) => {
#computations
Ok(#result_value)
}
})
}
#[darling(attributes(animate), default)]
#[derive(Default, FromDeriveInput)]
struct AnimateInputAttrs {
fallback: Option<Path>,
}
#[darling(attributes(animation), default)]
#[derive(Default, FromDeriveInput)]
pub struct AnimationInputAttrs {
pub no_bound: Option<IdentList>,
}
#[darling(attributes(animation), default)]
#[derive(Default, FromVariant)]
pub struct AnimationVariantAttrs {
pub error: bool,
// Only here because of structs, where the struct definition acts as a
// variant itself.
pub no_bound: Option<IdentList>,
}
#[darling(attributes(animation), default)]
#[derive(Default, FromField)]
pub struct AnimationFieldAttrs {
pub constant: bool,
pub field_bound: bool,
}
|
}
}
}
}
|
random_line_split
|
animate.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use darling::util::IdentList;
use derive_common::cg;
use proc_macro2::TokenStream;
use quote::TokenStreamExt;
use syn::{DeriveInput, Path, WhereClause};
use synstructure::{Structure, VariantInfo};
pub fn derive(mut input: DeriveInput) -> TokenStream {
let animation_input_attrs = cg::parse_input_attrs::<AnimationInputAttrs>(&input);
let no_bound = animation_input_attrs.no_bound.unwrap_or_default();
let mut where_clause = input.generics.where_clause.take();
for param in input.generics.type_params() {
if!no_bound.contains(¶m.ident) {
cg::add_predicate(
&mut where_clause,
parse_quote!(#param: crate::values::animated::Animate),
);
}
}
let (mut match_body, append_error_clause) = {
let s = Structure::new(&input);
let mut append_error_clause = s.variants().len() > 1;
let match_body = s.variants().iter().fold(quote!(), |body, variant| {
let arm = match derive_variant_arm(variant, &mut where_clause) {
Ok(arm) => arm,
Err(()) => {
append_error_clause = true;
return body;
},
};
quote! { #body #arm }
});
(match_body, append_error_clause)
};
input.generics.where_clause = where_clause;
if append_error_clause {
let input_attrs = cg::parse_input_attrs::<AnimateInputAttrs>(&input);
if let Some(fallback) = input_attrs.fallback
|
else {
match_body.append_all(quote! { _ => Err(()) });
}
}
let name = &input.ident;
let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl();
quote! {
impl #impl_generics crate::values::animated::Animate for #name #ty_generics #where_clause {
#[allow(unused_variables, unused_imports)]
#[inline]
fn animate(
&self,
other: &Self,
procedure: crate::values::animated::Procedure,
) -> Result<Self, ()> {
match (self, other) {
#match_body
}
}
}
}
}
fn derive_variant_arm(
variant: &VariantInfo,
where_clause: &mut Option<WhereClause>,
) -> Result<TokenStream, ()> {
let variant_attrs = cg::parse_variant_attrs_from_ast::<AnimationVariantAttrs>(&variant.ast());
if variant_attrs.error {
return Err(());
}
let (this_pattern, this_info) = cg::ref_pattern(&variant, "this");
let (other_pattern, other_info) = cg::ref_pattern(&variant, "other");
let (result_value, result_info) = cg::value(&variant, "result");
let mut computations = quote!();
let iter = result_info.iter().zip(this_info.iter().zip(&other_info));
computations.append_all(iter.map(|(result, (this, other))| {
let field_attrs = cg::parse_field_attrs::<AnimationFieldAttrs>(&result.ast());
if field_attrs.field_bound {
let ty = &this.ast().ty;
cg::add_predicate(
where_clause,
parse_quote!(#ty: crate::values::animated::Animate),
);
}
if field_attrs.constant {
quote! {
if #this!= #other {
return Err(());
}
let #result = std::clone::Clone::clone(#this);
}
} else {
quote! {
let #result =
crate::values::animated::Animate::animate(#this, #other, procedure)?;
}
}
}));
Ok(quote! {
(&#this_pattern, &#other_pattern) => {
#computations
Ok(#result_value)
}
})
}
#[darling(attributes(animate), default)]
#[derive(Default, FromDeriveInput)]
struct AnimateInputAttrs {
fallback: Option<Path>,
}
#[darling(attributes(animation), default)]
#[derive(Default, FromDeriveInput)]
pub struct AnimationInputAttrs {
pub no_bound: Option<IdentList>,
}
#[darling(attributes(animation), default)]
#[derive(Default, FromVariant)]
pub struct AnimationVariantAttrs {
pub error: bool,
// Only here because of structs, where the struct definition acts as a
// variant itself.
pub no_bound: Option<IdentList>,
}
#[darling(attributes(animation), default)]
#[derive(Default, FromField)]
pub struct AnimationFieldAttrs {
pub constant: bool,
pub field_bound: bool,
}
|
{
match_body.append_all(quote! {
(this, other) => #fallback(this, other, procedure)
});
}
|
conditional_block
|
animate.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use darling::util::IdentList;
use derive_common::cg;
use proc_macro2::TokenStream;
use quote::TokenStreamExt;
use syn::{DeriveInput, Path, WhereClause};
use synstructure::{Structure, VariantInfo};
pub fn derive(mut input: DeriveInput) -> TokenStream {
let animation_input_attrs = cg::parse_input_attrs::<AnimationInputAttrs>(&input);
let no_bound = animation_input_attrs.no_bound.unwrap_or_default();
let mut where_clause = input.generics.where_clause.take();
for param in input.generics.type_params() {
if!no_bound.contains(¶m.ident) {
cg::add_predicate(
&mut where_clause,
parse_quote!(#param: crate::values::animated::Animate),
);
}
}
let (mut match_body, append_error_clause) = {
let s = Structure::new(&input);
let mut append_error_clause = s.variants().len() > 1;
let match_body = s.variants().iter().fold(quote!(), |body, variant| {
let arm = match derive_variant_arm(variant, &mut where_clause) {
Ok(arm) => arm,
Err(()) => {
append_error_clause = true;
return body;
},
};
quote! { #body #arm }
});
(match_body, append_error_clause)
};
input.generics.where_clause = where_clause;
if append_error_clause {
let input_attrs = cg::parse_input_attrs::<AnimateInputAttrs>(&input);
if let Some(fallback) = input_attrs.fallback {
match_body.append_all(quote! {
(this, other) => #fallback(this, other, procedure)
});
} else {
match_body.append_all(quote! { _ => Err(()) });
}
}
let name = &input.ident;
let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl();
quote! {
impl #impl_generics crate::values::animated::Animate for #name #ty_generics #where_clause {
#[allow(unused_variables, unused_imports)]
#[inline]
fn animate(
&self,
other: &Self,
procedure: crate::values::animated::Procedure,
) -> Result<Self, ()> {
match (self, other) {
#match_body
}
}
}
}
}
fn derive_variant_arm(
variant: &VariantInfo,
where_clause: &mut Option<WhereClause>,
) -> Result<TokenStream, ()>
|
quote! {
if #this!= #other {
return Err(());
}
let #result = std::clone::Clone::clone(#this);
}
} else {
quote! {
let #result =
crate::values::animated::Animate::animate(#this, #other, procedure)?;
}
}
}));
Ok(quote! {
(&#this_pattern, &#other_pattern) => {
#computations
Ok(#result_value)
}
})
}
#[darling(attributes(animate), default)]
#[derive(Default, FromDeriveInput)]
struct AnimateInputAttrs {
fallback: Option<Path>,
}
#[darling(attributes(animation), default)]
#[derive(Default, FromDeriveInput)]
pub struct AnimationInputAttrs {
pub no_bound: Option<IdentList>,
}
#[darling(attributes(animation), default)]
#[derive(Default, FromVariant)]
pub struct AnimationVariantAttrs {
pub error: bool,
// Only here because of structs, where the struct definition acts as a
// variant itself.
pub no_bound: Option<IdentList>,
}
#[darling(attributes(animation), default)]
#[derive(Default, FromField)]
pub struct AnimationFieldAttrs {
pub constant: bool,
pub field_bound: bool,
}
|
{
let variant_attrs = cg::parse_variant_attrs_from_ast::<AnimationVariantAttrs>(&variant.ast());
if variant_attrs.error {
return Err(());
}
let (this_pattern, this_info) = cg::ref_pattern(&variant, "this");
let (other_pattern, other_info) = cg::ref_pattern(&variant, "other");
let (result_value, result_info) = cg::value(&variant, "result");
let mut computations = quote!();
let iter = result_info.iter().zip(this_info.iter().zip(&other_info));
computations.append_all(iter.map(|(result, (this, other))| {
let field_attrs = cg::parse_field_attrs::<AnimationFieldAttrs>(&result.ast());
if field_attrs.field_bound {
let ty = &this.ast().ty;
cg::add_predicate(
where_clause,
parse_quote!(#ty: crate::values::animated::Animate),
);
}
if field_attrs.constant {
|
identifier_body
|
error.rs
|
//! Messaging primitives for server errors.
use std::borrow::Cow;
use std::io::{self, Write};
use nom::IResult;
/// Error reported by the server and sent to the client.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ErrorResponse<'a> {
message: Cow<'a, str>,
}
impl<'a> ErrorResponse<'a> {
/// Create a new ErrorResponse.
pub fn new(message: &'a str) -> ErrorResponse<'a> {
ErrorResponse { message: Cow::Borrowed(message) }
}
/// Construct an ErrorResponse from the given bytes.
pub fn from_bytes(bytes: &'a [u8]) -> IResult<&'a [u8], ErrorResponse<'a>> {
map!(bytes, take_str!(bytes.len()), |m| ErrorResponse::new(m))
}
/// Write the ErrorResponse to the given writer.
pub fn write_bytes<W>(&self, mut writer: W) -> io::Result<()>
where W: Write
|
/// Message describing the error that occured.
pub fn message(&self) -> &str {
&*self.message
}
/// Create an owned version of the ErrorResponse.
pub fn to_owned(&self) -> ErrorResponse<'static> {
ErrorResponse { message: Cow::Owned((*self.message).to_owned()) }
}
}
|
{
try!(writer.write_all(self.message.as_bytes()));
Ok(())
}
|
identifier_body
|
error.rs
|
//! Messaging primitives for server errors.
use std::borrow::Cow;
use std::io::{self, Write};
use nom::IResult;
/// Error reported by the server and sent to the client.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ErrorResponse<'a> {
message: Cow<'a, str>,
}
impl<'a> ErrorResponse<'a> {
/// Create a new ErrorResponse.
pub fn new(message: &'a str) -> ErrorResponse<'a> {
ErrorResponse { message: Cow::Borrowed(message) }
}
/// Construct an ErrorResponse from the given bytes.
pub fn
|
(bytes: &'a [u8]) -> IResult<&'a [u8], ErrorResponse<'a>> {
map!(bytes, take_str!(bytes.len()), |m| ErrorResponse::new(m))
}
/// Write the ErrorResponse to the given writer.
pub fn write_bytes<W>(&self, mut writer: W) -> io::Result<()>
where W: Write
{
try!(writer.write_all(self.message.as_bytes()));
Ok(())
}
/// Message describing the error that occured.
pub fn message(&self) -> &str {
&*self.message
}
/// Create an owned version of the ErrorResponse.
pub fn to_owned(&self) -> ErrorResponse<'static> {
ErrorResponse { message: Cow::Owned((*self.message).to_owned()) }
}
}
|
from_bytes
|
identifier_name
|
error.rs
|
//! Messaging primitives for server errors.
use std::borrow::Cow;
use std::io::{self, Write};
|
use nom::IResult;
/// Error reported by the server and sent to the client.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ErrorResponse<'a> {
message: Cow<'a, str>,
}
impl<'a> ErrorResponse<'a> {
/// Create a new ErrorResponse.
pub fn new(message: &'a str) -> ErrorResponse<'a> {
ErrorResponse { message: Cow::Borrowed(message) }
}
/// Construct an ErrorResponse from the given bytes.
pub fn from_bytes(bytes: &'a [u8]) -> IResult<&'a [u8], ErrorResponse<'a>> {
map!(bytes, take_str!(bytes.len()), |m| ErrorResponse::new(m))
}
/// Write the ErrorResponse to the given writer.
pub fn write_bytes<W>(&self, mut writer: W) -> io::Result<()>
where W: Write
{
try!(writer.write_all(self.message.as_bytes()));
Ok(())
}
/// Message describing the error that occured.
pub fn message(&self) -> &str {
&*self.message
}
/// Create an owned version of the ErrorResponse.
pub fn to_owned(&self) -> ErrorResponse<'static> {
ErrorResponse { message: Cow::Owned((*self.message).to_owned()) }
}
}
|
random_line_split
|
|
sudoku.rs
|
#[macro_use]
extern crate kanren;
use kanren::core::{State, Unifier, Var, VarStore, VarRetrieve};
use kanren::iter::StateIter;
use kanren::list::List;
use kanren::list::{Pair, Nil};
use std::io::Read;
use std::fmt::{Formatter, Debug};
use kanren::finitedomain::Fd;
use kanren::constraints::AllDiffConstraint;
fn main() {
let mut state = State::new();
fresh!(state, orig_list);
let mut list = orig_list;
let mut colvars: Vec<Vec<Var<Fd>>> = (0..9).map(|_| Vec::new()).collect();
let mut rowvars: Vec<Vec<Var<Fd>>> = (0..9).map(|_| Vec::new()).collect();
let mut groupvars: Vec<Vec<Var<Fd>>> = (0..9).map(|_| Vec::new()).collect();
let mut puzstr = String::new();
let stdin = ::std::io::stdin();
stdin.lock().read_to_string(&mut puzstr).unwrap();
let puzzle: Vec<Option<u8>> = puzstr.chars().flat_map(|x| {
(if x.is_numeric() { Some(Some((x as u32 - '0' as u32) as u8)) }
else if x == '_' { Some(None) }
else { None })
.into_iter()
}).collect();
println!("Input: ");
display_output(puzzle.iter().map(|x| *x));
println!("");
assert!(puzzle.len() == 81);
let xy = (0..9).flat_map(|y| (0..9).map(move |x| (x,y)));
for ((x, y), &puz) in xy.zip(puzzle.iter()) {
fresh!(state, entry, tail);
let value = match puz {
Some(x) => Fd::new_single(x as usize),
None => Fd::new_values((1..10).collect()),
};
state.unify(entry, value);
colvars[x].push(entry);
rowvars[y].push(entry);
groupvars[x / 3 + (y / 3) * 3].push(entry);
state.unify(list, Pair(entry, tail));
list = tail;
}
state.unify(list, Nil);
//println!("colvars: {:?}, rowvars: {:?}, groupvars: {:?}", colvars, rowvars, groupvars);
for vars in colvars.into_iter().chain(rowvars).chain(groupvars) {
state.add_constraint(AllDiffConstraint::new(vars));
}
#[allow(unused_variables)]
fn get_fds(state: State, list: Var<List<Fd>>) -> StateIter {
//conde!(state, {
//state.unify(list, Nil);
//single(state)
//}, {
//fresh!(state, head, tail);
//state.unify(list, Pair(head, tail));
//}
::kanren::core::assign_all_values(state)
}
struct UnderscoreWriter<T>(Option<T>);
impl<T> Debug for UnderscoreWriter<T> where T: Debug {
fn fmt(&self, fmt: &mut Formatter) -> ::std::fmt::Result {
match self.0 {
Some(ref x) => write!(fmt, "{:?}", x),
None => write!(fmt, "_")
}
}
}
fn display_list(state: &mut State, list: Var<List<Fd>>) {
let list = state.get_value(list).unwrap();
display_output(list.iter(state).map(|x| x.and_then(Fd::single_value)))
}
fn display_output<I, T>(i: I) where I: IntoIterator<Item=Option<T>>, T: Debug
|
for (i, mut state) in get_fds(state, orig_list).into_iter().enumerate().take(100) {
//let reifier = Reifier::new(&state);
println!("solution {}:", i);
display_list(&mut state, orig_list);
println!("");
//println!("state: {:?}\n", state);
}
}
|
{
let items: Vec<_> = i.into_iter().map(|x| UnderscoreWriter(x)).collect();
for chunk in items.chunks(9) {
println!("{:?}", chunk);
}
}
|
identifier_body
|
sudoku.rs
|
#[macro_use]
extern crate kanren;
use kanren::core::{State, Unifier, Var, VarStore, VarRetrieve};
use kanren::iter::StateIter;
use kanren::list::List;
use kanren::list::{Pair, Nil};
use std::io::Read;
use std::fmt::{Formatter, Debug};
use kanren::finitedomain::Fd;
use kanren::constraints::AllDiffConstraint;
fn main() {
let mut state = State::new();
fresh!(state, orig_list);
let mut list = orig_list;
let mut colvars: Vec<Vec<Var<Fd>>> = (0..9).map(|_| Vec::new()).collect();
let mut rowvars: Vec<Vec<Var<Fd>>> = (0..9).map(|_| Vec::new()).collect();
let mut groupvars: Vec<Vec<Var<Fd>>> = (0..9).map(|_| Vec::new()).collect();
let mut puzstr = String::new();
let stdin = ::std::io::stdin();
stdin.lock().read_to_string(&mut puzstr).unwrap();
|
else { None })
.into_iter()
}).collect();
println!("Input: ");
display_output(puzzle.iter().map(|x| *x));
println!("");
assert!(puzzle.len() == 81);
let xy = (0..9).flat_map(|y| (0..9).map(move |x| (x,y)));
for ((x, y), &puz) in xy.zip(puzzle.iter()) {
fresh!(state, entry, tail);
let value = match puz {
Some(x) => Fd::new_single(x as usize),
None => Fd::new_values((1..10).collect()),
};
state.unify(entry, value);
colvars[x].push(entry);
rowvars[y].push(entry);
groupvars[x / 3 + (y / 3) * 3].push(entry);
state.unify(list, Pair(entry, tail));
list = tail;
}
state.unify(list, Nil);
//println!("colvars: {:?}, rowvars: {:?}, groupvars: {:?}", colvars, rowvars, groupvars);
for vars in colvars.into_iter().chain(rowvars).chain(groupvars) {
state.add_constraint(AllDiffConstraint::new(vars));
}
#[allow(unused_variables)]
fn get_fds(state: State, list: Var<List<Fd>>) -> StateIter {
//conde!(state, {
//state.unify(list, Nil);
//single(state)
//}, {
//fresh!(state, head, tail);
//state.unify(list, Pair(head, tail));
//}
::kanren::core::assign_all_values(state)
}
struct UnderscoreWriter<T>(Option<T>);
impl<T> Debug for UnderscoreWriter<T> where T: Debug {
fn fmt(&self, fmt: &mut Formatter) -> ::std::fmt::Result {
match self.0 {
Some(ref x) => write!(fmt, "{:?}", x),
None => write!(fmt, "_")
}
}
}
fn display_list(state: &mut State, list: Var<List<Fd>>) {
let list = state.get_value(list).unwrap();
display_output(list.iter(state).map(|x| x.and_then(Fd::single_value)))
}
fn display_output<I, T>(i: I) where I: IntoIterator<Item=Option<T>>, T: Debug {
let items: Vec<_> = i.into_iter().map(|x| UnderscoreWriter(x)).collect();
for chunk in items.chunks(9) {
println!("{:?}", chunk);
}
}
for (i, mut state) in get_fds(state, orig_list).into_iter().enumerate().take(100) {
//let reifier = Reifier::new(&state);
println!("solution {}:", i);
display_list(&mut state, orig_list);
println!("");
//println!("state: {:?}\n", state);
}
}
|
let puzzle: Vec<Option<u8>> = puzstr.chars().flat_map(|x| {
(if x.is_numeric() { Some(Some((x as u32 - '0' as u32) as u8)) }
else if x == '_' { Some(None) }
|
random_line_split
|
sudoku.rs
|
#[macro_use]
extern crate kanren;
use kanren::core::{State, Unifier, Var, VarStore, VarRetrieve};
use kanren::iter::StateIter;
use kanren::list::List;
use kanren::list::{Pair, Nil};
use std::io::Read;
use std::fmt::{Formatter, Debug};
use kanren::finitedomain::Fd;
use kanren::constraints::AllDiffConstraint;
fn main() {
let mut state = State::new();
fresh!(state, orig_list);
let mut list = orig_list;
let mut colvars: Vec<Vec<Var<Fd>>> = (0..9).map(|_| Vec::new()).collect();
let mut rowvars: Vec<Vec<Var<Fd>>> = (0..9).map(|_| Vec::new()).collect();
let mut groupvars: Vec<Vec<Var<Fd>>> = (0..9).map(|_| Vec::new()).collect();
let mut puzstr = String::new();
let stdin = ::std::io::stdin();
stdin.lock().read_to_string(&mut puzstr).unwrap();
let puzzle: Vec<Option<u8>> = puzstr.chars().flat_map(|x| {
(if x.is_numeric() { Some(Some((x as u32 - '0' as u32) as u8)) }
else if x == '_' { Some(None) }
else { None })
.into_iter()
}).collect();
println!("Input: ");
display_output(puzzle.iter().map(|x| *x));
println!("");
assert!(puzzle.len() == 81);
let xy = (0..9).flat_map(|y| (0..9).map(move |x| (x,y)));
for ((x, y), &puz) in xy.zip(puzzle.iter()) {
fresh!(state, entry, tail);
let value = match puz {
Some(x) => Fd::new_single(x as usize),
None => Fd::new_values((1..10).collect()),
};
state.unify(entry, value);
colvars[x].push(entry);
rowvars[y].push(entry);
groupvars[x / 3 + (y / 3) * 3].push(entry);
state.unify(list, Pair(entry, tail));
list = tail;
}
state.unify(list, Nil);
//println!("colvars: {:?}, rowvars: {:?}, groupvars: {:?}", colvars, rowvars, groupvars);
for vars in colvars.into_iter().chain(rowvars).chain(groupvars) {
state.add_constraint(AllDiffConstraint::new(vars));
}
#[allow(unused_variables)]
fn
|
(state: State, list: Var<List<Fd>>) -> StateIter {
//conde!(state, {
//state.unify(list, Nil);
//single(state)
//}, {
//fresh!(state, head, tail);
//state.unify(list, Pair(head, tail));
//}
::kanren::core::assign_all_values(state)
}
struct UnderscoreWriter<T>(Option<T>);
impl<T> Debug for UnderscoreWriter<T> where T: Debug {
fn fmt(&self, fmt: &mut Formatter) -> ::std::fmt::Result {
match self.0 {
Some(ref x) => write!(fmt, "{:?}", x),
None => write!(fmt, "_")
}
}
}
fn display_list(state: &mut State, list: Var<List<Fd>>) {
let list = state.get_value(list).unwrap();
display_output(list.iter(state).map(|x| x.and_then(Fd::single_value)))
}
fn display_output<I, T>(i: I) where I: IntoIterator<Item=Option<T>>, T: Debug {
let items: Vec<_> = i.into_iter().map(|x| UnderscoreWriter(x)).collect();
for chunk in items.chunks(9) {
println!("{:?}", chunk);
}
}
for (i, mut state) in get_fds(state, orig_list).into_iter().enumerate().take(100) {
//let reifier = Reifier::new(&state);
println!("solution {}:", i);
display_list(&mut state, orig_list);
println!("");
//println!("state: {:?}\n", state);
}
}
|
get_fds
|
identifier_name
|
generate.rs
|
use fact_table::FactTable;
use program::Program;
use rand;
use std::cmp;
use std::collections::hash_map::{Entry, HashMap};
use std::fmt;
use truth_value::TruthValue;
use types::{Clause, Constant, Literal, Predicate, Term, TermIndex};
pub struct Generator<R>
where R: rand::Rng
{
rng: R,
max_predicate: Predicate,
num_terms: HashMap<Predicate, usize>,
max_constant: HashMap<(Predicate, TermIndex), Constant>,
}
impl<R> Generator<R>
where R: rand::Rng
{
pub fn new<T>(rng: R, facts: &FactTable<T>, program: &Program<T>) -> Self
where T: TruthValue
{
let max_predicate = program.num_predicates();
let max_constant = HashMap::new();
let mut result = Generator {
rng: rng,
max_predicate: max_predicate,
num_terms: program.predicate_num_terms().clone(),
max_constant: max_constant,
};
result.update_max_constant(facts);
return result;
}
pub fn update_num_terms<T>(&mut self, program: &Program<T>)
where T: TruthValue
{
self.num_terms = program.predicate_num_terms().clone();
}
pub fn update_max_constant<T>(&mut self, facts: &FactTable<T>)
where T: TruthValue
{
self.max_constant = facts.max_constant_table();
}
fn gen_predicate(&mut self) -> Predicate {
// 0 through max, inclusive.
self.rng.gen_range(0, 1 + self.max_predicate)
}
fn gen_constant(&mut self, predicate: Predicate, term_index: usize) -> Constant {
// Might want to increase by one.
let max_constant = 1 +
self.max_constant
.get(&(predicate, term_index))
.cloned()
.unwrap_or(0usize);
// Want inclusive range.
self.rng.gen_range(0, 1 + max_constant)
}
fn get_num_terms(&mut self, predicate: Predicate, max_new: usize) -> usize {
match self.num_terms.entry(predicate) {
Entry::Occupied(pair) => *pair.get(),
Entry::Vacant(pair) => {
let n = self.rng.gen_range(0, 1 + max_new);
pair.insert(n);
n
}
}
}
fn gen_head(&mut self, max_new_predicate_terms: usize) -> (Literal, usize) {
let predicate = self.gen_predicate();
let num_terms = self.get_num_terms(predicate, max_new_predicate_terms);
let num_output_variables = if self.rng.gen_weighted_bool(8) {
// With 1/8 probability, select a random number of output variables.
self.rng.gen_range(0, num_terms + 1usize)
} else {
// Otherwise, use all output terms as output_variables.
num_terms
};
let mut num_remaining_output_variables = num_output_variables;
let mut head_terms = Vec::with_capacity(num_terms);
for (term_index, num_remaining_output_terms) in (1..(1 + num_terms)).rev().enumerate() {
// If every remaining output term must be filled.
let prob_should_use_output_variable = num_remaining_output_variables as f64 /
num_remaining_output_terms as f64;
// TODO(zentner): This never uses the same output variable multiple times, which is
// valid.
let next_output_variable = num_output_variables - num_remaining_output_variables;
if self.rng.next_f64() <= prob_should_use_output_variable {
head_terms.push(Term::Variable(next_output_variable));
num_remaining_output_variables -= 1;
} else {
head_terms.push(Term::Constant(self.gen_constant(predicate, term_index)));
}
}
let head = Literal::new_from_vec(predicate, head_terms);
assert!(num_remaining_output_variables == 0);
return (head, num_output_variables);
}
pub fn gen_clause(&mut self, max_body_len: usize, max_new_predicate_terms: usize) -> Clause {
let (head, num_output_variables) = self.gen_head(max_new_predicate_terms);
let body_len = self.rng.gen_range(1, 1 + max_body_len);
let mut body = Vec::with_capacity(body_len);
let mut unused_output_variables: Vec<_> =
(0..num_output_variables).map(|v| Some(v)).collect();
self.rng.shuffle(&mut unused_output_variables);
let mut num_unused_output_variables = num_output_variables;
let mut num_variables = num_output_variables;
let mut num_total_body_terms = 0;
let mut predicates = Vec::with_capacity(body_len);
let mut num_terms = Vec::with_capacity(body_len);
while num_total_body_terms < num_output_variables || predicates.len() < body_len {
let predicate = self.gen_predicate();
predicates.push(predicate);
let term_count = self.get_num_terms(predicate, max_new_predicate_terms);
num_terms.push(term_count);
num_total_body_terms += term_count;
}
let mut num_remaining_output_terms: usize = num_total_body_terms;
for (&predicate, &num_terms) in predicates.iter().zip(num_terms.iter()) {
let mut terms = Vec::with_capacity(num_terms);
for term_index in 0..num_terms {
// If every remaining output term must be filled.
let prob_should_use_output_variable = num_unused_output_variables as f64 /
num_remaining_output_terms as f64;
if self.rng.next_f64() <= prob_should_use_output_variable {
let output_variable;
loop {
if let Some(variable) = unused_output_variables.pop().unwrap() {
output_variable = variable;
break;
}
}
terms.push(Term::Variable(output_variable));
num_unused_output_variables -= 1;
} else {
if self.rng.gen_weighted_bool(3) {
// With 1/3 probability, output a constant.
terms.push(Term::Constant(self.gen_constant(predicate, term_index)));
} else {
// Might want to increase the number of variables by two.
let variable = self.rng.gen_range(0, 2 + num_variables);
num_variables = cmp::max(num_variables, variable);
if variable < num_output_variables {
// Mark the corresponding output variable as used.
if let Some(index) = unused_output_variables
.iter()
.position(|&v| v == Some(variable)) {
unused_output_variables[index] = None;
num_unused_output_variables -= 1;
}
}
terms.push(Term::Variable(variable));
}
}
num_remaining_output_terms -= 1;
}
let literal = Literal::new_from_vec(predicate, terms);
body.push(literal);
}
let clause = Clause::new_from_vec(head, body);
return clause;
}
}
impl<R> fmt::Debug for Generator<R>
where R: rand::Rng
{
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(f,
"Generator {{ rng: *, max_predicate: {:?}, num_terms: {:?}, max_constant: {:?} }}",
self.max_predicate,
self.num_terms,
self.max_constant)
}
}
#[cfg(test)]
mod tests {
use super::Generator;
use parser::program;
use rand::SeedableRng;
use rand::XorShiftRng;
#[test]
fn can_generate_a_clause()
|
}
|
{
let rng = XorShiftRng::from_seed([0xde, 0xad, 0xbe, 0xef]);
let (facts, program, _) = program::<()>(r#"
a(0).
a(1).
a(X) :- b(X)
"#)
.unwrap()
.0;
let mut generator = Generator::new(rng, &facts, &program);
println!("generated_clause = {:?}", generator.gen_clause(4, 8));
println!("generated_clause = {:?}", generator.gen_clause(100, 8));
}
|
identifier_body
|
generate.rs
|
use fact_table::FactTable;
use program::Program;
use rand;
use std::cmp;
use std::collections::hash_map::{Entry, HashMap};
use std::fmt;
use truth_value::TruthValue;
use types::{Clause, Constant, Literal, Predicate, Term, TermIndex};
pub struct Generator<R>
where R: rand::Rng
{
rng: R,
max_predicate: Predicate,
num_terms: HashMap<Predicate, usize>,
max_constant: HashMap<(Predicate, TermIndex), Constant>,
}
impl<R> Generator<R>
where R: rand::Rng
{
pub fn new<T>(rng: R, facts: &FactTable<T>, program: &Program<T>) -> Self
where T: TruthValue
{
let max_predicate = program.num_predicates();
let max_constant = HashMap::new();
let mut result = Generator {
rng: rng,
max_predicate: max_predicate,
num_terms: program.predicate_num_terms().clone(),
max_constant: max_constant,
};
result.update_max_constant(facts);
return result;
}
pub fn update_num_terms<T>(&mut self, program: &Program<T>)
where T: TruthValue
{
self.num_terms = program.predicate_num_terms().clone();
}
pub fn update_max_constant<T>(&mut self, facts: &FactTable<T>)
where T: TruthValue
{
self.max_constant = facts.max_constant_table();
}
fn gen_predicate(&mut self) -> Predicate {
// 0 through max, inclusive.
self.rng.gen_range(0, 1 + self.max_predicate)
}
fn gen_constant(&mut self, predicate: Predicate, term_index: usize) -> Constant {
// Might want to increase by one.
let max_constant = 1 +
self.max_constant
.get(&(predicate, term_index))
.cloned()
.unwrap_or(0usize);
// Want inclusive range.
self.rng.gen_range(0, 1 + max_constant)
}
fn get_num_terms(&mut self, predicate: Predicate, max_new: usize) -> usize {
match self.num_terms.entry(predicate) {
Entry::Occupied(pair) => *pair.get(),
Entry::Vacant(pair) => {
let n = self.rng.gen_range(0, 1 + max_new);
pair.insert(n);
n
}
}
}
fn gen_head(&mut self, max_new_predicate_terms: usize) -> (Literal, usize) {
let predicate = self.gen_predicate();
let num_terms = self.get_num_terms(predicate, max_new_predicate_terms);
let num_output_variables = if self.rng.gen_weighted_bool(8) {
// With 1/8 probability, select a random number of output variables.
self.rng.gen_range(0, num_terms + 1usize)
} else {
// Otherwise, use all output terms as output_variables.
num_terms
};
let mut num_remaining_output_variables = num_output_variables;
let mut head_terms = Vec::with_capacity(num_terms);
for (term_index, num_remaining_output_terms) in (1..(1 + num_terms)).rev().enumerate() {
// If every remaining output term must be filled.
let prob_should_use_output_variable = num_remaining_output_variables as f64 /
num_remaining_output_terms as f64;
// TODO(zentner): This never uses the same output variable multiple times, which is
// valid.
let next_output_variable = num_output_variables - num_remaining_output_variables;
if self.rng.next_f64() <= prob_should_use_output_variable {
head_terms.push(Term::Variable(next_output_variable));
num_remaining_output_variables -= 1;
} else {
head_terms.push(Term::Constant(self.gen_constant(predicate, term_index)));
}
}
let head = Literal::new_from_vec(predicate, head_terms);
assert!(num_remaining_output_variables == 0);
return (head, num_output_variables);
}
pub fn gen_clause(&mut self, max_body_len: usize, max_new_predicate_terms: usize) -> Clause {
let (head, num_output_variables) = self.gen_head(max_new_predicate_terms);
let body_len = self.rng.gen_range(1, 1 + max_body_len);
let mut body = Vec::with_capacity(body_len);
let mut unused_output_variables: Vec<_> =
(0..num_output_variables).map(|v| Some(v)).collect();
self.rng.shuffle(&mut unused_output_variables);
let mut num_unused_output_variables = num_output_variables;
let mut num_variables = num_output_variables;
let mut num_total_body_terms = 0;
let mut predicates = Vec::with_capacity(body_len);
let mut num_terms = Vec::with_capacity(body_len);
while num_total_body_terms < num_output_variables || predicates.len() < body_len {
let predicate = self.gen_predicate();
predicates.push(predicate);
let term_count = self.get_num_terms(predicate, max_new_predicate_terms);
num_terms.push(term_count);
num_total_body_terms += term_count;
}
let mut num_remaining_output_terms: usize = num_total_body_terms;
for (&predicate, &num_terms) in predicates.iter().zip(num_terms.iter()) {
let mut terms = Vec::with_capacity(num_terms);
for term_index in 0..num_terms {
// If every remaining output term must be filled.
let prob_should_use_output_variable = num_unused_output_variables as f64 /
num_remaining_output_terms as f64;
if self.rng.next_f64() <= prob_should_use_output_variable {
let output_variable;
loop {
if let Some(variable) = unused_output_variables.pop().unwrap() {
output_variable = variable;
break;
}
}
terms.push(Term::Variable(output_variable));
num_unused_output_variables -= 1;
} else
|
num_remaining_output_terms -= 1;
}
let literal = Literal::new_from_vec(predicate, terms);
body.push(literal);
}
let clause = Clause::new_from_vec(head, body);
return clause;
}
}
impl<R> fmt::Debug for Generator<R>
where R: rand::Rng
{
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(f,
"Generator {{ rng: *, max_predicate: {:?}, num_terms: {:?}, max_constant: {:?} }}",
self.max_predicate,
self.num_terms,
self.max_constant)
}
}
#[cfg(test)]
mod tests {
use super::Generator;
use parser::program;
use rand::SeedableRng;
use rand::XorShiftRng;
#[test]
fn can_generate_a_clause() {
let rng = XorShiftRng::from_seed([0xde, 0xad, 0xbe, 0xef]);
let (facts, program, _) = program::<()>(r#"
a(0).
a(1).
a(X) :- b(X)
"#)
.unwrap()
.0;
let mut generator = Generator::new(rng, &facts, &program);
println!("generated_clause = {:?}", generator.gen_clause(4, 8));
println!("generated_clause = {:?}", generator.gen_clause(100, 8));
}
}
|
{
if self.rng.gen_weighted_bool(3) {
// With 1/3 probability, output a constant.
terms.push(Term::Constant(self.gen_constant(predicate, term_index)));
} else {
// Might want to increase the number of variables by two.
let variable = self.rng.gen_range(0, 2 + num_variables);
num_variables = cmp::max(num_variables, variable);
if variable < num_output_variables {
// Mark the corresponding output variable as used.
if let Some(index) = unused_output_variables
.iter()
.position(|&v| v == Some(variable)) {
unused_output_variables[index] = None;
num_unused_output_variables -= 1;
}
}
terms.push(Term::Variable(variable));
}
}
|
conditional_block
|
generate.rs
|
use fact_table::FactTable;
use program::Program;
use rand;
use std::cmp;
use std::collections::hash_map::{Entry, HashMap};
use std::fmt;
use truth_value::TruthValue;
use types::{Clause, Constant, Literal, Predicate, Term, TermIndex};
pub struct Generator<R>
where R: rand::Rng
{
rng: R,
max_predicate: Predicate,
num_terms: HashMap<Predicate, usize>,
max_constant: HashMap<(Predicate, TermIndex), Constant>,
}
impl<R> Generator<R>
where R: rand::Rng
{
pub fn new<T>(rng: R, facts: &FactTable<T>, program: &Program<T>) -> Self
where T: TruthValue
{
let max_predicate = program.num_predicates();
let max_constant = HashMap::new();
let mut result = Generator {
rng: rng,
max_predicate: max_predicate,
num_terms: program.predicate_num_terms().clone(),
max_constant: max_constant,
};
result.update_max_constant(facts);
return result;
}
pub fn update_num_terms<T>(&mut self, program: &Program<T>)
where T: TruthValue
{
self.num_terms = program.predicate_num_terms().clone();
}
pub fn update_max_constant<T>(&mut self, facts: &FactTable<T>)
where T: TruthValue
{
self.max_constant = facts.max_constant_table();
}
fn gen_predicate(&mut self) -> Predicate {
// 0 through max, inclusive.
self.rng.gen_range(0, 1 + self.max_predicate)
}
fn gen_constant(&mut self, predicate: Predicate, term_index: usize) -> Constant {
// Might want to increase by one.
let max_constant = 1 +
self.max_constant
.get(&(predicate, term_index))
.cloned()
.unwrap_or(0usize);
// Want inclusive range.
self.rng.gen_range(0, 1 + max_constant)
}
fn get_num_terms(&mut self, predicate: Predicate, max_new: usize) -> usize {
match self.num_terms.entry(predicate) {
Entry::Occupied(pair) => *pair.get(),
Entry::Vacant(pair) => {
let n = self.rng.gen_range(0, 1 + max_new);
pair.insert(n);
n
}
}
}
fn
|
(&mut self, max_new_predicate_terms: usize) -> (Literal, usize) {
let predicate = self.gen_predicate();
let num_terms = self.get_num_terms(predicate, max_new_predicate_terms);
let num_output_variables = if self.rng.gen_weighted_bool(8) {
// With 1/8 probability, select a random number of output variables.
self.rng.gen_range(0, num_terms + 1usize)
} else {
// Otherwise, use all output terms as output_variables.
num_terms
};
let mut num_remaining_output_variables = num_output_variables;
let mut head_terms = Vec::with_capacity(num_terms);
for (term_index, num_remaining_output_terms) in (1..(1 + num_terms)).rev().enumerate() {
// If every remaining output term must be filled.
let prob_should_use_output_variable = num_remaining_output_variables as f64 /
num_remaining_output_terms as f64;
// TODO(zentner): This never uses the same output variable multiple times, which is
// valid.
let next_output_variable = num_output_variables - num_remaining_output_variables;
if self.rng.next_f64() <= prob_should_use_output_variable {
head_terms.push(Term::Variable(next_output_variable));
num_remaining_output_variables -= 1;
} else {
head_terms.push(Term::Constant(self.gen_constant(predicate, term_index)));
}
}
let head = Literal::new_from_vec(predicate, head_terms);
assert!(num_remaining_output_variables == 0);
return (head, num_output_variables);
}
pub fn gen_clause(&mut self, max_body_len: usize, max_new_predicate_terms: usize) -> Clause {
let (head, num_output_variables) = self.gen_head(max_new_predicate_terms);
let body_len = self.rng.gen_range(1, 1 + max_body_len);
let mut body = Vec::with_capacity(body_len);
let mut unused_output_variables: Vec<_> =
(0..num_output_variables).map(|v| Some(v)).collect();
self.rng.shuffle(&mut unused_output_variables);
let mut num_unused_output_variables = num_output_variables;
let mut num_variables = num_output_variables;
let mut num_total_body_terms = 0;
let mut predicates = Vec::with_capacity(body_len);
let mut num_terms = Vec::with_capacity(body_len);
while num_total_body_terms < num_output_variables || predicates.len() < body_len {
let predicate = self.gen_predicate();
predicates.push(predicate);
let term_count = self.get_num_terms(predicate, max_new_predicate_terms);
num_terms.push(term_count);
num_total_body_terms += term_count;
}
let mut num_remaining_output_terms: usize = num_total_body_terms;
for (&predicate, &num_terms) in predicates.iter().zip(num_terms.iter()) {
let mut terms = Vec::with_capacity(num_terms);
for term_index in 0..num_terms {
// If every remaining output term must be filled.
let prob_should_use_output_variable = num_unused_output_variables as f64 /
num_remaining_output_terms as f64;
if self.rng.next_f64() <= prob_should_use_output_variable {
let output_variable;
loop {
if let Some(variable) = unused_output_variables.pop().unwrap() {
output_variable = variable;
break;
}
}
terms.push(Term::Variable(output_variable));
num_unused_output_variables -= 1;
} else {
if self.rng.gen_weighted_bool(3) {
// With 1/3 probability, output a constant.
terms.push(Term::Constant(self.gen_constant(predicate, term_index)));
} else {
// Might want to increase the number of variables by two.
let variable = self.rng.gen_range(0, 2 + num_variables);
num_variables = cmp::max(num_variables, variable);
if variable < num_output_variables {
// Mark the corresponding output variable as used.
if let Some(index) = unused_output_variables
.iter()
.position(|&v| v == Some(variable)) {
unused_output_variables[index] = None;
num_unused_output_variables -= 1;
}
}
terms.push(Term::Variable(variable));
}
}
num_remaining_output_terms -= 1;
}
let literal = Literal::new_from_vec(predicate, terms);
body.push(literal);
}
let clause = Clause::new_from_vec(head, body);
return clause;
}
}
impl<R> fmt::Debug for Generator<R>
where R: rand::Rng
{
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(f,
"Generator {{ rng: *, max_predicate: {:?}, num_terms: {:?}, max_constant: {:?} }}",
self.max_predicate,
self.num_terms,
self.max_constant)
}
}
#[cfg(test)]
mod tests {
use super::Generator;
use parser::program;
use rand::SeedableRng;
use rand::XorShiftRng;
#[test]
fn can_generate_a_clause() {
let rng = XorShiftRng::from_seed([0xde, 0xad, 0xbe, 0xef]);
let (facts, program, _) = program::<()>(r#"
a(0).
a(1).
a(X) :- b(X)
"#)
.unwrap()
.0;
let mut generator = Generator::new(rng, &facts, &program);
println!("generated_clause = {:?}", generator.gen_clause(4, 8));
println!("generated_clause = {:?}", generator.gen_clause(100, 8));
}
}
|
gen_head
|
identifier_name
|
generate.rs
|
use fact_table::FactTable;
use program::Program;
use rand;
use std::cmp;
use std::collections::hash_map::{Entry, HashMap};
use std::fmt;
use truth_value::TruthValue;
use types::{Clause, Constant, Literal, Predicate, Term, TermIndex};
pub struct Generator<R>
where R: rand::Rng
{
rng: R,
max_predicate: Predicate,
num_terms: HashMap<Predicate, usize>,
max_constant: HashMap<(Predicate, TermIndex), Constant>,
}
impl<R> Generator<R>
where R: rand::Rng
{
pub fn new<T>(rng: R, facts: &FactTable<T>, program: &Program<T>) -> Self
where T: TruthValue
{
let max_predicate = program.num_predicates();
let max_constant = HashMap::new();
let mut result = Generator {
rng: rng,
max_predicate: max_predicate,
num_terms: program.predicate_num_terms().clone(),
max_constant: max_constant,
};
result.update_max_constant(facts);
return result;
}
pub fn update_num_terms<T>(&mut self, program: &Program<T>)
where T: TruthValue
{
self.num_terms = program.predicate_num_terms().clone();
}
pub fn update_max_constant<T>(&mut self, facts: &FactTable<T>)
where T: TruthValue
{
self.max_constant = facts.max_constant_table();
}
fn gen_predicate(&mut self) -> Predicate {
// 0 through max, inclusive.
self.rng.gen_range(0, 1 + self.max_predicate)
}
fn gen_constant(&mut self, predicate: Predicate, term_index: usize) -> Constant {
// Might want to increase by one.
let max_constant = 1 +
self.max_constant
.get(&(predicate, term_index))
.cloned()
.unwrap_or(0usize);
// Want inclusive range.
self.rng.gen_range(0, 1 + max_constant)
}
fn get_num_terms(&mut self, predicate: Predicate, max_new: usize) -> usize {
match self.num_terms.entry(predicate) {
Entry::Occupied(pair) => *pair.get(),
Entry::Vacant(pair) => {
let n = self.rng.gen_range(0, 1 + max_new);
pair.insert(n);
n
}
}
}
fn gen_head(&mut self, max_new_predicate_terms: usize) -> (Literal, usize) {
let predicate = self.gen_predicate();
let num_terms = self.get_num_terms(predicate, max_new_predicate_terms);
let num_output_variables = if self.rng.gen_weighted_bool(8) {
// With 1/8 probability, select a random number of output variables.
self.rng.gen_range(0, num_terms + 1usize)
} else {
// Otherwise, use all output terms as output_variables.
num_terms
};
let mut num_remaining_output_variables = num_output_variables;
let mut head_terms = Vec::with_capacity(num_terms);
for (term_index, num_remaining_output_terms) in (1..(1 + num_terms)).rev().enumerate() {
// If every remaining output term must be filled.
let prob_should_use_output_variable = num_remaining_output_variables as f64 /
num_remaining_output_terms as f64;
// TODO(zentner): This never uses the same output variable multiple times, which is
// valid.
let next_output_variable = num_output_variables - num_remaining_output_variables;
if self.rng.next_f64() <= prob_should_use_output_variable {
head_terms.push(Term::Variable(next_output_variable));
num_remaining_output_variables -= 1;
} else {
head_terms.push(Term::Constant(self.gen_constant(predicate, term_index)));
}
}
let head = Literal::new_from_vec(predicate, head_terms);
assert!(num_remaining_output_variables == 0);
return (head, num_output_variables);
}
pub fn gen_clause(&mut self, max_body_len: usize, max_new_predicate_terms: usize) -> Clause {
let (head, num_output_variables) = self.gen_head(max_new_predicate_terms);
let body_len = self.rng.gen_range(1, 1 + max_body_len);
let mut body = Vec::with_capacity(body_len);
let mut unused_output_variables: Vec<_> =
(0..num_output_variables).map(|v| Some(v)).collect();
self.rng.shuffle(&mut unused_output_variables);
let mut num_unused_output_variables = num_output_variables;
let mut num_variables = num_output_variables;
let mut num_total_body_terms = 0;
let mut predicates = Vec::with_capacity(body_len);
let mut num_terms = Vec::with_capacity(body_len);
while num_total_body_terms < num_output_variables || predicates.len() < body_len {
let predicate = self.gen_predicate();
predicates.push(predicate);
let term_count = self.get_num_terms(predicate, max_new_predicate_terms);
num_terms.push(term_count);
num_total_body_terms += term_count;
}
let mut num_remaining_output_terms: usize = num_total_body_terms;
for (&predicate, &num_terms) in predicates.iter().zip(num_terms.iter()) {
let mut terms = Vec::with_capacity(num_terms);
for term_index in 0..num_terms {
// If every remaining output term must be filled.
let prob_should_use_output_variable = num_unused_output_variables as f64 /
num_remaining_output_terms as f64;
if self.rng.next_f64() <= prob_should_use_output_variable {
let output_variable;
loop {
if let Some(variable) = unused_output_variables.pop().unwrap() {
output_variable = variable;
break;
}
}
terms.push(Term::Variable(output_variable));
num_unused_output_variables -= 1;
} else {
if self.rng.gen_weighted_bool(3) {
// With 1/3 probability, output a constant.
terms.push(Term::Constant(self.gen_constant(predicate, term_index)));
} else {
// Might want to increase the number of variables by two.
let variable = self.rng.gen_range(0, 2 + num_variables);
num_variables = cmp::max(num_variables, variable);
if variable < num_output_variables {
// Mark the corresponding output variable as used.
if let Some(index) = unused_output_variables
.iter()
.position(|&v| v == Some(variable)) {
unused_output_variables[index] = None;
num_unused_output_variables -= 1;
}
}
terms.push(Term::Variable(variable));
}
}
num_remaining_output_terms -= 1;
}
let literal = Literal::new_from_vec(predicate, terms);
body.push(literal);
}
let clause = Clause::new_from_vec(head, body);
return clause;
}
}
impl<R> fmt::Debug for Generator<R>
where R: rand::Rng
{
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(f,
"Generator {{ rng: *, max_predicate: {:?}, num_terms: {:?}, max_constant: {:?} }}",
self.max_predicate,
|
self.max_constant)
}
}
#[cfg(test)]
mod tests {
use super::Generator;
use parser::program;
use rand::SeedableRng;
use rand::XorShiftRng;
#[test]
fn can_generate_a_clause() {
let rng = XorShiftRng::from_seed([0xde, 0xad, 0xbe, 0xef]);
let (facts, program, _) = program::<()>(r#"
a(0).
a(1).
a(X) :- b(X)
"#)
.unwrap()
.0;
let mut generator = Generator::new(rng, &facts, &program);
println!("generated_clause = {:?}", generator.gen_clause(4, 8));
println!("generated_clause = {:?}", generator.gen_clause(100, 8));
}
}
|
self.num_terms,
|
random_line_split
|
webglframebuffer.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
// https://www.khronos.org/registry/webgl/specs/latest/1.0/webgl.idl
use dom::bindings::codegen::Bindings::WebGLFramebufferBinding;
use dom::bindings::global::GlobalRef;
use dom::bindings::js::Root;
use dom::bindings::utils::reflect_dom_object;
use dom::webglobject::WebGLObject;
use canvas_traits::{CanvasMsg, CanvasWebGLMsg, WebGLFramebufferBindingRequest};
use ipc_channel::ipc::{self, IpcSender};
use std::cell::Cell;
#[dom_struct]
#[derive(HeapSizeOf)]
pub struct WebGLFramebuffer {
webgl_object: WebGLObject,
id: u32,
is_deleted: Cell<bool>,
#[ignore_heap_size_of = "Defined in ipc-channel"]
renderer: IpcSender<CanvasMsg>,
}
impl WebGLFramebuffer {
fn
|
(renderer: IpcSender<CanvasMsg>, id: u32) -> WebGLFramebuffer {
WebGLFramebuffer {
webgl_object: WebGLObject::new_inherited(),
id: id,
is_deleted: Cell::new(false),
renderer: renderer,
}
}
pub fn maybe_new(global: GlobalRef, renderer: IpcSender<CanvasMsg>)
-> Option<Root<WebGLFramebuffer>> {
let (sender, receiver) = ipc::channel().unwrap();
renderer.send(CanvasMsg::WebGL(CanvasWebGLMsg::CreateFramebuffer(sender))).unwrap();
let result = receiver.recv().unwrap();
result.map(|fb_id| WebGLFramebuffer::new(global, renderer, *fb_id))
}
pub fn new(global: GlobalRef, renderer: IpcSender<CanvasMsg>, id: u32)
-> Root<WebGLFramebuffer> {
reflect_dom_object(box WebGLFramebuffer::new_inherited(renderer, id), global, WebGLFramebufferBinding::Wrap)
}
}
pub trait WebGLFramebufferHelpers {
fn id(self) -> u32;
fn bind(self, target: u32);
fn delete(self);
}
impl<'a> WebGLFramebufferHelpers for &'a WebGLFramebuffer {
fn id(self) -> u32 {
self.id
}
fn bind(self, target: u32) {
let cmd = CanvasWebGLMsg::BindFramebuffer(target, WebGLFramebufferBindingRequest::Explicit(self.id));
self.renderer.send(CanvasMsg::WebGL(cmd)).unwrap();
}
fn delete(self) {
if!self.is_deleted.get() {
self.is_deleted.set(true);
self.renderer.send(CanvasMsg::WebGL(CanvasWebGLMsg::DeleteFramebuffer(self.id))).unwrap();
}
}
}
|
new_inherited
|
identifier_name
|
webglframebuffer.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
// https://www.khronos.org/registry/webgl/specs/latest/1.0/webgl.idl
use dom::bindings::codegen::Bindings::WebGLFramebufferBinding;
use dom::bindings::global::GlobalRef;
use dom::bindings::js::Root;
use dom::bindings::utils::reflect_dom_object;
use dom::webglobject::WebGLObject;
use canvas_traits::{CanvasMsg, CanvasWebGLMsg, WebGLFramebufferBindingRequest};
use ipc_channel::ipc::{self, IpcSender};
use std::cell::Cell;
#[dom_struct]
#[derive(HeapSizeOf)]
pub struct WebGLFramebuffer {
webgl_object: WebGLObject,
id: u32,
is_deleted: Cell<bool>,
#[ignore_heap_size_of = "Defined in ipc-channel"]
renderer: IpcSender<CanvasMsg>,
}
impl WebGLFramebuffer {
fn new_inherited(renderer: IpcSender<CanvasMsg>, id: u32) -> WebGLFramebuffer {
WebGLFramebuffer {
webgl_object: WebGLObject::new_inherited(),
id: id,
is_deleted: Cell::new(false),
renderer: renderer,
}
}
pub fn maybe_new(global: GlobalRef, renderer: IpcSender<CanvasMsg>)
-> Option<Root<WebGLFramebuffer>> {
let (sender, receiver) = ipc::channel().unwrap();
renderer.send(CanvasMsg::WebGL(CanvasWebGLMsg::CreateFramebuffer(sender))).unwrap();
let result = receiver.recv().unwrap();
result.map(|fb_id| WebGLFramebuffer::new(global, renderer, *fb_id))
}
pub fn new(global: GlobalRef, renderer: IpcSender<CanvasMsg>, id: u32)
-> Root<WebGLFramebuffer> {
reflect_dom_object(box WebGLFramebuffer::new_inherited(renderer, id), global, WebGLFramebufferBinding::Wrap)
}
}
pub trait WebGLFramebufferHelpers {
fn id(self) -> u32;
fn bind(self, target: u32);
fn delete(self);
}
impl<'a> WebGLFramebufferHelpers for &'a WebGLFramebuffer {
fn id(self) -> u32 {
self.id
}
fn bind(self, target: u32) {
let cmd = CanvasWebGLMsg::BindFramebuffer(target, WebGLFramebufferBindingRequest::Explicit(self.id));
self.renderer.send(CanvasMsg::WebGL(cmd)).unwrap();
}
fn delete(self) {
if!self.is_deleted.get()
|
}
}
|
{
self.is_deleted.set(true);
self.renderer.send(CanvasMsg::WebGL(CanvasWebGLMsg::DeleteFramebuffer(self.id))).unwrap();
}
|
conditional_block
|
webglframebuffer.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
// https://www.khronos.org/registry/webgl/specs/latest/1.0/webgl.idl
use dom::bindings::codegen::Bindings::WebGLFramebufferBinding;
use dom::bindings::global::GlobalRef;
use dom::bindings::js::Root;
use dom::bindings::utils::reflect_dom_object;
use dom::webglobject::WebGLObject;
use canvas_traits::{CanvasMsg, CanvasWebGLMsg, WebGLFramebufferBindingRequest};
use ipc_channel::ipc::{self, IpcSender};
use std::cell::Cell;
#[dom_struct]
#[derive(HeapSizeOf)]
pub struct WebGLFramebuffer {
webgl_object: WebGLObject,
id: u32,
is_deleted: Cell<bool>,
#[ignore_heap_size_of = "Defined in ipc-channel"]
renderer: IpcSender<CanvasMsg>,
}
impl WebGLFramebuffer {
fn new_inherited(renderer: IpcSender<CanvasMsg>, id: u32) -> WebGLFramebuffer {
WebGLFramebuffer {
webgl_object: WebGLObject::new_inherited(),
id: id,
is_deleted: Cell::new(false),
renderer: renderer,
}
}
pub fn maybe_new(global: GlobalRef, renderer: IpcSender<CanvasMsg>)
-> Option<Root<WebGLFramebuffer>> {
let (sender, receiver) = ipc::channel().unwrap();
renderer.send(CanvasMsg::WebGL(CanvasWebGLMsg::CreateFramebuffer(sender))).unwrap();
let result = receiver.recv().unwrap();
result.map(|fb_id| WebGLFramebuffer::new(global, renderer, *fb_id))
}
pub fn new(global: GlobalRef, renderer: IpcSender<CanvasMsg>, id: u32)
-> Root<WebGLFramebuffer> {
reflect_dom_object(box WebGLFramebuffer::new_inherited(renderer, id), global, WebGLFramebufferBinding::Wrap)
}
}
pub trait WebGLFramebufferHelpers {
fn id(self) -> u32;
fn bind(self, target: u32);
fn delete(self);
}
impl<'a> WebGLFramebufferHelpers for &'a WebGLFramebuffer {
fn id(self) -> u32 {
self.id
}
fn bind(self, target: u32)
|
fn delete(self) {
if!self.is_deleted.get() {
self.is_deleted.set(true);
self.renderer.send(CanvasMsg::WebGL(CanvasWebGLMsg::DeleteFramebuffer(self.id))).unwrap();
}
}
}
|
{
let cmd = CanvasWebGLMsg::BindFramebuffer(target, WebGLFramebufferBindingRequest::Explicit(self.id));
self.renderer.send(CanvasMsg::WebGL(cmd)).unwrap();
}
|
identifier_body
|
webglframebuffer.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
// https://www.khronos.org/registry/webgl/specs/latest/1.0/webgl.idl
use dom::bindings::codegen::Bindings::WebGLFramebufferBinding;
use dom::bindings::global::GlobalRef;
use dom::bindings::js::Root;
use dom::bindings::utils::reflect_dom_object;
use dom::webglobject::WebGLObject;
use canvas_traits::{CanvasMsg, CanvasWebGLMsg, WebGLFramebufferBindingRequest};
use ipc_channel::ipc::{self, IpcSender};
use std::cell::Cell;
#[dom_struct]
#[derive(HeapSizeOf)]
pub struct WebGLFramebuffer {
webgl_object: WebGLObject,
id: u32,
is_deleted: Cell<bool>,
#[ignore_heap_size_of = "Defined in ipc-channel"]
renderer: IpcSender<CanvasMsg>,
}
impl WebGLFramebuffer {
fn new_inherited(renderer: IpcSender<CanvasMsg>, id: u32) -> WebGLFramebuffer {
WebGLFramebuffer {
webgl_object: WebGLObject::new_inherited(),
id: id,
is_deleted: Cell::new(false),
renderer: renderer,
}
}
pub fn maybe_new(global: GlobalRef, renderer: IpcSender<CanvasMsg>)
-> Option<Root<WebGLFramebuffer>> {
let (sender, receiver) = ipc::channel().unwrap();
renderer.send(CanvasMsg::WebGL(CanvasWebGLMsg::CreateFramebuffer(sender))).unwrap();
let result = receiver.recv().unwrap();
result.map(|fb_id| WebGLFramebuffer::new(global, renderer, *fb_id))
}
pub fn new(global: GlobalRef, renderer: IpcSender<CanvasMsg>, id: u32)
-> Root<WebGLFramebuffer> {
|
pub trait WebGLFramebufferHelpers {
fn id(self) -> u32;
fn bind(self, target: u32);
fn delete(self);
}
impl<'a> WebGLFramebufferHelpers for &'a WebGLFramebuffer {
fn id(self) -> u32 {
self.id
}
fn bind(self, target: u32) {
let cmd = CanvasWebGLMsg::BindFramebuffer(target, WebGLFramebufferBindingRequest::Explicit(self.id));
self.renderer.send(CanvasMsg::WebGL(cmd)).unwrap();
}
fn delete(self) {
if!self.is_deleted.get() {
self.is_deleted.set(true);
self.renderer.send(CanvasMsg::WebGL(CanvasWebGLMsg::DeleteFramebuffer(self.id))).unwrap();
}
}
}
|
reflect_dom_object(box WebGLFramebuffer::new_inherited(renderer, id), global, WebGLFramebufferBinding::Wrap)
}
}
|
random_line_split
|
atomic_store_rel.rs
|
#![feature(core, core_intrinsics)]
extern crate core;
#[cfg(test)]
mod tests {
use core::intrinsics::atomic_store_rel;
use core::cell::UnsafeCell;
use std::sync::Arc;
use std::thread;
// pub fn atomic_store_rel_rel<T>(dst: *mut T, val: T);
struct A<T> {
v: UnsafeCell<T>
}
unsafe impl Sync for A<T> {}
impl<T> A<T> {
fn new(v: T) -> A<T> {
A { v: UnsafeCell::<T>::new(v) }
}
}
type T = usize;
macro_rules! atomic_store_rel_test {
($init:expr, $value:expr) => ({
let value: T = $init;
let a: A<T> = A::<T>::new(value);
let data: Arc<A<T>> = Arc::<A<T>>::new(a);
let clone: Arc<A<T>> = data.clone();
thread::spawn(move || {
let dst: *mut T = clone.v.get();
let val: T = $value;
unsafe { atomic_store_rel::<T>(dst, val) };
});
thread::sleep_ms(10);
let ptr: *mut T = data.v.get();
assert_eq!(unsafe { *ptr }, $value);
})
}
|
#[test]
fn atomic_store_rel_test1() {
atomic_store_rel_test!( 68, 500 );
}
}
|
random_line_split
|
|
atomic_store_rel.rs
|
#![feature(core, core_intrinsics)]
extern crate core;
#[cfg(test)]
mod tests {
use core::intrinsics::atomic_store_rel;
use core::cell::UnsafeCell;
use std::sync::Arc;
use std::thread;
// pub fn atomic_store_rel_rel<T>(dst: *mut T, val: T);
struct
|
<T> {
v: UnsafeCell<T>
}
unsafe impl Sync for A<T> {}
impl<T> A<T> {
fn new(v: T) -> A<T> {
A { v: UnsafeCell::<T>::new(v) }
}
}
type T = usize;
macro_rules! atomic_store_rel_test {
($init:expr, $value:expr) => ({
let value: T = $init;
let a: A<T> = A::<T>::new(value);
let data: Arc<A<T>> = Arc::<A<T>>::new(a);
let clone: Arc<A<T>> = data.clone();
thread::spawn(move || {
let dst: *mut T = clone.v.get();
let val: T = $value;
unsafe { atomic_store_rel::<T>(dst, val) };
});
thread::sleep_ms(10);
let ptr: *mut T = data.v.get();
assert_eq!(unsafe { *ptr }, $value);
})
}
#[test]
fn atomic_store_rel_test1() {
atomic_store_rel_test!( 68, 500 );
}
}
|
A
|
identifier_name
|
atomic_store_rel.rs
|
#![feature(core, core_intrinsics)]
extern crate core;
#[cfg(test)]
mod tests {
use core::intrinsics::atomic_store_rel;
use core::cell::UnsafeCell;
use std::sync::Arc;
use std::thread;
// pub fn atomic_store_rel_rel<T>(dst: *mut T, val: T);
struct A<T> {
v: UnsafeCell<T>
}
unsafe impl Sync for A<T> {}
impl<T> A<T> {
fn new(v: T) -> A<T> {
A { v: UnsafeCell::<T>::new(v) }
}
}
type T = usize;
macro_rules! atomic_store_rel_test {
($init:expr, $value:expr) => ({
let value: T = $init;
let a: A<T> = A::<T>::new(value);
let data: Arc<A<T>> = Arc::<A<T>>::new(a);
let clone: Arc<A<T>> = data.clone();
thread::spawn(move || {
let dst: *mut T = clone.v.get();
let val: T = $value;
unsafe { atomic_store_rel::<T>(dst, val) };
});
thread::sleep_ms(10);
let ptr: *mut T = data.v.get();
assert_eq!(unsafe { *ptr }, $value);
})
}
#[test]
fn atomic_store_rel_test1()
|
}
|
{
atomic_store_rel_test!( 68, 500 );
}
|
identifier_body
|
asm-in-out-operand.rs
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(asm)]
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
unsafe fn next_power_of_2(n: u32) -> u32 {
let mut tmp = n;
asm!("dec $0" : "+rm"(tmp) :: "cc");
let mut shift = 1_u32;
while shift <= 16 {
asm!(
"shr %cl, $2
or $2, $0
shl $$1, $1"
: "+&rm"(tmp), "+{ecx}"(shift) : "r"(tmp) : "cc"
);
}
asm!("inc $0" : "+rm"(tmp) :: "cc");
return tmp;
}
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
pub fn main() {
unsafe {
assert_eq!(64, next_power_of_2(37));
assert_eq!(2147483648, next_power_of_2(2147483647));
}
let mut y: isize = 5;
let x: isize;
unsafe {
// Treat the output as initialization.
asm!(
"shl $2, $1
add $3, $1
mov $1, $0"
: "=r"(x), "+r"(y) : "i"(3_usize), "ir"(7_usize) : "cc"
);
}
assert_eq!(x, 47);
assert_eq!(y, 47);
let mut x = x + 1;
assert_eq!(x, 48);
unsafe {
// Assignment to mutable.
// Early clobber "&":
// Forbids the use of a single register by both operands.
asm!("shr $$2, $1; add $1, $0" : "+&r"(x) : "r"(x) : "cc");
}
assert_eq!(x, 60);
}
#[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))]
pub fn
|
() {}
|
main
|
identifier_name
|
asm-in-out-operand.rs
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(asm)]
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
unsafe fn next_power_of_2(n: u32) -> u32 {
let mut tmp = n;
asm!("dec $0" : "+rm"(tmp) :: "cc");
let mut shift = 1_u32;
while shift <= 16 {
asm!(
"shr %cl, $2
or $2, $0
shl $$1, $1"
: "+&rm"(tmp), "+{ecx}"(shift) : "r"(tmp) : "cc"
);
}
asm!("inc $0" : "+rm"(tmp) :: "cc");
return tmp;
}
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
pub fn main() {
unsafe {
assert_eq!(64, next_power_of_2(37));
assert_eq!(2147483648, next_power_of_2(2147483647));
}
let mut y: isize = 5;
let x: isize;
unsafe {
// Treat the output as initialization.
asm!(
"shl $2, $1
add $3, $1
mov $1, $0"
: "=r"(x), "+r"(y) : "i"(3_usize), "ir"(7_usize) : "cc"
);
}
assert_eq!(x, 47);
assert_eq!(y, 47);
let mut x = x + 1;
assert_eq!(x, 48);
unsafe {
// Assignment to mutable.
// Early clobber "&":
// Forbids the use of a single register by both operands.
asm!("shr $$2, $1; add $1, $0" : "+&r"(x) : "r"(x) : "cc");
}
assert_eq!(x, 60);
}
#[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))]
pub fn main()
|
{}
|
identifier_body
|
|
asm-in-out-operand.rs
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(asm)]
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
unsafe fn next_power_of_2(n: u32) -> u32 {
let mut tmp = n;
asm!("dec $0" : "+rm"(tmp) :: "cc");
let mut shift = 1_u32;
while shift <= 16 {
asm!(
"shr %cl, $2
or $2, $0
shl $$1, $1"
: "+&rm"(tmp), "+{ecx}"(shift) : "r"(tmp) : "cc"
);
}
asm!("inc $0" : "+rm"(tmp) :: "cc");
return tmp;
}
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
pub fn main() {
unsafe {
assert_eq!(64, next_power_of_2(37));
assert_eq!(2147483648, next_power_of_2(2147483647));
}
let mut y: isize = 5;
let x: isize;
unsafe {
// Treat the output as initialization.
asm!(
"shl $2, $1
add $3, $1
mov $1, $0"
: "=r"(x), "+r"(y) : "i"(3_usize), "ir"(7_usize) : "cc"
);
}
|
assert_eq!(x, 47);
assert_eq!(y, 47);
let mut x = x + 1;
assert_eq!(x, 48);
unsafe {
// Assignment to mutable.
// Early clobber "&":
// Forbids the use of a single register by both operands.
asm!("shr $$2, $1; add $1, $0" : "+&r"(x) : "r"(x) : "cc");
}
assert_eq!(x, 60);
}
#[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))]
pub fn main() {}
|
random_line_split
|
|
common.rs
|
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(non_camel_case_types, non_upper_case_globals)]
pub use self::astencode_tag::*;
use back::svh::Svh;
// RBML enum definitions and utils shared by the encoder and decoder
//
// 0x00..0x1f: reserved for RBML generic type tags
// 0x20..0xef: free for use, preferred for frequent tags
// 0xf0..0xff: internally used by RBML to encode 0x100..0xfff in two bytes
// 0x100..0xfff: free for use, preferred for infrequent tags
pub const tag_items: uint = 0x100; // top-level only
pub const tag_paths_data_name: uint = 0x20;
pub const tag_def_id: uint = 0x21;
pub const tag_items_data: uint = 0x22;
pub const tag_items_data_item: uint = 0x23;
pub const tag_items_data_item_family: uint = 0x24;
pub const tag_items_data_item_type: uint = 0x25;
pub const tag_items_data_item_symbol: uint = 0x26;
pub const tag_items_data_item_variant: uint = 0x27;
pub const tag_items_data_parent_item: uint = 0x28;
pub const tag_items_data_item_is_tuple_struct_ctor: uint = 0x29;
pub const tag_index: uint = 0x2a;
pub const tag_index_buckets: uint = 0x2b;
pub const tag_index_buckets_bucket: uint = 0x2c;
pub const tag_index_buckets_bucket_elt: uint = 0x2d;
pub const tag_index_table: uint = 0x2e;
pub const tag_meta_item_name_value: uint = 0x2f;
pub const tag_meta_item_name: uint = 0x30;
pub const tag_meta_item_value: uint = 0x31;
pub const tag_attributes: uint = 0x101; // top-level only
pub const tag_attribute: uint = 0x32;
pub const tag_meta_item_word: uint = 0x33;
pub const tag_meta_item_list: uint = 0x34;
// The list of crates that this crate depends on
pub const tag_crate_deps: uint = 0x102; // top-level only
// A single crate dependency
pub const tag_crate_dep: uint = 0x35;
pub const tag_crate_hash: uint = 0x103; // top-level only
pub const tag_crate_crate_name: uint = 0x104; // top-level only
pub const tag_crate_dep_crate_name: uint = 0x36;
pub const tag_crate_dep_hash: uint = 0x37;
pub const tag_mod_impl: uint = 0x38;
pub const tag_item_trait_item: uint = 0x39;
pub const tag_item_trait_ref: uint = 0x3a;
// discriminator value for variants
pub const tag_disr_val: uint = 0x3c;
// used to encode ast_map::PathElem
pub const tag_path: uint = 0x3d;
pub const tag_path_len: uint = 0x3e;
pub const tag_path_elem_mod: uint = 0x3f;
pub const tag_path_elem_name: uint = 0x40;
pub const tag_item_field: uint = 0x41;
pub const tag_item_field_origin: uint = 0x42;
pub const tag_item_variances: uint = 0x43;
/*
trait items contain tag_item_trait_item elements,
impl items contain tag_item_impl_item elements, and classes
have both. That's because some code treats classes like traits,
and other code treats them like impls. Because classes can contain
both, tag_item_trait_item and tag_item_impl_item have to be two
different tags.
*/
pub const tag_item_impl_item: uint = 0x44;
pub const tag_item_trait_method_explicit_self: uint = 0x45;
// Reexports are found within module tags. Each reexport contains def_ids
// and names.
pub const tag_items_data_item_reexport: uint = 0x46;
pub const tag_items_data_item_reexport_def_id: uint = 0x47;
pub const tag_items_data_item_reexport_name: uint = 0x48;
// used to encode crate_ctxt side tables
#[derive(Copy, PartialEq, FromPrimitive)]
#[repr(uint)]
pub enum astencode_tag { // Reserves 0x50 -- 0x6f
tag_ast = 0x50,
tag_tree = 0x51,
tag_id_range = 0x52,
tag_table = 0x53,
// GAP 0x54, 0x55
tag_table_def = 0x56,
tag_table_node_type = 0x57,
tag_table_item_subst = 0x58,
tag_table_freevars = 0x59,
tag_table_tcache = 0x5a,
tag_table_param_defs = 0x5b,
tag_table_mutbl = 0x5c,
tag_table_last_use = 0x5d,
tag_table_spill = 0x5e,
tag_table_method_map = 0x5f,
tag_table_vtable_map = 0x60,
tag_table_adjustments = 0x61,
tag_table_moves_map = 0x62,
tag_table_capture_map = 0x63,
tag_table_closure_tys = 0x64,
tag_table_closure_kinds = 0x65,
tag_table_upvar_capture_map = 0x66,
tag_table_capture_modes = 0x67,
tag_table_object_cast_map = 0x68,
tag_table_const_qualif = 0x69,
}
pub const tag_item_trait_item_sort: uint = 0x70;
pub const tag_item_trait_parent_sort: uint = 0x71;
pub const tag_item_impl_type_basename: uint = 0x72;
pub const tag_crate_triple: uint = 0x105; // top-level only
pub const tag_dylib_dependency_formats: uint = 0x106; // top-level only
// Language items are a top-level directory (for speed). Hierarchy:
//
// tag_lang_items
// - tag_lang_items_item
// - tag_lang_items_item_id: u32
// - tag_lang_items_item_node_id: u32
pub const tag_lang_items: uint = 0x107; // top-level only
pub const tag_lang_items_item: uint = 0x73;
pub const tag_lang_items_item_id: uint = 0x74;
pub const tag_lang_items_item_node_id: uint = 0x75;
pub const tag_lang_items_missing: uint = 0x76;
|
pub const tag_item_method_tps: uint = 0x79;
pub const tag_item_method_fty: uint = 0x7a;
pub const tag_mod_child: uint = 0x7b;
pub const tag_misc_info: uint = 0x108; // top-level only
pub const tag_misc_info_crate_items: uint = 0x7c;
pub const tag_item_method_provided_source: uint = 0x7d;
pub const tag_item_impl_vtables: uint = 0x7e;
pub const tag_impls: uint = 0x109; // top-level only
pub const tag_impls_impl: uint = 0x7f;
pub const tag_items_data_item_inherent_impl: uint = 0x80;
pub const tag_items_data_item_extension_impl: uint = 0x81;
pub const tag_native_libraries: uint = 0x10a; // top-level only
pub const tag_native_libraries_lib: uint = 0x82;
pub const tag_native_libraries_name: uint = 0x83;
pub const tag_native_libraries_kind: uint = 0x84;
pub const tag_plugin_registrar_fn: uint = 0x10b; // top-level only
pub const tag_method_argument_names: uint = 0x85;
pub const tag_method_argument_name: uint = 0x86;
pub const tag_reachable_extern_fns: uint = 0x10c; // top-level only
pub const tag_reachable_extern_fn_id: uint = 0x87;
pub const tag_items_data_item_stability: uint = 0x88;
pub const tag_items_data_item_repr: uint = 0x89;
#[derive(Clone, Debug)]
pub struct LinkMeta {
pub crate_name: String,
pub crate_hash: Svh,
}
pub const tag_struct_fields: uint = 0x10d; // top-level only
pub const tag_struct_field: uint = 0x8a;
pub const tag_struct_field_id: uint = 0x8b;
pub const tag_attribute_is_sugared_doc: uint = 0x8c;
pub const tag_items_data_region: uint = 0x8e;
pub const tag_region_param_def: uint = 0x8f;
pub const tag_region_param_def_ident: uint = 0x90;
pub const tag_region_param_def_def_id: uint = 0x91;
pub const tag_region_param_def_space: uint = 0x92;
pub const tag_region_param_def_index: uint = 0x93;
pub const tag_type_param_def: uint = 0x94;
pub const tag_item_generics: uint = 0x95;
pub const tag_method_ty_generics: uint = 0x96;
pub const tag_predicate: uint = 0x97;
pub const tag_predicate_space: uint = 0x98;
pub const tag_predicate_data: uint = 0x99;
pub const tag_unsafety: uint = 0x9a;
pub const tag_associated_type_names: uint = 0x9b;
pub const tag_associated_type_name: uint = 0x9c;
pub const tag_polarity: uint = 0x9d;
pub const tag_macro_defs: uint = 0x10e; // top-level only
pub const tag_macro_def: uint = 0x9e;
pub const tag_macro_def_body: uint = 0x9f;
pub const tag_paren_sugar: uint = 0xa0;
pub const tag_codemap: uint = 0xa1;
pub const tag_codemap_filemap: uint = 0xa2;
pub const tag_item_super_predicates: uint = 0xa3;
pub const tag_defaulted_trait: uint = 0xa4;
|
pub const tag_item_unnamed_field: uint = 0x77;
pub const tag_items_data_item_visibility: uint = 0x78;
|
random_line_split
|
common.rs
|
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(non_camel_case_types, non_upper_case_globals)]
pub use self::astencode_tag::*;
use back::svh::Svh;
// RBML enum definitions and utils shared by the encoder and decoder
//
// 0x00..0x1f: reserved for RBML generic type tags
// 0x20..0xef: free for use, preferred for frequent tags
// 0xf0..0xff: internally used by RBML to encode 0x100..0xfff in two bytes
// 0x100..0xfff: free for use, preferred for infrequent tags
pub const tag_items: uint = 0x100; // top-level only
pub const tag_paths_data_name: uint = 0x20;
pub const tag_def_id: uint = 0x21;
pub const tag_items_data: uint = 0x22;
pub const tag_items_data_item: uint = 0x23;
pub const tag_items_data_item_family: uint = 0x24;
pub const tag_items_data_item_type: uint = 0x25;
pub const tag_items_data_item_symbol: uint = 0x26;
pub const tag_items_data_item_variant: uint = 0x27;
pub const tag_items_data_parent_item: uint = 0x28;
pub const tag_items_data_item_is_tuple_struct_ctor: uint = 0x29;
pub const tag_index: uint = 0x2a;
pub const tag_index_buckets: uint = 0x2b;
pub const tag_index_buckets_bucket: uint = 0x2c;
pub const tag_index_buckets_bucket_elt: uint = 0x2d;
pub const tag_index_table: uint = 0x2e;
pub const tag_meta_item_name_value: uint = 0x2f;
pub const tag_meta_item_name: uint = 0x30;
pub const tag_meta_item_value: uint = 0x31;
pub const tag_attributes: uint = 0x101; // top-level only
pub const tag_attribute: uint = 0x32;
pub const tag_meta_item_word: uint = 0x33;
pub const tag_meta_item_list: uint = 0x34;
// The list of crates that this crate depends on
pub const tag_crate_deps: uint = 0x102; // top-level only
// A single crate dependency
pub const tag_crate_dep: uint = 0x35;
pub const tag_crate_hash: uint = 0x103; // top-level only
pub const tag_crate_crate_name: uint = 0x104; // top-level only
pub const tag_crate_dep_crate_name: uint = 0x36;
pub const tag_crate_dep_hash: uint = 0x37;
pub const tag_mod_impl: uint = 0x38;
pub const tag_item_trait_item: uint = 0x39;
pub const tag_item_trait_ref: uint = 0x3a;
// discriminator value for variants
pub const tag_disr_val: uint = 0x3c;
// used to encode ast_map::PathElem
pub const tag_path: uint = 0x3d;
pub const tag_path_len: uint = 0x3e;
pub const tag_path_elem_mod: uint = 0x3f;
pub const tag_path_elem_name: uint = 0x40;
pub const tag_item_field: uint = 0x41;
pub const tag_item_field_origin: uint = 0x42;
pub const tag_item_variances: uint = 0x43;
/*
trait items contain tag_item_trait_item elements,
impl items contain tag_item_impl_item elements, and classes
have both. That's because some code treats classes like traits,
and other code treats them like impls. Because classes can contain
both, tag_item_trait_item and tag_item_impl_item have to be two
different tags.
*/
pub const tag_item_impl_item: uint = 0x44;
pub const tag_item_trait_method_explicit_self: uint = 0x45;
// Reexports are found within module tags. Each reexport contains def_ids
// and names.
pub const tag_items_data_item_reexport: uint = 0x46;
pub const tag_items_data_item_reexport_def_id: uint = 0x47;
pub const tag_items_data_item_reexport_name: uint = 0x48;
// used to encode crate_ctxt side tables
#[derive(Copy, PartialEq, FromPrimitive)]
#[repr(uint)]
pub enum
|
{ // Reserves 0x50 -- 0x6f
tag_ast = 0x50,
tag_tree = 0x51,
tag_id_range = 0x52,
tag_table = 0x53,
// GAP 0x54, 0x55
tag_table_def = 0x56,
tag_table_node_type = 0x57,
tag_table_item_subst = 0x58,
tag_table_freevars = 0x59,
tag_table_tcache = 0x5a,
tag_table_param_defs = 0x5b,
tag_table_mutbl = 0x5c,
tag_table_last_use = 0x5d,
tag_table_spill = 0x5e,
tag_table_method_map = 0x5f,
tag_table_vtable_map = 0x60,
tag_table_adjustments = 0x61,
tag_table_moves_map = 0x62,
tag_table_capture_map = 0x63,
tag_table_closure_tys = 0x64,
tag_table_closure_kinds = 0x65,
tag_table_upvar_capture_map = 0x66,
tag_table_capture_modes = 0x67,
tag_table_object_cast_map = 0x68,
tag_table_const_qualif = 0x69,
}
pub const tag_item_trait_item_sort: uint = 0x70;
pub const tag_item_trait_parent_sort: uint = 0x71;
pub const tag_item_impl_type_basename: uint = 0x72;
pub const tag_crate_triple: uint = 0x105; // top-level only
pub const tag_dylib_dependency_formats: uint = 0x106; // top-level only
// Language items are a top-level directory (for speed). Hierarchy:
//
// tag_lang_items
// - tag_lang_items_item
// - tag_lang_items_item_id: u32
// - tag_lang_items_item_node_id: u32
pub const tag_lang_items: uint = 0x107; // top-level only
pub const tag_lang_items_item: uint = 0x73;
pub const tag_lang_items_item_id: uint = 0x74;
pub const tag_lang_items_item_node_id: uint = 0x75;
pub const tag_lang_items_missing: uint = 0x76;
pub const tag_item_unnamed_field: uint = 0x77;
pub const tag_items_data_item_visibility: uint = 0x78;
pub const tag_item_method_tps: uint = 0x79;
pub const tag_item_method_fty: uint = 0x7a;
pub const tag_mod_child: uint = 0x7b;
pub const tag_misc_info: uint = 0x108; // top-level only
pub const tag_misc_info_crate_items: uint = 0x7c;
pub const tag_item_method_provided_source: uint = 0x7d;
pub const tag_item_impl_vtables: uint = 0x7e;
pub const tag_impls: uint = 0x109; // top-level only
pub const tag_impls_impl: uint = 0x7f;
pub const tag_items_data_item_inherent_impl: uint = 0x80;
pub const tag_items_data_item_extension_impl: uint = 0x81;
pub const tag_native_libraries: uint = 0x10a; // top-level only
pub const tag_native_libraries_lib: uint = 0x82;
pub const tag_native_libraries_name: uint = 0x83;
pub const tag_native_libraries_kind: uint = 0x84;
pub const tag_plugin_registrar_fn: uint = 0x10b; // top-level only
pub const tag_method_argument_names: uint = 0x85;
pub const tag_method_argument_name: uint = 0x86;
pub const tag_reachable_extern_fns: uint = 0x10c; // top-level only
pub const tag_reachable_extern_fn_id: uint = 0x87;
pub const tag_items_data_item_stability: uint = 0x88;
pub const tag_items_data_item_repr: uint = 0x89;
#[derive(Clone, Debug)]
pub struct LinkMeta {
pub crate_name: String,
pub crate_hash: Svh,
}
pub const tag_struct_fields: uint = 0x10d; // top-level only
pub const tag_struct_field: uint = 0x8a;
pub const tag_struct_field_id: uint = 0x8b;
pub const tag_attribute_is_sugared_doc: uint = 0x8c;
pub const tag_items_data_region: uint = 0x8e;
pub const tag_region_param_def: uint = 0x8f;
pub const tag_region_param_def_ident: uint = 0x90;
pub const tag_region_param_def_def_id: uint = 0x91;
pub const tag_region_param_def_space: uint = 0x92;
pub const tag_region_param_def_index: uint = 0x93;
pub const tag_type_param_def: uint = 0x94;
pub const tag_item_generics: uint = 0x95;
pub const tag_method_ty_generics: uint = 0x96;
pub const tag_predicate: uint = 0x97;
pub const tag_predicate_space: uint = 0x98;
pub const tag_predicate_data: uint = 0x99;
pub const tag_unsafety: uint = 0x9a;
pub const tag_associated_type_names: uint = 0x9b;
pub const tag_associated_type_name: uint = 0x9c;
pub const tag_polarity: uint = 0x9d;
pub const tag_macro_defs: uint = 0x10e; // top-level only
pub const tag_macro_def: uint = 0x9e;
pub const tag_macro_def_body: uint = 0x9f;
pub const tag_paren_sugar: uint = 0xa0;
pub const tag_codemap: uint = 0xa1;
pub const tag_codemap_filemap: uint = 0xa2;
pub const tag_item_super_predicates: uint = 0xa3;
pub const tag_defaulted_trait: uint = 0xa4;
|
astencode_tag
|
identifier_name
|
issue-61076.rs
|
// edition:2018
use core::future::Future;
use core::pin::Pin;
use core::task::{Context, Poll};
struct T;
struct Tuple(i32);
struct Struct {
a: i32
}
impl Struct {
fn method(&self) {}
}
impl Future for Struct {
type Output = Struct;
fn poll(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Self::Output> { Poll::Pending }
}
impl Future for Tuple {
type Output = Tuple;
fn poll(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Self::Output>
|
}
impl Future for T {
type Output = Result<(), ()>;
fn poll(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Self::Output> {
Poll::Pending
}
}
async fn foo() -> Result<(), ()> {
Ok(())
}
async fn bar() -> Result<(), ()> {
foo()?; //~ ERROR the `?` operator can only be applied to values that implement `Try`
//~^ NOTE the `?` operator cannot be applied to type `impl Future<Output = Result<(), ()>>`
//~| HELP the trait `Try` is not implemented for `impl Future<Output = Result<(), ()>>`
//~| HELP consider `await`ing on the `Future`
//~| NOTE in this expansion of desugaring of operator `?`
//~| NOTE in this expansion of desugaring of operator `?`
//~| NOTE in this expansion of desugaring of operator `?`
Ok(())
}
async fn struct_() -> Struct {
Struct { a: 1 }
}
async fn tuple() -> Tuple {
//~^ NOTE checked the `Output` of this `async fn`, expected opaque type
//~| NOTE while checking the return type of the `async fn`
//~| NOTE in this expansion of desugaring of `async` block or function
Tuple(1i32)
}
async fn baz() -> Result<(), ()> {
let t = T;
t?; //~ ERROR the `?` operator can only be applied to values that implement `Try`
//~^ NOTE the `?` operator cannot be applied to type `T`
//~| HELP the trait `Try` is not implemented for `T`
//~| HELP consider `await`ing on the `Future`
//~| NOTE in this expansion of desugaring of operator `?`
//~| NOTE in this expansion of desugaring of operator `?`
//~| NOTE in this expansion of desugaring of operator `?`
let _: i32 = tuple().0; //~ ERROR no field `0`
//~^ HELP consider `await`ing on the `Future`
//~| NOTE field not available in `impl Future`
let _: i32 = struct_().a; //~ ERROR no field `a`
//~^ HELP consider `await`ing on the `Future`
//~| NOTE field not available in `impl Future`
struct_().method(); //~ ERROR no method named
//~^ NOTE method not found in `impl Future<Output = Struct>`
//~| HELP consider `await`ing on the `Future`
Ok(())
}
async fn match_() {
match tuple() { //~ HELP consider `await`ing on the `Future`
Tuple(_) => {} //~ ERROR mismatched types
//~^ NOTE expected opaque type, found struct `Tuple`
//~| NOTE expected opaque type `impl Future<Output = Tuple>`
}
}
fn main() {}
|
{ Poll::Pending }
|
identifier_body
|
issue-61076.rs
|
// edition:2018
use core::future::Future;
use core::pin::Pin;
use core::task::{Context, Poll};
struct T;
struct Tuple(i32);
struct Struct {
a: i32
}
impl Struct {
fn method(&self) {}
}
impl Future for Struct {
type Output = Struct;
fn poll(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Self::Output> { Poll::Pending }
}
impl Future for Tuple {
type Output = Tuple;
fn poll(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Self::Output> { Poll::Pending }
}
impl Future for T {
type Output = Result<(), ()>;
fn poll(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Self::Output> {
Poll::Pending
}
}
async fn foo() -> Result<(), ()> {
Ok(())
}
async fn bar() -> Result<(), ()> {
foo()?; //~ ERROR the `?` operator can only be applied to values that implement `Try`
//~^ NOTE the `?` operator cannot be applied to type `impl Future<Output = Result<(), ()>>`
//~| HELP the trait `Try` is not implemented for `impl Future<Output = Result<(), ()>>`
//~| HELP consider `await`ing on the `Future`
//~| NOTE in this expansion of desugaring of operator `?`
//~| NOTE in this expansion of desugaring of operator `?`
//~| NOTE in this expansion of desugaring of operator `?`
Ok(())
}
async fn struct_() -> Struct {
Struct { a: 1 }
}
async fn tuple() -> Tuple {
//~^ NOTE checked the `Output` of this `async fn`, expected opaque type
//~| NOTE while checking the return type of the `async fn`
//~| NOTE in this expansion of desugaring of `async` block or function
Tuple(1i32)
}
async fn baz() -> Result<(), ()> {
let t = T;
t?; //~ ERROR the `?` operator can only be applied to values that implement `Try`
//~^ NOTE the `?` operator cannot be applied to type `T`
//~| HELP the trait `Try` is not implemented for `T`
//~| HELP consider `await`ing on the `Future`
|
let _: i32 = tuple().0; //~ ERROR no field `0`
//~^ HELP consider `await`ing on the `Future`
//~| NOTE field not available in `impl Future`
let _: i32 = struct_().a; //~ ERROR no field `a`
//~^ HELP consider `await`ing on the `Future`
//~| NOTE field not available in `impl Future`
struct_().method(); //~ ERROR no method named
//~^ NOTE method not found in `impl Future<Output = Struct>`
//~| HELP consider `await`ing on the `Future`
Ok(())
}
async fn match_() {
match tuple() { //~ HELP consider `await`ing on the `Future`
Tuple(_) => {} //~ ERROR mismatched types
//~^ NOTE expected opaque type, found struct `Tuple`
//~| NOTE expected opaque type `impl Future<Output = Tuple>`
}
}
fn main() {}
|
//~| NOTE in this expansion of desugaring of operator `?`
//~| NOTE in this expansion of desugaring of operator `?`
//~| NOTE in this expansion of desugaring of operator `?`
|
random_line_split
|
issue-61076.rs
|
// edition:2018
use core::future::Future;
use core::pin::Pin;
use core::task::{Context, Poll};
struct T;
struct Tuple(i32);
struct Struct {
a: i32
}
impl Struct {
fn method(&self) {}
}
impl Future for Struct {
type Output = Struct;
fn poll(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Self::Output> { Poll::Pending }
}
impl Future for Tuple {
type Output = Tuple;
fn poll(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Self::Output> { Poll::Pending }
}
impl Future for T {
type Output = Result<(), ()>;
fn poll(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Self::Output> {
Poll::Pending
}
}
async fn foo() -> Result<(), ()> {
Ok(())
}
async fn bar() -> Result<(), ()> {
foo()?; //~ ERROR the `?` operator can only be applied to values that implement `Try`
//~^ NOTE the `?` operator cannot be applied to type `impl Future<Output = Result<(), ()>>`
//~| HELP the trait `Try` is not implemented for `impl Future<Output = Result<(), ()>>`
//~| HELP consider `await`ing on the `Future`
//~| NOTE in this expansion of desugaring of operator `?`
//~| NOTE in this expansion of desugaring of operator `?`
//~| NOTE in this expansion of desugaring of operator `?`
Ok(())
}
async fn struct_() -> Struct {
Struct { a: 1 }
}
async fn tuple() -> Tuple {
//~^ NOTE checked the `Output` of this `async fn`, expected opaque type
//~| NOTE while checking the return type of the `async fn`
//~| NOTE in this expansion of desugaring of `async` block or function
Tuple(1i32)
}
async fn baz() -> Result<(), ()> {
let t = T;
t?; //~ ERROR the `?` operator can only be applied to values that implement `Try`
//~^ NOTE the `?` operator cannot be applied to type `T`
//~| HELP the trait `Try` is not implemented for `T`
//~| HELP consider `await`ing on the `Future`
//~| NOTE in this expansion of desugaring of operator `?`
//~| NOTE in this expansion of desugaring of operator `?`
//~| NOTE in this expansion of desugaring of operator `?`
let _: i32 = tuple().0; //~ ERROR no field `0`
//~^ HELP consider `await`ing on the `Future`
//~| NOTE field not available in `impl Future`
let _: i32 = struct_().a; //~ ERROR no field `a`
//~^ HELP consider `await`ing on the `Future`
//~| NOTE field not available in `impl Future`
struct_().method(); //~ ERROR no method named
//~^ NOTE method not found in `impl Future<Output = Struct>`
//~| HELP consider `await`ing on the `Future`
Ok(())
}
async fn
|
() {
match tuple() { //~ HELP consider `await`ing on the `Future`
Tuple(_) => {} //~ ERROR mismatched types
//~^ NOTE expected opaque type, found struct `Tuple`
//~| NOTE expected opaque type `impl Future<Output = Tuple>`
}
}
fn main() {}
|
match_
|
identifier_name
|
angle.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Computed angles.
use num_traits::Zero;
use std::f64::consts::PI;
use std::fmt::{self, Write};
use std::ops::Add;
use std::{f32, f64};
use style_traits::{CssWriter, ToCss};
use values::distance::{ComputeSquaredDistance, SquaredDistance};
use values::CSSFloat;
/// A computed angle in degrees.
#[cfg_attr(feature = "servo", derive(Deserialize, Serialize))]
#[derive(Animate, Clone, Copy, Debug, MallocSizeOf, PartialEq, PartialOrd, ToAnimatedZero)]
pub struct Angle(CSSFloat);
impl ToCss for Angle {
fn
|
<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
self.degrees().to_css(dest)?;
dest.write_str("deg")
}
}
const RAD_PER_DEG: f64 = PI / 180.0;
impl Angle {
/// Creates a computed `Angle` value from a radian amount.
pub fn from_radians(radians: CSSFloat) -> Self {
Angle(radians / RAD_PER_DEG as f32)
}
/// Creates a computed `Angle` value from a degrees amount.
#[inline]
pub fn from_degrees(degrees: CSSFloat) -> Self {
Angle(degrees)
}
/// Returns the amount of radians this angle represents.
#[inline]
pub fn radians(&self) -> CSSFloat {
self.radians64().min(f32::MAX as f64).max(f32::MIN as f64) as f32
}
/// Returns the amount of radians this angle represents as a `f64`.
///
/// Gecko stores angles as singles, but does this computation using doubles.
///
/// This is significant enough to mess up rounding to the nearest
/// quarter-turn for 225 degrees, for example.
#[inline]
pub fn radians64(&self) -> f64 {
self.0 as f64 * RAD_PER_DEG
}
/// Return the value in degrees.
#[inline]
pub fn degrees(&self) -> CSSFloat {
self.0
}
}
impl Add for Angle {
type Output = Self;
#[inline]
fn add(self, rhs: Self) -> Self {
Angle(self.0 + rhs.0)
}
}
impl Zero for Angle {
#[inline]
fn zero() -> Self {
Angle(0.0)
}
#[inline]
fn is_zero(&self) -> bool {
self.0 == 0.
}
}
impl ComputeSquaredDistance for Angle {
#[inline]
fn compute_squared_distance(&self, other: &Self) -> Result<SquaredDistance, ()> {
// Use the formula for calculating the distance between angles defined in SVG:
// https://www.w3.org/TR/SVG/animate.html#complexDistances
self.radians64()
.compute_squared_distance(&other.radians64())
}
}
|
to_css
|
identifier_name
|
angle.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Computed angles.
use num_traits::Zero;
use std::f64::consts::PI;
use std::fmt::{self, Write};
use std::ops::Add;
use std::{f32, f64};
use style_traits::{CssWriter, ToCss};
use values::distance::{ComputeSquaredDistance, SquaredDistance};
use values::CSSFloat;
/// A computed angle in degrees.
#[cfg_attr(feature = "servo", derive(Deserialize, Serialize))]
#[derive(Animate, Clone, Copy, Debug, MallocSizeOf, PartialEq, PartialOrd, ToAnimatedZero)]
pub struct Angle(CSSFloat);
impl ToCss for Angle {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
self.degrees().to_css(dest)?;
dest.write_str("deg")
}
}
const RAD_PER_DEG: f64 = PI / 180.0;
impl Angle {
/// Creates a computed `Angle` value from a radian amount.
pub fn from_radians(radians: CSSFloat) -> Self {
Angle(radians / RAD_PER_DEG as f32)
}
/// Creates a computed `Angle` value from a degrees amount.
#[inline]
pub fn from_degrees(degrees: CSSFloat) -> Self {
Angle(degrees)
}
/// Returns the amount of radians this angle represents.
#[inline]
pub fn radians(&self) -> CSSFloat {
self.radians64().min(f32::MAX as f64).max(f32::MIN as f64) as f32
}
/// Returns the amount of radians this angle represents as a `f64`.
///
/// Gecko stores angles as singles, but does this computation using doubles.
///
/// This is significant enough to mess up rounding to the nearest
/// quarter-turn for 225 degrees, for example.
#[inline]
pub fn radians64(&self) -> f64 {
self.0 as f64 * RAD_PER_DEG
}
/// Return the value in degrees.
#[inline]
pub fn degrees(&self) -> CSSFloat {
self.0
}
}
impl Add for Angle {
type Output = Self;
#[inline]
fn add(self, rhs: Self) -> Self {
Angle(self.0 + rhs.0)
}
}
impl Zero for Angle {
#[inline]
fn zero() -> Self {
Angle(0.0)
}
#[inline]
fn is_zero(&self) -> bool {
self.0 == 0.
}
}
impl ComputeSquaredDistance for Angle {
#[inline]
fn compute_squared_distance(&self, other: &Self) -> Result<SquaredDistance, ()>
|
}
|
{
// Use the formula for calculating the distance between angles defined in SVG:
// https://www.w3.org/TR/SVG/animate.html#complexDistances
self.radians64()
.compute_squared_distance(&other.radians64())
}
|
identifier_body
|
angle.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Computed angles.
use num_traits::Zero;
use std::f64::consts::PI;
use std::fmt::{self, Write};
use std::ops::Add;
use std::{f32, f64};
use style_traits::{CssWriter, ToCss};
use values::distance::{ComputeSquaredDistance, SquaredDistance};
use values::CSSFloat;
/// A computed angle in degrees.
#[cfg_attr(feature = "servo", derive(Deserialize, Serialize))]
#[derive(Animate, Clone, Copy, Debug, MallocSizeOf, PartialEq, PartialOrd, ToAnimatedZero)]
pub struct Angle(CSSFloat);
impl ToCss for Angle {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
self.degrees().to_css(dest)?;
dest.write_str("deg")
}
}
const RAD_PER_DEG: f64 = PI / 180.0;
impl Angle {
/// Creates a computed `Angle` value from a radian amount.
pub fn from_radians(radians: CSSFloat) -> Self {
Angle(radians / RAD_PER_DEG as f32)
}
/// Creates a computed `Angle` value from a degrees amount.
#[inline]
pub fn from_degrees(degrees: CSSFloat) -> Self {
Angle(degrees)
}
/// Returns the amount of radians this angle represents.
#[inline]
pub fn radians(&self) -> CSSFloat {
self.radians64().min(f32::MAX as f64).max(f32::MIN as f64) as f32
}
/// Returns the amount of radians this angle represents as a `f64`.
///
/// Gecko stores angles as singles, but does this computation using doubles.
///
/// This is significant enough to mess up rounding to the nearest
/// quarter-turn for 225 degrees, for example.
#[inline]
pub fn radians64(&self) -> f64 {
self.0 as f64 * RAD_PER_DEG
}
|
}
}
impl Add for Angle {
type Output = Self;
#[inline]
fn add(self, rhs: Self) -> Self {
Angle(self.0 + rhs.0)
}
}
impl Zero for Angle {
#[inline]
fn zero() -> Self {
Angle(0.0)
}
#[inline]
fn is_zero(&self) -> bool {
self.0 == 0.
}
}
impl ComputeSquaredDistance for Angle {
#[inline]
fn compute_squared_distance(&self, other: &Self) -> Result<SquaredDistance, ()> {
// Use the formula for calculating the distance between angles defined in SVG:
// https://www.w3.org/TR/SVG/animate.html#complexDistances
self.radians64()
.compute_squared_distance(&other.radians64())
}
}
|
/// Return the value in degrees.
#[inline]
pub fn degrees(&self) -> CSSFloat {
self.0
|
random_line_split
|
connection.rs
|
use std::{
fmt::Display,
io::{self, BufRead, BufReader, Write},
net::ToSocketAddrs,
time::Duration,
};
use super::{ClientCodec, NetworkStream, TlsParameters};
use crate::{
address::Envelope,
transport::smtp::{
authentication::{Credentials, Mechanism},
commands::*,
error,
error::Error,
extension::{ClientId, Extension, MailBodyParameter, MailParameter, ServerInfo},
response::{parse_response, Response},
},
};
#[cfg(feature = "tracing")]
use super::escape_crlf;
macro_rules! try_smtp (
($err: expr, $client: ident) => ({
match $err {
Ok(val) => val,
Err(err) => {
$client.abort();
return Err(From::from(err))
},
}
})
);
/// Structure that implements the SMTP client
pub struct SmtpConnection {
/// TCP stream between client and server
/// Value is None before connection
stream: BufReader<NetworkStream>,
/// Panic state
panic: bool,
/// Information about the server
server_info: ServerInfo,
}
impl SmtpConnection {
pub fn server_info(&self) -> &ServerInfo {
&self.server_info
}
// FIXME add simple connect and rename this one
/// Connects to the configured server
///
/// Sends EHLO and parses server information
pub fn connect<A: ToSocketAddrs>(
server: A,
timeout: Option<Duration>,
hello_name: &ClientId,
tls_parameters: Option<&TlsParameters>,
) -> Result<SmtpConnection, Error> {
let stream = NetworkStream::connect(server, timeout, tls_parameters)?;
let stream = BufReader::new(stream);
let mut conn = SmtpConnection {
stream,
panic: false,
server_info: ServerInfo::default(),
};
conn.set_timeout(timeout).map_err(error::network)?;
// TODO log
let _response = conn.read_response()?;
conn.ehlo(hello_name)?;
// Print server information
#[cfg(feature = "tracing")]
tracing::debug!("server {}", conn.server_info);
Ok(conn)
}
pub fn send(&mut self, envelope: &Envelope, email: &[u8]) -> Result<Response, Error> {
// Mail
let mut mail_options = vec![];
// Internationalization handling
//
// * 8BITMIME: https://tools.ietf.org/html/rfc6152
// * SMTPUTF8: https://tools.ietf.org/html/rfc653
// Check for non-ascii addresses and use the SMTPUTF8 option if any.
if envelope.has_non_ascii_addresses() {
if!self.server_info().supports_feature(Extension::SmtpUtfEight)
|
mail_options.push(MailParameter::SmtpUtfEight);
}
// Check for non-ascii content in message
if!email.is_ascii() {
if!self.server_info().supports_feature(Extension::EightBitMime) {
return Err(error::client(
"Message contains non-ascii chars but server does not support 8BITMIME",
));
}
mail_options.push(MailParameter::Body(MailBodyParameter::EightBitMime));
}
try_smtp!(
self.command(Mail::new(envelope.from().cloned(), mail_options)),
self
);
// Recipient
for to_address in envelope.to() {
try_smtp!(self.command(Rcpt::new(to_address.clone(), vec![])), self);
}
// Data
try_smtp!(self.command(Data), self);
// Message content
let result = try_smtp!(self.message(email), self);
Ok(result)
}
pub fn has_broken(&self) -> bool {
self.panic
}
pub fn can_starttls(&self) -> bool {
!self.is_encrypted() && self.server_info.supports_feature(Extension::StartTls)
}
#[allow(unused_variables)]
pub fn starttls(
&mut self,
tls_parameters: &TlsParameters,
hello_name: &ClientId,
) -> Result<(), Error> {
if self.server_info.supports_feature(Extension::StartTls) {
#[cfg(any(feature = "native-tls", feature = "rustls-tls"))]
{
try_smtp!(self.command(Starttls), self);
self.stream.get_mut().upgrade_tls(tls_parameters)?;
#[cfg(feature = "tracing")]
tracing::debug!("connection encrypted");
// Send EHLO again
try_smtp!(self.ehlo(hello_name), self);
Ok(())
}
#[cfg(not(any(feature = "native-tls", feature = "rustls-tls")))]
// This should never happen as `Tls` can only be created
// when a TLS library is enabled
unreachable!("TLS support required but not supported");
} else {
Err(error::client("STARTTLS is not supported on this server"))
}
}
/// Send EHLO and update server info
fn ehlo(&mut self, hello_name: &ClientId) -> Result<(), Error> {
let ehlo_response = try_smtp!(self.command(Ehlo::new(hello_name.clone())), self);
self.server_info = try_smtp!(ServerInfo::from_response(&ehlo_response), self);
Ok(())
}
pub fn quit(&mut self) -> Result<Response, Error> {
Ok(try_smtp!(self.command(Quit), self))
}
pub fn abort(&mut self) {
// Only try to quit if we are not already broken
if!self.panic {
self.panic = true;
let _ = self.command(Quit);
}
}
/// Sets the underlying stream
pub fn set_stream(&mut self, stream: NetworkStream) {
self.stream = BufReader::new(stream);
}
/// Tells if the underlying stream is currently encrypted
pub fn is_encrypted(&self) -> bool {
self.stream.get_ref().is_encrypted()
}
/// Set timeout
pub fn set_timeout(&mut self, duration: Option<Duration>) -> io::Result<()> {
self.stream.get_mut().set_read_timeout(duration)?;
self.stream.get_mut().set_write_timeout(duration)
}
/// Checks if the server is connected using the NOOP SMTP command
pub fn test_connected(&mut self) -> bool {
self.command(Noop).is_ok()
}
/// Sends an AUTH command with the given mechanism, and handles challenge if needed
pub fn auth(
&mut self,
mechanisms: &[Mechanism],
credentials: &Credentials,
) -> Result<Response, Error> {
let mechanism = self
.server_info
.get_auth_mechanism(mechanisms)
.ok_or_else(|| error::client("No compatible authentication mechanism was found"))?;
// Limit challenges to avoid blocking
let mut challenges = 10;
let mut response = self.command(Auth::new(mechanism, credentials.clone(), None)?)?;
while challenges > 0 && response.has_code(334) {
challenges -= 1;
response = try_smtp!(
self.command(Auth::new_from_response(
mechanism,
credentials.clone(),
&response,
)?),
self
);
}
if challenges == 0 {
Err(error::response("Unexpected number of challenges"))
} else {
Ok(response)
}
}
/// Sends the message content
pub fn message(&mut self, message: &[u8]) -> Result<Response, Error> {
let mut out_buf: Vec<u8> = vec![];
let mut codec = ClientCodec::new();
codec.encode(message, &mut out_buf);
self.write(out_buf.as_slice())?;
self.write(b"\r\n.\r\n")?;
self.read_response()
}
/// Sends an SMTP command
pub fn command<C: Display>(&mut self, command: C) -> Result<Response, Error> {
self.write(command.to_string().as_bytes())?;
self.read_response()
}
/// Writes a string to the server
fn write(&mut self, string: &[u8]) -> Result<(), Error> {
self.stream
.get_mut()
.write_all(string)
.map_err(error::network)?;
self.stream.get_mut().flush().map_err(error::network)?;
#[cfg(feature = "tracing")]
tracing::debug!("Wrote: {}", escape_crlf(&String::from_utf8_lossy(string)));
Ok(())
}
/// Gets the SMTP response
pub fn read_response(&mut self) -> Result<Response, Error> {
let mut buffer = String::with_capacity(100);
while self.stream.read_line(&mut buffer).map_err(error::network)? > 0 {
#[cfg(feature = "tracing")]
tracing::debug!("<< {}", escape_crlf(&buffer));
match parse_response(&buffer) {
Ok((_remaining, response)) => {
return if response.is_positive() {
Ok(response)
} else {
Err(error::code(response.code()))
};
}
Err(nom::Err::Failure(e)) => {
return Err(error::response(e.to_string()));
}
Err(nom::Err::Incomplete(_)) => { /* read more */ }
Err(nom::Err::Error(e)) => {
return Err(error::response(e.to_string()));
}
}
}
Err(error::response("incomplete response"))
}
}
|
{
// don't try to send non-ascii addresses (per RFC)
return Err(error::client(
"Envelope contains non-ascii chars but server does not support SMTPUTF8",
));
}
|
conditional_block
|
connection.rs
|
use std::{
fmt::Display,
io::{self, BufRead, BufReader, Write},
net::ToSocketAddrs,
time::Duration,
};
use super::{ClientCodec, NetworkStream, TlsParameters};
use crate::{
address::Envelope,
transport::smtp::{
authentication::{Credentials, Mechanism},
commands::*,
error,
error::Error,
extension::{ClientId, Extension, MailBodyParameter, MailParameter, ServerInfo},
response::{parse_response, Response},
},
};
#[cfg(feature = "tracing")]
use super::escape_crlf;
macro_rules! try_smtp (
($err: expr, $client: ident) => ({
match $err {
Ok(val) => val,
Err(err) => {
$client.abort();
return Err(From::from(err))
},
}
})
);
/// Structure that implements the SMTP client
pub struct SmtpConnection {
/// TCP stream between client and server
/// Value is None before connection
stream: BufReader<NetworkStream>,
/// Panic state
panic: bool,
/// Information about the server
server_info: ServerInfo,
}
impl SmtpConnection {
pub fn server_info(&self) -> &ServerInfo {
&self.server_info
}
// FIXME add simple connect and rename this one
/// Connects to the configured server
///
/// Sends EHLO and parses server information
pub fn connect<A: ToSocketAddrs>(
server: A,
timeout: Option<Duration>,
hello_name: &ClientId,
tls_parameters: Option<&TlsParameters>,
) -> Result<SmtpConnection, Error> {
let stream = NetworkStream::connect(server, timeout, tls_parameters)?;
let stream = BufReader::new(stream);
let mut conn = SmtpConnection {
stream,
panic: false,
server_info: ServerInfo::default(),
};
conn.set_timeout(timeout).map_err(error::network)?;
// TODO log
let _response = conn.read_response()?;
conn.ehlo(hello_name)?;
// Print server information
#[cfg(feature = "tracing")]
tracing::debug!("server {}", conn.server_info);
Ok(conn)
}
pub fn send(&mut self, envelope: &Envelope, email: &[u8]) -> Result<Response, Error> {
// Mail
let mut mail_options = vec![];
// Internationalization handling
//
// * 8BITMIME: https://tools.ietf.org/html/rfc6152
// * SMTPUTF8: https://tools.ietf.org/html/rfc653
// Check for non-ascii addresses and use the SMTPUTF8 option if any.
if envelope.has_non_ascii_addresses() {
if!self.server_info().supports_feature(Extension::SmtpUtfEight) {
// don't try to send non-ascii addresses (per RFC)
return Err(error::client(
"Envelope contains non-ascii chars but server does not support SMTPUTF8",
));
}
mail_options.push(MailParameter::SmtpUtfEight);
}
// Check for non-ascii content in message
if!email.is_ascii() {
if!self.server_info().supports_feature(Extension::EightBitMime) {
return Err(error::client(
"Message contains non-ascii chars but server does not support 8BITMIME",
));
}
mail_options.push(MailParameter::Body(MailBodyParameter::EightBitMime));
}
try_smtp!(
self.command(Mail::new(envelope.from().cloned(), mail_options)),
self
);
// Recipient
for to_address in envelope.to() {
try_smtp!(self.command(Rcpt::new(to_address.clone(), vec![])), self);
}
// Data
try_smtp!(self.command(Data), self);
// Message content
let result = try_smtp!(self.message(email), self);
Ok(result)
}
pub fn has_broken(&self) -> bool {
self.panic
}
pub fn can_starttls(&self) -> bool {
!self.is_encrypted() && self.server_info.supports_feature(Extension::StartTls)
}
#[allow(unused_variables)]
pub fn starttls(
&mut self,
tls_parameters: &TlsParameters,
hello_name: &ClientId,
) -> Result<(), Error> {
if self.server_info.supports_feature(Extension::StartTls) {
#[cfg(any(feature = "native-tls", feature = "rustls-tls"))]
{
try_smtp!(self.command(Starttls), self);
self.stream.get_mut().upgrade_tls(tls_parameters)?;
#[cfg(feature = "tracing")]
tracing::debug!("connection encrypted");
// Send EHLO again
try_smtp!(self.ehlo(hello_name), self);
Ok(())
}
#[cfg(not(any(feature = "native-tls", feature = "rustls-tls")))]
// This should never happen as `Tls` can only be created
// when a TLS library is enabled
unreachable!("TLS support required but not supported");
} else {
Err(error::client("STARTTLS is not supported on this server"))
}
}
/// Send EHLO and update server info
fn ehlo(&mut self, hello_name: &ClientId) -> Result<(), Error> {
let ehlo_response = try_smtp!(self.command(Ehlo::new(hello_name.clone())), self);
self.server_info = try_smtp!(ServerInfo::from_response(&ehlo_response), self);
Ok(())
}
pub fn quit(&mut self) -> Result<Response, Error> {
Ok(try_smtp!(self.command(Quit), self))
}
pub fn abort(&mut self) {
// Only try to quit if we are not already broken
if!self.panic {
self.panic = true;
let _ = self.command(Quit);
}
}
/// Sets the underlying stream
pub fn set_stream(&mut self, stream: NetworkStream) {
self.stream = BufReader::new(stream);
}
/// Tells if the underlying stream is currently encrypted
pub fn is_encrypted(&self) -> bool {
self.stream.get_ref().is_encrypted()
}
/// Set timeout
pub fn
|
(&mut self, duration: Option<Duration>) -> io::Result<()> {
self.stream.get_mut().set_read_timeout(duration)?;
self.stream.get_mut().set_write_timeout(duration)
}
/// Checks if the server is connected using the NOOP SMTP command
pub fn test_connected(&mut self) -> bool {
self.command(Noop).is_ok()
}
/// Sends an AUTH command with the given mechanism, and handles challenge if needed
pub fn auth(
&mut self,
mechanisms: &[Mechanism],
credentials: &Credentials,
) -> Result<Response, Error> {
let mechanism = self
.server_info
.get_auth_mechanism(mechanisms)
.ok_or_else(|| error::client("No compatible authentication mechanism was found"))?;
// Limit challenges to avoid blocking
let mut challenges = 10;
let mut response = self.command(Auth::new(mechanism, credentials.clone(), None)?)?;
while challenges > 0 && response.has_code(334) {
challenges -= 1;
response = try_smtp!(
self.command(Auth::new_from_response(
mechanism,
credentials.clone(),
&response,
)?),
self
);
}
if challenges == 0 {
Err(error::response("Unexpected number of challenges"))
} else {
Ok(response)
}
}
/// Sends the message content
pub fn message(&mut self, message: &[u8]) -> Result<Response, Error> {
let mut out_buf: Vec<u8> = vec![];
let mut codec = ClientCodec::new();
codec.encode(message, &mut out_buf);
self.write(out_buf.as_slice())?;
self.write(b"\r\n.\r\n")?;
self.read_response()
}
/// Sends an SMTP command
pub fn command<C: Display>(&mut self, command: C) -> Result<Response, Error> {
self.write(command.to_string().as_bytes())?;
self.read_response()
}
/// Writes a string to the server
fn write(&mut self, string: &[u8]) -> Result<(), Error> {
self.stream
.get_mut()
.write_all(string)
.map_err(error::network)?;
self.stream.get_mut().flush().map_err(error::network)?;
#[cfg(feature = "tracing")]
tracing::debug!("Wrote: {}", escape_crlf(&String::from_utf8_lossy(string)));
Ok(())
}
/// Gets the SMTP response
pub fn read_response(&mut self) -> Result<Response, Error> {
let mut buffer = String::with_capacity(100);
while self.stream.read_line(&mut buffer).map_err(error::network)? > 0 {
#[cfg(feature = "tracing")]
tracing::debug!("<< {}", escape_crlf(&buffer));
match parse_response(&buffer) {
Ok((_remaining, response)) => {
return if response.is_positive() {
Ok(response)
} else {
Err(error::code(response.code()))
};
}
Err(nom::Err::Failure(e)) => {
return Err(error::response(e.to_string()));
}
Err(nom::Err::Incomplete(_)) => { /* read more */ }
Err(nom::Err::Error(e)) => {
return Err(error::response(e.to_string()));
}
}
}
Err(error::response("incomplete response"))
}
}
|
set_timeout
|
identifier_name
|
connection.rs
|
use std::{
fmt::Display,
io::{self, BufRead, BufReader, Write},
net::ToSocketAddrs,
time::Duration,
};
use super::{ClientCodec, NetworkStream, TlsParameters};
use crate::{
address::Envelope,
transport::smtp::{
authentication::{Credentials, Mechanism},
commands::*,
error,
error::Error,
extension::{ClientId, Extension, MailBodyParameter, MailParameter, ServerInfo},
response::{parse_response, Response},
},
};
#[cfg(feature = "tracing")]
use super::escape_crlf;
macro_rules! try_smtp (
($err: expr, $client: ident) => ({
match $err {
Ok(val) => val,
Err(err) => {
$client.abort();
return Err(From::from(err))
},
}
})
);
/// Structure that implements the SMTP client
pub struct SmtpConnection {
/// TCP stream between client and server
/// Value is None before connection
stream: BufReader<NetworkStream>,
/// Panic state
panic: bool,
/// Information about the server
server_info: ServerInfo,
}
impl SmtpConnection {
pub fn server_info(&self) -> &ServerInfo {
&self.server_info
}
// FIXME add simple connect and rename this one
/// Connects to the configured server
///
/// Sends EHLO and parses server information
pub fn connect<A: ToSocketAddrs>(
server: A,
timeout: Option<Duration>,
hello_name: &ClientId,
tls_parameters: Option<&TlsParameters>,
) -> Result<SmtpConnection, Error> {
let stream = NetworkStream::connect(server, timeout, tls_parameters)?;
let stream = BufReader::new(stream);
let mut conn = SmtpConnection {
stream,
panic: false,
server_info: ServerInfo::default(),
};
conn.set_timeout(timeout).map_err(error::network)?;
// TODO log
let _response = conn.read_response()?;
conn.ehlo(hello_name)?;
// Print server information
#[cfg(feature = "tracing")]
tracing::debug!("server {}", conn.server_info);
Ok(conn)
}
pub fn send(&mut self, envelope: &Envelope, email: &[u8]) -> Result<Response, Error> {
// Mail
let mut mail_options = vec![];
// Internationalization handling
//
// * 8BITMIME: https://tools.ietf.org/html/rfc6152
// * SMTPUTF8: https://tools.ietf.org/html/rfc653
// Check for non-ascii addresses and use the SMTPUTF8 option if any.
if envelope.has_non_ascii_addresses() {
if!self.server_info().supports_feature(Extension::SmtpUtfEight) {
// don't try to send non-ascii addresses (per RFC)
return Err(error::client(
"Envelope contains non-ascii chars but server does not support SMTPUTF8",
));
}
mail_options.push(MailParameter::SmtpUtfEight);
}
// Check for non-ascii content in message
if!email.is_ascii() {
if!self.server_info().supports_feature(Extension::EightBitMime) {
return Err(error::client(
"Message contains non-ascii chars but server does not support 8BITMIME",
));
}
mail_options.push(MailParameter::Body(MailBodyParameter::EightBitMime));
}
try_smtp!(
self.command(Mail::new(envelope.from().cloned(), mail_options)),
self
);
// Recipient
for to_address in envelope.to() {
try_smtp!(self.command(Rcpt::new(to_address.clone(), vec![])), self);
}
// Data
try_smtp!(self.command(Data), self);
// Message content
let result = try_smtp!(self.message(email), self);
Ok(result)
}
pub fn has_broken(&self) -> bool {
self.panic
}
pub fn can_starttls(&self) -> bool {
!self.is_encrypted() && self.server_info.supports_feature(Extension::StartTls)
}
#[allow(unused_variables)]
pub fn starttls(
&mut self,
tls_parameters: &TlsParameters,
hello_name: &ClientId,
) -> Result<(), Error> {
if self.server_info.supports_feature(Extension::StartTls) {
#[cfg(any(feature = "native-tls", feature = "rustls-tls"))]
{
try_smtp!(self.command(Starttls), self);
self.stream.get_mut().upgrade_tls(tls_parameters)?;
#[cfg(feature = "tracing")]
tracing::debug!("connection encrypted");
// Send EHLO again
try_smtp!(self.ehlo(hello_name), self);
Ok(())
}
#[cfg(not(any(feature = "native-tls", feature = "rustls-tls")))]
// This should never happen as `Tls` can only be created
// when a TLS library is enabled
unreachable!("TLS support required but not supported");
} else {
Err(error::client("STARTTLS is not supported on this server"))
}
}
/// Send EHLO and update server info
fn ehlo(&mut self, hello_name: &ClientId) -> Result<(), Error> {
let ehlo_response = try_smtp!(self.command(Ehlo::new(hello_name.clone())), self);
self.server_info = try_smtp!(ServerInfo::from_response(&ehlo_response), self);
Ok(())
}
pub fn quit(&mut self) -> Result<Response, Error> {
Ok(try_smtp!(self.command(Quit), self))
}
pub fn abort(&mut self) {
// Only try to quit if we are not already broken
if!self.panic {
|
/// Sets the underlying stream
pub fn set_stream(&mut self, stream: NetworkStream) {
self.stream = BufReader::new(stream);
}
/// Tells if the underlying stream is currently encrypted
pub fn is_encrypted(&self) -> bool {
self.stream.get_ref().is_encrypted()
}
/// Set timeout
pub fn set_timeout(&mut self, duration: Option<Duration>) -> io::Result<()> {
self.stream.get_mut().set_read_timeout(duration)?;
self.stream.get_mut().set_write_timeout(duration)
}
/// Checks if the server is connected using the NOOP SMTP command
pub fn test_connected(&mut self) -> bool {
self.command(Noop).is_ok()
}
/// Sends an AUTH command with the given mechanism, and handles challenge if needed
pub fn auth(
&mut self,
mechanisms: &[Mechanism],
credentials: &Credentials,
) -> Result<Response, Error> {
let mechanism = self
.server_info
.get_auth_mechanism(mechanisms)
.ok_or_else(|| error::client("No compatible authentication mechanism was found"))?;
// Limit challenges to avoid blocking
let mut challenges = 10;
let mut response = self.command(Auth::new(mechanism, credentials.clone(), None)?)?;
while challenges > 0 && response.has_code(334) {
challenges -= 1;
response = try_smtp!(
self.command(Auth::new_from_response(
mechanism,
credentials.clone(),
&response,
)?),
self
);
}
if challenges == 0 {
Err(error::response("Unexpected number of challenges"))
} else {
Ok(response)
}
}
/// Sends the message content
pub fn message(&mut self, message: &[u8]) -> Result<Response, Error> {
let mut out_buf: Vec<u8> = vec![];
let mut codec = ClientCodec::new();
codec.encode(message, &mut out_buf);
self.write(out_buf.as_slice())?;
self.write(b"\r\n.\r\n")?;
self.read_response()
}
/// Sends an SMTP command
pub fn command<C: Display>(&mut self, command: C) -> Result<Response, Error> {
self.write(command.to_string().as_bytes())?;
self.read_response()
}
/// Writes a string to the server
fn write(&mut self, string: &[u8]) -> Result<(), Error> {
self.stream
.get_mut()
.write_all(string)
.map_err(error::network)?;
self.stream.get_mut().flush().map_err(error::network)?;
#[cfg(feature = "tracing")]
tracing::debug!("Wrote: {}", escape_crlf(&String::from_utf8_lossy(string)));
Ok(())
}
/// Gets the SMTP response
pub fn read_response(&mut self) -> Result<Response, Error> {
let mut buffer = String::with_capacity(100);
while self.stream.read_line(&mut buffer).map_err(error::network)? > 0 {
#[cfg(feature = "tracing")]
tracing::debug!("<< {}", escape_crlf(&buffer));
match parse_response(&buffer) {
Ok((_remaining, response)) => {
return if response.is_positive() {
Ok(response)
} else {
Err(error::code(response.code()))
};
}
Err(nom::Err::Failure(e)) => {
return Err(error::response(e.to_string()));
}
Err(nom::Err::Incomplete(_)) => { /* read more */ }
Err(nom::Err::Error(e)) => {
return Err(error::response(e.to_string()));
}
}
}
Err(error::response("incomplete response"))
}
}
|
self.panic = true;
let _ = self.command(Quit);
}
}
|
random_line_split
|
connection.rs
|
use std::{
fmt::Display,
io::{self, BufRead, BufReader, Write},
net::ToSocketAddrs,
time::Duration,
};
use super::{ClientCodec, NetworkStream, TlsParameters};
use crate::{
address::Envelope,
transport::smtp::{
authentication::{Credentials, Mechanism},
commands::*,
error,
error::Error,
extension::{ClientId, Extension, MailBodyParameter, MailParameter, ServerInfo},
response::{parse_response, Response},
},
};
#[cfg(feature = "tracing")]
use super::escape_crlf;
macro_rules! try_smtp (
($err: expr, $client: ident) => ({
match $err {
Ok(val) => val,
Err(err) => {
$client.abort();
return Err(From::from(err))
},
}
})
);
/// Structure that implements the SMTP client
pub struct SmtpConnection {
/// TCP stream between client and server
/// Value is None before connection
stream: BufReader<NetworkStream>,
/// Panic state
panic: bool,
/// Information about the server
server_info: ServerInfo,
}
impl SmtpConnection {
pub fn server_info(&self) -> &ServerInfo {
&self.server_info
}
// FIXME add simple connect and rename this one
/// Connects to the configured server
///
/// Sends EHLO and parses server information
pub fn connect<A: ToSocketAddrs>(
server: A,
timeout: Option<Duration>,
hello_name: &ClientId,
tls_parameters: Option<&TlsParameters>,
) -> Result<SmtpConnection, Error> {
let stream = NetworkStream::connect(server, timeout, tls_parameters)?;
let stream = BufReader::new(stream);
let mut conn = SmtpConnection {
stream,
panic: false,
server_info: ServerInfo::default(),
};
conn.set_timeout(timeout).map_err(error::network)?;
// TODO log
let _response = conn.read_response()?;
conn.ehlo(hello_name)?;
// Print server information
#[cfg(feature = "tracing")]
tracing::debug!("server {}", conn.server_info);
Ok(conn)
}
pub fn send(&mut self, envelope: &Envelope, email: &[u8]) -> Result<Response, Error> {
// Mail
let mut mail_options = vec![];
// Internationalization handling
//
// * 8BITMIME: https://tools.ietf.org/html/rfc6152
// * SMTPUTF8: https://tools.ietf.org/html/rfc653
// Check for non-ascii addresses and use the SMTPUTF8 option if any.
if envelope.has_non_ascii_addresses() {
if!self.server_info().supports_feature(Extension::SmtpUtfEight) {
// don't try to send non-ascii addresses (per RFC)
return Err(error::client(
"Envelope contains non-ascii chars but server does not support SMTPUTF8",
));
}
mail_options.push(MailParameter::SmtpUtfEight);
}
// Check for non-ascii content in message
if!email.is_ascii() {
if!self.server_info().supports_feature(Extension::EightBitMime) {
return Err(error::client(
"Message contains non-ascii chars but server does not support 8BITMIME",
));
}
mail_options.push(MailParameter::Body(MailBodyParameter::EightBitMime));
}
try_smtp!(
self.command(Mail::new(envelope.from().cloned(), mail_options)),
self
);
// Recipient
for to_address in envelope.to() {
try_smtp!(self.command(Rcpt::new(to_address.clone(), vec![])), self);
}
// Data
try_smtp!(self.command(Data), self);
// Message content
let result = try_smtp!(self.message(email), self);
Ok(result)
}
pub fn has_broken(&self) -> bool {
self.panic
}
pub fn can_starttls(&self) -> bool {
!self.is_encrypted() && self.server_info.supports_feature(Extension::StartTls)
}
#[allow(unused_variables)]
pub fn starttls(
&mut self,
tls_parameters: &TlsParameters,
hello_name: &ClientId,
) -> Result<(), Error> {
if self.server_info.supports_feature(Extension::StartTls) {
#[cfg(any(feature = "native-tls", feature = "rustls-tls"))]
{
try_smtp!(self.command(Starttls), self);
self.stream.get_mut().upgrade_tls(tls_parameters)?;
#[cfg(feature = "tracing")]
tracing::debug!("connection encrypted");
// Send EHLO again
try_smtp!(self.ehlo(hello_name), self);
Ok(())
}
#[cfg(not(any(feature = "native-tls", feature = "rustls-tls")))]
// This should never happen as `Tls` can only be created
// when a TLS library is enabled
unreachable!("TLS support required but not supported");
} else {
Err(error::client("STARTTLS is not supported on this server"))
}
}
/// Send EHLO and update server info
fn ehlo(&mut self, hello_name: &ClientId) -> Result<(), Error> {
let ehlo_response = try_smtp!(self.command(Ehlo::new(hello_name.clone())), self);
self.server_info = try_smtp!(ServerInfo::from_response(&ehlo_response), self);
Ok(())
}
pub fn quit(&mut self) -> Result<Response, Error> {
Ok(try_smtp!(self.command(Quit), self))
}
pub fn abort(&mut self) {
// Only try to quit if we are not already broken
if!self.panic {
self.panic = true;
let _ = self.command(Quit);
}
}
/// Sets the underlying stream
pub fn set_stream(&mut self, stream: NetworkStream) {
self.stream = BufReader::new(stream);
}
/// Tells if the underlying stream is currently encrypted
pub fn is_encrypted(&self) -> bool {
self.stream.get_ref().is_encrypted()
}
/// Set timeout
pub fn set_timeout(&mut self, duration: Option<Duration>) -> io::Result<()> {
self.stream.get_mut().set_read_timeout(duration)?;
self.stream.get_mut().set_write_timeout(duration)
}
/// Checks if the server is connected using the NOOP SMTP command
pub fn test_connected(&mut self) -> bool {
self.command(Noop).is_ok()
}
/// Sends an AUTH command with the given mechanism, and handles challenge if needed
pub fn auth(
&mut self,
mechanisms: &[Mechanism],
credentials: &Credentials,
) -> Result<Response, Error> {
let mechanism = self
.server_info
.get_auth_mechanism(mechanisms)
.ok_or_else(|| error::client("No compatible authentication mechanism was found"))?;
// Limit challenges to avoid blocking
let mut challenges = 10;
let mut response = self.command(Auth::new(mechanism, credentials.clone(), None)?)?;
while challenges > 0 && response.has_code(334) {
challenges -= 1;
response = try_smtp!(
self.command(Auth::new_from_response(
mechanism,
credentials.clone(),
&response,
)?),
self
);
}
if challenges == 0 {
Err(error::response("Unexpected number of challenges"))
} else {
Ok(response)
}
}
/// Sends the message content
pub fn message(&mut self, message: &[u8]) -> Result<Response, Error> {
let mut out_buf: Vec<u8> = vec![];
let mut codec = ClientCodec::new();
codec.encode(message, &mut out_buf);
self.write(out_buf.as_slice())?;
self.write(b"\r\n.\r\n")?;
self.read_response()
}
/// Sends an SMTP command
pub fn command<C: Display>(&mut self, command: C) -> Result<Response, Error> {
self.write(command.to_string().as_bytes())?;
self.read_response()
}
/// Writes a string to the server
fn write(&mut self, string: &[u8]) -> Result<(), Error> {
self.stream
.get_mut()
.write_all(string)
.map_err(error::network)?;
self.stream.get_mut().flush().map_err(error::network)?;
#[cfg(feature = "tracing")]
tracing::debug!("Wrote: {}", escape_crlf(&String::from_utf8_lossy(string)));
Ok(())
}
/// Gets the SMTP response
pub fn read_response(&mut self) -> Result<Response, Error>
|
}
}
}
Err(error::response("incomplete response"))
}
}
|
{
let mut buffer = String::with_capacity(100);
while self.stream.read_line(&mut buffer).map_err(error::network)? > 0 {
#[cfg(feature = "tracing")]
tracing::debug!("<< {}", escape_crlf(&buffer));
match parse_response(&buffer) {
Ok((_remaining, response)) => {
return if response.is_positive() {
Ok(response)
} else {
Err(error::code(response.code()))
};
}
Err(nom::Err::Failure(e)) => {
return Err(error::response(e.to_string()));
}
Err(nom::Err::Incomplete(_)) => { /* read more */ }
Err(nom::Err::Error(e)) => {
return Err(error::response(e.to_string()));
|
identifier_body
|
smoke.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// These tests came from https://github.com/rust-lang/rust/blob/master/src/libstd/sys/common/remutex.rs
extern crate servo_remutex;
use servo_remutex::{ReentrantMutex, ReentrantMutexGuard};
use std::cell::RefCell;
use std::sync::Arc;
use std::thread;
#[test]
fn smoke() {
let m = ReentrantMutex::new(());
{
let a = m.lock().unwrap();
{
let b = m.lock().unwrap();
{
let c = m.lock().unwrap();
assert_eq!(*c, ());
}
assert_eq!(*b, ());
}
assert_eq!(*a, ());
}
}
#[test]
fn is_mutex() {
let m = Arc::new(ReentrantMutex::new(RefCell::new(0)));
let m2 = m.clone();
let lock = m.lock().unwrap();
let child = thread::spawn(move || {
let lock = m2.lock().unwrap();
assert_eq!(*lock.borrow(), 4950);
});
for i in 0..100 {
let lock = m.lock().unwrap();
*lock.borrow_mut() += i;
}
|
#[test]
fn trylock_works() {
let m = Arc::new(ReentrantMutex::new(()));
let m2 = m.clone();
let _lock = m.try_lock().unwrap();
let _lock2 = m.try_lock().unwrap();
thread::spawn(move || {
let lock = m2.try_lock();
assert!(lock.is_err());
}).join().unwrap();
let _lock3 = m.try_lock().unwrap();
}
pub struct Answer<'a>(pub ReentrantMutexGuard<'a, RefCell<u32>>);
impl<'a> Drop for Answer<'a> {
fn drop(&mut self) {
*self.0.borrow_mut() = 42;
}
}
#[test]
fn poison_works() {
let m = Arc::new(ReentrantMutex::new(RefCell::new(0)));
let mc = m.clone();
let result = thread::spawn(move ||{
let lock = mc.lock().unwrap();
*lock.borrow_mut() = 1;
let lock2 = mc.lock().unwrap();
*lock.borrow_mut() = 2;
let _answer = Answer(lock2);
println!("Intentionally panicking.");
panic!("What the answer to my lifetimes dilemma is?");
}).join();
assert!(result.is_err());
let r = m.lock().err().unwrap().into_inner();
assert_eq!(*r.borrow(), 42);
}
|
drop(lock);
child.join().unwrap();
}
|
random_line_split
|
smoke.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// These tests came from https://github.com/rust-lang/rust/blob/master/src/libstd/sys/common/remutex.rs
extern crate servo_remutex;
use servo_remutex::{ReentrantMutex, ReentrantMutexGuard};
use std::cell::RefCell;
use std::sync::Arc;
use std::thread;
#[test]
fn smoke() {
let m = ReentrantMutex::new(());
{
let a = m.lock().unwrap();
{
let b = m.lock().unwrap();
{
let c = m.lock().unwrap();
assert_eq!(*c, ());
}
assert_eq!(*b, ());
}
assert_eq!(*a, ());
}
}
#[test]
fn is_mutex() {
let m = Arc::new(ReentrantMutex::new(RefCell::new(0)));
let m2 = m.clone();
let lock = m.lock().unwrap();
let child = thread::spawn(move || {
let lock = m2.lock().unwrap();
assert_eq!(*lock.borrow(), 4950);
});
for i in 0..100 {
let lock = m.lock().unwrap();
*lock.borrow_mut() += i;
}
drop(lock);
child.join().unwrap();
}
#[test]
fn trylock_works()
|
pub struct Answer<'a>(pub ReentrantMutexGuard<'a, RefCell<u32>>);
impl<'a> Drop for Answer<'a> {
fn drop(&mut self) {
*self.0.borrow_mut() = 42;
}
}
#[test]
fn poison_works() {
let m = Arc::new(ReentrantMutex::new(RefCell::new(0)));
let mc = m.clone();
let result = thread::spawn(move ||{
let lock = mc.lock().unwrap();
*lock.borrow_mut() = 1;
let lock2 = mc.lock().unwrap();
*lock.borrow_mut() = 2;
let _answer = Answer(lock2);
println!("Intentionally panicking.");
panic!("What the answer to my lifetimes dilemma is?");
}).join();
assert!(result.is_err());
let r = m.lock().err().unwrap().into_inner();
assert_eq!(*r.borrow(), 42);
}
|
{
let m = Arc::new(ReentrantMutex::new(()));
let m2 = m.clone();
let _lock = m.try_lock().unwrap();
let _lock2 = m.try_lock().unwrap();
thread::spawn(move || {
let lock = m2.try_lock();
assert!(lock.is_err());
}).join().unwrap();
let _lock3 = m.try_lock().unwrap();
}
|
identifier_body
|
smoke.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// These tests came from https://github.com/rust-lang/rust/blob/master/src/libstd/sys/common/remutex.rs
extern crate servo_remutex;
use servo_remutex::{ReentrantMutex, ReentrantMutexGuard};
use std::cell::RefCell;
use std::sync::Arc;
use std::thread;
#[test]
fn smoke() {
let m = ReentrantMutex::new(());
{
let a = m.lock().unwrap();
{
let b = m.lock().unwrap();
{
let c = m.lock().unwrap();
assert_eq!(*c, ());
}
assert_eq!(*b, ());
}
assert_eq!(*a, ());
}
}
#[test]
fn is_mutex() {
let m = Arc::new(ReentrantMutex::new(RefCell::new(0)));
let m2 = m.clone();
let lock = m.lock().unwrap();
let child = thread::spawn(move || {
let lock = m2.lock().unwrap();
assert_eq!(*lock.borrow(), 4950);
});
for i in 0..100 {
let lock = m.lock().unwrap();
*lock.borrow_mut() += i;
}
drop(lock);
child.join().unwrap();
}
#[test]
fn trylock_works() {
let m = Arc::new(ReentrantMutex::new(()));
let m2 = m.clone();
let _lock = m.try_lock().unwrap();
let _lock2 = m.try_lock().unwrap();
thread::spawn(move || {
let lock = m2.try_lock();
assert!(lock.is_err());
}).join().unwrap();
let _lock3 = m.try_lock().unwrap();
}
pub struct Answer<'a>(pub ReentrantMutexGuard<'a, RefCell<u32>>);
impl<'a> Drop for Answer<'a> {
fn drop(&mut self) {
*self.0.borrow_mut() = 42;
}
}
#[test]
fn
|
() {
let m = Arc::new(ReentrantMutex::new(RefCell::new(0)));
let mc = m.clone();
let result = thread::spawn(move ||{
let lock = mc.lock().unwrap();
*lock.borrow_mut() = 1;
let lock2 = mc.lock().unwrap();
*lock.borrow_mut() = 2;
let _answer = Answer(lock2);
println!("Intentionally panicking.");
panic!("What the answer to my lifetimes dilemma is?");
}).join();
assert!(result.is_err());
let r = m.lock().err().unwrap().into_inner();
assert_eq!(*r.borrow(), 42);
}
|
poison_works
|
identifier_name
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.