file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
msg_pong.rs | use std;
use ::serialize::{self, Serializable};
use super::PingMessage;
use super::BIP0031_VERSION;
#[derive(Debug,Default,Clone)]
pub struct |
{
pub nonce: u64,
}
impl PongMessage {
pub fn new(ping:&PingMessage) -> PongMessage {
PongMessage{ nonce: ping.nonce }
}
}
impl super::Message for PongMessage {
fn get_command(&self) -> [u8; super::message_header::COMMAND_SIZE] {
super::message_header::COMMAND_PONG
}
}
impl std::fmt::Display for PongMessage {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "Pong(nonce={})", self.nonce)
}
}
impl Serializable for PongMessage {
fn get_serialize_size(&self, ser:&serialize::SerializeParam) -> usize {
if BIP0031_VERSION < ser.version {
self.nonce.get_serialize_size(ser)
} else {
0usize
}
}
fn serialize(&self, io:&mut std::io::Write, ser:&serialize::SerializeParam) -> serialize::Result {
if BIP0031_VERSION < ser.version {
self.nonce.serialize(io, ser)
} else {
Ok(0usize)
}
}
fn deserialize(&mut self, io:&mut std::io::Read, ser:&serialize::SerializeParam) -> serialize::Result {
if BIP0031_VERSION < ser.version {
self.nonce.deserialize(io, ser)
} else {
Ok(0usize)
}
}
}
| PongMessage | identifier_name |
msg_pong.rs | use std;
use ::serialize::{self, Serializable};
use super::PingMessage;
use super::BIP0031_VERSION;
#[derive(Debug,Default,Clone)]
pub struct PongMessage
{
pub nonce: u64,
}
impl PongMessage {
pub fn new(ping:&PingMessage) -> PongMessage {
PongMessage{ nonce: ping.nonce }
}
}
impl super::Message for PongMessage {
fn get_command(&self) -> [u8; super::message_header::COMMAND_SIZE] {
super::message_header::COMMAND_PONG
}
}
impl std::fmt::Display for PongMessage {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "Pong(nonce={})", self.nonce)
}
}
impl Serializable for PongMessage {
fn get_serialize_size(&self, ser:&serialize::SerializeParam) -> usize {
if BIP0031_VERSION < ser.version {
self.nonce.get_serialize_size(ser)
} else {
0usize
}
}
fn serialize(&self, io:&mut std::io::Write, ser:&serialize::SerializeParam) -> serialize::Result {
if BIP0031_VERSION < ser.version {
self.nonce.serialize(io, ser)
} else {
Ok(0usize)
}
}
fn deserialize(&mut self, io:&mut std::io::Read, ser:&serialize::SerializeParam) -> serialize::Result {
if BIP0031_VERSION < ser.version | else {
Ok(0usize)
}
}
}
| {
self.nonce.deserialize(io, ser)
} | conditional_block |
msg_pong.rs | use std;
use ::serialize::{self, Serializable};
use super::PingMessage;
use super::BIP0031_VERSION;
#[derive(Debug,Default,Clone)]
pub struct PongMessage
{
pub nonce: u64,
}
impl PongMessage {
pub fn new(ping:&PingMessage) -> PongMessage {
PongMessage{ nonce: ping.nonce }
}
}
impl super::Message for PongMessage {
fn get_command(&self) -> [u8; super::message_header::COMMAND_SIZE] {
super::message_header::COMMAND_PONG
}
}
impl std::fmt::Display for PongMessage {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "Pong(nonce={})", self.nonce)
}
}
| if BIP0031_VERSION < ser.version {
self.nonce.get_serialize_size(ser)
} else {
0usize
}
}
fn serialize(&self, io:&mut std::io::Write, ser:&serialize::SerializeParam) -> serialize::Result {
if BIP0031_VERSION < ser.version {
self.nonce.serialize(io, ser)
} else {
Ok(0usize)
}
}
fn deserialize(&mut self, io:&mut std::io::Read, ser:&serialize::SerializeParam) -> serialize::Result {
if BIP0031_VERSION < ser.version {
self.nonce.deserialize(io, ser)
} else {
Ok(0usize)
}
}
} | impl Serializable for PongMessage {
fn get_serialize_size(&self, ser:&serialize::SerializeParam) -> usize { | random_line_split |
benchmarks.rs | extern crate ostrov;
extern crate test;
use ostrov::runtime::Runtime;
use test::Bencher;
static NESTED_IFS: &'static str = "
(if
(if
(if
(if
(if
(if
(> 1 2 3 4 5 6 7 8 9 10)
(= 2 2 2 2 2 2 2 2 2 2) | #f
)
(= 2 2 2 2 2 2 2 2 2 2)
#f
)
(= 2 2 2 2 2 2 2 2 2 2)
#f
)
(= 2 2 2 2 2 2 2 2 2 2)
#f
)
2
3
)
";
#[bench]
fn nested_evaluation(b: &mut Bencher) {
let mut runtime = Runtime::new();
b.iter(|| {
assert_eq!(runtime.eval_str(NESTED_IFS), runtime.eval_str("3"));
})
}
#[bench]
fn nested_evaluation_bytecode(b: &mut Bencher) {
let mut runtime = Runtime::new();
b.iter(|| {
assert_eq!(runtime.eval_str(NESTED_IFS), runtime.eval_str("3"));
})
}
#[bench]
fn procedure_evaluation(b: &mut Bencher) {
let input = "
(define (fact n)
(if (= n 1)
1
(* n (fact (- n 1)))))
(fact 5)
";
let mut runtime = Runtime::new();
b.iter(|| {
assert_eq!(
runtime.eval_str(input).unwrap()[1],
runtime.eval_str("120").unwrap()[0]
);
})
} | #f
)
(= 2 2 2 2 2 2 2 2 2 2) | random_line_split |
benchmarks.rs | extern crate ostrov;
extern crate test;
use ostrov::runtime::Runtime;
use test::Bencher;
static NESTED_IFS: &'static str = "
(if
(if
(if
(if
(if
(if
(> 1 2 3 4 5 6 7 8 9 10)
(= 2 2 2 2 2 2 2 2 2 2)
#f
)
(= 2 2 2 2 2 2 2 2 2 2)
#f
)
(= 2 2 2 2 2 2 2 2 2 2)
#f
)
(= 2 2 2 2 2 2 2 2 2 2)
#f
)
(= 2 2 2 2 2 2 2 2 2 2)
#f
)
2
3
)
";
#[bench]
fn | (b: &mut Bencher) {
let mut runtime = Runtime::new();
b.iter(|| {
assert_eq!(runtime.eval_str(NESTED_IFS), runtime.eval_str("3"));
})
}
#[bench]
fn nested_evaluation_bytecode(b: &mut Bencher) {
let mut runtime = Runtime::new();
b.iter(|| {
assert_eq!(runtime.eval_str(NESTED_IFS), runtime.eval_str("3"));
})
}
#[bench]
fn procedure_evaluation(b: &mut Bencher) {
let input = "
(define (fact n)
(if (= n 1)
1
(* n (fact (- n 1)))))
(fact 5)
";
let mut runtime = Runtime::new();
b.iter(|| {
assert_eq!(
runtime.eval_str(input).unwrap()[1],
runtime.eval_str("120").unwrap()[0]
);
})
}
| nested_evaluation | identifier_name |
benchmarks.rs | extern crate ostrov;
extern crate test;
use ostrov::runtime::Runtime;
use test::Bencher;
static NESTED_IFS: &'static str = "
(if
(if
(if
(if
(if
(if
(> 1 2 3 4 5 6 7 8 9 10)
(= 2 2 2 2 2 2 2 2 2 2)
#f
)
(= 2 2 2 2 2 2 2 2 2 2)
#f
)
(= 2 2 2 2 2 2 2 2 2 2)
#f
)
(= 2 2 2 2 2 2 2 2 2 2)
#f
)
(= 2 2 2 2 2 2 2 2 2 2)
#f
)
2
3
)
";
#[bench]
fn nested_evaluation(b: &mut Bencher) |
#[bench]
fn nested_evaluation_bytecode(b: &mut Bencher) {
let mut runtime = Runtime::new();
b.iter(|| {
assert_eq!(runtime.eval_str(NESTED_IFS), runtime.eval_str("3"));
})
}
#[bench]
fn procedure_evaluation(b: &mut Bencher) {
let input = "
(define (fact n)
(if (= n 1)
1
(* n (fact (- n 1)))))
(fact 5)
";
let mut runtime = Runtime::new();
b.iter(|| {
assert_eq!(
runtime.eval_str(input).unwrap()[1],
runtime.eval_str("120").unwrap()[0]
);
})
}
| {
let mut runtime = Runtime::new();
b.iter(|| {
assert_eq!(runtime.eval_str(NESTED_IFS), runtime.eval_str("3"));
})
} | identifier_body |
string_list.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use libc::{c_int};
use std::mem;
use string::{cef_string_userfree_utf16_alloc,cef_string_userfree_utf16_free,cef_string_utf16_set};
use types::{cef_string_list_t,cef_string_t};
fn string_list_to_vec(lt: *mut cef_string_list_t) -> *mut Vec<*mut cef_string_t> {
lt as *mut Vec<*mut cef_string_t>
}
//cef_string_list
#[no_mangle]
pub extern "C" fn cef_string_list_alloc() -> *mut cef_string_list_t {
unsafe {
let lt: Box<Vec<*mut cef_string_t>> = box vec!();
mem::transmute(lt)
}
}
#[no_mangle]
pub extern "C" fn cef_string_list_size(lt: *mut cef_string_list_t) -> c_int {
unsafe {
if lt.is_null() { return 0; }
let v = string_list_to_vec(lt);
(*v).len() as c_int
}
}
#[no_mangle]
pub extern "C" fn cef_string_list_append(lt: *mut cef_string_list_t, value: *const cef_string_t) {
unsafe {
if lt.is_null() { return; }
let v = string_list_to_vec(lt);
let cs = cef_string_userfree_utf16_alloc();
cef_string_utf16_set(mem::transmute((*value).str), (*value).length, cs, 1);
(*v).push(cs);
}
}
#[no_mangle]
pub extern "C" fn cef_string_list_value(lt: *mut cef_string_list_t, index: c_int, value: *mut cef_string_t) -> c_int {
unsafe {
if index < 0 || lt.is_null() { return 0; }
let v = string_list_to_vec(lt);
if index as usize > (*v).len() - 1 { return 0; }
let cs = (*v)[index as usize];
cef_string_utf16_set(mem::transmute((*cs).str), (*cs).length, value, 1)
}
}
#[no_mangle]
pub extern "C" fn cef_string_list_clear(lt: *mut cef_string_list_t) {
unsafe {
if lt.is_null() { return; }
let v = string_list_to_vec(lt);
if (*v).len() == 0 { return; }
let mut cs;
while (*v).len()!= 0 {
cs = (*v).pop();
cef_string_userfree_utf16_free(cs.unwrap());
}
}
}
#[no_mangle]
pub extern "C" fn cef_string_list_free(lt: *mut cef_string_list_t) {
unsafe {
if lt.is_null() |
let v: Box<Vec<*mut cef_string_t>> = mem::transmute(lt);
cef_string_list_clear(lt);
drop(v);
}
}
#[no_mangle]
pub extern "C" fn cef_string_list_copy(lt: *mut cef_string_list_t) -> *mut cef_string_list_t {
unsafe {
if lt.is_null() { return 0 as *mut cef_string_list_t; }
let v = string_list_to_vec(lt);
let lt2 = cef_string_list_alloc();
for cs in (*v).iter() {
cef_string_list_append(lt2, mem::transmute((*cs)));
}
lt2
}
}
| { return; } | conditional_block |
string_list.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use libc::{c_int};
use std::mem;
use string::{cef_string_userfree_utf16_alloc,cef_string_userfree_utf16_free,cef_string_utf16_set};
use types::{cef_string_list_t,cef_string_t};
fn string_list_to_vec(lt: *mut cef_string_list_t) -> *mut Vec<*mut cef_string_t> {
lt as *mut Vec<*mut cef_string_t>
}
//cef_string_list
#[no_mangle]
pub extern "C" fn cef_string_list_alloc() -> *mut cef_string_list_t {
unsafe {
let lt: Box<Vec<*mut cef_string_t>> = box vec!();
mem::transmute(lt)
}
}
#[no_mangle]
pub extern "C" fn cef_string_list_size(lt: *mut cef_string_list_t) -> c_int {
unsafe {
if lt.is_null() { return 0; }
let v = string_list_to_vec(lt);
(*v).len() as c_int
}
}
#[no_mangle]
pub extern "C" fn cef_string_list_append(lt: *mut cef_string_list_t, value: *const cef_string_t) {
unsafe {
if lt.is_null() { return; }
let v = string_list_to_vec(lt);
let cs = cef_string_userfree_utf16_alloc();
cef_string_utf16_set(mem::transmute((*value).str), (*value).length, cs, 1);
(*v).push(cs);
}
}
#[no_mangle]
pub extern "C" fn cef_string_list_value(lt: *mut cef_string_list_t, index: c_int, value: *mut cef_string_t) -> c_int {
unsafe {
if index < 0 || lt.is_null() { return 0; }
let v = string_list_to_vec(lt);
if index as usize > (*v).len() - 1 { return 0; }
let cs = (*v)[index as usize];
cef_string_utf16_set(mem::transmute((*cs).str), (*cs).length, value, 1)
}
}
#[no_mangle]
pub extern "C" fn cef_string_list_clear(lt: *mut cef_string_list_t) { | unsafe {
if lt.is_null() { return; }
let v = string_list_to_vec(lt);
if (*v).len() == 0 { return; }
let mut cs;
while (*v).len()!= 0 {
cs = (*v).pop();
cef_string_userfree_utf16_free(cs.unwrap());
}
}
}
#[no_mangle]
pub extern "C" fn cef_string_list_free(lt: *mut cef_string_list_t) {
unsafe {
if lt.is_null() { return; }
let v: Box<Vec<*mut cef_string_t>> = mem::transmute(lt);
cef_string_list_clear(lt);
drop(v);
}
}
#[no_mangle]
pub extern "C" fn cef_string_list_copy(lt: *mut cef_string_list_t) -> *mut cef_string_list_t {
unsafe {
if lt.is_null() { return 0 as *mut cef_string_list_t; }
let v = string_list_to_vec(lt);
let lt2 = cef_string_list_alloc();
for cs in (*v).iter() {
cef_string_list_append(lt2, mem::transmute((*cs)));
}
lt2
}
} | random_line_split |
|
string_list.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use libc::{c_int};
use std::mem;
use string::{cef_string_userfree_utf16_alloc,cef_string_userfree_utf16_free,cef_string_utf16_set};
use types::{cef_string_list_t,cef_string_t};
fn string_list_to_vec(lt: *mut cef_string_list_t) -> *mut Vec<*mut cef_string_t> {
lt as *mut Vec<*mut cef_string_t>
}
//cef_string_list
#[no_mangle]
pub extern "C" fn cef_string_list_alloc() -> *mut cef_string_list_t {
unsafe {
let lt: Box<Vec<*mut cef_string_t>> = box vec!();
mem::transmute(lt)
}
}
#[no_mangle]
pub extern "C" fn cef_string_list_size(lt: *mut cef_string_list_t) -> c_int {
unsafe {
if lt.is_null() { return 0; }
let v = string_list_to_vec(lt);
(*v).len() as c_int
}
}
#[no_mangle]
pub extern "C" fn | (lt: *mut cef_string_list_t, value: *const cef_string_t) {
unsafe {
if lt.is_null() { return; }
let v = string_list_to_vec(lt);
let cs = cef_string_userfree_utf16_alloc();
cef_string_utf16_set(mem::transmute((*value).str), (*value).length, cs, 1);
(*v).push(cs);
}
}
#[no_mangle]
pub extern "C" fn cef_string_list_value(lt: *mut cef_string_list_t, index: c_int, value: *mut cef_string_t) -> c_int {
unsafe {
if index < 0 || lt.is_null() { return 0; }
let v = string_list_to_vec(lt);
if index as usize > (*v).len() - 1 { return 0; }
let cs = (*v)[index as usize];
cef_string_utf16_set(mem::transmute((*cs).str), (*cs).length, value, 1)
}
}
#[no_mangle]
pub extern "C" fn cef_string_list_clear(lt: *mut cef_string_list_t) {
unsafe {
if lt.is_null() { return; }
let v = string_list_to_vec(lt);
if (*v).len() == 0 { return; }
let mut cs;
while (*v).len()!= 0 {
cs = (*v).pop();
cef_string_userfree_utf16_free(cs.unwrap());
}
}
}
#[no_mangle]
pub extern "C" fn cef_string_list_free(lt: *mut cef_string_list_t) {
unsafe {
if lt.is_null() { return; }
let v: Box<Vec<*mut cef_string_t>> = mem::transmute(lt);
cef_string_list_clear(lt);
drop(v);
}
}
#[no_mangle]
pub extern "C" fn cef_string_list_copy(lt: *mut cef_string_list_t) -> *mut cef_string_list_t {
unsafe {
if lt.is_null() { return 0 as *mut cef_string_list_t; }
let v = string_list_to_vec(lt);
let lt2 = cef_string_list_alloc();
for cs in (*v).iter() {
cef_string_list_append(lt2, mem::transmute((*cs)));
}
lt2
}
}
| cef_string_list_append | identifier_name |
string_list.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use libc::{c_int};
use std::mem;
use string::{cef_string_userfree_utf16_alloc,cef_string_userfree_utf16_free,cef_string_utf16_set};
use types::{cef_string_list_t,cef_string_t};
fn string_list_to_vec(lt: *mut cef_string_list_t) -> *mut Vec<*mut cef_string_t> {
lt as *mut Vec<*mut cef_string_t>
}
//cef_string_list
#[no_mangle]
pub extern "C" fn cef_string_list_alloc() -> *mut cef_string_list_t {
unsafe {
let lt: Box<Vec<*mut cef_string_t>> = box vec!();
mem::transmute(lt)
}
}
#[no_mangle]
pub extern "C" fn cef_string_list_size(lt: *mut cef_string_list_t) -> c_int {
unsafe {
if lt.is_null() { return 0; }
let v = string_list_to_vec(lt);
(*v).len() as c_int
}
}
#[no_mangle]
pub extern "C" fn cef_string_list_append(lt: *mut cef_string_list_t, value: *const cef_string_t) {
unsafe {
if lt.is_null() { return; }
let v = string_list_to_vec(lt);
let cs = cef_string_userfree_utf16_alloc();
cef_string_utf16_set(mem::transmute((*value).str), (*value).length, cs, 1);
(*v).push(cs);
}
}
#[no_mangle]
pub extern "C" fn cef_string_list_value(lt: *mut cef_string_list_t, index: c_int, value: *mut cef_string_t) -> c_int {
unsafe {
if index < 0 || lt.is_null() { return 0; }
let v = string_list_to_vec(lt);
if index as usize > (*v).len() - 1 { return 0; }
let cs = (*v)[index as usize];
cef_string_utf16_set(mem::transmute((*cs).str), (*cs).length, value, 1)
}
}
#[no_mangle]
pub extern "C" fn cef_string_list_clear(lt: *mut cef_string_list_t) |
#[no_mangle]
pub extern "C" fn cef_string_list_free(lt: *mut cef_string_list_t) {
unsafe {
if lt.is_null() { return; }
let v: Box<Vec<*mut cef_string_t>> = mem::transmute(lt);
cef_string_list_clear(lt);
drop(v);
}
}
#[no_mangle]
pub extern "C" fn cef_string_list_copy(lt: *mut cef_string_list_t) -> *mut cef_string_list_t {
unsafe {
if lt.is_null() { return 0 as *mut cef_string_list_t; }
let v = string_list_to_vec(lt);
let lt2 = cef_string_list_alloc();
for cs in (*v).iter() {
cef_string_list_append(lt2, mem::transmute((*cs)));
}
lt2
}
}
| {
unsafe {
if lt.is_null() { return; }
let v = string_list_to_vec(lt);
if (*v).len() == 0 { return; }
let mut cs;
while (*v).len() != 0 {
cs = (*v).pop();
cef_string_userfree_utf16_free(cs.unwrap());
}
}
} | identifier_body |
deadlock.rs | // Zeming Lin
// For CS4414, probably generates a deadlock.
// Inspired by David Evans code from class 13 for CS4414, Fall '13
// This works on an AMD cpu running ubuntu LTS 12.04,
// but it fails on an Intel Atom netbook.
// Rust tasks also don't like to run concurrently on the same netbook
// so it probably isn't the code's fault.
type Semaphore = Option<uint> ; // either None (available) or owner
static mut count: uint = 0; // protected by lock
static mut lock1: Semaphore = None;
static mut lock2: Semaphore = None;
fn grab_lock(id: uint) {
unsafe {
println(fmt!("%u is grabbing lock...", id));
while (lock1.is_some() && lock2.is_some()) {
; // wait for lock
}
if!lock1.is_some() {
lock1 = Some(id);
print(fmt!("Process %u grabbed lock1!\n", id));
while (lock2.is_some()) {
;
}
lock2 = Some(id);
print(fmt!("Process %u grabbed lock2!\n", id));
}
else if!lock2.is_some() |
else { // Oops, another process grabbed the locks
grab_lock(id);
}
}
}
fn release_locks() {
unsafe {
lock1 = None;
lock2 = None
}
}
fn update_count(id: uint) {
unsafe {
grab_lock(id);
count += 1;
println(fmt!("Count updated by %?: %?", id, count));
release_locks();
}
}
fn main() {
for num in range(0u, 10) {
do spawn {
for _ in range(0u, 1000) {
update_count(num);
}
}
}
}
| {
lock2 = Some(id);
print(fmt!("Process %u grabbed lock2!\n", id));
while (lock1.is_some()) {
;
}
lock1 = Some(id);
print(fmt!("Process %u grabbed lock1!\n", id));
} | conditional_block |
deadlock.rs | // but it fails on an Intel Atom netbook.
// Rust tasks also don't like to run concurrently on the same netbook
// so it probably isn't the code's fault.
type Semaphore = Option<uint> ; // either None (available) or owner
static mut count: uint = 0; // protected by lock
static mut lock1: Semaphore = None;
static mut lock2: Semaphore = None;
fn grab_lock(id: uint) {
unsafe {
println(fmt!("%u is grabbing lock...", id));
while (lock1.is_some() && lock2.is_some()) {
; // wait for lock
}
if!lock1.is_some() {
lock1 = Some(id);
print(fmt!("Process %u grabbed lock1!\n", id));
while (lock2.is_some()) {
;
}
lock2 = Some(id);
print(fmt!("Process %u grabbed lock2!\n", id));
}
else if!lock2.is_some() {
lock2 = Some(id);
print(fmt!("Process %u grabbed lock2!\n", id));
while (lock1.is_some()) {
;
}
lock1 = Some(id);
print(fmt!("Process %u grabbed lock1!\n", id));
}
else { // Oops, another process grabbed the locks
grab_lock(id);
}
}
}
fn release_locks() {
unsafe {
lock1 = None;
lock2 = None
}
}
fn update_count(id: uint) {
unsafe {
grab_lock(id);
count += 1;
println(fmt!("Count updated by %?: %?", id, count));
release_locks();
}
}
fn main() {
for num in range(0u, 10) {
do spawn {
for _ in range(0u, 1000) {
update_count(num);
}
}
}
} | // Zeming Lin
// For CS4414, probably generates a deadlock.
// Inspired by David Evans code from class 13 for CS4414, Fall '13
// This works on an AMD cpu running ubuntu LTS 12.04, | random_line_split |
|
deadlock.rs | // Zeming Lin
// For CS4414, probably generates a deadlock.
// Inspired by David Evans code from class 13 for CS4414, Fall '13
// This works on an AMD cpu running ubuntu LTS 12.04,
// but it fails on an Intel Atom netbook.
// Rust tasks also don't like to run concurrently on the same netbook
// so it probably isn't the code's fault.
type Semaphore = Option<uint> ; // either None (available) or owner
static mut count: uint = 0; // protected by lock
static mut lock1: Semaphore = None;
static mut lock2: Semaphore = None;
fn grab_lock(id: uint) {
unsafe {
println(fmt!("%u is grabbing lock...", id));
while (lock1.is_some() && lock2.is_some()) {
; // wait for lock
}
if!lock1.is_some() {
lock1 = Some(id);
print(fmt!("Process %u grabbed lock1!\n", id));
while (lock2.is_some()) {
;
}
lock2 = Some(id);
print(fmt!("Process %u grabbed lock2!\n", id));
}
else if!lock2.is_some() {
lock2 = Some(id);
print(fmt!("Process %u grabbed lock2!\n", id));
while (lock1.is_some()) {
;
}
lock1 = Some(id);
print(fmt!("Process %u grabbed lock1!\n", id));
}
else { // Oops, another process grabbed the locks
grab_lock(id);
}
}
}
fn release_locks() {
unsafe {
lock1 = None;
lock2 = None
}
}
fn update_count(id: uint) |
fn main() {
for num in range(0u, 10) {
do spawn {
for _ in range(0u, 1000) {
update_count(num);
}
}
}
}
| {
unsafe {
grab_lock(id);
count += 1;
println(fmt!("Count updated by %?: %?", id, count));
release_locks();
}
} | identifier_body |
deadlock.rs | // Zeming Lin
// For CS4414, probably generates a deadlock.
// Inspired by David Evans code from class 13 for CS4414, Fall '13
// This works on an AMD cpu running ubuntu LTS 12.04,
// but it fails on an Intel Atom netbook.
// Rust tasks also don't like to run concurrently on the same netbook
// so it probably isn't the code's fault.
type Semaphore = Option<uint> ; // either None (available) or owner
static mut count: uint = 0; // protected by lock
static mut lock1: Semaphore = None;
static mut lock2: Semaphore = None;
fn grab_lock(id: uint) {
unsafe {
println(fmt!("%u is grabbing lock...", id));
while (lock1.is_some() && lock2.is_some()) {
; // wait for lock
}
if!lock1.is_some() {
lock1 = Some(id);
print(fmt!("Process %u grabbed lock1!\n", id));
while (lock2.is_some()) {
;
}
lock2 = Some(id);
print(fmt!("Process %u grabbed lock2!\n", id));
}
else if!lock2.is_some() {
lock2 = Some(id);
print(fmt!("Process %u grabbed lock2!\n", id));
while (lock1.is_some()) {
;
}
lock1 = Some(id);
print(fmt!("Process %u grabbed lock1!\n", id));
}
else { // Oops, another process grabbed the locks
grab_lock(id);
}
}
}
fn release_locks() {
unsafe {
lock1 = None;
lock2 = None
}
}
fn | (id: uint) {
unsafe {
grab_lock(id);
count += 1;
println(fmt!("Count updated by %?: %?", id, count));
release_locks();
}
}
fn main() {
for num in range(0u, 10) {
do spawn {
for _ in range(0u, 1000) {
update_count(num);
}
}
}
}
| update_count | identifier_name |
env.rs | // except according to those terms.
use std::collections::hashmap::HashMap;
use expr::{Expr, ExprResult};
use result::SchemerResult;
pub type EnvResult = SchemerResult<Env>;
pub type EnvSetResult = SchemerResult<()>;
#[deriving(Clone)]
pub struct Env {
entries: HashMap<String, Expr>,
outer: Option<Box<Env>>
}
impl Env {
pub fn new_empty() -> Env {
Env {
entries: HashMap::new(),
outer: None
}
}
pub fn new(
params: Option<Vec<String>>,
args: Option<Vec<Expr>>,
outer: Option<Env>) -> EnvResult {
let mut entries = HashMap::new();
if params.is_some() && args.is_some() {
let params = match params {
Some(p) => p,
None => return Err("Env::new(): params should be a value".to_string())
};
let args = match args {
Some(p) => p,
None => return Err("Env::new(): args should be a value".to_string())
};
if params.len() == args.len() {
for ctr in range(0,params.len()) {
let var_name = params[ctr].to_string();
let arg = args[ctr].clone();
entries.insert(var_name, arg);
}
} else {
return Err("Env::new(): params and args length doesn't match".to_string())
}
}
else if params.is_none() && args.is_none() {
if outer.is_none() {
return Ok(Env::new_empty())
}
} else {
return Err("Env::new(): cannot have params & args unset".to_string())
}
Ok(Env { entries: entries, outer: match outer { Some(e) => Some(box e), None => None } })
}
pub fn set(&mut self, symbol: String, val: Expr) -> EnvSetResult {
match self.entries.contains_key(&symbol) {
true => {
self.define(symbol, val);
Ok(())
},
false => match self.outer {
Some(ref mut outer_env) =>
outer_env.set(symbol, val),
None => Err(
format!("Env.set(): Symbol '{}' is not defined in this Env scope chain",
symbol))
}
}
}
pub fn define(&mut self, symbol: String, val: Expr) {
self.entries.insert(symbol, val);
}
pub fn find<'b>(&'b self, symbol: &String) -> ExprResult {
match self.entries.find(symbol) {
Some(v) => Ok(v.clone()),
None => {
match &self.outer {
&Some(ref outer_env) => outer_env.find(symbol),
&None => Err(format!("No variable named {} defined in the environment."
, *symbol))
}
}
}
}
pub fn enclose(&mut self, base: Env) {
match self.outer {
Some(ref mut outer) => outer.enclose(base),
None => self.outer = Some(box base)
}
}
pub fn into_parent(self) -> Option<Env> {
self.outer.map(|x| *x)
}
} | // Copyright 2014 Jeffery Olson
//
// Licensed under the 3-Clause BSD License, see LICENSE.txt
// at the top-level of this repository.
// This file may not be copied, modified, or distributed | random_line_split |
|
env.rs | // Copyright 2014 Jeffery Olson
//
// Licensed under the 3-Clause BSD License, see LICENSE.txt
// at the top-level of this repository.
// This file may not be copied, modified, or distributed
// except according to those terms.
use std::collections::hashmap::HashMap;
use expr::{Expr, ExprResult};
use result::SchemerResult;
pub type EnvResult = SchemerResult<Env>;
pub type EnvSetResult = SchemerResult<()>;
#[deriving(Clone)]
pub struct Env {
entries: HashMap<String, Expr>,
outer: Option<Box<Env>>
}
impl Env {
pub fn new_empty() -> Env {
Env {
entries: HashMap::new(),
outer: None
}
}
pub fn new(
params: Option<Vec<String>>,
args: Option<Vec<Expr>>,
outer: Option<Env>) -> EnvResult {
let mut entries = HashMap::new();
if params.is_some() && args.is_some() {
let params = match params {
Some(p) => p,
None => return Err("Env::new(): params should be a value".to_string())
};
let args = match args {
Some(p) => p,
None => return Err("Env::new(): args should be a value".to_string())
};
if params.len() == args.len() {
for ctr in range(0,params.len()) {
let var_name = params[ctr].to_string();
let arg = args[ctr].clone();
entries.insert(var_name, arg);
}
} else {
return Err("Env::new(): params and args length doesn't match".to_string())
}
}
else if params.is_none() && args.is_none() {
if outer.is_none() {
return Ok(Env::new_empty())
}
} else {
return Err("Env::new(): cannot have params & args unset".to_string())
}
Ok(Env { entries: entries, outer: match outer { Some(e) => Some(box e), None => None } })
}
pub fn set(&mut self, symbol: String, val: Expr) -> EnvSetResult {
match self.entries.contains_key(&symbol) {
true => {
self.define(symbol, val);
Ok(())
},
false => match self.outer {
Some(ref mut outer_env) =>
outer_env.set(symbol, val),
None => Err(
format!("Env.set(): Symbol '{}' is not defined in this Env scope chain",
symbol))
}
}
}
pub fn define(&mut self, symbol: String, val: Expr) {
self.entries.insert(symbol, val);
}
pub fn find<'b>(&'b self, symbol: &String) -> ExprResult |
pub fn enclose(&mut self, base: Env) {
match self.outer {
Some(ref mut outer) => outer.enclose(base),
None => self.outer = Some(box base)
}
}
pub fn into_parent(self) -> Option<Env> {
self.outer.map(|x| *x)
}
}
| {
match self.entries.find(symbol) {
Some(v) => Ok(v.clone()),
None => {
match &self.outer {
&Some(ref outer_env) => outer_env.find(symbol),
&None => Err(format!("No variable named {} defined in the environment."
, *symbol))
}
}
}
} | identifier_body |
env.rs | // Copyright 2014 Jeffery Olson
//
// Licensed under the 3-Clause BSD License, see LICENSE.txt
// at the top-level of this repository.
// This file may not be copied, modified, or distributed
// except according to those terms.
use std::collections::hashmap::HashMap;
use expr::{Expr, ExprResult};
use result::SchemerResult;
pub type EnvResult = SchemerResult<Env>;
pub type EnvSetResult = SchemerResult<()>;
#[deriving(Clone)]
pub struct Env {
entries: HashMap<String, Expr>,
outer: Option<Box<Env>>
}
impl Env {
pub fn new_empty() -> Env {
Env {
entries: HashMap::new(),
outer: None
}
}
pub fn new(
params: Option<Vec<String>>,
args: Option<Vec<Expr>>,
outer: Option<Env>) -> EnvResult {
let mut entries = HashMap::new();
if params.is_some() && args.is_some() {
let params = match params {
Some(p) => p,
None => return Err("Env::new(): params should be a value".to_string())
};
let args = match args {
Some(p) => p,
None => return Err("Env::new(): args should be a value".to_string())
};
if params.len() == args.len() {
for ctr in range(0,params.len()) {
let var_name = params[ctr].to_string();
let arg = args[ctr].clone();
entries.insert(var_name, arg);
}
} else {
return Err("Env::new(): params and args length doesn't match".to_string())
}
}
else if params.is_none() && args.is_none() {
if outer.is_none() {
return Ok(Env::new_empty())
}
} else {
return Err("Env::new(): cannot have params & args unset".to_string())
}
Ok(Env { entries: entries, outer: match outer { Some(e) => Some(box e), None => None } })
}
pub fn set(&mut self, symbol: String, val: Expr) -> EnvSetResult {
match self.entries.contains_key(&symbol) {
true => {
self.define(symbol, val);
Ok(())
},
false => match self.outer {
Some(ref mut outer_env) =>
outer_env.set(symbol, val),
None => Err(
format!("Env.set(): Symbol '{}' is not defined in this Env scope chain",
symbol))
}
}
}
pub fn define(&mut self, symbol: String, val: Expr) {
self.entries.insert(symbol, val);
}
pub fn | <'b>(&'b self, symbol: &String) -> ExprResult {
match self.entries.find(symbol) {
Some(v) => Ok(v.clone()),
None => {
match &self.outer {
&Some(ref outer_env) => outer_env.find(symbol),
&None => Err(format!("No variable named {} defined in the environment."
, *symbol))
}
}
}
}
pub fn enclose(&mut self, base: Env) {
match self.outer {
Some(ref mut outer) => outer.enclose(base),
None => self.outer = Some(box base)
}
}
pub fn into_parent(self) -> Option<Env> {
self.outer.map(|x| *x)
}
}
| find | identifier_name |
main.rs | extern crate osmpbfreader;
extern crate serde;
#[macro_use] extern crate serde_derive;
#[macro_use(bson, doc)] extern crate bson;
extern crate mongodb;
use std::collections::HashMap;
use std::env::args;
use mongodb::coll::options::WriteModel;
use mongodb::{Client, ThreadedClient};
use mongodb::db::ThreadedDatabase;
use std::mem;
// Structure holding a river, used for bson serialization
#[derive(Serialize, Deserialize, Debug)]
pub struct River {
pub paths: Vec<(f64,f64)>
}
fn process_file(filename : &String, rivers_coll : &mongodb::coll::Collection) {
let r = std::fs::File::open(&std::path::Path::new(filename)).unwrap();
println!("Parsing {:?}...",filename);
let mut pbf = osmpbfreader::OsmPbfReader::new(r);
let objs = pbf.get_objs_and_deps(|obj| {
obj.is_way() && obj.tags().contains_key("waterway") && ( obj.tags().get("waterway").unwrap()=="river" || obj.tags().get("waterway").unwrap()=="stream" || obj.tags().get("waterway").unwrap()=="canal" ) && obj.tags().contains_key("name") && obj.tags().get("name").unwrap().len()>0
}).unwrap();
println!("Objs got");
let mut nodes = HashMap::new();
let mut bulk = Vec::new();
for (_id, obj) in &objs {
match obj {
osmpbfreader::OsmObj::Node(n) => |
osmpbfreader::OsmObj::Way(w) => {
let mut path = Vec::new();
for node_id in &w.nodes {
match nodes.get(&node_id) {
Some(node) => path.push((node.0,node.1)),
None => { panic!(); }
}
}
// Insert into MongoDB
let river = River {
paths: path
};
let names : Vec<String> = obj.tags().iter().filter(|tag| tag.0.starts_with("name:")).map(|tag| tag.1.clone()).collect();
let serialized_river = bson::to_bson(&river); // Serialize
match serialized_river {
Ok(sr) => {
if let bson::Bson::Document(docu) = sr { // Documentize
bulk.push(WriteModel::UpdateOne { filter: doc!{"_id": obj.tags().get("name").unwrap().to_string()},
update: doc!{"$push": docu, "$addToSet": {"names": {"$each": bson::to_bson(&names).unwrap()}}},
upsert: Some(true) });
if bulk.len()>100 {
println!("Insert into db... {}",bulk.len());
let mut bulk2 = Vec::new(); // create new empty bulk
mem::swap(&mut bulk, &mut bulk2); // bulk <-> bulk2
let result = rivers_coll.bulk_write(bulk2, true); // send full bulk
println!("Number of rivers inserted: ins:{} match:{} modif:{} del:{} upset:{}",result.inserted_count,result.matched_count,result.modified_count,result.deleted_count,result.upserted_count);
match result.bulk_write_exception {
Some(exception) => {
if exception.message.len()>0 {
println!("ERROR: {}",exception.message);
}
}
None => ()
}
//bulk.clear(); // bulk is now a new empty bulk thanks to swaping, clear is unecessary
} // Compiler will drop bulk2 (the full bulk) at this point
} else {
println!("Error converting the BSON object into a MongoDB document");
}
},
Err(_) => println!("Error serializing the River as a BSON object")
}
},
osmpbfreader::OsmObj::Relation(_) => ()
}
}
if bulk.len()>0 {
println!("Insert into db... {} river(s)",bulk.len());
let result = rivers_coll.bulk_write(bulk, true); // send remaining bulk
println!("Number of rivers inserted: match:{} ins:{} modif:{} del:{} upsert:{}",result.matched_count,result.inserted_count,result.modified_count,result.deleted_count,result.upserted_count);
match result.bulk_write_exception {
Some(exception) => {
if exception.message.len()>0 {
println!("ERROR(s): {}",exception.message);
}
}
None => ()
}
}
}
fn main() {
// Connect to MongoDB client and select collection
let client = Client::connect("localhost", 27017).ok().expect("Failed to initialize client.");
let rivers_coll = client.db("wwsupdb").collection("osm");
match rivers_coll.drop() {
Ok(_) => println!("Collection droped"),
Err(_) => panic!()
}
for arg in args().skip(1) {
process_file(&arg, &rivers_coll);
}
}
| {
nodes.insert( n.id, (n.lat(),n.lon()) );
} | conditional_block |
main.rs | extern crate osmpbfreader;
extern crate serde;
#[macro_use] extern crate serde_derive;
#[macro_use(bson, doc)] extern crate bson;
extern crate mongodb;
use std::collections::HashMap;
use std::env::args;
use mongodb::coll::options::WriteModel;
use mongodb::{Client, ThreadedClient};
use mongodb::db::ThreadedDatabase;
use std::mem;
// Structure holding a river, used for bson serialization
#[derive(Serialize, Deserialize, Debug)]
pub struct River {
pub paths: Vec<(f64,f64)>
}
fn process_file(filename : &String, rivers_coll : &mongodb::coll::Collection) {
let r = std::fs::File::open(&std::path::Path::new(filename)).unwrap();
println!("Parsing {:?}...",filename);
let mut pbf = osmpbfreader::OsmPbfReader::new(r);
let objs = pbf.get_objs_and_deps(|obj| {
obj.is_way() && obj.tags().contains_key("waterway") && ( obj.tags().get("waterway").unwrap()=="river" || obj.tags().get("waterway").unwrap()=="stream" || obj.tags().get("waterway").unwrap()=="canal" ) && obj.tags().contains_key("name") && obj.tags().get("name").unwrap().len()>0
}).unwrap();
println!("Objs got");
let mut nodes = HashMap::new();
let mut bulk = Vec::new();
for (_id, obj) in &objs {
match obj {
osmpbfreader::OsmObj::Node(n) => {
nodes.insert( n.id, (n.lat(),n.lon()) );
}
osmpbfreader::OsmObj::Way(w) => {
let mut path = Vec::new();
for node_id in &w.nodes {
match nodes.get(&node_id) {
Some(node) => path.push((node.0,node.1)),
None => { panic!(); }
}
}
// Insert into MongoDB
let river = River {
paths: path
};
let names : Vec<String> = obj.tags().iter().filter(|tag| tag.0.starts_with("name:")).map(|tag| tag.1.clone()).collect();
let serialized_river = bson::to_bson(&river); // Serialize
match serialized_river {
Ok(sr) => {
if let bson::Bson::Document(docu) = sr { // Documentize
bulk.push(WriteModel::UpdateOne { filter: doc!{"_id": obj.tags().get("name").unwrap().to_string()},
update: doc!{"$push": docu, "$addToSet": {"names": {"$each": bson::to_bson(&names).unwrap()}}},
upsert: Some(true) });
if bulk.len()>100 {
println!("Insert into db... {}",bulk.len());
let mut bulk2 = Vec::new(); // create new empty bulk
mem::swap(&mut bulk, &mut bulk2); // bulk <-> bulk2
let result = rivers_coll.bulk_write(bulk2, true); // send full bulk
println!("Number of rivers inserted: ins:{} match:{} modif:{} del:{} upset:{}",result.inserted_count,result.matched_count,result.modified_count,result.deleted_count,result.upserted_count);
match result.bulk_write_exception {
Some(exception) => {
if exception.message.len()>0 {
println!("ERROR: {}",exception.message);
}
}
None => ()
}
//bulk.clear(); // bulk is now a new empty bulk thanks to swaping, clear is unecessary
} // Compiler will drop bulk2 (the full bulk) at this point
} else {
println!("Error converting the BSON object into a MongoDB document");
}
},
Err(_) => println!("Error serializing the River as a BSON object")
}
},
osmpbfreader::OsmObj::Relation(_) => ()
}
}
if bulk.len()>0 {
println!("Insert into db... {} river(s)",bulk.len());
let result = rivers_coll.bulk_write(bulk, true); // send remaining bulk
println!("Number of rivers inserted: match:{} ins:{} modif:{} del:{} upsert:{}",result.matched_count,result.inserted_count,result.modified_count,result.deleted_count,result.upserted_count);
match result.bulk_write_exception {
Some(exception) => { | if exception.message.len()>0 {
println!("ERROR(s): {}",exception.message);
}
}
None => ()
}
}
}
fn main() {
// Connect to MongoDB client and select collection
let client = Client::connect("localhost", 27017).ok().expect("Failed to initialize client.");
let rivers_coll = client.db("wwsupdb").collection("osm");
match rivers_coll.drop() {
Ok(_) => println!("Collection droped"),
Err(_) => panic!()
}
for arg in args().skip(1) {
process_file(&arg, &rivers_coll);
}
} | random_line_split |
|
main.rs | extern crate osmpbfreader;
extern crate serde;
#[macro_use] extern crate serde_derive;
#[macro_use(bson, doc)] extern crate bson;
extern crate mongodb;
use std::collections::HashMap;
use std::env::args;
use mongodb::coll::options::WriteModel;
use mongodb::{Client, ThreadedClient};
use mongodb::db::ThreadedDatabase;
use std::mem;
// Structure holding a river, used for bson serialization
#[derive(Serialize, Deserialize, Debug)]
pub struct River {
pub paths: Vec<(f64,f64)>
}
fn process_file(filename : &String, rivers_coll : &mongodb::coll::Collection) {
let r = std::fs::File::open(&std::path::Path::new(filename)).unwrap();
println!("Parsing {:?}...",filename);
let mut pbf = osmpbfreader::OsmPbfReader::new(r);
let objs = pbf.get_objs_and_deps(|obj| {
obj.is_way() && obj.tags().contains_key("waterway") && ( obj.tags().get("waterway").unwrap()=="river" || obj.tags().get("waterway").unwrap()=="stream" || obj.tags().get("waterway").unwrap()=="canal" ) && obj.tags().contains_key("name") && obj.tags().get("name").unwrap().len()>0
}).unwrap();
println!("Objs got");
let mut nodes = HashMap::new();
let mut bulk = Vec::new();
for (_id, obj) in &objs {
match obj {
osmpbfreader::OsmObj::Node(n) => {
nodes.insert( n.id, (n.lat(),n.lon()) );
}
osmpbfreader::OsmObj::Way(w) => {
let mut path = Vec::new();
for node_id in &w.nodes {
match nodes.get(&node_id) {
Some(node) => path.push((node.0,node.1)),
None => { panic!(); }
}
}
// Insert into MongoDB
let river = River {
paths: path
};
let names : Vec<String> = obj.tags().iter().filter(|tag| tag.0.starts_with("name:")).map(|tag| tag.1.clone()).collect();
let serialized_river = bson::to_bson(&river); // Serialize
match serialized_river {
Ok(sr) => {
if let bson::Bson::Document(docu) = sr { // Documentize
bulk.push(WriteModel::UpdateOne { filter: doc!{"_id": obj.tags().get("name").unwrap().to_string()},
update: doc!{"$push": docu, "$addToSet": {"names": {"$each": bson::to_bson(&names).unwrap()}}},
upsert: Some(true) });
if bulk.len()>100 {
println!("Insert into db... {}",bulk.len());
let mut bulk2 = Vec::new(); // create new empty bulk
mem::swap(&mut bulk, &mut bulk2); // bulk <-> bulk2
let result = rivers_coll.bulk_write(bulk2, true); // send full bulk
println!("Number of rivers inserted: ins:{} match:{} modif:{} del:{} upset:{}",result.inserted_count,result.matched_count,result.modified_count,result.deleted_count,result.upserted_count);
match result.bulk_write_exception {
Some(exception) => {
if exception.message.len()>0 {
println!("ERROR: {}",exception.message);
}
}
None => ()
}
//bulk.clear(); // bulk is now a new empty bulk thanks to swaping, clear is unecessary
} // Compiler will drop bulk2 (the full bulk) at this point
} else {
println!("Error converting the BSON object into a MongoDB document");
}
},
Err(_) => println!("Error serializing the River as a BSON object")
}
},
osmpbfreader::OsmObj::Relation(_) => ()
}
}
if bulk.len()>0 {
println!("Insert into db... {} river(s)",bulk.len());
let result = rivers_coll.bulk_write(bulk, true); // send remaining bulk
println!("Number of rivers inserted: match:{} ins:{} modif:{} del:{} upsert:{}",result.matched_count,result.inserted_count,result.modified_count,result.deleted_count,result.upserted_count);
match result.bulk_write_exception {
Some(exception) => {
if exception.message.len()>0 {
println!("ERROR(s): {}",exception.message);
}
}
None => ()
}
}
}
fn | () {
// Connect to MongoDB client and select collection
let client = Client::connect("localhost", 27017).ok().expect("Failed to initialize client.");
let rivers_coll = client.db("wwsupdb").collection("osm");
match rivers_coll.drop() {
Ok(_) => println!("Collection droped"),
Err(_) => panic!()
}
for arg in args().skip(1) {
process_file(&arg, &rivers_coll);
}
}
| main | identifier_name |
mod.rs | use anyhow::Result;
use pueue_lib::network::message::Message;
use pueue_lib::network::protocol::*;
use pueue_lib::state::State;
mod edit;
mod format_state;
mod local_follow;
mod restart;
mod wait;
pub use edit::edit;
pub use format_state::format_state;
pub use local_follow::local_follow;
pub use restart::restart;
pub use wait::wait;
// This is a helper function for easy retrieval of the current daemon state.
// The current daemon state is often needed in more complex commands.
pub async fn get_state(stream: &mut GenericStream) -> Result<State> | {
// Create the message payload and send it to the daemon.
send_message(Message::Status, stream).await?;
// Check if we can receive the response from the daemon
let message = receive_message(stream).await?;
match message {
Message::StatusResponse(state) => Ok(*state),
_ => unreachable!(),
}
} | identifier_body |
|
mod.rs | use pueue_lib::state::State;
mod edit;
mod format_state;
mod local_follow;
mod restart;
mod wait;
pub use edit::edit;
pub use format_state::format_state;
pub use local_follow::local_follow;
pub use restart::restart;
pub use wait::wait;
// This is a helper function for easy retrieval of the current daemon state.
// The current daemon state is often needed in more complex commands.
pub async fn get_state(stream: &mut GenericStream) -> Result<State> {
// Create the message payload and send it to the daemon.
send_message(Message::Status, stream).await?;
// Check if we can receive the response from the daemon
let message = receive_message(stream).await?;
match message {
Message::StatusResponse(state) => Ok(*state),
_ => unreachable!(),
}
} | use anyhow::Result;
use pueue_lib::network::message::Message;
use pueue_lib::network::protocol::*; | random_line_split |
|
mod.rs | use anyhow::Result;
use pueue_lib::network::message::Message;
use pueue_lib::network::protocol::*;
use pueue_lib::state::State;
mod edit;
mod format_state;
mod local_follow;
mod restart;
mod wait;
pub use edit::edit;
pub use format_state::format_state;
pub use local_follow::local_follow;
pub use restart::restart;
pub use wait::wait;
// This is a helper function for easy retrieval of the current daemon state.
// The current daemon state is often needed in more complex commands.
pub async fn | (stream: &mut GenericStream) -> Result<State> {
// Create the message payload and send it to the daemon.
send_message(Message::Status, stream).await?;
// Check if we can receive the response from the daemon
let message = receive_message(stream).await?;
match message {
Message::StatusResponse(state) => Ok(*state),
_ => unreachable!(),
}
}
| get_state | identifier_name |
du.rs | #![crate_name = "du"]
#![feature(collections, core, old_io, old_path, rustc_private, std_misc, unicode)]
/*
* This file is part of the uutils coreutils package.
*
* (c) Derek Chiang <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
#![allow(non_snake_case)]
extern crate getopts;
extern crate libc;
extern crate time;
use std::old_io::{stderr, fs, FileStat, FileType};
use std::num::Float;
use std::option::Option;
use std::old_path::Path;
use std::sync::{Arc, Future};
use time::Timespec;
#[path = "../common/util.rs"]
#[macro_use]
mod util;
static NAME: &'static str = "du";
static VERSION: &'static str = "1.0.0";
struct Options {
all: bool,
program_name: String,
max_depth: Option<usize>,
total: bool,
separate_dirs: bool,
}
struct Stat {
path: Path,
fstat: FileStat,
}
// this takes `my_stat` to avoid having to stat files multiple times.
fn du(path: &Path, mut my_stat: Stat,
options: Arc<Options>, depth: usize) -> Vec<Arc<Stat>> | my_stat.fstat.size += this_stat.fstat.size;
my_stat.fstat.unstable.blocks += this_stat.fstat.unstable.blocks;
if options.all {
stats.push(Arc::new(this_stat))
}
}
}
}
for future in futures.iter_mut() {
for stat in future.get().into_iter().rev() {
if!options.separate_dirs && stat.path.dir_path() == my_stat.path {
my_stat.fstat.size += stat.fstat.size;
my_stat.fstat.unstable.blocks += stat.fstat.unstable.blocks;
}
if options.max_depth == None || depth < options.max_depth.unwrap() {
stats.push(stat.clone());
}
}
}
stats.push(Arc::new(my_stat));
stats
}
pu
b fn uumain(args: Vec<String>) -> i32 {
let program = args[0].as_slice();
let opts = [
// In task
getopts::optflag("a", "all", " write counts for all files, not just directories"),
// In main
getopts::optflag("", "apparent-size", "print apparent sizes, rather than disk usage;
although the apparent size is usually smaller, it may be larger due to holes
in ('sparse') files, internal fragmentation, indirect blocks, and the like"),
// In main
getopts::optopt("B", "block-size", "scale sizes by SIZE before printing them.
E.g., '-BM' prints sizes in units of 1,048,576 bytes. See SIZE format below.",
"SIZE"),
// In main
getopts::optflag("b", "bytes", "equivalent to '--apparent-size --block-size=1'"),
// In main
getopts::optflag("c", "total", "produce a grand total"),
// In task
// getopts::optflag("D", "dereference-args", "dereference only symlinks that are listed
// on the command line"),
// In main
// getopts::optopt("", "files0-from", "summarize disk usage of the NUL-terminated file
// names specified in file F;
// If F is - then read names from standard input", "F"),
// // In task
// getopts::optflag("H", "", "equivalent to --dereference-args (-D)"),
// In main
getopts::optflag("h", "human-readable", "print sizes in human readable format (e.g., 1K 234M 2G)"),
// In main
getopts::optflag("", "si", "like -h, but use powers of 1000 not 1024"),
// In main
getopts::optflag("k", "", "like --block-size=1K"),
// In task
getopts::optflag("l", "count-links", "count sizes many times if hard linked"),
// // In main
getopts::optflag("m", "", "like --block-size=1M"),
// // In task
// getopts::optflag("L", "dereference", "dereference all symbolic links"),
// // In task
// getopts::optflag("P", "no-dereference", "don't follow any symbolic links (this is the default)"),
// // In main
getopts::optflag("0", "null", "end each output line with 0 byte rather than newline"),
// In main
getopts::optflag("S", "separate-dirs", "do not include size of subdirectories"),
// In main
getopts::optflag("s", "summarize", "display only a total for each argument"),
// // In task
// getopts::optflag("x", "one-file-system", "skip directories on different file systems"),
// // In task
// getopts::optopt("X", "exclude-from", "exclude files that match any pattern in FILE", "FILE"),
// // In task
// getopts::optopt("", "exclude", "exclude files that match PATTERN", "PATTERN"),
// In main
getopts::optopt("d", "max-depth", "print the total for a directory (or file, with --all)
only if it is N or fewer levels below the command
line argument; --max-depth=0 is the same as --summarize", "N"),
// In main
getopts::optflagopt("", "time", "show time of the last modification of any file in the
directory, or any of its subdirectories. If WORD is given, show time as WORD instead of modification time:
atime, access, use, ctime or status", "WORD"),
// In main
getopts::optopt("", "time-style", "show times using style STYLE:
full-iso, long-iso, iso, +FORMAT FORMAT is interpreted like 'date'", "STYLE"),
getopts::optflag("", "help", "display this help and exit"),
getopts::optflag("V", "version", "output version information and exit"),
];
let matches = match getopts::getopts(args.tail(), &opts) {
Ok(m) => m,
Err(f) => {
show_error!("Invalid options\n{}", f);
return 1;
}
};
if matches.opt_present("help") {
println!("{program} {version} - estimate file space usage
Usage
{program} [OPTION]... [FILE]...
{program} [OPTION]... --files0-from=F
{usage}
Display values are in units of the first available SIZE from
--block-size, and the DU_BLOCK_SIZE, BLOCK_SIZE and BLOCKSIZE environ‐
ment variables. Otherwise, units default to 1024 bytes (or 512 if
POSIXLY_CORRECT is set).
SIZE is an integer and optional unit (example: 10M is 10*1024*1024).
Units are K, M, G, T, P, E, Z, Y (powers of 1024) or KB, MB,... (pow‐
ers of 1000).",
program = program,
version = VERSION,
usage = getopts::usage("Summarize disk usage of each FILE, recursively for directories.", &opts));
return 0;
} else if matches.opt_present("version") {
println!("{} version: {}", program, VERSION);
return 0;
}
let summarize = matches.opt_present("summarize");
let max_depth_str = matches.opt_str("max-depth");
let max_depth = max_depth_str.as_ref().and_then(|s| s.parse::<usize>().ok());
match (max_depth_str, max_depth) {
(Some(ref s), _) if summarize => {
show_error!("summarizing conflicts with --max-depth={}", *s);
return 1;
}
(Some(ref s), None) => {
show_error!("invalid maximum depth '{}'", *s);
return 1;
}
(Some(_), Some(_)) | (None, _) => { /* valid */ }
}
let options = Options {
all: matches.opt_present("all"),
program_name: program.to_string(),
max_depth: max_depth,
total: matches.opt_present("total"),
separate_dirs: matches.opt_present("S"),
};
let strs = if matches.free.is_empty() {vec!("./".to_string())} else {matches.free.clone()};
let options_arc = Arc::new(options);
let MB = match matches.opt_present("si") {
true => 1000 * 1000,
false => 1024 * 1024,
};
let KB = match matches.opt_present("si") {
true => 1000,
false => 1024,
};
let block_size = match matches.opt_str("block-size") {
Some(s) => {
let mut found_number = false;
let mut found_letter = false;
let mut numbers = String::new();
let mut letters = String::new();
for c in s.as_slice().chars() {
if found_letter && c.is_digit(10) ||!found_number &&!c.is_digit(10) {
show_error!("invalid --block-size argument '{}'", s);
return 1;
} else if c.is_digit(10) {
found_number = true;
numbers.push(c);
} else if c.is_alphabetic() {
found_letter = true;
letters.push(c);
}
}
let number = numbers.parse::<usize>().unwrap();
let multiple = match letters.as_slice() {
"K" => 1024, "M" => 1024 * 1024, "G" => 1024 * 1024 * 1024,
"T" => 1024 * 1024 * 1024 * 1024, "P" => 1024 * 1024 * 1024 * 1024 * 1024,
"E" => 1024 * 1024 * 1024 * 1024 * 1024 * 1024,
"Z" => 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024,
"Y" => 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024,
"KB" => 1000, "MB" => 1000 * 1000, "GB" => 1000 * 1000 * 1000,
"TB" => 1000 * 1000 * 1000 * 1000, "PB" => 1000 * 1000 * 1000 * 1000 * 1000,
"EB" => 1000 * 1000 * 1000 * 1000 * 1000 * 1000,
"ZB" => 1000 * 1000 * 1000 * 1000 * 1000 * 1000 * 1000,
"YB" => 1000 * 1000 * 1000 * 1000 * 1000 * 1000 * 1000 * 1000,
_ => {
show_error!("invalid --block-size argument '{}'", s);
return 1;
}
};
number * multiple
},
None => 1024
};
let convert_size = |size: u64| -> String {
if matches.opt_present("human-readable") || matches.opt_present("si") {
if size >= MB {
format!("{:.1}M", (size as f64) / (MB as f64))
} else if size >= KB {
format!("{:.1}K", (size as f64) / (KB as f64))
} else {
format!("{}B", size)
}
} else if matches.opt_present("k") {
format!("{}", ((size as f64) / (KB as f64)).ceil())
} else if matches.opt_present("m") {
format!("{}", ((size as f64) / (MB as f64)).ceil())
} else {
format!("{}", ((size as f64) / (block_size as f64)).ceil())
}
};
let time_format_str = match matches.opt_str("time-style") {
Some(s) => {
match s.as_slice() {
"full-iso" => "%Y-%m-%d %H:%M:%S.%f %z",
"long-iso" => "%Y-%m-%d %H:%M",
"iso" => "%Y-%m-%d",
_ => {
show_error!("invalid argument '{}' for 'time style'
Valid arguments are:
- 'full-iso'
- 'long-iso'
- 'iso'
Try '{} --help' for more information.", s, program);
return 1;
}
}
},
None => "%Y-%m-%d %H:%M"
};
let line_separator = match matches.opt_present("0") {
true => "\0",
false => "\n",
};
let mut grand_total = 0;
for path_str in strs.into_iter() {
let path = Path::new(path_str);
let stat = safe_unwrap!(fs::lstat(&path));
let iter = du(&path, Stat{path: path.clone(), fstat: stat}, options_arc.clone(), 0).into_iter();
let (_, len) = iter.size_hint();
let len = len.unwrap();
for (index, stat) in iter.enumerate() {
let size = match matches.opt_present("apparent-size") {
true => stat.fstat.unstable.nlink * stat.fstat.size,
// C's stat is such that each block is assume to be 512 bytes
// See: http://linux.die.net/man/2/stat
false => stat.fstat.unstable.blocks * 512,
};
if matches.opt_present("time") {
let tm = {
let (secs, nsecs) = {
let time = match matches.opt_str("time") {
Some(s) => match s.as_slice() {
"accessed" => stat.fstat.accessed,
"created" => stat.fstat.created,
"modified" => stat.fstat.modified,
_ => {
show_error!("invalid argument'modified' for '--time'
Valid arguments are:
- 'accessed', 'created','modified'
Try '{} --help' for more information.", program);
return 1;
}
},
None => stat.fstat.modified
};
((time / 1000) as i64, (time % 1000 * 1000000) as i32)
};
time::at(Timespec::new(secs, nsecs))
};
if!summarize || (summarize && index == len-1) {
let time_str = tm.strftime(time_format_str).unwrap();
print!("{}\t{}\t{}{}", convert_size(size), time_str, stat.path.display(), line_separator);
}
} else {
if!summarize || (summarize && index == len-1) {
print!("{}\t{}{}", convert_size(size), stat.path.display(), line_separator);
}
}
if options_arc.total && index == (len - 1) {
// The last element will be the total size of the the path under
// path_str. We add it to the grand total.
grand_total += size;
}
}
}
if options_arc.total {
print!("{}\ttotal", convert_size(grand_total));
print!("{}", line_separator);
}
0
}
| {
let mut stats = vec!();
let mut futures = vec!();
if my_stat.fstat.kind == FileType::Directory {
let read = match fs::readdir(path) {
Ok(read) => read,
Err(e) => {
safe_writeln!(&mut stderr(), "{}: cannot read directory ‘{}‘: {}",
options.program_name, path.display(), e);
return vec!(Arc::new(my_stat))
}
};
for f in read.into_iter() {
let this_stat = Stat{path: f.clone(), fstat: safe_unwrap!(fs::lstat(&f))};
if this_stat.fstat.kind == FileType::Directory {
let oa_clone = options.clone();
futures.push(Future::spawn(move || { du(&f, this_stat, oa_clone, depth + 1) }))
} else { | identifier_body |
du.rs | #![crate_name = "du"]
#![feature(collections, core, old_io, old_path, rustc_private, std_misc, unicode)]
/*
* This file is part of the uutils coreutils package.
*
* (c) Derek Chiang <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
#![allow(non_snake_case)]
extern crate getopts;
extern crate libc;
extern crate time;
use std::old_io::{stderr, fs, FileStat, FileType};
use std::num::Float;
use std::option::Option;
use std::old_path::Path;
use std::sync::{Arc, Future};
use time::Timespec;
#[path = "../common/util.rs"]
#[macro_use]
mod util;
static NAME: &'static str = "du";
static VERSION: &'static str = "1.0.0";
struct Options {
all: bool,
program_name: String,
max_depth: Option<usize>,
total: bool,
separate_dirs: bool,
}
struct Stat {
path: Path,
fstat: FileStat,
}
// this takes `my_stat` to avoid having to stat files multiple times.
fn du(path: &Path, mut my_stat: Stat,
options: Arc<Options>, depth: usize) -> Vec<Arc<Stat>> {
let mut stats = vec!();
let mut futures = vec!();
if my_stat.fstat.kind == FileType::Directory {
let read = match fs::readdir(path) {
Ok(read) => read,
Err(e) => {
safe_writeln!(&mut stderr(), "{}: cannot read directory ‘{}‘: {}",
options.program_name, path.display(), e);
return vec!(Arc::new(my_stat))
}
};
for f in read.into_iter() {
let this_stat = Stat{path: f.clone(), fstat: safe_unwrap!(fs::lstat(&f))};
if this_stat.fstat.kind == FileType::Directory {
let oa_clone = options.clone();
futures.push(Future::spawn(move || { du(&f, this_stat, oa_clone, depth + 1) }))
} else {
my_stat.fstat.size += this_stat.fstat.size;
my_stat.fstat.unstable.blocks += this_stat.fstat.unstable.blocks;
if options.all {
stats.push(Arc::new(this_stat))
}
}
}
}
for future in futures.iter_mut() {
for stat in future.get().into_iter().rev() {
if!options.separate_dirs && stat.path.dir_path() == my_stat.path {
my_stat.fstat.size += stat.fstat.size;
my_stat.fstat.unstable.blocks += stat.fstat.unstable.blocks;
}
if options.max_depth == None || depth < options.max_depth.unwrap() {
stats.push(stat.clone());
}
}
}
stats.push(Arc::new(my_stat));
stats
}
pub fn uumain(args: Vec<String>) -> i32 {
let program = args[0].as_slice();
let opts = [
// In task
getopts::optflag("a", "all", " write counts for all files, not just directories"),
// In main
getopts::optflag("", "apparent-size", "print apparent sizes, rather than disk usage;
although the apparent size is usually smaller, it may be larger due to holes
in ('sparse') files, internal fragmentation, indirect blocks, and the like"),
// In main
getopts::optopt("B", "block-size", "scale sizes by SIZE before printing them.
E.g., '-BM' prints sizes in units of 1,048,576 bytes. See SIZE format below.",
"SIZE"),
// In main
getopts::optflag("b", "bytes", "equivalent to '--apparent-size --block-size=1'"),
// In main
getopts::optflag("c", "total", "produce a grand total"),
// In task
// getopts::optflag("D", "dereference-args", "dereference only symlinks that are listed
// on the command line"),
// In main
// getopts::optopt("", "files0-from", "summarize disk usage of the NUL-terminated file
// names specified in file F;
// If F is - then read names from standard input", "F"),
// // In task
// getopts::optflag("H", "", "equivalent to --dereference-args (-D)"),
// In main
getopts::optflag("h", "human-readable", "print sizes in human readable format (e.g., 1K 234M 2G)"),
// In main
getopts::optflag("", "si", "like -h, but use powers of 1000 not 1024"),
// In main
getopts::optflag("k", "", "like --block-size=1K"),
// In task
getopts::optflag("l", "count-links", "count sizes many times if hard linked"),
// // In main
getopts::optflag("m", "", "like --block-size=1M"),
// // In task
// getopts::optflag("L", "dereference", "dereference all symbolic links"),
// // In task
// getopts::optflag("P", "no-dereference", "don't follow any symbolic links (this is the default)"),
// // In main
getopts::optflag("0", "null", "end each output line with 0 byte rather than newline"),
// In main
getopts::optflag("S", "separate-dirs", "do not include size of subdirectories"),
// In main
getopts::optflag("s", "summarize", "display only a total for each argument"),
// // In task
// getopts::optflag("x", "one-file-system", "skip directories on different file systems"),
// // In task
// getopts::optopt("X", "exclude-from", "exclude files that match any pattern in FILE", "FILE"),
// // In task
// getopts::optopt("", "exclude", "exclude files that match PATTERN", "PATTERN"),
// In main
getopts::optopt("d", "max-depth", "print the total for a directory (or file, with --all)
only if it is N or fewer levels below the command
line argument; --max-depth=0 is the same as --summarize", "N"),
// In main
getopts::optflagopt("", "time", "show time of the last modification of any file in the
directory, or any of its subdirectories. If WORD is given, show time as WORD instead of modification time:
atime, access, use, ctime or status", "WORD"),
// In main
getopts::optopt("", "time-style", "show times using style STYLE:
full-iso, long-iso, iso, +FORMAT FORMAT is interpreted like 'date'", "STYLE"),
getopts::optflag("", "help", "display this help and exit"),
getopts::optflag("V", "version", "output version information and exit"),
];
let matches = match getopts::getopts(args.tail(), &opts) {
Ok(m) => m,
Err(f) => {
show_error!("Invalid options\n{}", f);
return 1;
}
};
if matches.opt_present("help") {
println!("{program} {version} - estimate file space usage
Usage
{program} [OPTION]... [FILE]...
{program} [OPTION]... --files0-from=F
{usage}
Display values are in units of the first available SIZE from
--block-size, and the DU_BLOCK_SIZE, BLOCK_SIZE and BLOCKSIZE environ‐
ment variables. Otherwise, units default to 1024 bytes (or 512 if
POSIXLY_CORRECT is set).
SIZE is an integer and optional unit (example: 10M is 10*1024*1024).
Units are K, M, G, T, P, E, Z, Y (powers of 1024) or KB, MB,... (pow‐
ers of 1000).",
program = program,
version = VERSION,
usage = getopts::usage("Summarize disk usage of each FILE, recursively for directories.", &opts));
return 0;
} else if matches.opt_present("version") {
println!("{} version: {}", program, VERSION);
return 0;
}
let summarize = matches.opt_present("summarize");
let max_depth_str = matches.opt_str("max-depth");
let max_depth = max_depth_str.as_ref().and_then(|s| s.parse::<usize>().ok());
match (max_depth_str, max_depth) {
(Some(ref s), _) if summarize => {
show_error!("summarizing conflicts with --max-depth={}", *s);
return 1;
}
(Some(ref s), None) => {
show_error!("invalid maximum depth '{}'", *s);
return 1;
}
(Some(_), Some(_)) | (None, _) => { /* valid */ }
}
let options = Options {
all: matches.opt_present("all"),
program_name: program.to_string(),
max_depth: max_depth,
total: matches.opt_present("total"),
separate_dirs: matches.opt_present("S"),
};
let strs = if matches.free.is_empty() {vec!("./".to_string())} else {matches.free.clone()};
let options_arc = Arc::new(options);
let MB = match matches.opt_present("si") {
true => 1000 * 1000,
false => 1024 * 1024,
};
let KB = match matches.opt_present("si") {
true => 1000,
false => 1024,
};
let block_size = match matches.opt_str("block-size") {
Some(s) => {
let mut found_number = false;
let mut found_letter = false; | let mut numbers = String::new();
let mut letters = String::new();
for c in s.as_slice().chars() {
if found_letter && c.is_digit(10) ||!found_number &&!c.is_digit(10) {
show_error!("invalid --block-size argument '{}'", s);
return 1;
} else if c.is_digit(10) {
found_number = true;
numbers.push(c);
} else if c.is_alphabetic() {
found_letter = true;
letters.push(c);
}
}
let number = numbers.parse::<usize>().unwrap();
let multiple = match letters.as_slice() {
"K" => 1024, "M" => 1024 * 1024, "G" => 1024 * 1024 * 1024,
"T" => 1024 * 1024 * 1024 * 1024, "P" => 1024 * 1024 * 1024 * 1024 * 1024,
"E" => 1024 * 1024 * 1024 * 1024 * 1024 * 1024,
"Z" => 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024,
"Y" => 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024,
"KB" => 1000, "MB" => 1000 * 1000, "GB" => 1000 * 1000 * 1000,
"TB" => 1000 * 1000 * 1000 * 1000, "PB" => 1000 * 1000 * 1000 * 1000 * 1000,
"EB" => 1000 * 1000 * 1000 * 1000 * 1000 * 1000,
"ZB" => 1000 * 1000 * 1000 * 1000 * 1000 * 1000 * 1000,
"YB" => 1000 * 1000 * 1000 * 1000 * 1000 * 1000 * 1000 * 1000,
_ => {
show_error!("invalid --block-size argument '{}'", s);
return 1;
}
};
number * multiple
},
None => 1024
};
let convert_size = |size: u64| -> String {
if matches.opt_present("human-readable") || matches.opt_present("si") {
if size >= MB {
format!("{:.1}M", (size as f64) / (MB as f64))
} else if size >= KB {
format!("{:.1}K", (size as f64) / (KB as f64))
} else {
format!("{}B", size)
}
} else if matches.opt_present("k") {
format!("{}", ((size as f64) / (KB as f64)).ceil())
} else if matches.opt_present("m") {
format!("{}", ((size as f64) / (MB as f64)).ceil())
} else {
format!("{}", ((size as f64) / (block_size as f64)).ceil())
}
};
let time_format_str = match matches.opt_str("time-style") {
Some(s) => {
match s.as_slice() {
"full-iso" => "%Y-%m-%d %H:%M:%S.%f %z",
"long-iso" => "%Y-%m-%d %H:%M",
"iso" => "%Y-%m-%d",
_ => {
show_error!("invalid argument '{}' for 'time style'
Valid arguments are:
- 'full-iso'
- 'long-iso'
- 'iso'
Try '{} --help' for more information.", s, program);
return 1;
}
}
},
None => "%Y-%m-%d %H:%M"
};
let line_separator = match matches.opt_present("0") {
true => "\0",
false => "\n",
};
let mut grand_total = 0;
for path_str in strs.into_iter() {
let path = Path::new(path_str);
let stat = safe_unwrap!(fs::lstat(&path));
let iter = du(&path, Stat{path: path.clone(), fstat: stat}, options_arc.clone(), 0).into_iter();
let (_, len) = iter.size_hint();
let len = len.unwrap();
for (index, stat) in iter.enumerate() {
let size = match matches.opt_present("apparent-size") {
true => stat.fstat.unstable.nlink * stat.fstat.size,
// C's stat is such that each block is assume to be 512 bytes
// See: http://linux.die.net/man/2/stat
false => stat.fstat.unstable.blocks * 512,
};
if matches.opt_present("time") {
let tm = {
let (secs, nsecs) = {
let time = match matches.opt_str("time") {
Some(s) => match s.as_slice() {
"accessed" => stat.fstat.accessed,
"created" => stat.fstat.created,
"modified" => stat.fstat.modified,
_ => {
show_error!("invalid argument'modified' for '--time'
Valid arguments are:
- 'accessed', 'created','modified'
Try '{} --help' for more information.", program);
return 1;
}
},
None => stat.fstat.modified
};
((time / 1000) as i64, (time % 1000 * 1000000) as i32)
};
time::at(Timespec::new(secs, nsecs))
};
if!summarize || (summarize && index == len-1) {
let time_str = tm.strftime(time_format_str).unwrap();
print!("{}\t{}\t{}{}", convert_size(size), time_str, stat.path.display(), line_separator);
}
} else {
if!summarize || (summarize && index == len-1) {
print!("{}\t{}{}", convert_size(size), stat.path.display(), line_separator);
}
}
if options_arc.total && index == (len - 1) {
// The last element will be the total size of the the path under
// path_str. We add it to the grand total.
grand_total += size;
}
}
}
if options_arc.total {
print!("{}\ttotal", convert_size(grand_total));
print!("{}", line_separator);
}
0
} | random_line_split |
|
du.rs | #![crate_name = "du"]
#![feature(collections, core, old_io, old_path, rustc_private, std_misc, unicode)]
/*
* This file is part of the uutils coreutils package.
*
* (c) Derek Chiang <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
#![allow(non_snake_case)]
extern crate getopts;
extern crate libc;
extern crate time;
use std::old_io::{stderr, fs, FileStat, FileType};
use std::num::Float;
use std::option::Option;
use std::old_path::Path;
use std::sync::{Arc, Future};
use time::Timespec;
#[path = "../common/util.rs"]
#[macro_use]
mod util;
static NAME: &'static str = "du";
static VERSION: &'static str = "1.0.0";
struct Options {
all: bool,
program_name: String,
max_depth: Option<usize>,
total: bool,
separate_dirs: bool,
}
struct Stat {
path: Path,
fstat: FileStat,
}
// this takes `my_stat` to avoid having to stat files multiple times.
fn du(path: &Path, mut my_stat: Stat,
options: Arc<Options>, depth: usize) -> Vec<Arc<Stat>> {
let mut stats = vec!();
let mut futures = vec!();
if my_stat.fstat.kind == FileType::Directory | }
}
}
}
for future in futures.iter_mut() {
for stat in future.get().into_iter().rev() {
if!options.separate_dirs && stat.path.dir_path() == my_stat.path {
my_stat.fstat.size += stat.fstat.size;
my_stat.fstat.unstable.blocks += stat.fstat.unstable.blocks;
}
if options.max_depth == None || depth < options.max_depth.unwrap() {
stats.push(stat.clone());
}
}
}
stats.push(Arc::new(my_stat));
stats
}
pub fn uumain(args: Vec<String>) -> i32 {
let program = args[0].as_slice();
let opts = [
// In task
getopts::optflag("a", "all", " write counts for all files, not just directories"),
// In main
getopts::optflag("", "apparent-size", "print apparent sizes, rather than disk usage;
although the apparent size is usually smaller, it may be larger due to holes
in ('sparse') files, internal fragmentation, indirect blocks, and the like"),
// In main
getopts::optopt("B", "block-size", "scale sizes by SIZE before printing them.
E.g., '-BM' prints sizes in units of 1,048,576 bytes. See SIZE format below.",
"SIZE"),
// In main
getopts::optflag("b", "bytes", "equivalent to '--apparent-size --block-size=1'"),
// In main
getopts::optflag("c", "total", "produce a grand total"),
// In task
// getopts::optflag("D", "dereference-args", "dereference only symlinks that are listed
// on the command line"),
// In main
// getopts::optopt("", "files0-from", "summarize disk usage of the NUL-terminated file
// names specified in file F;
// If F is - then read names from standard input", "F"),
// // In task
// getopts::optflag("H", "", "equivalent to --dereference-args (-D)"),
// In main
getopts::optflag("h", "human-readable", "print sizes in human readable format (e.g., 1K 234M 2G)"),
// In main
getopts::optflag("", "si", "like -h, but use powers of 1000 not 1024"),
// In main
getopts::optflag("k", "", "like --block-size=1K"),
// In task
getopts::optflag("l", "count-links", "count sizes many times if hard linked"),
// // In main
getopts::optflag("m", "", "like --block-size=1M"),
// // In task
// getopts::optflag("L", "dereference", "dereference all symbolic links"),
// // In task
// getopts::optflag("P", "no-dereference", "don't follow any symbolic links (this is the default)"),
// // In main
getopts::optflag("0", "null", "end each output line with 0 byte rather than newline"),
// In main
getopts::optflag("S", "separate-dirs", "do not include size of subdirectories"),
// In main
getopts::optflag("s", "summarize", "display only a total for each argument"),
// // In task
// getopts::optflag("x", "one-file-system", "skip directories on different file systems"),
// // In task
// getopts::optopt("X", "exclude-from", "exclude files that match any pattern in FILE", "FILE"),
// // In task
// getopts::optopt("", "exclude", "exclude files that match PATTERN", "PATTERN"),
// In main
getopts::optopt("d", "max-depth", "print the total for a directory (or file, with --all)
only if it is N or fewer levels below the command
line argument; --max-depth=0 is the same as --summarize", "N"),
// In main
getopts::optflagopt("", "time", "show time of the last modification of any file in the
directory, or any of its subdirectories. If WORD is given, show time as WORD instead of modification time:
atime, access, use, ctime or status", "WORD"),
// In main
getopts::optopt("", "time-style", "show times using style STYLE:
full-iso, long-iso, iso, +FORMAT FORMAT is interpreted like 'date'", "STYLE"),
getopts::optflag("", "help", "display this help and exit"),
getopts::optflag("V", "version", "output version information and exit"),
];
let matches = match getopts::getopts(args.tail(), &opts) {
Ok(m) => m,
Err(f) => {
show_error!("Invalid options\n{}", f);
return 1;
}
};
if matches.opt_present("help") {
println!("{program} {version} - estimate file space usage
Usage
{program} [OPTION]... [FILE]...
{program} [OPTION]... --files0-from=F
{usage}
Display values are in units of the first available SIZE from
--block-size, and the DU_BLOCK_SIZE, BLOCK_SIZE and BLOCKSIZE environ‐
ment variables. Otherwise, units default to 1024 bytes (or 512 if
POSIXLY_CORRECT is set).
SIZE is an integer and optional unit (example: 10M is 10*1024*1024).
Units are K, M, G, T, P, E, Z, Y (powers of 1024) or KB, MB,... (pow‐
ers of 1000).",
program = program,
version = VERSION,
usage = getopts::usage("Summarize disk usage of each FILE, recursively for directories.", &opts));
return 0;
} else if matches.opt_present("version") {
println!("{} version: {}", program, VERSION);
return 0;
}
let summarize = matches.opt_present("summarize");
let max_depth_str = matches.opt_str("max-depth");
let max_depth = max_depth_str.as_ref().and_then(|s| s.parse::<usize>().ok());
match (max_depth_str, max_depth) {
(Some(ref s), _) if summarize => {
show_error!("summarizing conflicts with --max-depth={}", *s);
return 1;
}
(Some(ref s), None) => {
show_error!("invalid maximum depth '{}'", *s);
return 1;
}
(Some(_), Some(_)) | (None, _) => { /* valid */ }
}
let options = Options {
all: matches.opt_present("all"),
program_name: program.to_string(),
max_depth: max_depth,
total: matches.opt_present("total"),
separate_dirs: matches.opt_present("S"),
};
let strs = if matches.free.is_empty() {vec!("./".to_string())} else {matches.free.clone()};
let options_arc = Arc::new(options);
let MB = match matches.opt_present("si") {
true => 1000 * 1000,
false => 1024 * 1024,
};
let KB = match matches.opt_present("si") {
true => 1000,
false => 1024,
};
let block_size = match matches.opt_str("block-size") {
Some(s) => {
let mut found_number = false;
let mut found_letter = false;
let mut numbers = String::new();
let mut letters = String::new();
for c in s.as_slice().chars() {
if found_letter && c.is_digit(10) ||!found_number &&!c.is_digit(10) {
show_error!("invalid --block-size argument '{}'", s);
return 1;
} else if c.is_digit(10) {
found_number = true;
numbers.push(c);
} else if c.is_alphabetic() {
found_letter = true;
letters.push(c);
}
}
let number = numbers.parse::<usize>().unwrap();
let multiple = match letters.as_slice() {
"K" => 1024, "M" => 1024 * 1024, "G" => 1024 * 1024 * 1024,
"T" => 1024 * 1024 * 1024 * 1024, "P" => 1024 * 1024 * 1024 * 1024 * 1024,
"E" => 1024 * 1024 * 1024 * 1024 * 1024 * 1024,
"Z" => 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024,
"Y" => 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024,
"KB" => 1000, "MB" => 1000 * 1000, "GB" => 1000 * 1000 * 1000,
"TB" => 1000 * 1000 * 1000 * 1000, "PB" => 1000 * 1000 * 1000 * 1000 * 1000,
"EB" => 1000 * 1000 * 1000 * 1000 * 1000 * 1000,
"ZB" => 1000 * 1000 * 1000 * 1000 * 1000 * 1000 * 1000,
"YB" => 1000 * 1000 * 1000 * 1000 * 1000 * 1000 * 1000 * 1000,
_ => {
show_error!("invalid --block-size argument '{}'", s);
return 1;
}
};
number * multiple
},
None => 1024
};
let convert_size = |size: u64| -> String {
if matches.opt_present("human-readable") || matches.opt_present("si") {
if size >= MB {
format!("{:.1}M", (size as f64) / (MB as f64))
} else if size >= KB {
format!("{:.1}K", (size as f64) / (KB as f64))
} else {
format!("{}B", size)
}
} else if matches.opt_present("k") {
format!("{}", ((size as f64) / (KB as f64)).ceil())
} else if matches.opt_present("m") {
format!("{}", ((size as f64) / (MB as f64)).ceil())
} else {
format!("{}", ((size as f64) / (block_size as f64)).ceil())
}
};
let time_format_str = match matches.opt_str("time-style") {
Some(s) => {
match s.as_slice() {
"full-iso" => "%Y-%m-%d %H:%M:%S.%f %z",
"long-iso" => "%Y-%m-%d %H:%M",
"iso" => "%Y-%m-%d",
_ => {
show_error!("invalid argument '{}' for 'time style'
Valid arguments are:
- 'full-iso'
- 'long-iso'
- 'iso'
Try '{} --help' for more information.", s, program);
return 1;
}
}
},
None => "%Y-%m-%d %H:%M"
};
let line_separator = match matches.opt_present("0") {
true => "\0",
false => "\n",
};
let mut grand_total = 0;
for path_str in strs.into_iter() {
let path = Path::new(path_str);
let stat = safe_unwrap!(fs::lstat(&path));
let iter = du(&path, Stat{path: path.clone(), fstat: stat}, options_arc.clone(), 0).into_iter();
let (_, len) = iter.size_hint();
let len = len.unwrap();
for (index, stat) in iter.enumerate() {
let size = match matches.opt_present("apparent-size") {
true => stat.fstat.unstable.nlink * stat.fstat.size,
// C's stat is such that each block is assume to be 512 bytes
// See: http://linux.die.net/man/2/stat
false => stat.fstat.unstable.blocks * 512,
};
if matches.opt_present("time") {
let tm = {
let (secs, nsecs) = {
let time = match matches.opt_str("time") {
Some(s) => match s.as_slice() {
"accessed" => stat.fstat.accessed,
"created" => stat.fstat.created,
"modified" => stat.fstat.modified,
_ => {
show_error!("invalid argument'modified' for '--time'
Valid arguments are:
- 'accessed', 'created','modified'
Try '{} --help' for more information.", program);
return 1;
}
},
None => stat.fstat.modified
};
((time / 1000) as i64, (time % 1000 * 1000000) as i32)
};
time::at(Timespec::new(secs, nsecs))
};
if!summarize || (summarize && index == len-1) {
let time_str = tm.strftime(time_format_str).unwrap();
print!("{}\t{}\t{}{}", convert_size(size), time_str, stat.path.display(), line_separator);
}
} else {
if!summarize || (summarize && index == len-1) {
print!("{}\t{}{}", convert_size(size), stat.path.display(), line_separator);
}
}
if options_arc.total && index == (len - 1) {
// The last element will be the total size of the the path under
// path_str. We add it to the grand total.
grand_total += size;
}
}
}
if options_arc.total {
print!("{}\ttotal", convert_size(grand_total));
print!("{}", line_separator);
}
0
}
| {
let read = match fs::readdir(path) {
Ok(read) => read,
Err(e) => {
safe_writeln!(&mut stderr(), "{}: cannot read directory ‘{}‘: {}",
options.program_name, path.display(), e);
return vec!(Arc::new(my_stat))
}
};
for f in read.into_iter() {
let this_stat = Stat{path: f.clone(), fstat: safe_unwrap!(fs::lstat(&f))};
if this_stat.fstat.kind == FileType::Directory {
let oa_clone = options.clone();
futures.push(Future::spawn(move || { du(&f, this_stat, oa_clone, depth + 1) }))
} else {
my_stat.fstat.size += this_stat.fstat.size;
my_stat.fstat.unstable.blocks += this_stat.fstat.unstable.blocks;
if options.all {
stats.push(Arc::new(this_stat)) | conditional_block |
du.rs | #![crate_name = "du"]
#![feature(collections, core, old_io, old_path, rustc_private, std_misc, unicode)]
/*
* This file is part of the uutils coreutils package.
*
* (c) Derek Chiang <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
#![allow(non_snake_case)]
extern crate getopts;
extern crate libc;
extern crate time;
use std::old_io::{stderr, fs, FileStat, FileType};
use std::num::Float;
use std::option::Option;
use std::old_path::Path;
use std::sync::{Arc, Future};
use time::Timespec;
#[path = "../common/util.rs"]
#[macro_use]
mod util;
static NAME: &'static str = "du";
static VERSION: &'static str = "1.0.0";
struct Options {
all: bool,
program_name: String,
max_depth: Option<usize>,
total: bool,
separate_dirs: bool,
}
struct Stat {
path: Path,
fstat: FileStat,
}
// this takes `my_stat` to avoid having to stat files multiple times.
fn | (path: &Path, mut my_stat: Stat,
options: Arc<Options>, depth: usize) -> Vec<Arc<Stat>> {
let mut stats = vec!();
let mut futures = vec!();
if my_stat.fstat.kind == FileType::Directory {
let read = match fs::readdir(path) {
Ok(read) => read,
Err(e) => {
safe_writeln!(&mut stderr(), "{}: cannot read directory ‘{}‘: {}",
options.program_name, path.display(), e);
return vec!(Arc::new(my_stat))
}
};
for f in read.into_iter() {
let this_stat = Stat{path: f.clone(), fstat: safe_unwrap!(fs::lstat(&f))};
if this_stat.fstat.kind == FileType::Directory {
let oa_clone = options.clone();
futures.push(Future::spawn(move || { du(&f, this_stat, oa_clone, depth + 1) }))
} else {
my_stat.fstat.size += this_stat.fstat.size;
my_stat.fstat.unstable.blocks += this_stat.fstat.unstable.blocks;
if options.all {
stats.push(Arc::new(this_stat))
}
}
}
}
for future in futures.iter_mut() {
for stat in future.get().into_iter().rev() {
if!options.separate_dirs && stat.path.dir_path() == my_stat.path {
my_stat.fstat.size += stat.fstat.size;
my_stat.fstat.unstable.blocks += stat.fstat.unstable.blocks;
}
if options.max_depth == None || depth < options.max_depth.unwrap() {
stats.push(stat.clone());
}
}
}
stats.push(Arc::new(my_stat));
stats
}
pub fn uumain(args: Vec<String>) -> i32 {
let program = args[0].as_slice();
let opts = [
// In task
getopts::optflag("a", "all", " write counts for all files, not just directories"),
// In main
getopts::optflag("", "apparent-size", "print apparent sizes, rather than disk usage;
although the apparent size is usually smaller, it may be larger due to holes
in ('sparse') files, internal fragmentation, indirect blocks, and the like"),
// In main
getopts::optopt("B", "block-size", "scale sizes by SIZE before printing them.
E.g., '-BM' prints sizes in units of 1,048,576 bytes. See SIZE format below.",
"SIZE"),
// In main
getopts::optflag("b", "bytes", "equivalent to '--apparent-size --block-size=1'"),
// In main
getopts::optflag("c", "total", "produce a grand total"),
// In task
// getopts::optflag("D", "dereference-args", "dereference only symlinks that are listed
// on the command line"),
// In main
// getopts::optopt("", "files0-from", "summarize disk usage of the NUL-terminated file
// names specified in file F;
// If F is - then read names from standard input", "F"),
// // In task
// getopts::optflag("H", "", "equivalent to --dereference-args (-D)"),
// In main
getopts::optflag("h", "human-readable", "print sizes in human readable format (e.g., 1K 234M 2G)"),
// In main
getopts::optflag("", "si", "like -h, but use powers of 1000 not 1024"),
// In main
getopts::optflag("k", "", "like --block-size=1K"),
// In task
getopts::optflag("l", "count-links", "count sizes many times if hard linked"),
// // In main
getopts::optflag("m", "", "like --block-size=1M"),
// // In task
// getopts::optflag("L", "dereference", "dereference all symbolic links"),
// // In task
// getopts::optflag("P", "no-dereference", "don't follow any symbolic links (this is the default)"),
// // In main
getopts::optflag("0", "null", "end each output line with 0 byte rather than newline"),
// In main
getopts::optflag("S", "separate-dirs", "do not include size of subdirectories"),
// In main
getopts::optflag("s", "summarize", "display only a total for each argument"),
// // In task
// getopts::optflag("x", "one-file-system", "skip directories on different file systems"),
// // In task
// getopts::optopt("X", "exclude-from", "exclude files that match any pattern in FILE", "FILE"),
// // In task
// getopts::optopt("", "exclude", "exclude files that match PATTERN", "PATTERN"),
// In main
getopts::optopt("d", "max-depth", "print the total for a directory (or file, with --all)
only if it is N or fewer levels below the command
line argument; --max-depth=0 is the same as --summarize", "N"),
// In main
getopts::optflagopt("", "time", "show time of the last modification of any file in the
directory, or any of its subdirectories. If WORD is given, show time as WORD instead of modification time:
atime, access, use, ctime or status", "WORD"),
// In main
getopts::optopt("", "time-style", "show times using style STYLE:
full-iso, long-iso, iso, +FORMAT FORMAT is interpreted like 'date'", "STYLE"),
getopts::optflag("", "help", "display this help and exit"),
getopts::optflag("V", "version", "output version information and exit"),
];
let matches = match getopts::getopts(args.tail(), &opts) {
Ok(m) => m,
Err(f) => {
show_error!("Invalid options\n{}", f);
return 1;
}
};
if matches.opt_present("help") {
println!("{program} {version} - estimate file space usage
Usage
{program} [OPTION]... [FILE]...
{program} [OPTION]... --files0-from=F
{usage}
Display values are in units of the first available SIZE from
--block-size, and the DU_BLOCK_SIZE, BLOCK_SIZE and BLOCKSIZE environ‐
ment variables. Otherwise, units default to 1024 bytes (or 512 if
POSIXLY_CORRECT is set).
SIZE is an integer and optional unit (example: 10M is 10*1024*1024).
Units are K, M, G, T, P, E, Z, Y (powers of 1024) or KB, MB,... (pow‐
ers of 1000).",
program = program,
version = VERSION,
usage = getopts::usage("Summarize disk usage of each FILE, recursively for directories.", &opts));
return 0;
} else if matches.opt_present("version") {
println!("{} version: {}", program, VERSION);
return 0;
}
let summarize = matches.opt_present("summarize");
let max_depth_str = matches.opt_str("max-depth");
let max_depth = max_depth_str.as_ref().and_then(|s| s.parse::<usize>().ok());
match (max_depth_str, max_depth) {
(Some(ref s), _) if summarize => {
show_error!("summarizing conflicts with --max-depth={}", *s);
return 1;
}
(Some(ref s), None) => {
show_error!("invalid maximum depth '{}'", *s);
return 1;
}
(Some(_), Some(_)) | (None, _) => { /* valid */ }
}
let options = Options {
all: matches.opt_present("all"),
program_name: program.to_string(),
max_depth: max_depth,
total: matches.opt_present("total"),
separate_dirs: matches.opt_present("S"),
};
let strs = if matches.free.is_empty() {vec!("./".to_string())} else {matches.free.clone()};
let options_arc = Arc::new(options);
let MB = match matches.opt_present("si") {
true => 1000 * 1000,
false => 1024 * 1024,
};
let KB = match matches.opt_present("si") {
true => 1000,
false => 1024,
};
let block_size = match matches.opt_str("block-size") {
Some(s) => {
let mut found_number = false;
let mut found_letter = false;
let mut numbers = String::new();
let mut letters = String::new();
for c in s.as_slice().chars() {
if found_letter && c.is_digit(10) ||!found_number &&!c.is_digit(10) {
show_error!("invalid --block-size argument '{}'", s);
return 1;
} else if c.is_digit(10) {
found_number = true;
numbers.push(c);
} else if c.is_alphabetic() {
found_letter = true;
letters.push(c);
}
}
let number = numbers.parse::<usize>().unwrap();
let multiple = match letters.as_slice() {
"K" => 1024, "M" => 1024 * 1024, "G" => 1024 * 1024 * 1024,
"T" => 1024 * 1024 * 1024 * 1024, "P" => 1024 * 1024 * 1024 * 1024 * 1024,
"E" => 1024 * 1024 * 1024 * 1024 * 1024 * 1024,
"Z" => 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024,
"Y" => 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024,
"KB" => 1000, "MB" => 1000 * 1000, "GB" => 1000 * 1000 * 1000,
"TB" => 1000 * 1000 * 1000 * 1000, "PB" => 1000 * 1000 * 1000 * 1000 * 1000,
"EB" => 1000 * 1000 * 1000 * 1000 * 1000 * 1000,
"ZB" => 1000 * 1000 * 1000 * 1000 * 1000 * 1000 * 1000,
"YB" => 1000 * 1000 * 1000 * 1000 * 1000 * 1000 * 1000 * 1000,
_ => {
show_error!("invalid --block-size argument '{}'", s);
return 1;
}
};
number * multiple
},
None => 1024
};
let convert_size = |size: u64| -> String {
if matches.opt_present("human-readable") || matches.opt_present("si") {
if size >= MB {
format!("{:.1}M", (size as f64) / (MB as f64))
} else if size >= KB {
format!("{:.1}K", (size as f64) / (KB as f64))
} else {
format!("{}B", size)
}
} else if matches.opt_present("k") {
format!("{}", ((size as f64) / (KB as f64)).ceil())
} else if matches.opt_present("m") {
format!("{}", ((size as f64) / (MB as f64)).ceil())
} else {
format!("{}", ((size as f64) / (block_size as f64)).ceil())
}
};
let time_format_str = match matches.opt_str("time-style") {
Some(s) => {
match s.as_slice() {
"full-iso" => "%Y-%m-%d %H:%M:%S.%f %z",
"long-iso" => "%Y-%m-%d %H:%M",
"iso" => "%Y-%m-%d",
_ => {
show_error!("invalid argument '{}' for 'time style'
Valid arguments are:
- 'full-iso'
- 'long-iso'
- 'iso'
Try '{} --help' for more information.", s, program);
return 1;
}
}
},
None => "%Y-%m-%d %H:%M"
};
let line_separator = match matches.opt_present("0") {
true => "\0",
false => "\n",
};
let mut grand_total = 0;
for path_str in strs.into_iter() {
let path = Path::new(path_str);
let stat = safe_unwrap!(fs::lstat(&path));
let iter = du(&path, Stat{path: path.clone(), fstat: stat}, options_arc.clone(), 0).into_iter();
let (_, len) = iter.size_hint();
let len = len.unwrap();
for (index, stat) in iter.enumerate() {
let size = match matches.opt_present("apparent-size") {
true => stat.fstat.unstable.nlink * stat.fstat.size,
// C's stat is such that each block is assume to be 512 bytes
// See: http://linux.die.net/man/2/stat
false => stat.fstat.unstable.blocks * 512,
};
if matches.opt_present("time") {
let tm = {
let (secs, nsecs) = {
let time = match matches.opt_str("time") {
Some(s) => match s.as_slice() {
"accessed" => stat.fstat.accessed,
"created" => stat.fstat.created,
"modified" => stat.fstat.modified,
_ => {
show_error!("invalid argument'modified' for '--time'
Valid arguments are:
- 'accessed', 'created','modified'
Try '{} --help' for more information.", program);
return 1;
}
},
None => stat.fstat.modified
};
((time / 1000) as i64, (time % 1000 * 1000000) as i32)
};
time::at(Timespec::new(secs, nsecs))
};
if!summarize || (summarize && index == len-1) {
let time_str = tm.strftime(time_format_str).unwrap();
print!("{}\t{}\t{}{}", convert_size(size), time_str, stat.path.display(), line_separator);
}
} else {
if!summarize || (summarize && index == len-1) {
print!("{}\t{}{}", convert_size(size), stat.path.display(), line_separator);
}
}
if options_arc.total && index == (len - 1) {
// The last element will be the total size of the the path under
// path_str. We add it to the grand total.
grand_total += size;
}
}
}
if options_arc.total {
print!("{}\ttotal", convert_size(grand_total));
print!("{}", line_separator);
}
0
}
| du | identifier_name |
input.rs | use std::f64;
use std::collections::HashMap;
use std::ops::{Index, IndexMut};
use glutin;
use glutin::{ElementState, EventsLoop, VirtualKeyCode};
use glutin::WindowEvent::*;
pub const INPUT_UP: usize = 0;
pub const INPUT_DOWN: usize = 1;
pub const INPUT_LEFT: usize = 2;
pub const INPUT_RIGHT: usize = 3;
enum MouseState {
Released,
JustPressed,
Held{ prev_x: f64, prev_y: f64 },
}
pub struct Input {
pub move_forward: bool,
pub move_backward: bool,
pub strafe_left: bool,
pub strafe_right: bool,
mouse: MouseState,
pub delta_x: f64,
pub delta_y: f64,
bindings: HashMap<VirtualKeyCode, usize>,
}
impl Input {
pub fn new() -> Self {
Input {
move_forward: false,
move_backward: false,
strafe_left: false,
strafe_right: false,
mouse: MouseState::Released,
delta_x: 0.0,
delta_y: 0.0,
bindings: HashMap::new(),
}
}
fn set_mouse(&mut self, x: f64, y: f64) {
match self.mouse {
MouseState::Released => {
self.delta_x = 0.0;
self.delta_y = 0.0;
},
MouseState::JustPressed => {
self.mouse = MouseState::Held {
prev_x: x,
prev_y: y
};
},
MouseState::Held{ prev_x, prev_y } => {
self.delta_x += x - prev_x;
self.delta_y += y - prev_y;
self.mouse = MouseState::Held {
prev_x: x,
prev_y: y
};
},
}
}
pub fn bind_key(&mut self, key: VirtualKeyCode, action: usize) |
fn get_binding(&self, key: &VirtualKeyCode) -> Option<usize> {
if let Some(action) = self.bindings.get(&key) {
Some(*action)
} else {
None
}
}
pub fn gather(&mut self, events_loop: &mut EventsLoop) -> bool {
self.delta_x = 0.0;
self.delta_y = 0.0;
let mut continue_game = true;
events_loop.poll_events(|event|{
if let glutin::Event::WindowEvent{event,..} = event {
match event {
Closed |
KeyboardInput{ input: glutin::KeyboardInput{ virtual_keycode: Some(VirtualKeyCode::Escape),.. },.. } =>
continue_game = false,
KeyboardInput{ input: glutin::KeyboardInput{ virtual_keycode: Some(key), state,.. },.. } => {
if let Some(action) = self.get_binding(&key) {
self[action] = ElementState::Pressed == state;
}
},
MouseInput{ state: glutin::ElementState::Pressed, button: glutin::MouseButton::Left,.. } => {
if let MouseState::Released = self.mouse {
self.mouse = MouseState::JustPressed;
}
},
MouseInput{ state: glutin::ElementState::Released, button: glutin::MouseButton::Left,.. } => {
self.mouse = MouseState::Released;
},
MouseMoved{ position: (x, y),.. } => self.set_mouse(x, y),
_ => {},
}
}
});
continue_game
}
}
impl Index<usize> for Input {
type Output = bool;
fn index<'a>(&'a self, index: usize) -> &'a bool {
match index {
INPUT_UP => &self.move_forward,
INPUT_DOWN => &self.move_backward,
INPUT_LEFT => &self.strafe_left,
INPUT_RIGHT => &self.strafe_right,
_ => unimplemented!(),
}
}
}
impl IndexMut<usize> for Input {
fn index_mut<'a>(&'a mut self, index: usize) -> &'a mut bool {
match index {
INPUT_UP => &mut self.move_forward,
INPUT_DOWN => &mut self.move_backward,
INPUT_LEFT => &mut self.strafe_left,
INPUT_RIGHT => &mut self.strafe_right,
_ => unimplemented!(),
}
}
}
| {
self.bindings.insert(key, action);
} | identifier_body |
input.rs | use std::f64;
use std::collections::HashMap;
use std::ops::{Index, IndexMut};
use glutin;
use glutin::{ElementState, EventsLoop, VirtualKeyCode};
use glutin::WindowEvent::*;
pub const INPUT_UP: usize = 0;
pub const INPUT_DOWN: usize = 1;
pub const INPUT_LEFT: usize = 2;
pub const INPUT_RIGHT: usize = 3;
enum MouseState {
Released,
JustPressed,
Held{ prev_x: f64, prev_y: f64 },
}
pub struct Input {
pub move_forward: bool,
pub move_backward: bool,
pub strafe_left: bool,
pub strafe_right: bool,
mouse: MouseState,
pub delta_x: f64,
pub delta_y: f64,
bindings: HashMap<VirtualKeyCode, usize>,
}
impl Input {
pub fn new() -> Self {
Input {
move_forward: false,
move_backward: false,
strafe_left: false,
strafe_right: false,
mouse: MouseState::Released,
delta_x: 0.0,
delta_y: 0.0,
bindings: HashMap::new(),
}
}
fn set_mouse(&mut self, x: f64, y: f64) {
match self.mouse {
MouseState::Released => {
self.delta_x = 0.0;
self.delta_y = 0.0;
},
MouseState::JustPressed => {
self.mouse = MouseState::Held {
prev_x: x,
prev_y: y
};
},
MouseState::Held{ prev_x, prev_y } => {
self.delta_x += x - prev_x;
self.delta_y += y - prev_y;
self.mouse = MouseState::Held {
prev_x: x,
prev_y: y
};
},
}
}
pub fn bind_key(&mut self, key: VirtualKeyCode, action: usize) {
self.bindings.insert(key, action);
}
fn | (&self, key: &VirtualKeyCode) -> Option<usize> {
if let Some(action) = self.bindings.get(&key) {
Some(*action)
} else {
None
}
}
pub fn gather(&mut self, events_loop: &mut EventsLoop) -> bool {
self.delta_x = 0.0;
self.delta_y = 0.0;
let mut continue_game = true;
events_loop.poll_events(|event|{
if let glutin::Event::WindowEvent{event,..} = event {
match event {
Closed |
KeyboardInput{ input: glutin::KeyboardInput{ virtual_keycode: Some(VirtualKeyCode::Escape),.. },.. } =>
continue_game = false,
KeyboardInput{ input: glutin::KeyboardInput{ virtual_keycode: Some(key), state,.. },.. } => {
if let Some(action) = self.get_binding(&key) {
self[action] = ElementState::Pressed == state;
}
},
MouseInput{ state: glutin::ElementState::Pressed, button: glutin::MouseButton::Left,.. } => {
if let MouseState::Released = self.mouse {
self.mouse = MouseState::JustPressed;
}
},
MouseInput{ state: glutin::ElementState::Released, button: glutin::MouseButton::Left,.. } => {
self.mouse = MouseState::Released;
},
MouseMoved{ position: (x, y),.. } => self.set_mouse(x, y),
_ => {},
}
}
});
continue_game
}
}
impl Index<usize> for Input {
type Output = bool;
fn index<'a>(&'a self, index: usize) -> &'a bool {
match index {
INPUT_UP => &self.move_forward,
INPUT_DOWN => &self.move_backward,
INPUT_LEFT => &self.strafe_left,
INPUT_RIGHT => &self.strafe_right,
_ => unimplemented!(),
}
}
}
impl IndexMut<usize> for Input {
fn index_mut<'a>(&'a mut self, index: usize) -> &'a mut bool {
match index {
INPUT_UP => &mut self.move_forward,
INPUT_DOWN => &mut self.move_backward,
INPUT_LEFT => &mut self.strafe_left,
INPUT_RIGHT => &mut self.strafe_right,
_ => unimplemented!(),
}
}
}
| get_binding | identifier_name |
input.rs | use std::f64;
use std::collections::HashMap;
use std::ops::{Index, IndexMut};
use glutin;
use glutin::{ElementState, EventsLoop, VirtualKeyCode};
use glutin::WindowEvent::*;
pub const INPUT_UP: usize = 0;
pub const INPUT_DOWN: usize = 1;
pub const INPUT_LEFT: usize = 2;
pub const INPUT_RIGHT: usize = 3;
enum MouseState {
Released,
JustPressed,
Held{ prev_x: f64, prev_y: f64 },
}
pub struct Input {
pub move_forward: bool,
pub move_backward: bool,
pub strafe_left: bool,
pub strafe_right: bool,
mouse: MouseState,
pub delta_x: f64,
pub delta_y: f64,
bindings: HashMap<VirtualKeyCode, usize>,
}
impl Input {
pub fn new() -> Self {
Input {
move_forward: false,
move_backward: false,
strafe_left: false,
strafe_right: false,
mouse: MouseState::Released,
delta_x: 0.0,
delta_y: 0.0,
bindings: HashMap::new(),
}
}
fn set_mouse(&mut self, x: f64, y: f64) {
match self.mouse {
MouseState::Released => {
self.delta_x = 0.0;
self.delta_y = 0.0;
},
MouseState::JustPressed => {
self.mouse = MouseState::Held {
prev_x: x,
prev_y: y
};
},
MouseState::Held{ prev_x, prev_y } => {
self.delta_x += x - prev_x;
self.delta_y += y - prev_y;
self.mouse = MouseState::Held {
prev_x: x,
prev_y: y
};
},
}
}
pub fn bind_key(&mut self, key: VirtualKeyCode, action: usize) {
self.bindings.insert(key, action);
}
fn get_binding(&self, key: &VirtualKeyCode) -> Option<usize> {
if let Some(action) = self.bindings.get(&key) {
Some(*action)
} else {
None
}
}
pub fn gather(&mut self, events_loop: &mut EventsLoop) -> bool {
self.delta_x = 0.0;
self.delta_y = 0.0;
let mut continue_game = true;
events_loop.poll_events(|event|{
if let glutin::Event::WindowEvent{event,..} = event {
match event {
Closed |
KeyboardInput{ input: glutin::KeyboardInput{ virtual_keycode: Some(VirtualKeyCode::Escape),.. },.. } =>
continue_game = false,
KeyboardInput{ input: glutin::KeyboardInput{ virtual_keycode: Some(key), state,.. },.. } => {
if let Some(action) = self.get_binding(&key) {
self[action] = ElementState::Pressed == state;
}
},
MouseInput{ state: glutin::ElementState::Pressed, button: glutin::MouseButton::Left,.. } => {
if let MouseState::Released = self.mouse {
self.mouse = MouseState::JustPressed;
}
},
MouseInput{ state: glutin::ElementState::Released, button: glutin::MouseButton::Left,.. } => {
self.mouse = MouseState::Released;
},
MouseMoved{ position: (x, y),.. } => self.set_mouse(x, y),
_ => {},
}
}
});
continue_game
}
}
impl Index<usize> for Input {
type Output = bool;
fn index<'a>(&'a self, index: usize) -> &'a bool {
match index {
INPUT_UP => &self.move_forward,
INPUT_DOWN => &self.move_backward,
INPUT_LEFT => &self.strafe_left,
INPUT_RIGHT => &self.strafe_right,
_ => unimplemented!(),
}
}
}
impl IndexMut<usize> for Input { | INPUT_RIGHT => &mut self.strafe_right,
_ => unimplemented!(),
}
}
} | fn index_mut<'a>(&'a mut self, index: usize) -> &'a mut bool {
match index {
INPUT_UP => &mut self.move_forward,
INPUT_DOWN => &mut self.move_backward,
INPUT_LEFT => &mut self.strafe_left, | random_line_split |
input.rs | use std::f64;
use std::collections::HashMap;
use std::ops::{Index, IndexMut};
use glutin;
use glutin::{ElementState, EventsLoop, VirtualKeyCode};
use glutin::WindowEvent::*;
pub const INPUT_UP: usize = 0;
pub const INPUT_DOWN: usize = 1;
pub const INPUT_LEFT: usize = 2;
pub const INPUT_RIGHT: usize = 3;
enum MouseState {
Released,
JustPressed,
Held{ prev_x: f64, prev_y: f64 },
}
pub struct Input {
pub move_forward: bool,
pub move_backward: bool,
pub strafe_left: bool,
pub strafe_right: bool,
mouse: MouseState,
pub delta_x: f64,
pub delta_y: f64,
bindings: HashMap<VirtualKeyCode, usize>,
}
impl Input {
pub fn new() -> Self {
Input {
move_forward: false,
move_backward: false,
strafe_left: false,
strafe_right: false,
mouse: MouseState::Released,
delta_x: 0.0,
delta_y: 0.0,
bindings: HashMap::new(),
}
}
fn set_mouse(&mut self, x: f64, y: f64) {
match self.mouse {
MouseState::Released => {
self.delta_x = 0.0;
self.delta_y = 0.0;
},
MouseState::JustPressed => {
self.mouse = MouseState::Held {
prev_x: x,
prev_y: y
};
},
MouseState::Held{ prev_x, prev_y } => {
self.delta_x += x - prev_x;
self.delta_y += y - prev_y;
self.mouse = MouseState::Held {
prev_x: x,
prev_y: y
};
},
}
}
pub fn bind_key(&mut self, key: VirtualKeyCode, action: usize) {
self.bindings.insert(key, action);
}
fn get_binding(&self, key: &VirtualKeyCode) -> Option<usize> {
if let Some(action) = self.bindings.get(&key) {
Some(*action)
} else {
None
}
}
pub fn gather(&mut self, events_loop: &mut EventsLoop) -> bool {
self.delta_x = 0.0;
self.delta_y = 0.0;
let mut continue_game = true;
events_loop.poll_events(|event|{
if let glutin::Event::WindowEvent{event,..} = event {
match event {
Closed |
KeyboardInput{ input: glutin::KeyboardInput{ virtual_keycode: Some(VirtualKeyCode::Escape),.. },.. } =>
continue_game = false,
KeyboardInput{ input: glutin::KeyboardInput{ virtual_keycode: Some(key), state,.. },.. } => {
if let Some(action) = self.get_binding(&key) {
self[action] = ElementState::Pressed == state;
}
},
MouseInput{ state: glutin::ElementState::Pressed, button: glutin::MouseButton::Left,.. } => {
if let MouseState::Released = self.mouse {
self.mouse = MouseState::JustPressed;
}
},
MouseInput{ state: glutin::ElementState::Released, button: glutin::MouseButton::Left,.. } => | ,
MouseMoved{ position: (x, y),.. } => self.set_mouse(x, y),
_ => {},
}
}
});
continue_game
}
}
impl Index<usize> for Input {
type Output = bool;
fn index<'a>(&'a self, index: usize) -> &'a bool {
match index {
INPUT_UP => &self.move_forward,
INPUT_DOWN => &self.move_backward,
INPUT_LEFT => &self.strafe_left,
INPUT_RIGHT => &self.strafe_right,
_ => unimplemented!(),
}
}
}
impl IndexMut<usize> for Input {
fn index_mut<'a>(&'a mut self, index: usize) -> &'a mut bool {
match index {
INPUT_UP => &mut self.move_forward,
INPUT_DOWN => &mut self.move_backward,
INPUT_LEFT => &mut self.strafe_left,
INPUT_RIGHT => &mut self.strafe_right,
_ => unimplemented!(),
}
}
}
| {
self.mouse = MouseState::Released;
} | conditional_block |
checkboxes.rs | /*
* Copyright (c) 2017-2020 Boucher, Antoni <[email protected]>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
use gtk::{
Inhibit,
Window,
WindowType,
prelude::ButtonExt,
prelude::ContainerExt,
prelude::ToggleButtonExt,
prelude::WidgetExt,
};
use gtk::Orientation::Vertical;
use relm::{
connect,
Component,
ContainerWidget,
Relm,
Update,
Widget,
WidgetTest,
};
use relm_derive::Msg;
use self::CheckMsg::*;
use self::Msg::*;
struct CheckModel {
check: bool,
label: &'static str,
}
#[derive(Msg)]
enum CheckMsg {
Check,
Toggle,
Uncheck,
}
struct CheckButton {
button: gtk::CheckButton,
model: CheckModel,
relm: Relm<CheckButton>,
}
impl Update for CheckButton {
type Model = CheckModel;
type ModelParam = &'static str;
type Msg = CheckMsg;
fn model(_: &Relm<Self>, label: &'static str) -> CheckModel {
CheckModel {
check: false,
label,
}
}
fn update(&mut self, event: CheckMsg) {
match event {
Check => {
self.model.check = true;
// Lock the stream so that the call to set_active does not emit a Toggle message
// because that would cause an infinite recursion
// The Toggle message is emitted because the button connect signal is handled.
let _lock = self.relm.stream().lock();
self.button.set_active(true);
},
Toggle => {
self.model.check =!self.model.check;
self.button.set_active(self.model.check);
},
Uncheck => {
self.model.check = false;
let _lock = self.relm.stream().lock();
self.button.set_active(false);
},
}
}
}
impl Widget for CheckButton {
type Root = gtk::CheckButton;
fn root(&self) -> Self::Root {
self.button.clone()
}
fn view(relm: &Relm<Self>, model: Self::Model) -> Self {
let button = gtk::CheckButton::with_label(model.label);
connect!(relm, button, connect_clicked(_), Toggle);
CheckButton {
button,
model,
relm: relm.clone(),
}
}
}
#[derive(Msg)]
enum Msg {
MinusToggle,
PlusToggle,
Quit,
}
struct Components {
minus_button: Component<CheckButton>,
plus_button: Component<CheckButton>,
}
#[derive(Clone)]
struct Widgets {
minus_button: gtk::CheckButton,
plus_button: gtk::CheckButton,
window: Window,
}
struct Win {
components: Components,
widgets: Widgets,
}
impl Update for Win {
type Model = ();
type ModelParam = ();
type Msg = Msg;
fn model(_: &Relm<Self>, _: ()) -> () {
}
fn update(&mut self, event: Msg) {
match event {
Quit => gtk::main_quit(),
MinusToggle => {
if self.widgets.minus_button.is_active() {
self.components.plus_button.emit(Uncheck);
}
else |
},
PlusToggle => {
if self.widgets.plus_button.is_active() {
self.components.minus_button.emit(Uncheck);
}
else {
self.components.minus_button.emit(Check);
}
},
}
}
}
impl Widget for Win {
type Root = Window;
fn root(&self) -> Self::Root {
self.widgets.window.clone()
}
fn view(relm: &Relm<Self>, _model: Self::Model) -> Self {
let vbox = gtk::Box::new(Vertical, 0);
let plus_button = vbox.add_widget::<CheckButton>("+");
let minus_button = vbox.add_widget::<CheckButton>("-");
let window = Window::new(WindowType::Toplevel);
window.add(&vbox);
window.show_all();
connect!(plus_button@Toggle, relm, PlusToggle);
connect!(minus_button@Toggle, relm, MinusToggle);
connect!(relm, window, connect_delete_event(_, _), return (Some(Quit), Inhibit(false)));
Win {
widgets: Widgets {
minus_button: minus_button.widget().clone(),
plus_button: plus_button.widget().clone(),
window: window,
},
components: Components {
minus_button,
plus_button,
},
}
}
}
impl WidgetTest for Win {
type Streams = ();
fn get_streams(&self) -> Self::Streams {
}
type Widgets = Widgets;
fn get_widgets(&self) -> Self::Widgets {
self.widgets.clone()
}
}
fn main() {
Win::run(()).expect("Win::run failed");
}
#[cfg(test)]
mod tests {
use gtk::prelude::ToggleButtonExt;
use relm_test::click;
use crate::Win;
#[test]
fn check_uncheck() {
let (_component, _, widgets) = relm::init_test::<Win>(()).expect("init_test failed");
let plus_button = &widgets.plus_button;
let minus_button = &widgets.minus_button;
assert!(!plus_button.is_active());
assert!(!minus_button.is_active());
click(plus_button);
assert!(plus_button.is_active());
assert!(!minus_button.is_active());
click(plus_button);
assert!(!plus_button.is_active());
assert!(minus_button.is_active());
click(minus_button);
assert!(plus_button.is_active());
assert!(!minus_button.is_active());
click(minus_button);
assert!(!plus_button.is_active());
assert!(minus_button.is_active());
}
}
| {
self.components.plus_button.emit(Check);
} | conditional_block |
checkboxes.rs | /*
* Copyright (c) 2017-2020 Boucher, Antoni <[email protected]>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
use gtk::{
Inhibit,
Window,
WindowType,
prelude::ButtonExt,
prelude::ContainerExt,
prelude::ToggleButtonExt,
prelude::WidgetExt,
};
use gtk::Orientation::Vertical;
use relm::{
connect,
Component,
ContainerWidget,
Relm,
Update,
Widget,
WidgetTest,
};
use relm_derive::Msg;
use self::CheckMsg::*;
use self::Msg::*;
struct CheckModel {
check: bool,
label: &'static str,
}
#[derive(Msg)]
enum CheckMsg {
Check,
Toggle,
Uncheck,
}
struct CheckButton {
button: gtk::CheckButton,
model: CheckModel,
relm: Relm<CheckButton>,
}
impl Update for CheckButton {
type Model = CheckModel;
type ModelParam = &'static str;
type Msg = CheckMsg;
fn model(_: &Relm<Self>, label: &'static str) -> CheckModel {
CheckModel {
check: false,
label,
}
}
fn update(&mut self, event: CheckMsg) {
match event {
Check => {
self.model.check = true;
// Lock the stream so that the call to set_active does not emit a Toggle message
// because that would cause an infinite recursion
// The Toggle message is emitted because the button connect signal is handled.
let _lock = self.relm.stream().lock();
self.button.set_active(true);
},
Toggle => {
self.model.check =!self.model.check;
self.button.set_active(self.model.check);
},
Uncheck => {
self.model.check = false;
let _lock = self.relm.stream().lock();
self.button.set_active(false);
},
}
}
}
impl Widget for CheckButton {
type Root = gtk::CheckButton;
fn root(&self) -> Self::Root {
self.button.clone()
}
fn view(relm: &Relm<Self>, model: Self::Model) -> Self {
let button = gtk::CheckButton::with_label(model.label);
connect!(relm, button, connect_clicked(_), Toggle);
CheckButton {
button,
model,
relm: relm.clone(),
}
}
}
#[derive(Msg)]
enum Msg {
MinusToggle,
PlusToggle,
Quit,
}
struct Components {
minus_button: Component<CheckButton>,
plus_button: Component<CheckButton>,
}
#[derive(Clone)]
struct Widgets {
minus_button: gtk::CheckButton,
plus_button: gtk::CheckButton,
window: Window,
}
struct Win {
components: Components,
widgets: Widgets,
}
impl Update for Win {
type Model = ();
type ModelParam = ();
type Msg = Msg;
fn model(_: &Relm<Self>, _: ()) -> () {
}
fn update(&mut self, event: Msg) {
match event {
Quit => gtk::main_quit(),
MinusToggle => {
if self.widgets.minus_button.is_active() {
self.components.plus_button.emit(Uncheck);
}
else {
self.components.plus_button.emit(Check);
}
},
PlusToggle => {
if self.widgets.plus_button.is_active() {
self.components.minus_button.emit(Uncheck);
}
else {
self.components.minus_button.emit(Check);
}
},
}
}
}
impl Widget for Win {
type Root = Window;
fn root(&self) -> Self::Root {
self.widgets.window.clone()
}
fn view(relm: &Relm<Self>, _model: Self::Model) -> Self {
let vbox = gtk::Box::new(Vertical, 0);
let plus_button = vbox.add_widget::<CheckButton>("+");
let minus_button = vbox.add_widget::<CheckButton>("-");
let window = Window::new(WindowType::Toplevel);
window.add(&vbox);
window.show_all();
connect!(plus_button@Toggle, relm, PlusToggle);
connect!(minus_button@Toggle, relm, MinusToggle);
connect!(relm, window, connect_delete_event(_, _), return (Some(Quit), Inhibit(false)));
Win {
widgets: Widgets {
minus_button: minus_button.widget().clone(),
plus_button: plus_button.widget().clone(),
window: window,
},
components: Components {
minus_button,
plus_button,
},
}
}
}
impl WidgetTest for Win {
type Streams = ();
fn get_streams(&self) -> Self::Streams {
}
type Widgets = Widgets;
fn get_widgets(&self) -> Self::Widgets {
self.widgets.clone()
}
}
fn main() {
Win::run(()).expect("Win::run failed");
}
#[cfg(test)]
mod tests {
use gtk::prelude::ToggleButtonExt;
use relm_test::click;
use crate::Win;
#[test]
fn | () {
let (_component, _, widgets) = relm::init_test::<Win>(()).expect("init_test failed");
let plus_button = &widgets.plus_button;
let minus_button = &widgets.minus_button;
assert!(!plus_button.is_active());
assert!(!minus_button.is_active());
click(plus_button);
assert!(plus_button.is_active());
assert!(!minus_button.is_active());
click(plus_button);
assert!(!plus_button.is_active());
assert!(minus_button.is_active());
click(minus_button);
assert!(plus_button.is_active());
assert!(!minus_button.is_active());
click(minus_button);
assert!(!plus_button.is_active());
assert!(minus_button.is_active());
}
}
| check_uncheck | identifier_name |
checkboxes.rs | /*
* Copyright (c) 2017-2020 Boucher, Antoni <[email protected]>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
use gtk::{
Inhibit,
Window,
WindowType,
prelude::ButtonExt,
prelude::ContainerExt,
prelude::ToggleButtonExt,
prelude::WidgetExt,
};
use gtk::Orientation::Vertical;
use relm::{
connect,
Component,
ContainerWidget,
Relm,
Update,
Widget,
WidgetTest,
};
use relm_derive::Msg;
use self::CheckMsg::*;
use self::Msg::*;
struct CheckModel {
check: bool,
label: &'static str,
}
#[derive(Msg)]
enum CheckMsg {
Check,
Toggle,
Uncheck,
}
struct CheckButton {
button: gtk::CheckButton,
model: CheckModel,
relm: Relm<CheckButton>,
}
impl Update for CheckButton {
type Model = CheckModel;
type ModelParam = &'static str;
type Msg = CheckMsg;
fn model(_: &Relm<Self>, label: &'static str) -> CheckModel {
CheckModel {
check: false,
label,
}
}
fn update(&mut self, event: CheckMsg) {
match event {
Check => {
self.model.check = true;
// Lock the stream so that the call to set_active does not emit a Toggle message
// because that would cause an infinite recursion
// The Toggle message is emitted because the button connect signal is handled.
let _lock = self.relm.stream().lock();
self.button.set_active(true);
},
Toggle => {
self.model.check =!self.model.check;
self.button.set_active(self.model.check);
},
Uncheck => {
self.model.check = false;
let _lock = self.relm.stream().lock();
self.button.set_active(false);
},
}
}
}
impl Widget for CheckButton {
type Root = gtk::CheckButton;
fn root(&self) -> Self::Root {
self.button.clone()
}
fn view(relm: &Relm<Self>, model: Self::Model) -> Self {
let button = gtk::CheckButton::with_label(model.label);
connect!(relm, button, connect_clicked(_), Toggle);
CheckButton {
button,
model,
relm: relm.clone(),
}
}
}
#[derive(Msg)]
enum Msg {
MinusToggle,
PlusToggle,
Quit,
}
struct Components {
minus_button: Component<CheckButton>,
plus_button: Component<CheckButton>,
}
#[derive(Clone)]
struct Widgets {
minus_button: gtk::CheckButton,
plus_button: gtk::CheckButton,
window: Window,
}
struct Win {
components: Components,
widgets: Widgets,
}
impl Update for Win {
type Model = ();
type ModelParam = ();
type Msg = Msg;
fn model(_: &Relm<Self>, _: ()) -> () {
}
fn update(&mut self, event: Msg) {
match event {
Quit => gtk::main_quit(),
MinusToggle => {
if self.widgets.minus_button.is_active() {
self.components.plus_button.emit(Uncheck);
}
else {
self.components.plus_button.emit(Check);
}
},
PlusToggle => {
if self.widgets.plus_button.is_active() {
self.components.minus_button.emit(Uncheck);
}
else {
self.components.minus_button.emit(Check);
}
},
}
}
}
impl Widget for Win {
type Root = Window;
fn root(&self) -> Self::Root {
self.widgets.window.clone()
}
fn view(relm: &Relm<Self>, _model: Self::Model) -> Self {
let vbox = gtk::Box::new(Vertical, 0);
let plus_button = vbox.add_widget::<CheckButton>("+");
let minus_button = vbox.add_widget::<CheckButton>("-");
let window = Window::new(WindowType::Toplevel);
window.add(&vbox);
window.show_all();
connect!(plus_button@Toggle, relm, PlusToggle);
connect!(minus_button@Toggle, relm, MinusToggle);
connect!(relm, window, connect_delete_event(_, _), return (Some(Quit), Inhibit(false)));
Win {
widgets: Widgets {
minus_button: minus_button.widget().clone(),
plus_button: plus_button.widget().clone(),
window: window,
},
components: Components {
minus_button,
plus_button,
},
}
}
}
impl WidgetTest for Win {
type Streams = ();
fn get_streams(&self) -> Self::Streams {
}
type Widgets = Widgets;
fn get_widgets(&self) -> Self::Widgets {
self.widgets.clone()
}
}
fn main() {
Win::run(()).expect("Win::run failed");
}
#[cfg(test)]
mod tests {
use gtk::prelude::ToggleButtonExt;
use relm_test::click;
use crate::Win;
#[test]
fn check_uncheck() {
let (_component, _, widgets) = relm::init_test::<Win>(()).expect("init_test failed");
let plus_button = &widgets.plus_button;
let minus_button = &widgets.minus_button;
assert!(!plus_button.is_active());
assert!(!minus_button.is_active());
click(plus_button);
assert!(plus_button.is_active());
assert!(!minus_button.is_active());
click(plus_button);
assert!(!plus_button.is_active());
assert!(minus_button.is_active());
click(minus_button);
assert!(plus_button.is_active());
assert!(!minus_button.is_active()); | } |
click(minus_button);
assert!(!plus_button.is_active());
assert!(minus_button.is_active());
} | random_line_split |
checkboxes.rs | /*
* Copyright (c) 2017-2020 Boucher, Antoni <[email protected]>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
use gtk::{
Inhibit,
Window,
WindowType,
prelude::ButtonExt,
prelude::ContainerExt,
prelude::ToggleButtonExt,
prelude::WidgetExt,
};
use gtk::Orientation::Vertical;
use relm::{
connect,
Component,
ContainerWidget,
Relm,
Update,
Widget,
WidgetTest,
};
use relm_derive::Msg;
use self::CheckMsg::*;
use self::Msg::*;
struct CheckModel {
check: bool,
label: &'static str,
}
#[derive(Msg)]
enum CheckMsg {
Check,
Toggle,
Uncheck,
}
struct CheckButton {
button: gtk::CheckButton,
model: CheckModel,
relm: Relm<CheckButton>,
}
impl Update for CheckButton {
type Model = CheckModel;
type ModelParam = &'static str;
type Msg = CheckMsg;
fn model(_: &Relm<Self>, label: &'static str) -> CheckModel {
CheckModel {
check: false,
label,
}
}
fn update(&mut self, event: CheckMsg) {
match event {
Check => {
self.model.check = true;
// Lock the stream so that the call to set_active does not emit a Toggle message
// because that would cause an infinite recursion
// The Toggle message is emitted because the button connect signal is handled.
let _lock = self.relm.stream().lock();
self.button.set_active(true);
},
Toggle => {
self.model.check =!self.model.check;
self.button.set_active(self.model.check);
},
Uncheck => {
self.model.check = false;
let _lock = self.relm.stream().lock();
self.button.set_active(false);
},
}
}
}
impl Widget for CheckButton {
type Root = gtk::CheckButton;
fn root(&self) -> Self::Root {
self.button.clone()
}
fn view(relm: &Relm<Self>, model: Self::Model) -> Self {
let button = gtk::CheckButton::with_label(model.label);
connect!(relm, button, connect_clicked(_), Toggle);
CheckButton {
button,
model,
relm: relm.clone(),
}
}
}
#[derive(Msg)]
enum Msg {
MinusToggle,
PlusToggle,
Quit,
}
struct Components {
minus_button: Component<CheckButton>,
plus_button: Component<CheckButton>,
}
#[derive(Clone)]
struct Widgets {
minus_button: gtk::CheckButton,
plus_button: gtk::CheckButton,
window: Window,
}
struct Win {
components: Components,
widgets: Widgets,
}
impl Update for Win {
type Model = ();
type ModelParam = ();
type Msg = Msg;
fn model(_: &Relm<Self>, _: ()) -> () {
}
fn update(&mut self, event: Msg) | }
}
impl Widget for Win {
type Root = Window;
fn root(&self) -> Self::Root {
self.widgets.window.clone()
}
fn view(relm: &Relm<Self>, _model: Self::Model) -> Self {
let vbox = gtk::Box::new(Vertical, 0);
let plus_button = vbox.add_widget::<CheckButton>("+");
let minus_button = vbox.add_widget::<CheckButton>("-");
let window = Window::new(WindowType::Toplevel);
window.add(&vbox);
window.show_all();
connect!(plus_button@Toggle, relm, PlusToggle);
connect!(minus_button@Toggle, relm, MinusToggle);
connect!(relm, window, connect_delete_event(_, _), return (Some(Quit), Inhibit(false)));
Win {
widgets: Widgets {
minus_button: minus_button.widget().clone(),
plus_button: plus_button.widget().clone(),
window: window,
},
components: Components {
minus_button,
plus_button,
},
}
}
}
impl WidgetTest for Win {
type Streams = ();
fn get_streams(&self) -> Self::Streams {
}
type Widgets = Widgets;
fn get_widgets(&self) -> Self::Widgets {
self.widgets.clone()
}
}
fn main() {
Win::run(()).expect("Win::run failed");
}
#[cfg(test)]
mod tests {
use gtk::prelude::ToggleButtonExt;
use relm_test::click;
use crate::Win;
#[test]
fn check_uncheck() {
let (_component, _, widgets) = relm::init_test::<Win>(()).expect("init_test failed");
let plus_button = &widgets.plus_button;
let minus_button = &widgets.minus_button;
assert!(!plus_button.is_active());
assert!(!minus_button.is_active());
click(plus_button);
assert!(plus_button.is_active());
assert!(!minus_button.is_active());
click(plus_button);
assert!(!plus_button.is_active());
assert!(minus_button.is_active());
click(minus_button);
assert!(plus_button.is_active());
assert!(!minus_button.is_active());
click(minus_button);
assert!(!plus_button.is_active());
assert!(minus_button.is_active());
}
}
| {
match event {
Quit => gtk::main_quit(),
MinusToggle => {
if self.widgets.minus_button.is_active() {
self.components.plus_button.emit(Uncheck);
}
else {
self.components.plus_button.emit(Check);
}
},
PlusToggle => {
if self.widgets.plus_button.is_active() {
self.components.minus_button.emit(Uncheck);
}
else {
self.components.minus_button.emit(Check);
}
},
} | identifier_body |
cell.rs | // Copyright 2016 Joe Wilm, The Alacritty Project Contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use ansi::{NamedColor, Color};
use grid;
use index::Column;
bitflags! {
#[derive(Serialize, Deserialize)]
pub flags Flags: u32 {
const INVERSE = 0b00000001,
const BOLD = 0b00000010,
const ITALIC = 0b00000100,
const UNDERLINE = 0b00001000,
const WRAPLINE = 0b00010000,
}
}
#[derive(Copy, Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]
pub struct Cell {
pub c: char,
pub fg: Color,
pub bg: Color,
pub flags: Flags,
}
impl Default for Cell {
fn default() -> Cell {
Cell::new(
'',
Color::Named(NamedColor::Foreground),
Color::Named(NamedColor::Background)
)
}
}
/// Get the length of occupied cells in a line
pub trait LineLength {
/// Calculate the occupied line length
fn line_length(&self) -> Column;
}
impl LineLength for grid::Row<Cell> {
fn line_length(&self) -> Column {
let mut length = Column(0);
if self[Column(self.len() - 1)].flags.contains(WRAPLINE) {
return Column(self.len());
}
for (index, cell) in self[..].iter().rev().enumerate() {
if cell.c!='' {
length = Column(self.len() - index);
break;
}
}
length
}
}
impl Cell {
pub fn bold(&self) -> bool {
self.flags.contains(BOLD)
}
pub fn | (c: char, fg: Color, bg: Color) -> Cell {
Cell {
c: c.into(),
bg: bg,
fg: fg,
flags: Flags::empty(),
}
}
#[inline]
pub fn is_empty(&self) -> bool {
self.c =='' &&
self.bg == Color::Named(NamedColor::Background) &&
!self.flags.contains(INVERSE)
}
#[inline]
pub fn reset(&mut self, template: &Cell) {
// memcpy template to self
*self = *template;
}
}
#[cfg(test)]
mod tests {
use super::{Cell, LineLength};
use grid::Row;
use index::Column;
#[test]
fn line_length_works() {
let template = Cell::default();
let mut row = Row::new(Column(10), &template);
row[Column(5)].c = 'a';
assert_eq!(row.line_length(), Column(6));
}
#[test]
fn line_length_works_with_wrapline() {
let template = Cell::default();
let mut row = Row::new(Column(10), &template);
row[Column(9)].flags.insert(super::WRAPLINE);
assert_eq!(row.line_length(), Column(10));
}
}
#[cfg(test)]
mod benches {
extern crate test;
use super::Cell;
#[bench]
fn cell_reset(b: &mut test::Bencher) {
b.iter(|| {
let mut cell = Cell::default();
for _ in 0..100 {
cell.reset(test::black_box(&Cell::default()));
}
test::black_box(cell);
});
}
}
| new | identifier_name |
cell.rs | // Copyright 2016 Joe Wilm, The Alacritty Project Contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use ansi::{NamedColor, Color};
use grid;
use index::Column;
bitflags! {
#[derive(Serialize, Deserialize)]
pub flags Flags: u32 {
const INVERSE = 0b00000001,
const BOLD = 0b00000010,
const ITALIC = 0b00000100,
const UNDERLINE = 0b00001000,
const WRAPLINE = 0b00010000,
}
}
#[derive(Copy, Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]
pub struct Cell {
pub c: char,
pub fg: Color,
pub bg: Color,
pub flags: Flags,
}
impl Default for Cell {
fn default() -> Cell {
Cell::new(
'',
Color::Named(NamedColor::Foreground),
Color::Named(NamedColor::Background)
)
}
}
/// Get the length of occupied cells in a line
pub trait LineLength {
/// Calculate the occupied line length
fn line_length(&self) -> Column;
}
impl LineLength for grid::Row<Cell> {
fn line_length(&self) -> Column |
}
impl Cell {
pub fn bold(&self) -> bool {
self.flags.contains(BOLD)
}
pub fn new(c: char, fg: Color, bg: Color) -> Cell {
Cell {
c: c.into(),
bg: bg,
fg: fg,
flags: Flags::empty(),
}
}
#[inline]
pub fn is_empty(&self) -> bool {
self.c =='' &&
self.bg == Color::Named(NamedColor::Background) &&
!self.flags.contains(INVERSE)
}
#[inline]
pub fn reset(&mut self, template: &Cell) {
// memcpy template to self
*self = *template;
}
}
#[cfg(test)]
mod tests {
use super::{Cell, LineLength};
use grid::Row;
use index::Column;
#[test]
fn line_length_works() {
let template = Cell::default();
let mut row = Row::new(Column(10), &template);
row[Column(5)].c = 'a';
assert_eq!(row.line_length(), Column(6));
}
#[test]
fn line_length_works_with_wrapline() {
let template = Cell::default();
let mut row = Row::new(Column(10), &template);
row[Column(9)].flags.insert(super::WRAPLINE);
assert_eq!(row.line_length(), Column(10));
}
}
#[cfg(test)]
mod benches {
extern crate test;
use super::Cell;
#[bench]
fn cell_reset(b: &mut test::Bencher) {
b.iter(|| {
let mut cell = Cell::default();
for _ in 0..100 {
cell.reset(test::black_box(&Cell::default()));
}
test::black_box(cell);
});
}
}
| {
let mut length = Column(0);
if self[Column(self.len() - 1)].flags.contains(WRAPLINE) {
return Column(self.len());
}
for (index, cell) in self[..].iter().rev().enumerate() {
if cell.c != ' ' {
length = Column(self.len() - index);
break;
}
}
length
} | identifier_body |
cell.rs | // Copyright 2016 Joe Wilm, The Alacritty Project Contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use ansi::{NamedColor, Color};
use grid;
use index::Column;
bitflags! {
#[derive(Serialize, Deserialize)]
pub flags Flags: u32 {
const INVERSE = 0b00000001,
const BOLD = 0b00000010,
const ITALIC = 0b00000100,
const UNDERLINE = 0b00001000,
const WRAPLINE = 0b00010000,
}
}
#[derive(Copy, Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]
pub struct Cell {
pub c: char,
pub fg: Color,
pub bg: Color,
pub flags: Flags,
}
impl Default for Cell {
fn default() -> Cell {
Cell::new(
'',
Color::Named(NamedColor::Foreground),
Color::Named(NamedColor::Background)
)
}
}
/// Get the length of occupied cells in a line
pub trait LineLength {
/// Calculate the occupied line length
fn line_length(&self) -> Column;
}
impl LineLength for grid::Row<Cell> {
fn line_length(&self) -> Column {
let mut length = Column(0);
if self[Column(self.len() - 1)].flags.contains(WRAPLINE) {
return Column(self.len());
}
for (index, cell) in self[..].iter().rev().enumerate() {
if cell.c!='' |
}
length
}
}
impl Cell {
pub fn bold(&self) -> bool {
self.flags.contains(BOLD)
}
pub fn new(c: char, fg: Color, bg: Color) -> Cell {
Cell {
c: c.into(),
bg: bg,
fg: fg,
flags: Flags::empty(),
}
}
#[inline]
pub fn is_empty(&self) -> bool {
self.c =='' &&
self.bg == Color::Named(NamedColor::Background) &&
!self.flags.contains(INVERSE)
}
#[inline]
pub fn reset(&mut self, template: &Cell) {
// memcpy template to self
*self = *template;
}
}
#[cfg(test)]
mod tests {
use super::{Cell, LineLength};
use grid::Row;
use index::Column;
#[test]
fn line_length_works() {
let template = Cell::default();
let mut row = Row::new(Column(10), &template);
row[Column(5)].c = 'a';
assert_eq!(row.line_length(), Column(6));
}
#[test]
fn line_length_works_with_wrapline() {
let template = Cell::default();
let mut row = Row::new(Column(10), &template);
row[Column(9)].flags.insert(super::WRAPLINE);
assert_eq!(row.line_length(), Column(10));
}
}
#[cfg(test)]
mod benches {
extern crate test;
use super::Cell;
#[bench]
fn cell_reset(b: &mut test::Bencher) {
b.iter(|| {
let mut cell = Cell::default();
for _ in 0..100 {
cell.reset(test::black_box(&Cell::default()));
}
test::black_box(cell);
});
}
}
| {
length = Column(self.len() - index);
break;
} | conditional_block |
cell.rs | // Copyright 2016 Joe Wilm, The Alacritty Project Contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use ansi::{NamedColor, Color};
use grid;
use index::Column;
bitflags! {
#[derive(Serialize, Deserialize)]
pub flags Flags: u32 {
const INVERSE = 0b00000001,
const BOLD = 0b00000010,
const ITALIC = 0b00000100,
const UNDERLINE = 0b00001000,
const WRAPLINE = 0b00010000,
}
}
#[derive(Copy, Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]
pub struct Cell {
pub c: char,
pub fg: Color,
pub bg: Color,
pub flags: Flags,
}
impl Default for Cell {
fn default() -> Cell {
Cell::new(
'',
Color::Named(NamedColor::Foreground),
Color::Named(NamedColor::Background)
)
}
}
/// Get the length of occupied cells in a line
pub trait LineLength {
/// Calculate the occupied line length
fn line_length(&self) -> Column;
}
impl LineLength for grid::Row<Cell> {
fn line_length(&self) -> Column {
let mut length = Column(0);
if self[Column(self.len() - 1)].flags.contains(WRAPLINE) {
return Column(self.len());
}
for (index, cell) in self[..].iter().rev().enumerate() {
if cell.c!='' {
length = Column(self.len() - index);
break;
}
}
length
}
}
impl Cell {
pub fn bold(&self) -> bool {
self.flags.contains(BOLD)
}
pub fn new(c: char, fg: Color, bg: Color) -> Cell { | Cell {
c: c.into(),
bg: bg,
fg: fg,
flags: Flags::empty(),
}
}
#[inline]
pub fn is_empty(&self) -> bool {
self.c =='' &&
self.bg == Color::Named(NamedColor::Background) &&
!self.flags.contains(INVERSE)
}
#[inline]
pub fn reset(&mut self, template: &Cell) {
// memcpy template to self
*self = *template;
}
}
#[cfg(test)]
mod tests {
use super::{Cell, LineLength};
use grid::Row;
use index::Column;
#[test]
fn line_length_works() {
let template = Cell::default();
let mut row = Row::new(Column(10), &template);
row[Column(5)].c = 'a';
assert_eq!(row.line_length(), Column(6));
}
#[test]
fn line_length_works_with_wrapline() {
let template = Cell::default();
let mut row = Row::new(Column(10), &template);
row[Column(9)].flags.insert(super::WRAPLINE);
assert_eq!(row.line_length(), Column(10));
}
}
#[cfg(test)]
mod benches {
extern crate test;
use super::Cell;
#[bench]
fn cell_reset(b: &mut test::Bencher) {
b.iter(|| {
let mut cell = Cell::default();
for _ in 0..100 {
cell.reset(test::black_box(&Cell::default()));
}
test::black_box(cell);
});
}
} | random_line_split |
|
variance-regions-direct.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that we correctly infer variance for region parameters in
// various self-contained types.
// Regions that just appear in normal spots are contravariant:
#[rustc_variance]
struct Test2<'a, 'b, 'c> { //~ ERROR regions=[[-, -, -];[];[];[]]
x: &'a int,
y: &'b [int],
c: &'c str
}
// Those same annotations in function arguments become covariant:
#[rustc_variance]
struct Test3<'a, 'b, 'c> { //~ ERROR regions=[[+, +, +];[];[];[]]
x: extern "Rust" fn(&'a int),
y: extern "Rust" fn(&'b [int]),
c: extern "Rust" fn(&'c str),
}
// Mutability induces invariance:
#[rustc_variance]
struct Test4<'a, 'b:'a> { //~ ERROR regions=[[-, o];[];[];[]]
x: &'a mut &'b int,
}
// Mutability induces invariance, even when in a
// contravariant context:
#[rustc_variance]
struct Test5<'a, 'b> { //~ ERROR regions=[[+, o];[];[];[]]
x: extern "Rust" fn(&'a mut &'b int),
}
// Invariance is a trap from which NO ONE CAN ESCAPE.
// In other words, even though the `&'b int` occurs in
// a argument list (which is contravariant), that
// argument list occurs in an invariant context.
#[rustc_variance]
struct Test6<'a, 'b> { //~ ERROR regions=[[-, o];[];[];[]]
x: &'a mut extern "Rust" fn(&'b int),
}
// No uses at all is bivariant:
#[rustc_variance]
struct Test7<'a> { //~ ERROR regions=[[*];[];[];[]]
x: int
}
// Try enums too.
#[rustc_variance]
enum Test8<'a, 'b, 'c:'b> { //~ ERROR regions=[[+, -, o];[];[];[]]
Test8A(extern "Rust" fn(&'a int)),
Test8B(&'b [int]),
Test8C(&'b mut &'c str),
}
fn | () {}
| main | identifier_name |
variance-regions-direct.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that we correctly infer variance for region parameters in
// various self-contained types.
// Regions that just appear in normal spots are contravariant:
#[rustc_variance]
struct Test2<'a, 'b, 'c> { //~ ERROR regions=[[-, -, -];[];[];[]]
x: &'a int,
y: &'b [int],
c: &'c str
}
| #[rustc_variance]
struct Test3<'a, 'b, 'c> { //~ ERROR regions=[[+, +, +];[];[];[]]
x: extern "Rust" fn(&'a int),
y: extern "Rust" fn(&'b [int]),
c: extern "Rust" fn(&'c str),
}
// Mutability induces invariance:
#[rustc_variance]
struct Test4<'a, 'b:'a> { //~ ERROR regions=[[-, o];[];[];[]]
x: &'a mut &'b int,
}
// Mutability induces invariance, even when in a
// contravariant context:
#[rustc_variance]
struct Test5<'a, 'b> { //~ ERROR regions=[[+, o];[];[];[]]
x: extern "Rust" fn(&'a mut &'b int),
}
// Invariance is a trap from which NO ONE CAN ESCAPE.
// In other words, even though the `&'b int` occurs in
// a argument list (which is contravariant), that
// argument list occurs in an invariant context.
#[rustc_variance]
struct Test6<'a, 'b> { //~ ERROR regions=[[-, o];[];[];[]]
x: &'a mut extern "Rust" fn(&'b int),
}
// No uses at all is bivariant:
#[rustc_variance]
struct Test7<'a> { //~ ERROR regions=[[*];[];[];[]]
x: int
}
// Try enums too.
#[rustc_variance]
enum Test8<'a, 'b, 'c:'b> { //~ ERROR regions=[[+, -, o];[];[];[]]
Test8A(extern "Rust" fn(&'a int)),
Test8B(&'b [int]),
Test8C(&'b mut &'c str),
}
fn main() {} | // Those same annotations in function arguments become covariant:
| random_line_split |
network_listener.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use bluetooth_traits::{BluetoothResponseListener, BluetoothResponseResult};
use net_traits::{Action, FetchResponseListener, FetchResponseMsg};
use script_thread::{Runnable, RunnableWrapper};
use std::sync::{Arc, Mutex};
use task_source::TaskSource;
use task_source::networking::NetworkingTaskSource;
/// An off-thread sink for async network event runnables. All such events are forwarded to
/// a target thread, where they are invoked on the provided context object.
pub struct NetworkListener<Listener: PreInvoke + Send +'static> {
pub context: Arc<Mutex<Listener>>,
pub task_source: NetworkingTaskSource,
pub wrapper: Option<RunnableWrapper>, |
impl<Listener: PreInvoke + Send +'static> NetworkListener<Listener> {
pub fn notify<A: Action<Listener> + Send +'static>(&self, action: A) {
let runnable = box ListenerRunnable {
context: self.context.clone(),
action: action,
};
let result = if let Some(ref wrapper) = self.wrapper {
self.task_source.queue_with_wrapper(runnable, wrapper)
} else {
self.task_source.queue_wrapperless(runnable)
};
if let Err(err) = result {
warn!("failed to deliver network data: {:?}", err);
}
}
}
// helps type inference
impl<Listener: FetchResponseListener + PreInvoke + Send +'static> NetworkListener<Listener> {
pub fn notify_fetch(&self, action: FetchResponseMsg) {
self.notify(action);
}
}
// helps type inference
impl<Listener: BluetoothResponseListener + PreInvoke + Send +'static> NetworkListener<Listener> {
pub fn notify_response(&self, action: BluetoothResponseResult) {
self.notify(action);
}
}
/// A gating mechanism that runs before invoking the runnable on the target thread.
/// If the `should_invoke` method returns false, the runnable is discarded without
/// being invoked.
pub trait PreInvoke {
fn should_invoke(&self) -> bool {
true
}
}
/// A runnable for moving the async network events between threads.
struct ListenerRunnable<A: Action<Listener> + Send +'static, Listener: PreInvoke + Send> {
context: Arc<Mutex<Listener>>,
action: A,
}
impl<A: Action<Listener> + Send +'static, Listener: PreInvoke + Send> Runnable for ListenerRunnable<A, Listener> {
fn handler(self: Box<ListenerRunnable<A, Listener>>) {
let this = *self;
let mut context = this.context.lock().unwrap();
if context.should_invoke() {
this.action.process(&mut *context);
}
}
} | } | random_line_split |
network_listener.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use bluetooth_traits::{BluetoothResponseListener, BluetoothResponseResult};
use net_traits::{Action, FetchResponseListener, FetchResponseMsg};
use script_thread::{Runnable, RunnableWrapper};
use std::sync::{Arc, Mutex};
use task_source::TaskSource;
use task_source::networking::NetworkingTaskSource;
/// An off-thread sink for async network event runnables. All such events are forwarded to
/// a target thread, where they are invoked on the provided context object.
pub struct NetworkListener<Listener: PreInvoke + Send +'static> {
pub context: Arc<Mutex<Listener>>,
pub task_source: NetworkingTaskSource,
pub wrapper: Option<RunnableWrapper>,
}
impl<Listener: PreInvoke + Send +'static> NetworkListener<Listener> {
pub fn notify<A: Action<Listener> + Send +'static>(&self, action: A) {
let runnable = box ListenerRunnable {
context: self.context.clone(),
action: action,
};
let result = if let Some(ref wrapper) = self.wrapper {
self.task_source.queue_with_wrapper(runnable, wrapper)
} else {
self.task_source.queue_wrapperless(runnable)
};
if let Err(err) = result {
warn!("failed to deliver network data: {:?}", err);
}
}
}
// helps type inference
impl<Listener: FetchResponseListener + PreInvoke + Send +'static> NetworkListener<Listener> {
pub fn notify_fetch(&self, action: FetchResponseMsg) {
self.notify(action);
}
}
// helps type inference
impl<Listener: BluetoothResponseListener + PreInvoke + Send +'static> NetworkListener<Listener> {
pub fn notify_response(&self, action: BluetoothResponseResult) {
self.notify(action);
}
}
/// A gating mechanism that runs before invoking the runnable on the target thread.
/// If the `should_invoke` method returns false, the runnable is discarded without
/// being invoked.
pub trait PreInvoke {
fn | (&self) -> bool {
true
}
}
/// A runnable for moving the async network events between threads.
struct ListenerRunnable<A: Action<Listener> + Send +'static, Listener: PreInvoke + Send> {
context: Arc<Mutex<Listener>>,
action: A,
}
impl<A: Action<Listener> + Send +'static, Listener: PreInvoke + Send> Runnable for ListenerRunnable<A, Listener> {
fn handler(self: Box<ListenerRunnable<A, Listener>>) {
let this = *self;
let mut context = this.context.lock().unwrap();
if context.should_invoke() {
this.action.process(&mut *context);
}
}
}
| should_invoke | identifier_name |
network_listener.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use bluetooth_traits::{BluetoothResponseListener, BluetoothResponseResult};
use net_traits::{Action, FetchResponseListener, FetchResponseMsg};
use script_thread::{Runnable, RunnableWrapper};
use std::sync::{Arc, Mutex};
use task_source::TaskSource;
use task_source::networking::NetworkingTaskSource;
/// An off-thread sink for async network event runnables. All such events are forwarded to
/// a target thread, where they are invoked on the provided context object.
pub struct NetworkListener<Listener: PreInvoke + Send +'static> {
pub context: Arc<Mutex<Listener>>,
pub task_source: NetworkingTaskSource,
pub wrapper: Option<RunnableWrapper>,
}
impl<Listener: PreInvoke + Send +'static> NetworkListener<Listener> {
pub fn notify<A: Action<Listener> + Send +'static>(&self, action: A) {
let runnable = box ListenerRunnable {
context: self.context.clone(),
action: action,
};
let result = if let Some(ref wrapper) = self.wrapper {
self.task_source.queue_with_wrapper(runnable, wrapper)
} else {
self.task_source.queue_wrapperless(runnable)
};
if let Err(err) = result {
warn!("failed to deliver network data: {:?}", err);
}
}
}
// helps type inference
impl<Listener: FetchResponseListener + PreInvoke + Send +'static> NetworkListener<Listener> {
pub fn notify_fetch(&self, action: FetchResponseMsg) {
self.notify(action);
}
}
// helps type inference
impl<Listener: BluetoothResponseListener + PreInvoke + Send +'static> NetworkListener<Listener> {
pub fn notify_response(&self, action: BluetoothResponseResult) |
}
/// A gating mechanism that runs before invoking the runnable on the target thread.
/// If the `should_invoke` method returns false, the runnable is discarded without
/// being invoked.
pub trait PreInvoke {
fn should_invoke(&self) -> bool {
true
}
}
/// A runnable for moving the async network events between threads.
struct ListenerRunnable<A: Action<Listener> + Send +'static, Listener: PreInvoke + Send> {
context: Arc<Mutex<Listener>>,
action: A,
}
impl<A: Action<Listener> + Send +'static, Listener: PreInvoke + Send> Runnable for ListenerRunnable<A, Listener> {
fn handler(self: Box<ListenerRunnable<A, Listener>>) {
let this = *self;
let mut context = this.context.lock().unwrap();
if context.should_invoke() {
this.action.process(&mut *context);
}
}
}
| {
self.notify(action);
} | identifier_body |
network_listener.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use bluetooth_traits::{BluetoothResponseListener, BluetoothResponseResult};
use net_traits::{Action, FetchResponseListener, FetchResponseMsg};
use script_thread::{Runnable, RunnableWrapper};
use std::sync::{Arc, Mutex};
use task_source::TaskSource;
use task_source::networking::NetworkingTaskSource;
/// An off-thread sink for async network event runnables. All such events are forwarded to
/// a target thread, where they are invoked on the provided context object.
pub struct NetworkListener<Listener: PreInvoke + Send +'static> {
pub context: Arc<Mutex<Listener>>,
pub task_source: NetworkingTaskSource,
pub wrapper: Option<RunnableWrapper>,
}
impl<Listener: PreInvoke + Send +'static> NetworkListener<Listener> {
pub fn notify<A: Action<Listener> + Send +'static>(&self, action: A) {
let runnable = box ListenerRunnable {
context: self.context.clone(),
action: action,
};
let result = if let Some(ref wrapper) = self.wrapper {
self.task_source.queue_with_wrapper(runnable, wrapper)
} else | ;
if let Err(err) = result {
warn!("failed to deliver network data: {:?}", err);
}
}
}
// helps type inference
impl<Listener: FetchResponseListener + PreInvoke + Send +'static> NetworkListener<Listener> {
pub fn notify_fetch(&self, action: FetchResponseMsg) {
self.notify(action);
}
}
// helps type inference
impl<Listener: BluetoothResponseListener + PreInvoke + Send +'static> NetworkListener<Listener> {
pub fn notify_response(&self, action: BluetoothResponseResult) {
self.notify(action);
}
}
/// A gating mechanism that runs before invoking the runnable on the target thread.
/// If the `should_invoke` method returns false, the runnable is discarded without
/// being invoked.
pub trait PreInvoke {
fn should_invoke(&self) -> bool {
true
}
}
/// A runnable for moving the async network events between threads.
struct ListenerRunnable<A: Action<Listener> + Send +'static, Listener: PreInvoke + Send> {
context: Arc<Mutex<Listener>>,
action: A,
}
impl<A: Action<Listener> + Send +'static, Listener: PreInvoke + Send> Runnable for ListenerRunnable<A, Listener> {
fn handler(self: Box<ListenerRunnable<A, Listener>>) {
let this = *self;
let mut context = this.context.lock().unwrap();
if context.should_invoke() {
this.action.process(&mut *context);
}
}
}
| {
self.task_source.queue_wrapperless(runnable)
} | conditional_block |
tree_gravity.rs | //! Simple integration tests oriented towards gravity computations
extern crate acacia;
extern crate approx;
extern crate nalgebra;
extern crate quickcheck;
use acacia::partition::Ncube;
use acacia::{AssociatedData, DataQuery, Node, Positioned, Tree};
use approx::Relative;
use nalgebra::{distance, zero, Point2, Point3, Vector2, Vector3};
use quickcheck::{quickcheck, TestResult};
use std::fmt::Debug;
#[test]
fn tree_center_of_mass() {
fn tree_center_of_mass(data: Vec<(f64, (f64, f64))>) -> TestResult {
// Only test non-empty lists with positive masses
if data.is_empty() || data.iter().any(|&(m, _)| m <= 0.0) {
return TestResult::discard();
}
// No two points should be in the same place
for &(_, pi) in &data {
for &(_, pj) in &data {
if pi == pj {
return TestResult::discard();
}
}
}
// Compute center of mass in the traditional way
let (mps, ms) = data
.iter()
.map(|&(m, (x, y))| (Vector2::new(x, y) * m, m))
.fold((zero::<Vector2<f64>>(), 0.0f64), |(mps, ms), (mp, m)| {
(mps + mp, ms + m)
});
let com = mps / ms;
// Now use the tree
let tree = Tree::new(
data.iter().map(|&(m, (x, y))| Positioned {
object: m,
position: Point2::new(x, y),
}),
Ncube::new(Point2::origin(), 200.0f64),
(zero(), 0.0),
&|obj| ((obj.position - Point2::origin()) * obj.object, obj.object),
&|&(mps, ms), &(mp, m)| (mps + mp, ms + m),
)
.expect("Couldn't construct tree");
let (tree_mps, tree_ms) = *tree.data();
// …and compare
TestResult::from_bool(Relative::default().eq(&(tree_mps / tree_ms), &com))
}
quickcheck(tree_center_of_mass as fn(Vec<(f64, (f64, f64))>) -> TestResult);
}
#[test]
fn tree_gravity_approx() {
fn tree_gravity_approx(
starfield: Vec<(f64, (f64, f64, f64))>,
test_point: (f64, f64, f64),
) -> TestResult {
// We want to have at least one star
if starfield.is_empty() {
return TestResult::discard();
}
// Only test positive masses
if starfield.iter().any(|&(m, _)| m <= 0.0) {
return TestResult::discard();
}
// The test point should not be in the same place as any star
if starfield.iter().any(|&(_, p)| p == test_point) {
return TestResult::discard();
}
// No two stars should be in the same place
for &(_, pi) in &starfield {
for &(_, pj) in &starfield {
if pi == pj {
return TestResult::discard();
}
}
}
// (T, T, T) -> Point3<T>
fn pnt<T: Debug + PartialEq + Copy +'static>(p: (T, T, T)) -> Point3<T> {
let (x, y, z) = p;
Point3::new(x, y, z)
}
let test_point = pnt(test_point);
// Newton's law of gravity for two point masses (with G = 1)
let newton = |(m, p1): (f64, Point3<f64>), p2| {
let diff: Vector3<f64> = p1 - p2;
let r = diff.norm();
diff * (m / r.powi(3))
};
// Calculate gravity exactly
let simple_gravity = starfield
.iter()
.map(|&(m, p)| newton((m, pnt(p)), test_point))
.fold(zero(), |a: Vector3<_>, b| a + b);
// Calculate gravity using a tree
let orig: Point3<f64> = Point3::origin();
let data_width = (test_point - orig).norm() * 2.0;
let width = if data_width < 200.0 {
200.0
} else {
data_width
};
let tree = Tree::new(
starfield.iter().map(|&(m, (x, y, z))| Positioned {
object: m,
position: Point3::new(x, y, z),
}),
Ncube::new(orig, width),
(orig, zero()),
&|obj| (obj.position, obj.object),
&|&(com1, m1), &(com2, m2)| {
if m1 + m2 > zero() {
(com1 + (com2 - com1) * (m2 / (m1 + m2)), m1 + m2)
} else {
(orig, zero())
}
},
)
.expect("Couldn't construct tree");
let theta = 0.5; // A bit arbitrary but this appears to work
let tree_gravity = tree
.query_data(|node| { | })
.map(|&(com, m)| newton((m, com), test_point))
.fold(zero::<Vector3<f64>>(), |a, b| a + b);
// Now the tree gravity should approximate the exact one, within 10 %
TestResult::from_bool(
Relative::default()
.epsilon(0.1 * simple_gravity.norm())
.eq(&simple_gravity, &tree_gravity),
)
}
quickcheck(
tree_gravity_approx as fn(Vec<(f64, (f64, f64, f64))>, (f64, f64, f64)) -> TestResult,
)
} | let &(ref center_of_mass, _) = node.data();
let d = distance(&test_point, center_of_mass);
let delta = distance(&node.partition().center(), center_of_mass);
d < node.partition().width() / theta + delta | random_line_split |
tree_gravity.rs | //! Simple integration tests oriented towards gravity computations
extern crate acacia;
extern crate approx;
extern crate nalgebra;
extern crate quickcheck;
use acacia::partition::Ncube;
use acacia::{AssociatedData, DataQuery, Node, Positioned, Tree};
use approx::Relative;
use nalgebra::{distance, zero, Point2, Point3, Vector2, Vector3};
use quickcheck::{quickcheck, TestResult};
use std::fmt::Debug;
#[test]
fn tree_center_of_mass() {
fn tree_center_of_mass(data: Vec<(f64, (f64, f64))>) -> TestResult {
// Only test non-empty lists with positive masses
if data.is_empty() || data.iter().any(|&(m, _)| m <= 0.0) {
return TestResult::discard();
}
// No two points should be in the same place
for &(_, pi) in &data {
for &(_, pj) in &data {
if pi == pj {
return TestResult::discard();
}
}
}
// Compute center of mass in the traditional way
let (mps, ms) = data
.iter()
.map(|&(m, (x, y))| (Vector2::new(x, y) * m, m))
.fold((zero::<Vector2<f64>>(), 0.0f64), |(mps, ms), (mp, m)| {
(mps + mp, ms + m)
});
let com = mps / ms;
// Now use the tree
let tree = Tree::new(
data.iter().map(|&(m, (x, y))| Positioned {
object: m,
position: Point2::new(x, y),
}),
Ncube::new(Point2::origin(), 200.0f64),
(zero(), 0.0),
&|obj| ((obj.position - Point2::origin()) * obj.object, obj.object),
&|&(mps, ms), &(mp, m)| (mps + mp, ms + m),
)
.expect("Couldn't construct tree");
let (tree_mps, tree_ms) = *tree.data();
// …and compare
TestResult::from_bool(Relative::default().eq(&(tree_mps / tree_ms), &com))
}
quickcheck(tree_center_of_mass as fn(Vec<(f64, (f64, f64))>) -> TestResult);
}
#[test]
fn tree_gravity_approx() {
| return TestResult::discard();
}
}
}
// (T, T, T) -> Point3<T>
fn pnt<T: Debug + PartialEq + Copy +'static>(p: (T, T, T)) -> Point3<T> {
let (x, y, z) = p;
Point3::new(x, y, z)
}
let test_point = pnt(test_point);
// Newton's law of gravity for two point masses (with G = 1)
let newton = |(m, p1): (f64, Point3<f64>), p2| {
let diff: Vector3<f64> = p1 - p2;
let r = diff.norm();
diff * (m / r.powi(3))
};
// Calculate gravity exactly
let simple_gravity = starfield
.iter()
.map(|&(m, p)| newton((m, pnt(p)), test_point))
.fold(zero(), |a: Vector3<_>, b| a + b);
// Calculate gravity using a tree
let orig: Point3<f64> = Point3::origin();
let data_width = (test_point - orig).norm() * 2.0;
let width = if data_width < 200.0 {
200.0
} else {
data_width
};
let tree = Tree::new(
starfield.iter().map(|&(m, (x, y, z))| Positioned {
object: m,
position: Point3::new(x, y, z),
}),
Ncube::new(orig, width),
(orig, zero()),
&|obj| (obj.position, obj.object),
&|&(com1, m1), &(com2, m2)| {
if m1 + m2 > zero() {
(com1 + (com2 - com1) * (m2 / (m1 + m2)), m1 + m2)
} else {
(orig, zero())
}
},
)
.expect("Couldn't construct tree");
let theta = 0.5; // A bit arbitrary but this appears to work
let tree_gravity = tree
.query_data(|node| {
let &(ref center_of_mass, _) = node.data();
let d = distance(&test_point, center_of_mass);
let delta = distance(&node.partition().center(), center_of_mass);
d < node.partition().width() / theta + delta
})
.map(|&(com, m)| newton((m, com), test_point))
.fold(zero::<Vector3<f64>>(), |a, b| a + b);
// Now the tree gravity should approximate the exact one, within 10 %
TestResult::from_bool(
Relative::default()
.epsilon(0.1 * simple_gravity.norm())
.eq(&simple_gravity, &tree_gravity),
)
}
quickcheck(
tree_gravity_approx as fn(Vec<(f64, (f64, f64, f64))>, (f64, f64, f64)) -> TestResult,
)
}
| fn tree_gravity_approx(
starfield: Vec<(f64, (f64, f64, f64))>,
test_point: (f64, f64, f64),
) -> TestResult {
// We want to have at least one star
if starfield.is_empty() {
return TestResult::discard();
}
// Only test positive masses
if starfield.iter().any(|&(m, _)| m <= 0.0) {
return TestResult::discard();
}
// The test point should not be in the same place as any star
if starfield.iter().any(|&(_, p)| p == test_point) {
return TestResult::discard();
}
// No two stars should be in the same place
for &(_, pi) in &starfield {
for &(_, pj) in &starfield {
if pi == pj { | identifier_body |
tree_gravity.rs | //! Simple integration tests oriented towards gravity computations
extern crate acacia;
extern crate approx;
extern crate nalgebra;
extern crate quickcheck;
use acacia::partition::Ncube;
use acacia::{AssociatedData, DataQuery, Node, Positioned, Tree};
use approx::Relative;
use nalgebra::{distance, zero, Point2, Point3, Vector2, Vector3};
use quickcheck::{quickcheck, TestResult};
use std::fmt::Debug;
#[test]
fn tree_center_of_mass() {
fn tree_center_of_mass(data: Vec<(f64, (f64, f64))>) -> TestResult {
// Only test non-empty lists with positive masses
if data.is_empty() || data.iter().any(|&(m, _)| m <= 0.0) {
return TestResult::discard();
}
// No two points should be in the same place
for &(_, pi) in &data {
for &(_, pj) in &data {
if pi == pj {
return TestResult::discard();
}
}
}
// Compute center of mass in the traditional way
let (mps, ms) = data
.iter()
.map(|&(m, (x, y))| (Vector2::new(x, y) * m, m))
.fold((zero::<Vector2<f64>>(), 0.0f64), |(mps, ms), (mp, m)| {
(mps + mp, ms + m)
});
let com = mps / ms;
// Now use the tree
let tree = Tree::new(
data.iter().map(|&(m, (x, y))| Positioned {
object: m,
position: Point2::new(x, y),
}),
Ncube::new(Point2::origin(), 200.0f64),
(zero(), 0.0),
&|obj| ((obj.position - Point2::origin()) * obj.object, obj.object),
&|&(mps, ms), &(mp, m)| (mps + mp, ms + m),
)
.expect("Couldn't construct tree");
let (tree_mps, tree_ms) = *tree.data();
// …and compare
TestResult::from_bool(Relative::default().eq(&(tree_mps / tree_ms), &com))
}
quickcheck(tree_center_of_mass as fn(Vec<(f64, (f64, f64))>) -> TestResult);
}
#[test]
fn tr | {
fn tree_gravity_approx(
starfield: Vec<(f64, (f64, f64, f64))>,
test_point: (f64, f64, f64),
) -> TestResult {
// We want to have at least one star
if starfield.is_empty() {
return TestResult::discard();
}
// Only test positive masses
if starfield.iter().any(|&(m, _)| m <= 0.0) {
return TestResult::discard();
}
// The test point should not be in the same place as any star
if starfield.iter().any(|&(_, p)| p == test_point) {
return TestResult::discard();
}
// No two stars should be in the same place
for &(_, pi) in &starfield {
for &(_, pj) in &starfield {
if pi == pj {
return TestResult::discard();
}
}
}
// (T, T, T) -> Point3<T>
fn pnt<T: Debug + PartialEq + Copy +'static>(p: (T, T, T)) -> Point3<T> {
let (x, y, z) = p;
Point3::new(x, y, z)
}
let test_point = pnt(test_point);
// Newton's law of gravity for two point masses (with G = 1)
let newton = |(m, p1): (f64, Point3<f64>), p2| {
let diff: Vector3<f64> = p1 - p2;
let r = diff.norm();
diff * (m / r.powi(3))
};
// Calculate gravity exactly
let simple_gravity = starfield
.iter()
.map(|&(m, p)| newton((m, pnt(p)), test_point))
.fold(zero(), |a: Vector3<_>, b| a + b);
// Calculate gravity using a tree
let orig: Point3<f64> = Point3::origin();
let data_width = (test_point - orig).norm() * 2.0;
let width = if data_width < 200.0 {
200.0
} else {
data_width
};
let tree = Tree::new(
starfield.iter().map(|&(m, (x, y, z))| Positioned {
object: m,
position: Point3::new(x, y, z),
}),
Ncube::new(orig, width),
(orig, zero()),
&|obj| (obj.position, obj.object),
&|&(com1, m1), &(com2, m2)| {
if m1 + m2 > zero() {
(com1 + (com2 - com1) * (m2 / (m1 + m2)), m1 + m2)
} else {
(orig, zero())
}
},
)
.expect("Couldn't construct tree");
let theta = 0.5; // A bit arbitrary but this appears to work
let tree_gravity = tree
.query_data(|node| {
let &(ref center_of_mass, _) = node.data();
let d = distance(&test_point, center_of_mass);
let delta = distance(&node.partition().center(), center_of_mass);
d < node.partition().width() / theta + delta
})
.map(|&(com, m)| newton((m, com), test_point))
.fold(zero::<Vector3<f64>>(), |a, b| a + b);
// Now the tree gravity should approximate the exact one, within 10 %
TestResult::from_bool(
Relative::default()
.epsilon(0.1 * simple_gravity.norm())
.eq(&simple_gravity, &tree_gravity),
)
}
quickcheck(
tree_gravity_approx as fn(Vec<(f64, (f64, f64, f64))>, (f64, f64, f64)) -> TestResult,
)
}
| ee_gravity_approx() | identifier_name |
tree_gravity.rs | //! Simple integration tests oriented towards gravity computations
extern crate acacia;
extern crate approx;
extern crate nalgebra;
extern crate quickcheck;
use acacia::partition::Ncube;
use acacia::{AssociatedData, DataQuery, Node, Positioned, Tree};
use approx::Relative;
use nalgebra::{distance, zero, Point2, Point3, Vector2, Vector3};
use quickcheck::{quickcheck, TestResult};
use std::fmt::Debug;
#[test]
fn tree_center_of_mass() {
fn tree_center_of_mass(data: Vec<(f64, (f64, f64))>) -> TestResult {
// Only test non-empty lists with positive masses
if data.is_empty() || data.iter().any(|&(m, _)| m <= 0.0) {
return TestResult::discard();
}
// No two points should be in the same place
for &(_, pi) in &data {
for &(_, pj) in &data {
if pi == pj {
return TestResult::discard();
}
}
}
// Compute center of mass in the traditional way
let (mps, ms) = data
.iter()
.map(|&(m, (x, y))| (Vector2::new(x, y) * m, m))
.fold((zero::<Vector2<f64>>(), 0.0f64), |(mps, ms), (mp, m)| {
(mps + mp, ms + m)
});
let com = mps / ms;
// Now use the tree
let tree = Tree::new(
data.iter().map(|&(m, (x, y))| Positioned {
object: m,
position: Point2::new(x, y),
}),
Ncube::new(Point2::origin(), 200.0f64),
(zero(), 0.0),
&|obj| ((obj.position - Point2::origin()) * obj.object, obj.object),
&|&(mps, ms), &(mp, m)| (mps + mp, ms + m),
)
.expect("Couldn't construct tree");
let (tree_mps, tree_ms) = *tree.data();
// …and compare
TestResult::from_bool(Relative::default().eq(&(tree_mps / tree_ms), &com))
}
quickcheck(tree_center_of_mass as fn(Vec<(f64, (f64, f64))>) -> TestResult);
}
#[test]
fn tree_gravity_approx() {
fn tree_gravity_approx(
starfield: Vec<(f64, (f64, f64, f64))>,
test_point: (f64, f64, f64),
) -> TestResult {
// We want to have at least one star
if starfield.is_empty() {
return TestResult::discard();
}
// Only test positive masses
if starfield.iter().any(|&(m, _)| m <= 0.0) {
return TestResult::discard();
}
// The test point should not be in the same place as any star
if starfield.iter().any(|&(_, p)| p == test_point) {
return TestResult::discard();
}
// No two stars should be in the same place
for &(_, pi) in &starfield {
for &(_, pj) in &starfield {
if pi == pj {
return TestResult::discard();
}
}
}
// (T, T, T) -> Point3<T>
fn pnt<T: Debug + PartialEq + Copy +'static>(p: (T, T, T)) -> Point3<T> {
let (x, y, z) = p;
Point3::new(x, y, z)
}
let test_point = pnt(test_point);
// Newton's law of gravity for two point masses (with G = 1)
let newton = |(m, p1): (f64, Point3<f64>), p2| {
let diff: Vector3<f64> = p1 - p2;
let r = diff.norm();
diff * (m / r.powi(3))
};
// Calculate gravity exactly
let simple_gravity = starfield
.iter()
.map(|&(m, p)| newton((m, pnt(p)), test_point))
.fold(zero(), |a: Vector3<_>, b| a + b);
// Calculate gravity using a tree
let orig: Point3<f64> = Point3::origin();
let data_width = (test_point - orig).norm() * 2.0;
let width = if data_width < 200.0 {
200.0
} else {
data_width
};
let tree = Tree::new(
starfield.iter().map(|&(m, (x, y, z))| Positioned {
object: m,
position: Point3::new(x, y, z),
}),
Ncube::new(orig, width),
(orig, zero()),
&|obj| (obj.position, obj.object),
&|&(com1, m1), &(com2, m2)| {
if m1 + m2 > zero() {
| lse {
(orig, zero())
}
},
)
.expect("Couldn't construct tree");
let theta = 0.5; // A bit arbitrary but this appears to work
let tree_gravity = tree
.query_data(|node| {
let &(ref center_of_mass, _) = node.data();
let d = distance(&test_point, center_of_mass);
let delta = distance(&node.partition().center(), center_of_mass);
d < node.partition().width() / theta + delta
})
.map(|&(com, m)| newton((m, com), test_point))
.fold(zero::<Vector3<f64>>(), |a, b| a + b);
// Now the tree gravity should approximate the exact one, within 10 %
TestResult::from_bool(
Relative::default()
.epsilon(0.1 * simple_gravity.norm())
.eq(&simple_gravity, &tree_gravity),
)
}
quickcheck(
tree_gravity_approx as fn(Vec<(f64, (f64, f64, f64))>, (f64, f64, f64)) -> TestResult,
)
}
| (com1 + (com2 - com1) * (m2 / (m1 + m2)), m1 + m2)
} e | conditional_block |
derive_object.rs | use crate::{
result::{GraphQLScope, UnsupportedAttribute},
util::{self, span_container::SpanContainer, RenameRule},
};
use proc_macro2::TokenStream;
use quote::quote;
use syn::{self, ext::IdentExt, spanned::Spanned, Data, Fields};
pub fn build_derive_object(ast: syn::DeriveInput, error: GraphQLScope) -> syn::Result<TokenStream> {
let ast_span = ast.span();
let struct_fields = match ast.data {
Data::Struct(data) => match data.fields {
Fields::Named(fields) => fields.named,
_ => return Err(error.custom_error(ast_span, "only named fields are allowed")),
},
_ => return Err(error.custom_error(ast_span, "can only be applied to structs")),
};
// Parse attributes.
let attrs = util::ObjectAttributes::from_attrs(&ast.attrs)?;
let ident = &ast.ident;
let name = attrs
.name
.clone()
.map(SpanContainer::into_inner)
.unwrap_or_else(|| ident.unraw().to_string());
let fields = struct_fields
.into_iter()
.filter_map(|field| {
let span = field.span();
let field_attrs = match util::FieldAttributes::from_attrs(
&field.attrs,
util::FieldAttributeParseMode::Object,
) {
Ok(attrs) => attrs,
Err(e) => {
proc_macro_error::emit_error!(e);
return None;
}
};
if field_attrs.skip.is_some() {
return None;
}
let field_name = &field.ident.unwrap();
let name = field_attrs
.name
.clone()
.map(SpanContainer::into_inner)
.unwrap_or_else(|| {
attrs
.rename
.unwrap_or(RenameRule::CamelCase)
.apply(&field_name.unraw().to_string())
});
if name.starts_with("__") {
error.no_double_underscore(if let Some(name) = field_attrs.name {
name.span_ident()
} else {
field_name.span()
});
}
if let Some(default) = field_attrs.default {
error.unsupported_attribute_within(
default.span_ident(),
UnsupportedAttribute::Default,
);
}
let resolver_code = quote!(
&self. #field_name
);
Some(util::GraphQLTypeDefinitionField {
name,
_type: field.ty,
args: Vec::new(),
description: field_attrs.description.map(SpanContainer::into_inner),
deprecation: field_attrs.deprecation.map(SpanContainer::into_inner),
resolver_code,
default: None,
is_type_inferred: true,
is_async: false,
span,
})
})
.collect::<Vec<_>>();
// Early abort after checking all fields
proc_macro_error::abort_if_dirty(); | }
if!attrs.is_internal && name.starts_with("__") {
error.no_double_underscore(if let Some(name) = attrs.name {
name.span_ident()
} else {
ident.span()
});
}
if fields.is_empty() {
error.not_empty(ast_span);
}
// Early abort after GraphQL properties
proc_macro_error::abort_if_dirty();
let definition = util::GraphQLTypeDefiniton {
name,
_type: syn::parse_str(&ast.ident.to_string()).unwrap(),
context: attrs.context.map(SpanContainer::into_inner),
scalar: attrs.scalar.map(SpanContainer::into_inner),
description: attrs.description.map(SpanContainer::into_inner),
fields,
generics: ast.generics,
interfaces: attrs
.interfaces
.into_iter()
.map(SpanContainer::into_inner)
.collect(),
include_type_generics: true,
generic_scalar: true,
no_async: attrs.no_async.is_some(),
};
Ok(definition.into_tokens())
} |
if let Some(duplicates) =
crate::util::duplicate::Duplicate::find_by_key(&fields, |field| field.name.as_str())
{
error.duplicate(duplicates.iter()); | random_line_split |
derive_object.rs | use crate::{
result::{GraphQLScope, UnsupportedAttribute},
util::{self, span_container::SpanContainer, RenameRule},
};
use proc_macro2::TokenStream;
use quote::quote;
use syn::{self, ext::IdentExt, spanned::Spanned, Data, Fields};
pub fn build_derive_object(ast: syn::DeriveInput, error: GraphQLScope) -> syn::Result<TokenStream> {
let ast_span = ast.span();
let struct_fields = match ast.data {
Data::Struct(data) => match data.fields {
Fields::Named(fields) => fields.named,
_ => return Err(error.custom_error(ast_span, "only named fields are allowed")),
},
_ => return Err(error.custom_error(ast_span, "can only be applied to structs")),
};
// Parse attributes.
let attrs = util::ObjectAttributes::from_attrs(&ast.attrs)?;
let ident = &ast.ident;
let name = attrs
.name
.clone()
.map(SpanContainer::into_inner)
.unwrap_or_else(|| ident.unraw().to_string());
let fields = struct_fields
.into_iter()
.filter_map(|field| {
let span = field.span();
let field_attrs = match util::FieldAttributes::from_attrs(
&field.attrs,
util::FieldAttributeParseMode::Object,
) {
Ok(attrs) => attrs,
Err(e) => {
proc_macro_error::emit_error!(e);
return None;
}
};
if field_attrs.skip.is_some() |
let field_name = &field.ident.unwrap();
let name = field_attrs
.name
.clone()
.map(SpanContainer::into_inner)
.unwrap_or_else(|| {
attrs
.rename
.unwrap_or(RenameRule::CamelCase)
.apply(&field_name.unraw().to_string())
});
if name.starts_with("__") {
error.no_double_underscore(if let Some(name) = field_attrs.name {
name.span_ident()
} else {
field_name.span()
});
}
if let Some(default) = field_attrs.default {
error.unsupported_attribute_within(
default.span_ident(),
UnsupportedAttribute::Default,
);
}
let resolver_code = quote!(
&self. #field_name
);
Some(util::GraphQLTypeDefinitionField {
name,
_type: field.ty,
args: Vec::new(),
description: field_attrs.description.map(SpanContainer::into_inner),
deprecation: field_attrs.deprecation.map(SpanContainer::into_inner),
resolver_code,
default: None,
is_type_inferred: true,
is_async: false,
span,
})
})
.collect::<Vec<_>>();
// Early abort after checking all fields
proc_macro_error::abort_if_dirty();
if let Some(duplicates) =
crate::util::duplicate::Duplicate::find_by_key(&fields, |field| field.name.as_str())
{
error.duplicate(duplicates.iter());
}
if!attrs.is_internal && name.starts_with("__") {
error.no_double_underscore(if let Some(name) = attrs.name {
name.span_ident()
} else {
ident.span()
});
}
if fields.is_empty() {
error.not_empty(ast_span);
}
// Early abort after GraphQL properties
proc_macro_error::abort_if_dirty();
let definition = util::GraphQLTypeDefiniton {
name,
_type: syn::parse_str(&ast.ident.to_string()).unwrap(),
context: attrs.context.map(SpanContainer::into_inner),
scalar: attrs.scalar.map(SpanContainer::into_inner),
description: attrs.description.map(SpanContainer::into_inner),
fields,
generics: ast.generics,
interfaces: attrs
.interfaces
.into_iter()
.map(SpanContainer::into_inner)
.collect(),
include_type_generics: true,
generic_scalar: true,
no_async: attrs.no_async.is_some(),
};
Ok(definition.into_tokens())
}
| {
return None;
} | conditional_block |
derive_object.rs | use crate::{
result::{GraphQLScope, UnsupportedAttribute},
util::{self, span_container::SpanContainer, RenameRule},
};
use proc_macro2::TokenStream;
use quote::quote;
use syn::{self, ext::IdentExt, spanned::Spanned, Data, Fields};
pub fn | (ast: syn::DeriveInput, error: GraphQLScope) -> syn::Result<TokenStream> {
let ast_span = ast.span();
let struct_fields = match ast.data {
Data::Struct(data) => match data.fields {
Fields::Named(fields) => fields.named,
_ => return Err(error.custom_error(ast_span, "only named fields are allowed")),
},
_ => return Err(error.custom_error(ast_span, "can only be applied to structs")),
};
// Parse attributes.
let attrs = util::ObjectAttributes::from_attrs(&ast.attrs)?;
let ident = &ast.ident;
let name = attrs
.name
.clone()
.map(SpanContainer::into_inner)
.unwrap_or_else(|| ident.unraw().to_string());
let fields = struct_fields
.into_iter()
.filter_map(|field| {
let span = field.span();
let field_attrs = match util::FieldAttributes::from_attrs(
&field.attrs,
util::FieldAttributeParseMode::Object,
) {
Ok(attrs) => attrs,
Err(e) => {
proc_macro_error::emit_error!(e);
return None;
}
};
if field_attrs.skip.is_some() {
return None;
}
let field_name = &field.ident.unwrap();
let name = field_attrs
.name
.clone()
.map(SpanContainer::into_inner)
.unwrap_or_else(|| {
attrs
.rename
.unwrap_or(RenameRule::CamelCase)
.apply(&field_name.unraw().to_string())
});
if name.starts_with("__") {
error.no_double_underscore(if let Some(name) = field_attrs.name {
name.span_ident()
} else {
field_name.span()
});
}
if let Some(default) = field_attrs.default {
error.unsupported_attribute_within(
default.span_ident(),
UnsupportedAttribute::Default,
);
}
let resolver_code = quote!(
&self. #field_name
);
Some(util::GraphQLTypeDefinitionField {
name,
_type: field.ty,
args: Vec::new(),
description: field_attrs.description.map(SpanContainer::into_inner),
deprecation: field_attrs.deprecation.map(SpanContainer::into_inner),
resolver_code,
default: None,
is_type_inferred: true,
is_async: false,
span,
})
})
.collect::<Vec<_>>();
// Early abort after checking all fields
proc_macro_error::abort_if_dirty();
if let Some(duplicates) =
crate::util::duplicate::Duplicate::find_by_key(&fields, |field| field.name.as_str())
{
error.duplicate(duplicates.iter());
}
if!attrs.is_internal && name.starts_with("__") {
error.no_double_underscore(if let Some(name) = attrs.name {
name.span_ident()
} else {
ident.span()
});
}
if fields.is_empty() {
error.not_empty(ast_span);
}
// Early abort after GraphQL properties
proc_macro_error::abort_if_dirty();
let definition = util::GraphQLTypeDefiniton {
name,
_type: syn::parse_str(&ast.ident.to_string()).unwrap(),
context: attrs.context.map(SpanContainer::into_inner),
scalar: attrs.scalar.map(SpanContainer::into_inner),
description: attrs.description.map(SpanContainer::into_inner),
fields,
generics: ast.generics,
interfaces: attrs
.interfaces
.into_iter()
.map(SpanContainer::into_inner)
.collect(),
include_type_generics: true,
generic_scalar: true,
no_async: attrs.no_async.is_some(),
};
Ok(definition.into_tokens())
}
| build_derive_object | identifier_name |
derive_object.rs | use crate::{
result::{GraphQLScope, UnsupportedAttribute},
util::{self, span_container::SpanContainer, RenameRule},
};
use proc_macro2::TokenStream;
use quote::quote;
use syn::{self, ext::IdentExt, spanned::Spanned, Data, Fields};
pub fn build_derive_object(ast: syn::DeriveInput, error: GraphQLScope) -> syn::Result<TokenStream> | let fields = struct_fields
.into_iter()
.filter_map(|field| {
let span = field.span();
let field_attrs = match util::FieldAttributes::from_attrs(
&field.attrs,
util::FieldAttributeParseMode::Object,
) {
Ok(attrs) => attrs,
Err(e) => {
proc_macro_error::emit_error!(e);
return None;
}
};
if field_attrs.skip.is_some() {
return None;
}
let field_name = &field.ident.unwrap();
let name = field_attrs
.name
.clone()
.map(SpanContainer::into_inner)
.unwrap_or_else(|| {
attrs
.rename
.unwrap_or(RenameRule::CamelCase)
.apply(&field_name.unraw().to_string())
});
if name.starts_with("__") {
error.no_double_underscore(if let Some(name) = field_attrs.name {
name.span_ident()
} else {
field_name.span()
});
}
if let Some(default) = field_attrs.default {
error.unsupported_attribute_within(
default.span_ident(),
UnsupportedAttribute::Default,
);
}
let resolver_code = quote!(
&self. #field_name
);
Some(util::GraphQLTypeDefinitionField {
name,
_type: field.ty,
args: Vec::new(),
description: field_attrs.description.map(SpanContainer::into_inner),
deprecation: field_attrs.deprecation.map(SpanContainer::into_inner),
resolver_code,
default: None,
is_type_inferred: true,
is_async: false,
span,
})
})
.collect::<Vec<_>>();
// Early abort after checking all fields
proc_macro_error::abort_if_dirty();
if let Some(duplicates) =
crate::util::duplicate::Duplicate::find_by_key(&fields, |field| field.name.as_str())
{
error.duplicate(duplicates.iter());
}
if!attrs.is_internal && name.starts_with("__") {
error.no_double_underscore(if let Some(name) = attrs.name {
name.span_ident()
} else {
ident.span()
});
}
if fields.is_empty() {
error.not_empty(ast_span);
}
// Early abort after GraphQL properties
proc_macro_error::abort_if_dirty();
let definition = util::GraphQLTypeDefiniton {
name,
_type: syn::parse_str(&ast.ident.to_string()).unwrap(),
context: attrs.context.map(SpanContainer::into_inner),
scalar: attrs.scalar.map(SpanContainer::into_inner),
description: attrs.description.map(SpanContainer::into_inner),
fields,
generics: ast.generics,
interfaces: attrs
.interfaces
.into_iter()
.map(SpanContainer::into_inner)
.collect(),
include_type_generics: true,
generic_scalar: true,
no_async: attrs.no_async.is_some(),
};
Ok(definition.into_tokens())
}
| {
let ast_span = ast.span();
let struct_fields = match ast.data {
Data::Struct(data) => match data.fields {
Fields::Named(fields) => fields.named,
_ => return Err(error.custom_error(ast_span, "only named fields are allowed")),
},
_ => return Err(error.custom_error(ast_span, "can only be applied to structs")),
};
// Parse attributes.
let attrs = util::ObjectAttributes::from_attrs(&ast.attrs)?;
let ident = &ast.ident;
let name = attrs
.name
.clone()
.map(SpanContainer::into_inner)
.unwrap_or_else(|| ident.unraw().to_string());
| identifier_body |
rscope.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use middle::ty;
use std::vec;
use syntax::ast;
use syntax::codemap::Span;
use syntax::opt_vec::OptVec;
/// Defines strategies for handling regions that are omitted. For
/// example, if one writes the type `&Foo`, then the lifetime of of
/// this borrowed pointer has been omitted. When converting this
/// type, the generic functions in astconv will invoke `anon_regions`
/// on the provided region-scope to decide how to translate this
/// omitted region.
///
/// It is not always legal to omit regions, therefore `anon_regions`
/// can return `Err(())` to indicate that this is not a scope in which
/// regions can legally be omitted.
pub trait RegionScope {
fn anon_regions(&self,
span: Span,
count: uint)
-> Result<~[ty::Region], ()>;
}
// A scope in which all regions must be explicitly named
pub struct ExplicitRscope;
impl RegionScope for ExplicitRscope {
fn anon_regions(&self,
_span: Span,
_count: uint)
-> Result<~[ty::Region], ()> {
Err(())
}
}
/// A scope in which we generate anonymous, late-bound regions for
/// omitted regions. This occurs in function signatures.
pub struct BindingRscope {
binder_id: ast::NodeId,
anon_bindings: @mut uint
}
impl BindingRscope {
pub fn new(binder_id: ast::NodeId) -> BindingRscope {
BindingRscope {
binder_id: binder_id,
anon_bindings: @mut 0
}
} | _: Span,
count: uint)
-> Result<~[ty::Region], ()> {
let idx = *self.anon_bindings;
*self.anon_bindings += count;
Ok(vec::from_fn(count, |i| ty::ReLateBound(self.binder_id,
ty::BrAnon(idx + i))))
}
}
pub fn bound_type_regions(defs: &[ty::RegionParameterDef])
-> OptVec<ty::Region> {
assert!(defs.iter().all(|def| def.def_id.crate == ast::LOCAL_CRATE));
defs.iter().enumerate().map(
|(i, def)| ty::ReEarlyBound(def.def_id.node, i, def.ident)).collect()
} | }
impl RegionScope for BindingRscope {
fn anon_regions(&self, | random_line_split |
rscope.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use middle::ty;
use std::vec;
use syntax::ast;
use syntax::codemap::Span;
use syntax::opt_vec::OptVec;
/// Defines strategies for handling regions that are omitted. For
/// example, if one writes the type `&Foo`, then the lifetime of of
/// this borrowed pointer has been omitted. When converting this
/// type, the generic functions in astconv will invoke `anon_regions`
/// on the provided region-scope to decide how to translate this
/// omitted region.
///
/// It is not always legal to omit regions, therefore `anon_regions`
/// can return `Err(())` to indicate that this is not a scope in which
/// regions can legally be omitted.
pub trait RegionScope {
fn anon_regions(&self,
span: Span,
count: uint)
-> Result<~[ty::Region], ()>;
}
// A scope in which all regions must be explicitly named
pub struct ExplicitRscope;
impl RegionScope for ExplicitRscope {
fn anon_regions(&self,
_span: Span,
_count: uint)
-> Result<~[ty::Region], ()> {
Err(())
}
}
/// A scope in which we generate anonymous, late-bound regions for
/// omitted regions. This occurs in function signatures.
pub struct BindingRscope {
binder_id: ast::NodeId,
anon_bindings: @mut uint
}
impl BindingRscope {
pub fn new(binder_id: ast::NodeId) -> BindingRscope {
BindingRscope {
binder_id: binder_id,
anon_bindings: @mut 0
}
}
}
impl RegionScope for BindingRscope {
fn anon_regions(&self,
_: Span,
count: uint)
-> Result<~[ty::Region], ()> {
let idx = *self.anon_bindings;
*self.anon_bindings += count;
Ok(vec::from_fn(count, |i| ty::ReLateBound(self.binder_id,
ty::BrAnon(idx + i))))
}
}
pub fn bound_type_regions(defs: &[ty::RegionParameterDef])
-> OptVec<ty::Region> | {
assert!(defs.iter().all(|def| def.def_id.crate == ast::LOCAL_CRATE));
defs.iter().enumerate().map(
|(i, def)| ty::ReEarlyBound(def.def_id.node, i, def.ident)).collect()
} | identifier_body |
|
rscope.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use middle::ty;
use std::vec;
use syntax::ast;
use syntax::codemap::Span;
use syntax::opt_vec::OptVec;
/// Defines strategies for handling regions that are omitted. For
/// example, if one writes the type `&Foo`, then the lifetime of of
/// this borrowed pointer has been omitted. When converting this
/// type, the generic functions in astconv will invoke `anon_regions`
/// on the provided region-scope to decide how to translate this
/// omitted region.
///
/// It is not always legal to omit regions, therefore `anon_regions`
/// can return `Err(())` to indicate that this is not a scope in which
/// regions can legally be omitted.
pub trait RegionScope {
fn anon_regions(&self,
span: Span,
count: uint)
-> Result<~[ty::Region], ()>;
}
// A scope in which all regions must be explicitly named
pub struct | ;
impl RegionScope for ExplicitRscope {
fn anon_regions(&self,
_span: Span,
_count: uint)
-> Result<~[ty::Region], ()> {
Err(())
}
}
/// A scope in which we generate anonymous, late-bound regions for
/// omitted regions. This occurs in function signatures.
pub struct BindingRscope {
binder_id: ast::NodeId,
anon_bindings: @mut uint
}
impl BindingRscope {
pub fn new(binder_id: ast::NodeId) -> BindingRscope {
BindingRscope {
binder_id: binder_id,
anon_bindings: @mut 0
}
}
}
impl RegionScope for BindingRscope {
fn anon_regions(&self,
_: Span,
count: uint)
-> Result<~[ty::Region], ()> {
let idx = *self.anon_bindings;
*self.anon_bindings += count;
Ok(vec::from_fn(count, |i| ty::ReLateBound(self.binder_id,
ty::BrAnon(idx + i))))
}
}
pub fn bound_type_regions(defs: &[ty::RegionParameterDef])
-> OptVec<ty::Region> {
assert!(defs.iter().all(|def| def.def_id.crate == ast::LOCAL_CRATE));
defs.iter().enumerate().map(
|(i, def)| ty::ReEarlyBound(def.def_id.node, i, def.ident)).collect()
}
| ExplicitRscope | identifier_name |
lib.rs | //! EPUB library
//! lib to read and navigate throught an epub file contents
//!
//! # Examples
//!
//! ## Opening
//!
//! ```
//! use epub::doc::EpubDoc;
//! let doc = EpubDoc::new("test.epub");
//! assert!(doc.is_ok());
//! let doc = doc.unwrap();
//!
//! ```
//!
//! ## Getting doc metatada
//!
//! Metadata is a HashMap storing all metadata defined in the epub
//!
//! ```
//! # use epub::doc::EpubDoc;
//! # let doc = EpubDoc::new("test.epub");
//! # let doc = doc.unwrap();
//! let title = doc.mdata("title");
//! assert_eq!(title.unwrap(), "Todo es mío");
//! ```
//!
//! ## Accessing resources
//!
//! In the resources var is stored each resource defined
//! in the epub indexed by the id and with the full internal
//! path and mimetype. It's a HashMap<a: String, (b: String, c: String)>
//! where 'a' is the resource id, 'b' is the resource full path and
//! 'c' is the resource mimetype
//!
//! ```
//! # use epub::doc::EpubDoc;
//! # use std::path::Path;
//! # let doc = EpubDoc::new("test.epub");
//! # let doc = doc.unwrap();
//! assert_eq!(23, doc.resources.len());
//! let tpage = doc.resources.get("titlepage.xhtml");
//! assert_eq!(tpage.unwrap().0, Path::new("OEBPS/Text/titlepage.xhtml"));
//! assert_eq!(tpage.unwrap().1, "application/xhtml+xml");
//! ```
//!
//! ## Navigating using the spine
//!
//! Spine is a Vec<String> storing the epub spine as resources ids
//!
//! ```
//! # use epub::doc::EpubDoc;
//! # let doc = EpubDoc::new("test.epub");
//! # let doc = doc.unwrap();
//! assert_eq!(17, doc.spine.len());
//! assert_eq!("titlepage.xhtml", doc.spine[0]);
//! ```
//!
//! ## Navigation using the doc internal state
//!
//! ```
//! use epub::doc::EpubDoc;
//! let doc = EpubDoc::new("test.epub");
//! let mut doc = doc.unwrap();
//! assert_eq!(0, doc.get_current_page());
//! assert_eq!("application/xhtml+xml", doc.get_current_mime().unwrap()); | //! doc.go_next();
//! assert_eq!("001.xhtml", doc.get_current_id().unwrap());
//! doc.go_prev();
//! assert_eq!("000.xhtml", doc.get_current_id().unwrap());
//!
//! doc.set_current_page(2);
//! assert_eq!("001.xhtml", doc.get_current_id().unwrap());
//! assert_eq!(2, doc.get_current_page());
//! assert!(doc.set_current_page(50).is_err());
//!
//! // doc.get_current() will return a Vec<u8> with the current page content
//! // doc.get_current_str() will return a String with the current page content
//! ```
//!
//! ## Getting the cover
//!
//! ```ignore
//! use std::fs;
//! use std::io::Write;
//! use epub::doc::EpubDoc;
//!
//! let doc = EpubDoc::new("test.epub");
//! assert!(doc.is_ok());
//! let mut doc = doc.unwrap();
//!
//! let cover_data = doc.get_cover().unwrap();
//!
//! let f = fs::File::create("/tmp/cover.png");
//! assert!(f.is_ok());
//! let mut f = f.unwrap();
//! let resp = f.write_all(&cover_data);
//! ```
mod xmlutils;
pub mod archive;
pub mod doc; | //!
//! doc.go_next();
//! assert_eq!("000.xhtml", doc.get_current_id().unwrap()); | random_line_split |
replace_fallback.rs | use std::io;
use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt};
use ogg::{OggTrackBuf};
use super::super::{RequestType, Request};
use ::proto::{self, Deserialize, Serialize};
/// Skips to the end of the currently playing track
#[derive(Clone)]
pub struct ReplaceFallbackRequest {
pub track: OggTrackBuf,
pub metadata: Option<Vec<(String, String)>>,
}
impl Deserialize for ReplaceFallbackRequest {
fn read(buf: &mut io::Cursor<Vec<u8>>) -> io::Result<Self> | let track = match track {
Some(track) => track,
None => return Err(io::Error::new(io::ErrorKind::Other, "missing field: track")),
};
let track = try!(OggTrackBuf::new(track)
.map_err(|_| io::Error::new(io::ErrorKind::Other, "invalid ogg")));
Ok(ReplaceFallbackRequest {
track: track,
metadata: metadata,
})
}
}
impl Serialize for ReplaceFallbackRequest {
fn write(&self, buf: &mut io::Cursor<Vec<u8>>) -> io::Result<()> {
try!(buf.write_u16::<BigEndian>(proto::TYPE_STRUCT));
let length = if self.metadata.is_some() { 2 } else { 1 };
try!(buf.write_u32::<BigEndian>(length));
try!(Serialize::write("track", buf));
try!(Serialize::write(self.track.as_u8_slice(), buf));
if let Some(ref metadata) = self.metadata {
try!(Serialize::write("metadata", buf));
try!(Serialize::write(&metadata[..], buf));
}
Ok(())
}
}
impl Request for ReplaceFallbackRequest {
type Value = ();
type Error = ReplaceFallbackError;
fn req_type(&self) -> RequestType {
RequestType::ReplaceFallback
}
}
pub type ReplaceFallbackResult = Result<(), ReplaceFallbackError>;
#[derive(Debug, Clone)]
pub enum ReplaceFallbackError {
InvalidTrack = 1,
BadSampleRate = 2,
Full = 3,
}
impl ReplaceFallbackError {
pub fn to_u32(&self) -> u32 {
self.clone() as u32
}
pub fn from_u32(val: u32) -> Option<ReplaceFallbackError> {
match val {
1 => Some(ReplaceFallbackError::InvalidTrack),
2 => Some(ReplaceFallbackError::BadSampleRate),
3 => Some(ReplaceFallbackError::Full),
_ => None
}
}
}
impl Deserialize for ReplaceFallbackError {
fn read(buf: &mut io::Cursor<Vec<u8>>) -> io::Result<Self> {
let num: u32 = try!(Deserialize::read(buf));
ReplaceFallbackError::from_u32(num)
.ok_or_else(|| {
io::Error::new(io::ErrorKind::Other, "unexpected ReplaceFallbackError value")
})
}
}
impl Serialize for ReplaceFallbackError {
fn write(&self, buf: &mut io::Cursor<Vec<u8>>) -> io::Result<()> {
try!(Serialize::write(&self.to_u32(), buf));
Ok(())
}
}
| {
try!(proto::expect_type(buf, proto::TYPE_STRUCT));
let field_count = try!(buf.read_u32::<BigEndian>());
let mut track: Option<Vec<u8>> = None;
let mut metadata: Option<Vec<(String, String)>> = None;
for _ in 0..field_count {
let field_name: String = try!(Deserialize::read(buf));
match &field_name[..] {
"track" => {
track = Some(try!(Deserialize::read(buf)));
},
"metadata" => {
metadata = Some(try!(Deserialize::read(buf)));
}
_ => try!(proto::skip_entity(buf)),
}
}
| identifier_body |
replace_fallback.rs | use std::io;
use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt};
use ogg::{OggTrackBuf};
use super::super::{RequestType, Request};
use ::proto::{self, Deserialize, Serialize};
/// Skips to the end of the currently playing track
#[derive(Clone)]
pub struct ReplaceFallbackRequest {
pub track: OggTrackBuf,
pub metadata: Option<Vec<(String, String)>>,
}
impl Deserialize for ReplaceFallbackRequest {
fn read(buf: &mut io::Cursor<Vec<u8>>) -> io::Result<Self> {
try!(proto::expect_type(buf, proto::TYPE_STRUCT));
let field_count = try!(buf.read_u32::<BigEndian>());
let mut track: Option<Vec<u8>> = None;
let mut metadata: Option<Vec<(String, String)>> = None;
for _ in 0..field_count {
let field_name: String = try!(Deserialize::read(buf));
match &field_name[..] {
"track" => {
track = Some(try!(Deserialize::read(buf)));
},
"metadata" => {
metadata = Some(try!(Deserialize::read(buf)));
}
_ => try!(proto::skip_entity(buf)),
}
}
let track = match track {
Some(track) => track,
None => return Err(io::Error::new(io::ErrorKind::Other, "missing field: track")),
};
let track = try!(OggTrackBuf::new(track)
.map_err(|_| io::Error::new(io::ErrorKind::Other, "invalid ogg")));
Ok(ReplaceFallbackRequest {
track: track,
metadata: metadata,
})
}
}
impl Serialize for ReplaceFallbackRequest {
fn write(&self, buf: &mut io::Cursor<Vec<u8>>) -> io::Result<()> {
try!(buf.write_u16::<BigEndian>(proto::TYPE_STRUCT));
let length = if self.metadata.is_some() { 2 } else { 1 };
try!(buf.write_u32::<BigEndian>(length));
try!(Serialize::write("track", buf));
try!(Serialize::write(self.track.as_u8_slice(), buf));
if let Some(ref metadata) = self.metadata {
try!(Serialize::write("metadata", buf));
try!(Serialize::write(&metadata[..], buf));
}
Ok(()) | type Value = ();
type Error = ReplaceFallbackError;
fn req_type(&self) -> RequestType {
RequestType::ReplaceFallback
}
}
pub type ReplaceFallbackResult = Result<(), ReplaceFallbackError>;
#[derive(Debug, Clone)]
pub enum ReplaceFallbackError {
InvalidTrack = 1,
BadSampleRate = 2,
Full = 3,
}
impl ReplaceFallbackError {
pub fn to_u32(&self) -> u32 {
self.clone() as u32
}
pub fn from_u32(val: u32) -> Option<ReplaceFallbackError> {
match val {
1 => Some(ReplaceFallbackError::InvalidTrack),
2 => Some(ReplaceFallbackError::BadSampleRate),
3 => Some(ReplaceFallbackError::Full),
_ => None
}
}
}
impl Deserialize for ReplaceFallbackError {
fn read(buf: &mut io::Cursor<Vec<u8>>) -> io::Result<Self> {
let num: u32 = try!(Deserialize::read(buf));
ReplaceFallbackError::from_u32(num)
.ok_or_else(|| {
io::Error::new(io::ErrorKind::Other, "unexpected ReplaceFallbackError value")
})
}
}
impl Serialize for ReplaceFallbackError {
fn write(&self, buf: &mut io::Cursor<Vec<u8>>) -> io::Result<()> {
try!(Serialize::write(&self.to_u32(), buf));
Ok(())
}
} | }
}
impl Request for ReplaceFallbackRequest { | random_line_split |
replace_fallback.rs | use std::io;
use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt};
use ogg::{OggTrackBuf};
use super::super::{RequestType, Request};
use ::proto::{self, Deserialize, Serialize};
/// Skips to the end of the currently playing track
#[derive(Clone)]
pub struct ReplaceFallbackRequest {
pub track: OggTrackBuf,
pub metadata: Option<Vec<(String, String)>>,
}
impl Deserialize for ReplaceFallbackRequest {
fn read(buf: &mut io::Cursor<Vec<u8>>) -> io::Result<Self> {
try!(proto::expect_type(buf, proto::TYPE_STRUCT));
let field_count = try!(buf.read_u32::<BigEndian>());
let mut track: Option<Vec<u8>> = None;
let mut metadata: Option<Vec<(String, String)>> = None;
for _ in 0..field_count {
let field_name: String = try!(Deserialize::read(buf));
match &field_name[..] {
"track" => {
track = Some(try!(Deserialize::read(buf)));
},
"metadata" => {
metadata = Some(try!(Deserialize::read(buf)));
}
_ => try!(proto::skip_entity(buf)),
}
}
let track = match track {
Some(track) => track,
None => return Err(io::Error::new(io::ErrorKind::Other, "missing field: track")),
};
let track = try!(OggTrackBuf::new(track)
.map_err(|_| io::Error::new(io::ErrorKind::Other, "invalid ogg")));
Ok(ReplaceFallbackRequest {
track: track,
metadata: metadata,
})
}
}
impl Serialize for ReplaceFallbackRequest {
fn | (&self, buf: &mut io::Cursor<Vec<u8>>) -> io::Result<()> {
try!(buf.write_u16::<BigEndian>(proto::TYPE_STRUCT));
let length = if self.metadata.is_some() { 2 } else { 1 };
try!(buf.write_u32::<BigEndian>(length));
try!(Serialize::write("track", buf));
try!(Serialize::write(self.track.as_u8_slice(), buf));
if let Some(ref metadata) = self.metadata {
try!(Serialize::write("metadata", buf));
try!(Serialize::write(&metadata[..], buf));
}
Ok(())
}
}
impl Request for ReplaceFallbackRequest {
type Value = ();
type Error = ReplaceFallbackError;
fn req_type(&self) -> RequestType {
RequestType::ReplaceFallback
}
}
pub type ReplaceFallbackResult = Result<(), ReplaceFallbackError>;
#[derive(Debug, Clone)]
pub enum ReplaceFallbackError {
InvalidTrack = 1,
BadSampleRate = 2,
Full = 3,
}
impl ReplaceFallbackError {
pub fn to_u32(&self) -> u32 {
self.clone() as u32
}
pub fn from_u32(val: u32) -> Option<ReplaceFallbackError> {
match val {
1 => Some(ReplaceFallbackError::InvalidTrack),
2 => Some(ReplaceFallbackError::BadSampleRate),
3 => Some(ReplaceFallbackError::Full),
_ => None
}
}
}
impl Deserialize for ReplaceFallbackError {
fn read(buf: &mut io::Cursor<Vec<u8>>) -> io::Result<Self> {
let num: u32 = try!(Deserialize::read(buf));
ReplaceFallbackError::from_u32(num)
.ok_or_else(|| {
io::Error::new(io::ErrorKind::Other, "unexpected ReplaceFallbackError value")
})
}
}
impl Serialize for ReplaceFallbackError {
fn write(&self, buf: &mut io::Cursor<Vec<u8>>) -> io::Result<()> {
try!(Serialize::write(&self.to_u32(), buf));
Ok(())
}
}
| write | identifier_name |
angle.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! Computed angles.
use crate::values::distance::{ComputeSquaredDistance, SquaredDistance};
use crate::values::CSSFloat;
use num_traits::Zero;
use std::f64::consts::PI;
use std::fmt::{self, Write};
use std::ops::Add;
use std::{f32, f64};
use style_traits::{CssWriter, ToCss};
/// A computed angle in degrees.
#[cfg_attr(feature = "servo", derive(Deserialize, Serialize))]
#[derive(Animate, Clone, Copy, Debug, MallocSizeOf, PartialEq, PartialOrd, ToAnimatedZero)]
pub struct Angle(CSSFloat);
impl ToCss for Angle {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
self.degrees().to_css(dest)?;
dest.write_str("deg")
}
}
const RAD_PER_DEG: f64 = PI / 180.0;
impl Angle {
/// Creates a computed `Angle` value from a radian amount.
pub fn from_radians(radians: CSSFloat) -> Self {
Angle(radians / RAD_PER_DEG as f32)
}
/// Creates a computed `Angle` value from a degrees amount.
#[inline]
pub fn from_degrees(degrees: CSSFloat) -> Self {
Angle(degrees) | #[inline]
pub fn radians(&self) -> CSSFloat {
self.radians64().min(f32::MAX as f64).max(f32::MIN as f64) as f32
}
/// Returns the amount of radians this angle represents as a `f64`.
///
/// Gecko stores angles as singles, but does this computation using doubles.
///
/// This is significant enough to mess up rounding to the nearest
/// quarter-turn for 225 degrees, for example.
#[inline]
pub fn radians64(&self) -> f64 {
self.0 as f64 * RAD_PER_DEG
}
/// Return the value in degrees.
#[inline]
pub fn degrees(&self) -> CSSFloat {
self.0
}
}
impl Add for Angle {
type Output = Self;
#[inline]
fn add(self, rhs: Self) -> Self {
Angle(self.0 + rhs.0)
}
}
impl Zero for Angle {
#[inline]
fn zero() -> Self {
Angle(0.0)
}
#[inline]
fn is_zero(&self) -> bool {
self.0 == 0.
}
}
impl ComputeSquaredDistance for Angle {
#[inline]
fn compute_squared_distance(&self, other: &Self) -> Result<SquaredDistance, ()> {
// Use the formula for calculating the distance between angles defined in SVG:
// https://www.w3.org/TR/SVG/animate.html#complexDistances
self.radians64()
.compute_squared_distance(&other.radians64())
}
} | }
/// Returns the amount of radians this angle represents. | random_line_split |
angle.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! Computed angles.
use crate::values::distance::{ComputeSquaredDistance, SquaredDistance};
use crate::values::CSSFloat;
use num_traits::Zero;
use std::f64::consts::PI;
use std::fmt::{self, Write};
use std::ops::Add;
use std::{f32, f64};
use style_traits::{CssWriter, ToCss};
/// A computed angle in degrees.
#[cfg_attr(feature = "servo", derive(Deserialize, Serialize))]
#[derive(Animate, Clone, Copy, Debug, MallocSizeOf, PartialEq, PartialOrd, ToAnimatedZero)]
pub struct Angle(CSSFloat);
impl ToCss for Angle {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
self.degrees().to_css(dest)?;
dest.write_str("deg")
}
}
const RAD_PER_DEG: f64 = PI / 180.0;
impl Angle {
/// Creates a computed `Angle` value from a radian amount.
pub fn from_radians(radians: CSSFloat) -> Self {
Angle(radians / RAD_PER_DEG as f32)
}
/// Creates a computed `Angle` value from a degrees amount.
#[inline]
pub fn from_degrees(degrees: CSSFloat) -> Self {
Angle(degrees)
}
/// Returns the amount of radians this angle represents.
#[inline]
pub fn radians(&self) -> CSSFloat {
self.radians64().min(f32::MAX as f64).max(f32::MIN as f64) as f32
}
/// Returns the amount of radians this angle represents as a `f64`.
///
/// Gecko stores angles as singles, but does this computation using doubles.
///
/// This is significant enough to mess up rounding to the nearest
/// quarter-turn for 225 degrees, for example.
#[inline]
pub fn radians64(&self) -> f64 {
self.0 as f64 * RAD_PER_DEG
}
/// Return the value in degrees.
#[inline]
pub fn degrees(&self) -> CSSFloat {
self.0
}
}
impl Add for Angle {
type Output = Self;
#[inline]
fn add(self, rhs: Self) -> Self {
Angle(self.0 + rhs.0)
}
}
impl Zero for Angle {
#[inline]
fn zero() -> Self {
Angle(0.0)
}
#[inline]
fn is_zero(&self) -> bool {
self.0 == 0.
}
}
impl ComputeSquaredDistance for Angle {
#[inline]
fn | (&self, other: &Self) -> Result<SquaredDistance, ()> {
// Use the formula for calculating the distance between angles defined in SVG:
// https://www.w3.org/TR/SVG/animate.html#complexDistances
self.radians64()
.compute_squared_distance(&other.radians64())
}
}
| compute_squared_distance | identifier_name |
angle.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! Computed angles.
use crate::values::distance::{ComputeSquaredDistance, SquaredDistance};
use crate::values::CSSFloat;
use num_traits::Zero;
use std::f64::consts::PI;
use std::fmt::{self, Write};
use std::ops::Add;
use std::{f32, f64};
use style_traits::{CssWriter, ToCss};
/// A computed angle in degrees.
#[cfg_attr(feature = "servo", derive(Deserialize, Serialize))]
#[derive(Animate, Clone, Copy, Debug, MallocSizeOf, PartialEq, PartialOrd, ToAnimatedZero)]
pub struct Angle(CSSFloat);
impl ToCss for Angle {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
|
}
const RAD_PER_DEG: f64 = PI / 180.0;
impl Angle {
/// Creates a computed `Angle` value from a radian amount.
pub fn from_radians(radians: CSSFloat) -> Self {
Angle(radians / RAD_PER_DEG as f32)
}
/// Creates a computed `Angle` value from a degrees amount.
#[inline]
pub fn from_degrees(degrees: CSSFloat) -> Self {
Angle(degrees)
}
/// Returns the amount of radians this angle represents.
#[inline]
pub fn radians(&self) -> CSSFloat {
self.radians64().min(f32::MAX as f64).max(f32::MIN as f64) as f32
}
/// Returns the amount of radians this angle represents as a `f64`.
///
/// Gecko stores angles as singles, but does this computation using doubles.
///
/// This is significant enough to mess up rounding to the nearest
/// quarter-turn for 225 degrees, for example.
#[inline]
pub fn radians64(&self) -> f64 {
self.0 as f64 * RAD_PER_DEG
}
/// Return the value in degrees.
#[inline]
pub fn degrees(&self) -> CSSFloat {
self.0
}
}
impl Add for Angle {
type Output = Self;
#[inline]
fn add(self, rhs: Self) -> Self {
Angle(self.0 + rhs.0)
}
}
impl Zero for Angle {
#[inline]
fn zero() -> Self {
Angle(0.0)
}
#[inline]
fn is_zero(&self) -> bool {
self.0 == 0.
}
}
impl ComputeSquaredDistance for Angle {
#[inline]
fn compute_squared_distance(&self, other: &Self) -> Result<SquaredDistance, ()> {
// Use the formula for calculating the distance between angles defined in SVG:
// https://www.w3.org/TR/SVG/animate.html#complexDistances
self.radians64()
.compute_squared_distance(&other.radians64())
}
}
| {
self.degrees().to_css(dest)?;
dest.write_str("deg")
} | identifier_body |
multilevel-path-1.rs | // edition:2021
#![feature(rustc_attrs)]
#![allow(unused)]
struct Point {
x: i32,
y: i32,
}
struct Wrapper {
p: Point,
}
fn main() {
let mut w = Wrapper { p: Point { x: 10, y: 10 } };
// Only paths that appears within the closure that directly start off
// a variable defined outside the closure are captured.
//
// Therefore `w.p` is captured
// Note that `wp.x` doesn't start off a variable defined outside the closure.
let c = #[rustc_capture_analysis]
//~^ ERROR: attributes on expressions are experimental
//~| NOTE: see issue #15701 <https://github.com/rust-lang/rust/issues/15701>
|| {
//~^ ERROR: First Pass analysis includes:
//~| ERROR: Min Capture analysis includes:
let wp = &w.p;
//~^ NOTE: Capturing w[(0, 0)] -> ImmBorrow
//~| NOTE: Min Capture w[(0, 0)] -> ImmBorrow
println!("{}", wp.x);
};
// Since `c` captures `w.p` by an ImmBorrow, `w.p.y` can't be mutated.
let py = &mut w.p.y;
c(); |
*py = 20
} | random_line_split |
|
multilevel-path-1.rs | // edition:2021
#![feature(rustc_attrs)]
#![allow(unused)]
struct Point {
x: i32,
y: i32,
}
struct Wrapper {
p: Point,
}
fn | () {
let mut w = Wrapper { p: Point { x: 10, y: 10 } };
// Only paths that appears within the closure that directly start off
// a variable defined outside the closure are captured.
//
// Therefore `w.p` is captured
// Note that `wp.x` doesn't start off a variable defined outside the closure.
let c = #[rustc_capture_analysis]
//~^ ERROR: attributes on expressions are experimental
//~| NOTE: see issue #15701 <https://github.com/rust-lang/rust/issues/15701>
|| {
//~^ ERROR: First Pass analysis includes:
//~| ERROR: Min Capture analysis includes:
let wp = &w.p;
//~^ NOTE: Capturing w[(0, 0)] -> ImmBorrow
//~| NOTE: Min Capture w[(0, 0)] -> ImmBorrow
println!("{}", wp.x);
};
// Since `c` captures `w.p` by an ImmBorrow, `w.p.y` can't be mutated.
let py = &mut w.p.y;
c();
*py = 20
}
| main | identifier_name |
set2.rs | extern crate openssl;
extern crate serialize;
use serialize::base64::{FromBase64};
use std::rand::{task_rng, Rng};
use std::io::BufferedReader;
use std::io::File;
use openssl::crypto::symm::{encrypt, decrypt, AES_128_ECB, AES_128_CBC};
fn xor(v1 : &[u8], v2 : &[u8]) -> Vec<u8> {
v1.iter().zip(v2.iter()).map(|(&b1, &b2)| b1 ^ b2).collect::<Vec<u8>>()
}
fn bytes_to_string(bytes : Vec<u8>) -> String {
match String::from_utf8(bytes) {
Ok(s) => s,
Err(_) => String::new()
}
}
fn pkcs7_pad(data : &mut Vec<u8>, block_size : uint) {
let n = block_size - (data.len() % block_size);
data.grow(n, &(n as u8));
}
fn aes_decrypt(k : &[u8], data : &[u8]) -> Vec<u8> {
let mut padded_data = Vec::from_slice(data);
pkcs7_pad(&mut padded_data, 16);
decrypt(AES_128_ECB, k, Vec::new(), padded_data.as_slice())
}
fn decrypt_aes_cbc(k : &[u8], c : &[u8], iv : &[u8]) -> Vec<u8> {
let n = c.len()/16;
let blocks = range(0, n).map(|i| c.slice(i*16, (i+1)*16) );
let mut m1 = Vec::from_slice(iv);
blocks.flat_map(|b| {
let m = aes_decrypt(k, b);
let xord = xor(m.as_slice(), m1.as_slice());
m1 = Vec::from_slice(b);
xord.move_iter()
}).collect::<Vec<u8>>()
}
fn gen_key() -> Vec<u8> {
let mut key = [0u8,..16];
task_rng().fill_bytes(key);
Vec::from_slice(key)
}
fn gen_random_size_vec(min : uint, max : uint) -> Vec<u8> {
let size = task_rng().gen_range(min, max);
let mut vec = Vec::new();
vec.grow(size, &(0));
task_rng().fill_bytes(vec.as_mut_slice());
vec
}
fn crypto_service(input : &[u8]) -> Vec<u8> |
fn ch9() {
println!("------- 9 ---------");
let mut data = Vec::from_slice("YELLOW SUBMARINE".as_bytes());
pkcs7_pad(&mut data, 20);
println!("Padded data: {}", data);
println!("Padded message: {}", bytes_to_string(data.clone()));
assert!("YELLOW SUBMARINE\x04\x04\x04\x04".as_bytes() == data.as_slice());
}
fn ch10() {
println!("------- 10 ---------");
let path = Path::new("./10.txt");
let mut file = BufferedReader::new(File::open(&path));
let c = file.read_to_string().unwrap().as_slice().from_base64().unwrap();
let iv : [u8,..16] = [0,..16];
let k = "YELLOW SUBMARINE".as_bytes();
let m = decrypt_aes_cbc(k, c.as_slice(), iv);
println!("Message: {}", bytes_to_string(m));
}
fn ch11() {
for _ in range(0u32, 10u32) {
let input = ['a' as u8,..64];
let data = crypto_service(input);
if data.slice(16, 32) == data.slice(32, 48) {
println!("Guess ECB");
} else {
println!("Guess CBC");
}
}
}
fn main() {
ch9();
ch10();
ch11();
}
| {
let key = gen_key();
let iv = gen_key();
let prepend_bytes = gen_random_size_vec(5, 10);
let append_bytes = gen_random_size_vec(5, 10);
let data = prepend_bytes + input + append_bytes;
let choices = [AES_128_ECB, AES_128_CBC];
let t = task_rng().choose(choices).unwrap();
println!("Used {}", (match *t { AES_128_ECB => "ECB", AES_128_CBC => "CBC", _ => "Unknown" }));
encrypt(*t, key.as_slice(), iv, data.as_slice())
} | identifier_body |
set2.rs | extern crate openssl;
extern crate serialize;
use serialize::base64::{FromBase64};
use std::rand::{task_rng, Rng};
use std::io::BufferedReader;
use std::io::File;
use openssl::crypto::symm::{encrypt, decrypt, AES_128_ECB, AES_128_CBC};
fn xor(v1 : &[u8], v2 : &[u8]) -> Vec<u8> {
v1.iter().zip(v2.iter()).map(|(&b1, &b2)| b1 ^ b2).collect::<Vec<u8>>()
}
fn bytes_to_string(bytes : Vec<u8>) -> String {
match String::from_utf8(bytes) {
Ok(s) => s,
Err(_) => String::new()
}
}
fn pkcs7_pad(data : &mut Vec<u8>, block_size : uint) {
let n = block_size - (data.len() % block_size);
data.grow(n, &(n as u8));
}
fn aes_decrypt(k : &[u8], data : &[u8]) -> Vec<u8> {
let mut padded_data = Vec::from_slice(data);
pkcs7_pad(&mut padded_data, 16);
decrypt(AES_128_ECB, k, Vec::new(), padded_data.as_slice())
}
fn decrypt_aes_cbc(k : &[u8], c : &[u8], iv : &[u8]) -> Vec<u8> {
let n = c.len()/16;
let blocks = range(0, n).map(|i| c.slice(i*16, (i+1)*16) );
let mut m1 = Vec::from_slice(iv);
blocks.flat_map(|b| {
let m = aes_decrypt(k, b);
let xord = xor(m.as_slice(), m1.as_slice());
m1 = Vec::from_slice(b);
xord.move_iter()
}).collect::<Vec<u8>>()
}
fn gen_key() -> Vec<u8> {
let mut key = [0u8,..16];
task_rng().fill_bytes(key);
Vec::from_slice(key)
}
fn gen_random_size_vec(min : uint, max : uint) -> Vec<u8> {
let size = task_rng().gen_range(min, max);
let mut vec = Vec::new();
vec.grow(size, &(0));
task_rng().fill_bytes(vec.as_mut_slice());
vec
}
fn crypto_service(input : &[u8]) -> Vec<u8> {
let key = gen_key();
let iv = gen_key();
let prepend_bytes = gen_random_size_vec(5, 10);
let append_bytes = gen_random_size_vec(5, 10);
let data = prepend_bytes + input + append_bytes;
let choices = [AES_128_ECB, AES_128_CBC];
let t = task_rng().choose(choices).unwrap(); | fn ch9() {
println!("------- 9 ---------");
let mut data = Vec::from_slice("YELLOW SUBMARINE".as_bytes());
pkcs7_pad(&mut data, 20);
println!("Padded data: {}", data);
println!("Padded message: {}", bytes_to_string(data.clone()));
assert!("YELLOW SUBMARINE\x04\x04\x04\x04".as_bytes() == data.as_slice());
}
fn ch10() {
println!("------- 10 ---------");
let path = Path::new("./10.txt");
let mut file = BufferedReader::new(File::open(&path));
let c = file.read_to_string().unwrap().as_slice().from_base64().unwrap();
let iv : [u8,..16] = [0,..16];
let k = "YELLOW SUBMARINE".as_bytes();
let m = decrypt_aes_cbc(k, c.as_slice(), iv);
println!("Message: {}", bytes_to_string(m));
}
fn ch11() {
for _ in range(0u32, 10u32) {
let input = ['a' as u8,..64];
let data = crypto_service(input);
if data.slice(16, 32) == data.slice(32, 48) {
println!("Guess ECB");
} else {
println!("Guess CBC");
}
}
}
fn main() {
ch9();
ch10();
ch11();
} | println!("Used {}", (match *t { AES_128_ECB => "ECB", AES_128_CBC => "CBC", _ => "Unknown" }));
encrypt(*t, key.as_slice(), iv, data.as_slice())
}
| random_line_split |
set2.rs | extern crate openssl;
extern crate serialize;
use serialize::base64::{FromBase64};
use std::rand::{task_rng, Rng};
use std::io::BufferedReader;
use std::io::File;
use openssl::crypto::symm::{encrypt, decrypt, AES_128_ECB, AES_128_CBC};
fn xor(v1 : &[u8], v2 : &[u8]) -> Vec<u8> {
v1.iter().zip(v2.iter()).map(|(&b1, &b2)| b1 ^ b2).collect::<Vec<u8>>()
}
fn bytes_to_string(bytes : Vec<u8>) -> String {
match String::from_utf8(bytes) {
Ok(s) => s,
Err(_) => String::new()
}
}
fn pkcs7_pad(data : &mut Vec<u8>, block_size : uint) {
let n = block_size - (data.len() % block_size);
data.grow(n, &(n as u8));
}
fn aes_decrypt(k : &[u8], data : &[u8]) -> Vec<u8> {
let mut padded_data = Vec::from_slice(data);
pkcs7_pad(&mut padded_data, 16);
decrypt(AES_128_ECB, k, Vec::new(), padded_data.as_slice())
}
fn decrypt_aes_cbc(k : &[u8], c : &[u8], iv : &[u8]) -> Vec<u8> {
let n = c.len()/16;
let blocks = range(0, n).map(|i| c.slice(i*16, (i+1)*16) );
let mut m1 = Vec::from_slice(iv);
blocks.flat_map(|b| {
let m = aes_decrypt(k, b);
let xord = xor(m.as_slice(), m1.as_slice());
m1 = Vec::from_slice(b);
xord.move_iter()
}).collect::<Vec<u8>>()
}
fn gen_key() -> Vec<u8> {
let mut key = [0u8,..16];
task_rng().fill_bytes(key);
Vec::from_slice(key)
}
fn gen_random_size_vec(min : uint, max : uint) -> Vec<u8> {
let size = task_rng().gen_range(min, max);
let mut vec = Vec::new();
vec.grow(size, &(0));
task_rng().fill_bytes(vec.as_mut_slice());
vec
}
fn crypto_service(input : &[u8]) -> Vec<u8> {
let key = gen_key();
let iv = gen_key();
let prepend_bytes = gen_random_size_vec(5, 10);
let append_bytes = gen_random_size_vec(5, 10);
let data = prepend_bytes + input + append_bytes;
let choices = [AES_128_ECB, AES_128_CBC];
let t = task_rng().choose(choices).unwrap();
println!("Used {}", (match *t { AES_128_ECB => "ECB", AES_128_CBC => "CBC", _ => "Unknown" }));
encrypt(*t, key.as_slice(), iv, data.as_slice())
}
fn | () {
println!("------- 9 ---------");
let mut data = Vec::from_slice("YELLOW SUBMARINE".as_bytes());
pkcs7_pad(&mut data, 20);
println!("Padded data: {}", data);
println!("Padded message: {}", bytes_to_string(data.clone()));
assert!("YELLOW SUBMARINE\x04\x04\x04\x04".as_bytes() == data.as_slice());
}
fn ch10() {
println!("------- 10 ---------");
let path = Path::new("./10.txt");
let mut file = BufferedReader::new(File::open(&path));
let c = file.read_to_string().unwrap().as_slice().from_base64().unwrap();
let iv : [u8,..16] = [0,..16];
let k = "YELLOW SUBMARINE".as_bytes();
let m = decrypt_aes_cbc(k, c.as_slice(), iv);
println!("Message: {}", bytes_to_string(m));
}
fn ch11() {
for _ in range(0u32, 10u32) {
let input = ['a' as u8,..64];
let data = crypto_service(input);
if data.slice(16, 32) == data.slice(32, 48) {
println!("Guess ECB");
} else {
println!("Guess CBC");
}
}
}
fn main() {
ch9();
ch10();
ch11();
}
| ch9 | identifier_name |
lexical-scope-with-macro.rs | // Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-android: FIXME(#10381)
// compile-flags:-g
// debugger:rbreak zzz
// debugger:run
// debugger:finish
// debugger:print a
// check:$1 = 10
// debugger:print b
// check:$2 = 34
// debugger:continue
// debugger:finish
// debugger:print a
// check:$3 = 890242
// debugger:print b
// check:$4 = 34
// debugger:continue
// debugger:finish
// debugger:print a
// check:$5 = 10
// debugger:print b
// check:$6 = 34
// debugger:continue
// debugger:finish
// debugger:print a
// check:$7 = 102
// debugger:print b
// check:$8 = 34
// debugger:continue
// debugger:finish
// debugger:print a
// check:$9 = 110
// debugger:print b
// check:$10 = 34
// debugger:continue
// debugger:finish
// debugger:print a
// check:$11 = 10
// debugger:print b
// check:$12 = 34
// debugger:continue
// debugger:finish
// debugger:print a
// check:$13 = 10
// debugger:print b
// check:$14 = 34
// debugger:print c
// check:$15 = 400
// debugger:continue
#[feature(macro_rules)];
macro_rules! trivial(
($e1:expr) => ($e1)
)
macro_rules! no_new_scope(
($e1:expr) => (($e1 + 2) - 1)
)
macro_rules! new_scope(
() => ({
let a = 890242;
zzz();
sentinel();
})
)
macro_rules! shadow_within_macro(
($e1:expr) => ({
let a = $e1 + 2;
zzz();
sentinel();
let a = $e1 + 10;
zzz();
sentinel();
})
)
macro_rules! dup_expr(
($e1:expr) => (($e1) + ($e1))
)
fn | () {
let a = trivial!(10);
let b = no_new_scope!(33);
zzz();
sentinel();
new_scope!();
zzz();
sentinel();
shadow_within_macro!(100);
zzz();
sentinel();
let c = dup_expr!(10 * 20);
zzz();
sentinel();
}
fn zzz() {()}
fn sentinel() {()}
| main | identifier_name |
lexical-scope-with-macro.rs | // Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-android: FIXME(#10381)
// compile-flags:-g
// debugger:rbreak zzz
// debugger:run
// debugger:finish
// debugger:print a
// check:$1 = 10
// debugger:print b
// check:$2 = 34
// debugger:continue
// debugger:finish
// debugger:print a
// check:$3 = 890242
// debugger:print b
// check:$4 = 34
// debugger:continue
// debugger:finish
// debugger:print a
// check:$5 = 10
// debugger:print b
// check:$6 = 34
// debugger:continue
// debugger:finish
// debugger:print a
// check:$7 = 102
// debugger:print b
// check:$8 = 34
// debugger:continue
// debugger:finish
// debugger:print a
// check:$9 = 110
// debugger:print b
// check:$10 = 34
// debugger:continue
// debugger:finish
// debugger:print a
// check:$11 = 10
// debugger:print b
// check:$12 = 34
// debugger:continue
// debugger:finish
// debugger:print a
// check:$13 = 10
// debugger:print b
// check:$14 = 34
// debugger:print c
// check:$15 = 400
// debugger:continue
#[feature(macro_rules)];
macro_rules! trivial(
($e1:expr) => ($e1)
)
macro_rules! no_new_scope(
($e1:expr) => (($e1 + 2) - 1)
)
macro_rules! new_scope(
() => ({
let a = 890242;
zzz();
sentinel();
})
)
macro_rules! shadow_within_macro(
($e1:expr) => ({
let a = $e1 + 2;
zzz();
sentinel();
let a = $e1 + 10;
zzz();
sentinel();
})
)
macro_rules! dup_expr(
($e1:expr) => (($e1) + ($e1))
)
fn main() {
let a = trivial!(10);
let b = no_new_scope!(33);
zzz();
sentinel();
new_scope!();
zzz();
sentinel();
shadow_within_macro!(100);
zzz();
sentinel();
let c = dup_expr!(10 * 20);
zzz();
sentinel();
}
fn zzz() |
fn sentinel() {()}
| {()} | identifier_body |
lexical-scope-with-macro.rs | // Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-android: FIXME(#10381)
// compile-flags:-g
// debugger:rbreak zzz
// debugger:run
// debugger:finish
// debugger:print a
// check:$1 = 10
// debugger:print b
// check:$2 = 34
// debugger:continue
// debugger:finish
// debugger:print a
// check:$3 = 890242
// debugger:print b
// check:$4 = 34
// debugger:continue
// debugger:finish
// debugger:print a
// check:$5 = 10
// debugger:print b
// check:$6 = 34
// debugger:continue
// debugger:finish
// debugger:print a
// check:$7 = 102
// debugger:print b
// check:$8 = 34
// debugger:continue
// debugger:finish
// debugger:print a
// check:$9 = 110
// debugger:print b
// check:$10 = 34
// debugger:continue
// debugger:finish
// debugger:print a
// check:$11 = 10
// debugger:print b
// check:$12 = 34
// debugger:continue
// debugger:finish
// debugger:print a
// check:$13 = 10
// debugger:print b
// check:$14 = 34
// debugger:print c
// check:$15 = 400
// debugger:continue
#[feature(macro_rules)];
macro_rules! trivial(
($e1:expr) => ($e1)
)
macro_rules! no_new_scope(
($e1:expr) => (($e1 + 2) - 1)
)
macro_rules! new_scope(
() => ({
let a = 890242;
zzz();
sentinel();
})
)
macro_rules! shadow_within_macro(
($e1:expr) => ({
let a = $e1 + 2;
zzz();
sentinel();
let a = $e1 + 10;
zzz();
sentinel();
})
)
macro_rules! dup_expr(
($e1:expr) => (($e1) + ($e1))
)
fn main() {
let a = trivial!(10);
let b = no_new_scope!(33);
| zzz();
sentinel();
new_scope!();
zzz();
sentinel();
shadow_within_macro!(100);
zzz();
sentinel();
let c = dup_expr!(10 * 20);
zzz();
sentinel();
}
fn zzz() {()}
fn sentinel() {()} | random_line_split |
|
std_dirs.rs | // Copyright 2018 MaidSafe.net limited.
//
// This SAFE Network Software is licensed to you under The General Public License (GPL), version 3.
// Unless required by applicable law or agreed to in writing, the SAFE Network Software distributed
// under the GPL Licence is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. Please review the Licences for the specific language governing
// permissions and limitations relating to use of the SAFE Network Software.
use crate::access_container::{self, AUTHENTICATOR_ENTRY};
use crate::client::AuthClient;
use crate::config::KEY_APPS;
use crate::{AuthError, AuthFuture};
use bincode::serialize;
use futures::{future, Future};
use safe_core::ipc::access_container_enc_key;
use safe_core::mdata_info;
use safe_core::nfs::create_dir;
use safe_core::utils::symmetric_encrypt;
use safe_core::{Client, CoreError, FutureExt, MDataInfo, DIR_TAG};
use safe_nd::{Error as SndError, MDataKind, MDataSeqValue};
use std::collections::HashMap;
/// Default directories to be created at registration.
pub static DEFAULT_PRIVATE_DIRS: [&str; 6] = [
"_documents",
"_downloads",
"_music",
"_pictures",
"_videos",
"_publicNames",
];
/// Publicly accessible default directories to be created upon registration.
pub static DEFAULT_PUBLIC_DIRS: [&str; 1] = ["_public"];
/// Create the root directories and the standard directories for the access container.
pub fn create(client: &AuthClient) -> Box<AuthFuture<()>> {
let c2 = client.clone();
let c3 = client.clone();
let c4 = client.clone();
// Initialise standard directories
let access_container = client.access_container();
let config_dir = client.config_root_dir();
// Try to get default dirs from the access container
let access_cont_fut = access_container::fetch_authenticator_entry(&c2)
.then(move |res| {
match res {
Ok((_, default_containers)) => {
// Make sure that all default dirs have been created
create_std_dirs(&c3, &default_containers)
}
Err(AuthError::CoreError(CoreError::DataError(SndError::NoSuchData))) => {
// Access container hasn't been created yet
let access_cont_value = fry!(random_std_dirs())
.into_iter()
.map(|(name, md_info)| (String::from(name), md_info))
.collect();
let std_dirs_fut = create_std_dirs(&c3, &access_cont_value);
let access_cont_fut =
create_access_container(&c3, &access_container, &access_cont_value);
future::join_all(vec![std_dirs_fut, access_cont_fut])
.map(|_| ())
.into_box()
}
Err(e) => err!(e),
}
})
.into_box();
future::join_all(vec![access_cont_fut, create_config_dir(&c2, &config_dir)])
.map_err(From::from)
.and_then(move |_| {
// Update account packet - root directories have been created successfully
// (so we don't have to recover them after login).
c4.set_std_dirs_created(true);
c4.update_account_packet().map_err(From::from).into_box()
})
.into_box()
}
fn create_config_dir(client: &AuthClient, config_dir: &MDataInfo) -> Box<AuthFuture<()>> {
let config_dir_entries =
btree_map![KEY_APPS.to_vec() => MDataSeqValue { data: Vec::new(), version: 0 }];
let config_dir_entries = fry!(mdata_info::encrypt_entries(config_dir, &config_dir_entries));
create_dir(client, config_dir, config_dir_entries, btree_map![])
.map_err(From::from)
.into_box()
}
fn create_access_container(
client: &AuthClient,
access_container: &MDataInfo,
default_entries: &HashMap<String, MDataInfo>,
) -> Box<AuthFuture<()>> {
let enc_key = client.secret_symmetric_key();
// Create access container
let authenticator_key = fry!(access_container_enc_key(
AUTHENTICATOR_ENTRY,
&enc_key,
fry!(access_container.nonce().ok_or_else(|| AuthError::from(
"Expected to have nonce on access container MDataInfo"
))),
)
.map_err(AuthError::from));
let access_cont_value = fry!(symmetric_encrypt(
&fry!(serialize(default_entries)),
&enc_key, | create_dir(
client,
access_container,
btree_map![
authenticator_key => MDataSeqValue { version: 0, data: access_cont_value }
],
btree_map![],
)
.map_err(From::from)
.into_box()
}
/// Generates a list of `MDataInfo` for standard dirs.
/// Returns a collection of standard dirs along with respective `MDataInfo`s.
/// Doesn't actually put data onto the network.
pub fn random_std_dirs() -> Result<Vec<(&'static str, MDataInfo)>, CoreError> {
let pub_dirs = DEFAULT_PUBLIC_DIRS
.iter()
.map(|name| MDataInfo::random_public(MDataKind::Seq, DIR_TAG).map(|dir| (*name, dir)));
let priv_dirs = DEFAULT_PRIVATE_DIRS
.iter()
.map(|name| MDataInfo::random_private(MDataKind::Seq, DIR_TAG).map(|dir| (*name, dir)));
priv_dirs.chain(pub_dirs).collect()
}
/// A registration helper function to create the set of default dirs in the users root directory.
pub fn create_std_dirs(
client: &AuthClient,
md_infos: &HashMap<String, MDataInfo>,
) -> Box<AuthFuture<()>> {
let client = client.clone();
let creations: Vec<_> = md_infos
.iter()
.map(|(_, md_info)| {
create_dir(&client, md_info, btree_map![], btree_map![]).map_err(AuthError::from)
})
.collect();
future::join_all(creations).map(|_| ()).into_box()
}
#[cfg(test)]
mod tests {
use super::*;
use crate::run;
use crate::test_utils::create_account_and_login;
use futures::Future;
// Test creation of default dirs.
#[test]
fn creates_default_dirs() {
let auth = create_account_and_login();
unwrap!(run(&auth, |client| {
let client = client.clone();
create_std_dirs(
&client,
&unwrap!(random_std_dirs())
.into_iter()
.map(|(k, v)| (k.to_owned(), v))
.collect(),
)
.then(move |res| {
assert!(res.is_ok());
access_container::fetch_authenticator_entry(&client)
})
.then(move |res| {
let (_, mdata_entries) = unwrap!(res);
assert_eq!(
mdata_entries.len(),
DEFAULT_PUBLIC_DIRS.len() + DEFAULT_PRIVATE_DIRS.len()
);
for key in DEFAULT_PUBLIC_DIRS
.iter()
.chain(DEFAULT_PRIVATE_DIRS.iter())
{
// let's check whether all our entries have been created properly
assert!(mdata_entries.contains_key(*key));
}
Ok(())
})
}));
}
} | None,
));
| random_line_split |
std_dirs.rs | // Copyright 2018 MaidSafe.net limited.
//
// This SAFE Network Software is licensed to you under The General Public License (GPL), version 3.
// Unless required by applicable law or agreed to in writing, the SAFE Network Software distributed
// under the GPL Licence is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. Please review the Licences for the specific language governing
// permissions and limitations relating to use of the SAFE Network Software.
use crate::access_container::{self, AUTHENTICATOR_ENTRY};
use crate::client::AuthClient;
use crate::config::KEY_APPS;
use crate::{AuthError, AuthFuture};
use bincode::serialize;
use futures::{future, Future};
use safe_core::ipc::access_container_enc_key;
use safe_core::mdata_info;
use safe_core::nfs::create_dir;
use safe_core::utils::symmetric_encrypt;
use safe_core::{Client, CoreError, FutureExt, MDataInfo, DIR_TAG};
use safe_nd::{Error as SndError, MDataKind, MDataSeqValue};
use std::collections::HashMap;
/// Default directories to be created at registration.
pub static DEFAULT_PRIVATE_DIRS: [&str; 6] = [
"_documents",
"_downloads",
"_music",
"_pictures",
"_videos",
"_publicNames",
];
/// Publicly accessible default directories to be created upon registration.
pub static DEFAULT_PUBLIC_DIRS: [&str; 1] = ["_public"];
/// Create the root directories and the standard directories for the access container.
pub fn create(client: &AuthClient) -> Box<AuthFuture<()>> | .into_iter()
.map(|(name, md_info)| (String::from(name), md_info))
.collect();
let std_dirs_fut = create_std_dirs(&c3, &access_cont_value);
let access_cont_fut =
create_access_container(&c3, &access_container, &access_cont_value);
future::join_all(vec![std_dirs_fut, access_cont_fut])
.map(|_| ())
.into_box()
}
Err(e) => err!(e),
}
})
.into_box();
future::join_all(vec![access_cont_fut, create_config_dir(&c2, &config_dir)])
.map_err(From::from)
.and_then(move |_| {
// Update account packet - root directories have been created successfully
// (so we don't have to recover them after login).
c4.set_std_dirs_created(true);
c4.update_account_packet().map_err(From::from).into_box()
})
.into_box()
}
fn create_config_dir(client: &AuthClient, config_dir: &MDataInfo) -> Box<AuthFuture<()>> {
let config_dir_entries =
btree_map![KEY_APPS.to_vec() => MDataSeqValue { data: Vec::new(), version: 0 }];
let config_dir_entries = fry!(mdata_info::encrypt_entries(config_dir, &config_dir_entries));
create_dir(client, config_dir, config_dir_entries, btree_map![])
.map_err(From::from)
.into_box()
}
fn create_access_container(
client: &AuthClient,
access_container: &MDataInfo,
default_entries: &HashMap<String, MDataInfo>,
) -> Box<AuthFuture<()>> {
let enc_key = client.secret_symmetric_key();
// Create access container
let authenticator_key = fry!(access_container_enc_key(
AUTHENTICATOR_ENTRY,
&enc_key,
fry!(access_container.nonce().ok_or_else(|| AuthError::from(
"Expected to have nonce on access container MDataInfo"
))),
)
.map_err(AuthError::from));
let access_cont_value = fry!(symmetric_encrypt(
&fry!(serialize(default_entries)),
&enc_key,
None,
));
create_dir(
client,
access_container,
btree_map![
authenticator_key => MDataSeqValue { version: 0, data: access_cont_value }
],
btree_map![],
)
.map_err(From::from)
.into_box()
}
/// Generates a list of `MDataInfo` for standard dirs.
/// Returns a collection of standard dirs along with respective `MDataInfo`s.
/// Doesn't actually put data onto the network.
pub fn random_std_dirs() -> Result<Vec<(&'static str, MDataInfo)>, CoreError> {
let pub_dirs = DEFAULT_PUBLIC_DIRS
.iter()
.map(|name| MDataInfo::random_public(MDataKind::Seq, DIR_TAG).map(|dir| (*name, dir)));
let priv_dirs = DEFAULT_PRIVATE_DIRS
.iter()
.map(|name| MDataInfo::random_private(MDataKind::Seq, DIR_TAG).map(|dir| (*name, dir)));
priv_dirs.chain(pub_dirs).collect()
}
/// A registration helper function to create the set of default dirs in the users root directory.
pub fn create_std_dirs(
client: &AuthClient,
md_infos: &HashMap<String, MDataInfo>,
) -> Box<AuthFuture<()>> {
let client = client.clone();
let creations: Vec<_> = md_infos
.iter()
.map(|(_, md_info)| {
create_dir(&client, md_info, btree_map![], btree_map![]).map_err(AuthError::from)
})
.collect();
future::join_all(creations).map(|_| ()).into_box()
}
#[cfg(test)]
mod tests {
use super::*;
use crate::run;
use crate::test_utils::create_account_and_login;
use futures::Future;
// Test creation of default dirs.
#[test]
fn creates_default_dirs() {
let auth = create_account_and_login();
unwrap!(run(&auth, |client| {
let client = client.clone();
create_std_dirs(
&client,
&unwrap!(random_std_dirs())
.into_iter()
.map(|(k, v)| (k.to_owned(), v))
.collect(),
)
.then(move |res| {
assert!(res.is_ok());
access_container::fetch_authenticator_entry(&client)
})
.then(move |res| {
let (_, mdata_entries) = unwrap!(res);
assert_eq!(
mdata_entries.len(),
DEFAULT_PUBLIC_DIRS.len() + DEFAULT_PRIVATE_DIRS.len()
);
for key in DEFAULT_PUBLIC_DIRS
.iter()
.chain(DEFAULT_PRIVATE_DIRS.iter())
{
// let's check whether all our entries have been created properly
assert!(mdata_entries.contains_key(*key));
}
Ok(())
})
}));
}
}
| {
let c2 = client.clone();
let c3 = client.clone();
let c4 = client.clone();
// Initialise standard directories
let access_container = client.access_container();
let config_dir = client.config_root_dir();
// Try to get default dirs from the access container
let access_cont_fut = access_container::fetch_authenticator_entry(&c2)
.then(move |res| {
match res {
Ok((_, default_containers)) => {
// Make sure that all default dirs have been created
create_std_dirs(&c3, &default_containers)
}
Err(AuthError::CoreError(CoreError::DataError(SndError::NoSuchData))) => {
// Access container hasn't been created yet
let access_cont_value = fry!(random_std_dirs()) | identifier_body |
std_dirs.rs | // Copyright 2018 MaidSafe.net limited.
//
// This SAFE Network Software is licensed to you under The General Public License (GPL), version 3.
// Unless required by applicable law or agreed to in writing, the SAFE Network Software distributed
// under the GPL Licence is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. Please review the Licences for the specific language governing
// permissions and limitations relating to use of the SAFE Network Software.
use crate::access_container::{self, AUTHENTICATOR_ENTRY};
use crate::client::AuthClient;
use crate::config::KEY_APPS;
use crate::{AuthError, AuthFuture};
use bincode::serialize;
use futures::{future, Future};
use safe_core::ipc::access_container_enc_key;
use safe_core::mdata_info;
use safe_core::nfs::create_dir;
use safe_core::utils::symmetric_encrypt;
use safe_core::{Client, CoreError, FutureExt, MDataInfo, DIR_TAG};
use safe_nd::{Error as SndError, MDataKind, MDataSeqValue};
use std::collections::HashMap;
/// Default directories to be created at registration.
pub static DEFAULT_PRIVATE_DIRS: [&str; 6] = [
"_documents",
"_downloads",
"_music",
"_pictures",
"_videos",
"_publicNames",
];
/// Publicly accessible default directories to be created upon registration.
pub static DEFAULT_PUBLIC_DIRS: [&str; 1] = ["_public"];
/// Create the root directories and the standard directories for the access container.
pub fn | (client: &AuthClient) -> Box<AuthFuture<()>> {
let c2 = client.clone();
let c3 = client.clone();
let c4 = client.clone();
// Initialise standard directories
let access_container = client.access_container();
let config_dir = client.config_root_dir();
// Try to get default dirs from the access container
let access_cont_fut = access_container::fetch_authenticator_entry(&c2)
.then(move |res| {
match res {
Ok((_, default_containers)) => {
// Make sure that all default dirs have been created
create_std_dirs(&c3, &default_containers)
}
Err(AuthError::CoreError(CoreError::DataError(SndError::NoSuchData))) => {
// Access container hasn't been created yet
let access_cont_value = fry!(random_std_dirs())
.into_iter()
.map(|(name, md_info)| (String::from(name), md_info))
.collect();
let std_dirs_fut = create_std_dirs(&c3, &access_cont_value);
let access_cont_fut =
create_access_container(&c3, &access_container, &access_cont_value);
future::join_all(vec![std_dirs_fut, access_cont_fut])
.map(|_| ())
.into_box()
}
Err(e) => err!(e),
}
})
.into_box();
future::join_all(vec![access_cont_fut, create_config_dir(&c2, &config_dir)])
.map_err(From::from)
.and_then(move |_| {
// Update account packet - root directories have been created successfully
// (so we don't have to recover them after login).
c4.set_std_dirs_created(true);
c4.update_account_packet().map_err(From::from).into_box()
})
.into_box()
}
fn create_config_dir(client: &AuthClient, config_dir: &MDataInfo) -> Box<AuthFuture<()>> {
let config_dir_entries =
btree_map![KEY_APPS.to_vec() => MDataSeqValue { data: Vec::new(), version: 0 }];
let config_dir_entries = fry!(mdata_info::encrypt_entries(config_dir, &config_dir_entries));
create_dir(client, config_dir, config_dir_entries, btree_map![])
.map_err(From::from)
.into_box()
}
fn create_access_container(
client: &AuthClient,
access_container: &MDataInfo,
default_entries: &HashMap<String, MDataInfo>,
) -> Box<AuthFuture<()>> {
let enc_key = client.secret_symmetric_key();
// Create access container
let authenticator_key = fry!(access_container_enc_key(
AUTHENTICATOR_ENTRY,
&enc_key,
fry!(access_container.nonce().ok_or_else(|| AuthError::from(
"Expected to have nonce on access container MDataInfo"
))),
)
.map_err(AuthError::from));
let access_cont_value = fry!(symmetric_encrypt(
&fry!(serialize(default_entries)),
&enc_key,
None,
));
create_dir(
client,
access_container,
btree_map![
authenticator_key => MDataSeqValue { version: 0, data: access_cont_value }
],
btree_map![],
)
.map_err(From::from)
.into_box()
}
/// Generates a list of `MDataInfo` for standard dirs.
/// Returns a collection of standard dirs along with respective `MDataInfo`s.
/// Doesn't actually put data onto the network.
pub fn random_std_dirs() -> Result<Vec<(&'static str, MDataInfo)>, CoreError> {
let pub_dirs = DEFAULT_PUBLIC_DIRS
.iter()
.map(|name| MDataInfo::random_public(MDataKind::Seq, DIR_TAG).map(|dir| (*name, dir)));
let priv_dirs = DEFAULT_PRIVATE_DIRS
.iter()
.map(|name| MDataInfo::random_private(MDataKind::Seq, DIR_TAG).map(|dir| (*name, dir)));
priv_dirs.chain(pub_dirs).collect()
}
/// A registration helper function to create the set of default dirs in the users root directory.
pub fn create_std_dirs(
client: &AuthClient,
md_infos: &HashMap<String, MDataInfo>,
) -> Box<AuthFuture<()>> {
let client = client.clone();
let creations: Vec<_> = md_infos
.iter()
.map(|(_, md_info)| {
create_dir(&client, md_info, btree_map![], btree_map![]).map_err(AuthError::from)
})
.collect();
future::join_all(creations).map(|_| ()).into_box()
}
#[cfg(test)]
mod tests {
use super::*;
use crate::run;
use crate::test_utils::create_account_and_login;
use futures::Future;
// Test creation of default dirs.
#[test]
fn creates_default_dirs() {
let auth = create_account_and_login();
unwrap!(run(&auth, |client| {
let client = client.clone();
create_std_dirs(
&client,
&unwrap!(random_std_dirs())
.into_iter()
.map(|(k, v)| (k.to_owned(), v))
.collect(),
)
.then(move |res| {
assert!(res.is_ok());
access_container::fetch_authenticator_entry(&client)
})
.then(move |res| {
let (_, mdata_entries) = unwrap!(res);
assert_eq!(
mdata_entries.len(),
DEFAULT_PUBLIC_DIRS.len() + DEFAULT_PRIVATE_DIRS.len()
);
for key in DEFAULT_PUBLIC_DIRS
.iter()
.chain(DEFAULT_PRIVATE_DIRS.iter())
{
// let's check whether all our entries have been created properly
assert!(mdata_entries.contains_key(*key));
}
Ok(())
})
}));
}
}
| create | identifier_name |
shared_cache.rs | // Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::sync::Arc;
use lru_cache::LruCache;
use util::{H256, Mutex};
use util::sha3::*; | use bit_set::BitSet;
use super::super::instructions;
const INITIAL_CAPACITY: usize = 32;
const DEFAULT_CACHE_SIZE: usize = 4 * 1024 * 1024;
/// Global cache for EVM interpreter
pub struct SharedCache {
jump_destinations: Mutex<LruCache<H256, Arc<BitSet>>>,
max_size: usize,
cur_size: Mutex<usize>,
}
impl SharedCache {
/// Create a jump destinations cache with a maximum size in bytes
/// to cache.
pub fn new(max_size: usize) -> Self {
SharedCache {
jump_destinations: Mutex::new(LruCache::new(INITIAL_CAPACITY)),
max_size: max_size * 8, // dealing with bits here.
cur_size: Mutex::new(0),
}
}
/// Get jump destinations bitmap for a contract.
pub fn jump_destinations(&self, code_hash: &H256, code: &[u8]) -> Arc<BitSet> {
if code_hash == &SHA3_EMPTY {
return Self::find_jump_destinations(code);
}
if let Some(d) = self.jump_destinations.lock().get_mut(code_hash) {
return d.clone();
}
let d = Self::find_jump_destinations(code);
{
let mut cur_size = self.cur_size.lock();
*cur_size += d.capacity();
let mut jump_dests = self.jump_destinations.lock();
let cap = jump_dests.capacity();
// grow the cache as necessary; it operates on amount of items
// but we're working based on memory usage.
if jump_dests.len() == cap && *cur_size < self.max_size {
jump_dests.set_capacity(cap * 2);
}
// account for any element displaced from the cache.
if let Some(lru) = jump_dests.insert(code_hash.clone(), d.clone()) {
*cur_size -= lru.capacity();
}
// remove elements until we are below the memory target.
while *cur_size > self.max_size {
match jump_dests.remove_lru() {
Some((_, v)) => *cur_size -= v.capacity(),
_ => break,
}
}
}
d
}
fn find_jump_destinations(code: &[u8]) -> Arc<BitSet> {
let mut jump_dests = BitSet::with_capacity(code.len());
let mut position = 0;
while position < code.len() {
let instruction = code[position];
if instruction == instructions::JUMPDEST {
jump_dests.insert(position);
} else if instructions::is_push(instruction) {
position += instructions::get_push_bytes(instruction);
}
position += 1;
}
jump_dests.shrink_to_fit();
Arc::new(jump_dests)
}
}
impl Default for SharedCache {
fn default() -> Self {
SharedCache::new(DEFAULT_CACHE_SIZE)
}
}
#[test]
fn test_find_jump_destinations() {
use util::FromHex;
// given
let code = "7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff5b01600055".from_hex().unwrap();
// when
let valid_jump_destinations = SharedCache::find_jump_destinations(&code);
// then
assert!(valid_jump_destinations.contains(66));
} | random_line_split |
|
shared_cache.rs | // Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::sync::Arc;
use lru_cache::LruCache;
use util::{H256, Mutex};
use util::sha3::*;
use bit_set::BitSet;
use super::super::instructions;
const INITIAL_CAPACITY: usize = 32;
const DEFAULT_CACHE_SIZE: usize = 4 * 1024 * 1024;
/// Global cache for EVM interpreter
pub struct SharedCache {
jump_destinations: Mutex<LruCache<H256, Arc<BitSet>>>,
max_size: usize,
cur_size: Mutex<usize>,
}
impl SharedCache {
/// Create a jump destinations cache with a maximum size in bytes
/// to cache.
pub fn | (max_size: usize) -> Self {
SharedCache {
jump_destinations: Mutex::new(LruCache::new(INITIAL_CAPACITY)),
max_size: max_size * 8, // dealing with bits here.
cur_size: Mutex::new(0),
}
}
/// Get jump destinations bitmap for a contract.
pub fn jump_destinations(&self, code_hash: &H256, code: &[u8]) -> Arc<BitSet> {
if code_hash == &SHA3_EMPTY {
return Self::find_jump_destinations(code);
}
if let Some(d) = self.jump_destinations.lock().get_mut(code_hash) {
return d.clone();
}
let d = Self::find_jump_destinations(code);
{
let mut cur_size = self.cur_size.lock();
*cur_size += d.capacity();
let mut jump_dests = self.jump_destinations.lock();
let cap = jump_dests.capacity();
// grow the cache as necessary; it operates on amount of items
// but we're working based on memory usage.
if jump_dests.len() == cap && *cur_size < self.max_size {
jump_dests.set_capacity(cap * 2);
}
// account for any element displaced from the cache.
if let Some(lru) = jump_dests.insert(code_hash.clone(), d.clone()) {
*cur_size -= lru.capacity();
}
// remove elements until we are below the memory target.
while *cur_size > self.max_size {
match jump_dests.remove_lru() {
Some((_, v)) => *cur_size -= v.capacity(),
_ => break,
}
}
}
d
}
fn find_jump_destinations(code: &[u8]) -> Arc<BitSet> {
let mut jump_dests = BitSet::with_capacity(code.len());
let mut position = 0;
while position < code.len() {
let instruction = code[position];
if instruction == instructions::JUMPDEST {
jump_dests.insert(position);
} else if instructions::is_push(instruction) {
position += instructions::get_push_bytes(instruction);
}
position += 1;
}
jump_dests.shrink_to_fit();
Arc::new(jump_dests)
}
}
impl Default for SharedCache {
fn default() -> Self {
SharedCache::new(DEFAULT_CACHE_SIZE)
}
}
#[test]
fn test_find_jump_destinations() {
use util::FromHex;
// given
let code = "7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff5b01600055".from_hex().unwrap();
// when
let valid_jump_destinations = SharedCache::find_jump_destinations(&code);
// then
assert!(valid_jump_destinations.contains(66));
}
| new | identifier_name |
shared_cache.rs | // Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::sync::Arc;
use lru_cache::LruCache;
use util::{H256, Mutex};
use util::sha3::*;
use bit_set::BitSet;
use super::super::instructions;
const INITIAL_CAPACITY: usize = 32;
const DEFAULT_CACHE_SIZE: usize = 4 * 1024 * 1024;
/// Global cache for EVM interpreter
pub struct SharedCache {
jump_destinations: Mutex<LruCache<H256, Arc<BitSet>>>,
max_size: usize,
cur_size: Mutex<usize>,
}
impl SharedCache {
/// Create a jump destinations cache with a maximum size in bytes
/// to cache.
pub fn new(max_size: usize) -> Self |
/// Get jump destinations bitmap for a contract.
pub fn jump_destinations(&self, code_hash: &H256, code: &[u8]) -> Arc<BitSet> {
if code_hash == &SHA3_EMPTY {
return Self::find_jump_destinations(code);
}
if let Some(d) = self.jump_destinations.lock().get_mut(code_hash) {
return d.clone();
}
let d = Self::find_jump_destinations(code);
{
let mut cur_size = self.cur_size.lock();
*cur_size += d.capacity();
let mut jump_dests = self.jump_destinations.lock();
let cap = jump_dests.capacity();
// grow the cache as necessary; it operates on amount of items
// but we're working based on memory usage.
if jump_dests.len() == cap && *cur_size < self.max_size {
jump_dests.set_capacity(cap * 2);
}
// account for any element displaced from the cache.
if let Some(lru) = jump_dests.insert(code_hash.clone(), d.clone()) {
*cur_size -= lru.capacity();
}
// remove elements until we are below the memory target.
while *cur_size > self.max_size {
match jump_dests.remove_lru() {
Some((_, v)) => *cur_size -= v.capacity(),
_ => break,
}
}
}
d
}
fn find_jump_destinations(code: &[u8]) -> Arc<BitSet> {
let mut jump_dests = BitSet::with_capacity(code.len());
let mut position = 0;
while position < code.len() {
let instruction = code[position];
if instruction == instructions::JUMPDEST {
jump_dests.insert(position);
} else if instructions::is_push(instruction) {
position += instructions::get_push_bytes(instruction);
}
position += 1;
}
jump_dests.shrink_to_fit();
Arc::new(jump_dests)
}
}
impl Default for SharedCache {
fn default() -> Self {
SharedCache::new(DEFAULT_CACHE_SIZE)
}
}
#[test]
fn test_find_jump_destinations() {
use util::FromHex;
// given
let code = "7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff5b01600055".from_hex().unwrap();
// when
let valid_jump_destinations = SharedCache::find_jump_destinations(&code);
// then
assert!(valid_jump_destinations.contains(66));
}
| {
SharedCache {
jump_destinations: Mutex::new(LruCache::new(INITIAL_CAPACITY)),
max_size: max_size * 8, // dealing with bits here.
cur_size: Mutex::new(0),
}
} | identifier_body |
shared_cache.rs | // Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::sync::Arc;
use lru_cache::LruCache;
use util::{H256, Mutex};
use util::sha3::*;
use bit_set::BitSet;
use super::super::instructions;
const INITIAL_CAPACITY: usize = 32;
const DEFAULT_CACHE_SIZE: usize = 4 * 1024 * 1024;
/// Global cache for EVM interpreter
pub struct SharedCache {
jump_destinations: Mutex<LruCache<H256, Arc<BitSet>>>,
max_size: usize,
cur_size: Mutex<usize>,
}
impl SharedCache {
/// Create a jump destinations cache with a maximum size in bytes
/// to cache.
pub fn new(max_size: usize) -> Self {
SharedCache {
jump_destinations: Mutex::new(LruCache::new(INITIAL_CAPACITY)),
max_size: max_size * 8, // dealing with bits here.
cur_size: Mutex::new(0),
}
}
/// Get jump destinations bitmap for a contract.
pub fn jump_destinations(&self, code_hash: &H256, code: &[u8]) -> Arc<BitSet> {
if code_hash == &SHA3_EMPTY |
if let Some(d) = self.jump_destinations.lock().get_mut(code_hash) {
return d.clone();
}
let d = Self::find_jump_destinations(code);
{
let mut cur_size = self.cur_size.lock();
*cur_size += d.capacity();
let mut jump_dests = self.jump_destinations.lock();
let cap = jump_dests.capacity();
// grow the cache as necessary; it operates on amount of items
// but we're working based on memory usage.
if jump_dests.len() == cap && *cur_size < self.max_size {
jump_dests.set_capacity(cap * 2);
}
// account for any element displaced from the cache.
if let Some(lru) = jump_dests.insert(code_hash.clone(), d.clone()) {
*cur_size -= lru.capacity();
}
// remove elements until we are below the memory target.
while *cur_size > self.max_size {
match jump_dests.remove_lru() {
Some((_, v)) => *cur_size -= v.capacity(),
_ => break,
}
}
}
d
}
fn find_jump_destinations(code: &[u8]) -> Arc<BitSet> {
let mut jump_dests = BitSet::with_capacity(code.len());
let mut position = 0;
while position < code.len() {
let instruction = code[position];
if instruction == instructions::JUMPDEST {
jump_dests.insert(position);
} else if instructions::is_push(instruction) {
position += instructions::get_push_bytes(instruction);
}
position += 1;
}
jump_dests.shrink_to_fit();
Arc::new(jump_dests)
}
}
impl Default for SharedCache {
fn default() -> Self {
SharedCache::new(DEFAULT_CACHE_SIZE)
}
}
#[test]
fn test_find_jump_destinations() {
use util::FromHex;
// given
let code = "7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff5b01600055".from_hex().unwrap();
// when
let valid_jump_destinations = SharedCache::find_jump_destinations(&code);
// then
assert!(valid_jump_destinations.contains(66));
}
| {
return Self::find_jump_destinations(code);
} | conditional_block |
mod.rs | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
use common::SourceLocationKey;
use dependency_analyzer::{get_reachable_ast, ReachableAst};
use fixture_tests::Fixture; | pub fn transform_fixture(fixture: &Fixture<'_>) -> Result<String, String> {
let parts: Vec<&str> = fixture.content.split("%definitions%").collect();
let source_location = SourceLocationKey::standalone(fixture.file_name);
let definitions = parse_executable(parts[0], source_location).unwrap();
let base_definitions = parts
.iter()
.skip(1)
.flat_map(|part| parse_executable(part, source_location).unwrap().definitions)
.collect();
let ReachableAst {
definitions: result,
base_fragment_names,
} = get_reachable_ast(definitions.definitions, base_definitions);
let mut texts = result
.into_iter()
.map(|def| def.name().unwrap().to_string())
.collect::<Vec<_>>();
texts.sort_unstable();
texts.push("========== Base definitions ==========".to_string());
let mut defs = base_fragment_names
.iter()
.map(|key| key.lookup())
.collect::<Vec<_>>();
defs.sort_unstable();
texts.push(defs.join(", "));
Ok(texts.join("\n"))
} | use graphql_syntax::*;
| random_line_split |
mod.rs | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
use common::SourceLocationKey;
use dependency_analyzer::{get_reachable_ast, ReachableAst};
use fixture_tests::Fixture;
use graphql_syntax::*;
pub fn | (fixture: &Fixture<'_>) -> Result<String, String> {
let parts: Vec<&str> = fixture.content.split("%definitions%").collect();
let source_location = SourceLocationKey::standalone(fixture.file_name);
let definitions = parse_executable(parts[0], source_location).unwrap();
let base_definitions = parts
.iter()
.skip(1)
.flat_map(|part| parse_executable(part, source_location).unwrap().definitions)
.collect();
let ReachableAst {
definitions: result,
base_fragment_names,
} = get_reachable_ast(definitions.definitions, base_definitions);
let mut texts = result
.into_iter()
.map(|def| def.name().unwrap().to_string())
.collect::<Vec<_>>();
texts.sort_unstable();
texts.push("========== Base definitions ==========".to_string());
let mut defs = base_fragment_names
.iter()
.map(|key| key.lookup())
.collect::<Vec<_>>();
defs.sort_unstable();
texts.push(defs.join(", "));
Ok(texts.join("\n"))
}
| transform_fixture | identifier_name |
mod.rs | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
use common::SourceLocationKey;
use dependency_analyzer::{get_reachable_ast, ReachableAst};
use fixture_tests::Fixture;
use graphql_syntax::*;
pub fn transform_fixture(fixture: &Fixture<'_>) -> Result<String, String> | texts.push("========== Base definitions ==========".to_string());
let mut defs = base_fragment_names
.iter()
.map(|key| key.lookup())
.collect::<Vec<_>>();
defs.sort_unstable();
texts.push(defs.join(", "));
Ok(texts.join("\n"))
}
| {
let parts: Vec<&str> = fixture.content.split("%definitions%").collect();
let source_location = SourceLocationKey::standalone(fixture.file_name);
let definitions = parse_executable(parts[0], source_location).unwrap();
let base_definitions = parts
.iter()
.skip(1)
.flat_map(|part| parse_executable(part, source_location).unwrap().definitions)
.collect();
let ReachableAst {
definitions: result,
base_fragment_names,
} = get_reachable_ast(definitions.definitions, base_definitions);
let mut texts = result
.into_iter()
.map(|def| def.name().unwrap().to_string())
.collect::<Vec<_>>();
texts.sort_unstable(); | identifier_body |
lib.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! Calculate [specified][specified] and [computed values][computed] from a
//! tree of DOM nodes and a set of stylesheets.
//!
//! [computed]: https://drafts.csswg.org/css-cascade/#computed
//! [specified]: https://drafts.csswg.org/css-cascade/#specified
//!
//! In particular, this crate contains the definitions of supported properties,
//! the code to parse them into specified values and calculate the computed
//! values based on the specified values, as well as the code to serialize both
//! specified and computed values.
//!
//! The main entry point is [`recalc_style_at`][recalc_style_at].
//!
//! [recalc_style_at]: traversal/fn.recalc_style_at.html
//!
//! Major dependencies are the [cssparser][cssparser] and [selectors][selectors]
//! crates.
//!
//! [cssparser]:../cssparser/index.html
//! [selectors]:../selectors/index.html
#![deny(missing_docs)]
extern crate app_units;
extern crate arrayvec;
extern crate atomic_refcell;
#[macro_use]
extern crate bitflags;
#[allow(unused_extern_crates)]
extern crate byteorder;
#[cfg(feature = "servo")]
extern crate crossbeam_channel;
#[macro_use]
extern crate cssparser;
#[macro_use]
extern crate debug_unreachable;
#[macro_use]
extern crate derive_more;
extern crate euclid;
extern crate fallible;
extern crate fxhash;
#[cfg(feature = "gecko")]
#[macro_use]
pub mod gecko_string_cache;
extern crate hashglobe;
#[cfg(feature = "servo")]
#[macro_use]
extern crate html5ever;
extern crate indexmap;
extern crate itertools;
extern crate itoa;
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate log;
#[macro_use]
extern crate malloc_size_of;
#[macro_use]
extern crate malloc_size_of_derive;
#[allow(unused_extern_crates)]
#[macro_use]
extern crate matches;
#[cfg(feature = "gecko")]
pub extern crate nsstring;
#[cfg(feature = "gecko")]
extern crate num_cpus;
#[macro_use]
extern crate num_derive;
extern crate num_integer;
extern crate num_traits;
extern crate ordered_float;
extern crate owning_ref;
extern crate parking_lot;
extern crate precomputed_hash;
extern crate rayon;
extern crate selectors;
#[macro_use]
extern crate serde;
pub extern crate servo_arc;
#[cfg(feature = "servo")]
#[macro_use]
extern crate servo_atoms;
#[cfg(feature = "servo")]
extern crate servo_config;
#[cfg(feature = "servo")]
extern crate servo_url;
extern crate smallbitvec;
extern crate smallvec;
#[cfg(feature = "gecko")]
extern crate static_prefs;
#[cfg(feature = "servo")]
extern crate string_cache;
#[macro_use]
extern crate style_derive;
extern crate style_traits;
#[cfg(feature = "gecko")]
extern crate thin_slice;
extern crate time;
extern crate to_shmem;
#[macro_use]
extern crate to_shmem_derive;
extern crate uluru;
extern crate unicode_bidi;
#[allow(unused_extern_crates)]
extern crate unicode_segmentation;
extern crate void;
#[macro_use]
mod macros;
pub mod animation;
pub mod applicable_declarations;
#[allow(missing_docs)] // TODO.
#[cfg(feature = "servo")]
pub mod attr;
pub mod author_styles;
pub mod bezier;
pub mod bloom;
pub mod context;
pub mod counter_style;
pub mod custom_properties;
pub mod data;
pub mod dom;
pub mod dom_apis;
pub mod driver;
pub mod element_state;
#[cfg(feature = "servo")]
mod encoding_support;
pub mod error_reporting;
pub mod font_face;
pub mod font_metrics;
#[cfg(feature = "gecko")]
#[allow(unsafe_code)]
pub mod gecko_bindings;
pub mod global_style_data;
pub mod hash;
pub mod invalidation;
#[allow(missing_docs)] // TODO.
pub mod logical_geometry;
pub mod matching;
#[macro_use]
pub mod media_queries;
pub mod parallel;
pub mod parser;
pub mod rule_cache;
pub mod rule_collector;
pub mod rule_tree;
pub mod scoped_tls;
pub mod selector_map;
pub mod selector_parser;
pub mod shared_lock;
pub mod sharing;
pub mod str;
pub mod style_adjuster;
pub mod style_resolver;
pub mod stylesheet_set;
pub mod stylesheets;
pub mod stylist;
pub mod thread_state;
pub mod timer;
pub mod traversal;
pub mod traversal_flags;
pub mod use_counters;
#[macro_use]
#[allow(non_camel_case_types)]
pub mod values;
#[cfg(feature = "gecko")]
pub use crate::gecko_string_cache as string_cache;
#[cfg(feature = "gecko")]
pub use crate::gecko_string_cache::Atom;
#[cfg(feature = "gecko")]
pub use crate::gecko_string_cache::Atom as Prefix;
#[cfg(feature = "gecko")]
pub use crate::gecko_string_cache::Atom as LocalName;
#[cfg(feature = "gecko")]
pub use crate::gecko_string_cache::Namespace;
#[cfg(feature = "servo")]
pub use html5ever::LocalName;
#[cfg(feature = "servo")]
pub use html5ever::Namespace;
#[cfg(feature = "servo")]
pub use html5ever::Prefix;
#[cfg(feature = "servo")]
pub use servo_atoms::Atom;
pub use style_traits::arc_slice::ArcSlice;
pub use style_traits::owned_slice::OwnedSlice;
pub use style_traits::owned_str::OwnedStr;
/// The CSS properties supported by the style system.
/// Generated from the properties.mako.rs template by build.rs
#[macro_use]
#[allow(unsafe_code)]
#[deny(missing_docs)]
pub mod properties {
include!(concat!(env!("OUT_DIR"), "/properties.rs"));
}
#[cfg(feature = "gecko")]
#[allow(unsafe_code)]
pub mod gecko;
// uses a macro from properties
#[cfg(feature = "servo")]
#[allow(unsafe_code)]
pub mod servo;
#[cfg(feature = "gecko")]
#[allow(unsafe_code, missing_docs)]
pub mod gecko_properties {
include!(concat!(env!("OUT_DIR"), "/gecko_properties.rs"));
}
macro_rules! reexport_computed_values {
( $( { $name: ident, $boxed: expr } )+ ) => {
/// Types for [computed values][computed].
///
/// [computed]: https://drafts.csswg.org/css-cascade/#computed
pub mod computed_values {
$(
pub use crate::properties::longhands::$name::computed_value as $name;
)+
// Don't use a side-specific name needlessly:
pub use crate::properties::longhands::border_top_style::computed_value as border_style;
}
}
}
longhand_properties_idents!(reexport_computed_values);
#[cfg(feature = "gecko")]
use crate::gecko_string_cache::WeakAtom;
#[cfg(feature = "servo")]
use servo_atoms::Atom as WeakAtom;
/// Extension methods for selectors::attr::CaseSensitivity
pub trait CaseSensitivityExt {
/// Return whether two atoms compare equal according to this case sensitivity.
fn eq_atom(self, a: &WeakAtom, b: &WeakAtom) -> bool;
}
impl CaseSensitivityExt for selectors::attr::CaseSensitivity {
fn eq_atom(self, a: &WeakAtom, b: &WeakAtom) -> bool {
match self {
selectors::attr::CaseSensitivity::CaseSensitive => a == b,
selectors::attr::CaseSensitivity::AsciiCaseInsensitive => a.eq_ignore_ascii_case(b),
}
}
}
/// A trait pretty much similar to num_traits::Zero, but without the need of
/// implementing `Add`.
pub trait Zero {
/// Returns the zero value.
fn zero() -> Self;
/// Returns whether this value is zero.
fn is_zero(&self) -> bool;
}
impl<T> Zero for T
where
T: num_traits::Zero,
{
fn zero() -> Self {
<Self as num_traits::Zero>::zero()
}
fn | (&self) -> bool {
<Self as num_traits::Zero>::is_zero(self)
}
}
| is_zero | identifier_name |
lib.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! Calculate [specified][specified] and [computed values][computed] from a
//! tree of DOM nodes and a set of stylesheets.
//!
//! [computed]: https://drafts.csswg.org/css-cascade/#computed
//! [specified]: https://drafts.csswg.org/css-cascade/#specified
//!
//! In particular, this crate contains the definitions of supported properties,
//! the code to parse them into specified values and calculate the computed
//! values based on the specified values, as well as the code to serialize both
//! specified and computed values.
//!
//! The main entry point is [`recalc_style_at`][recalc_style_at].
//!
//! [recalc_style_at]: traversal/fn.recalc_style_at.html
//!
//! Major dependencies are the [cssparser][cssparser] and [selectors][selectors]
//! crates.
//!
//! [cssparser]:../cssparser/index.html
//! [selectors]:../selectors/index.html
#![deny(missing_docs)]
extern crate app_units;
extern crate arrayvec;
extern crate atomic_refcell;
#[macro_use]
extern crate bitflags;
#[allow(unused_extern_crates)]
extern crate byteorder;
#[cfg(feature = "servo")]
extern crate crossbeam_channel;
#[macro_use]
extern crate cssparser;
#[macro_use]
extern crate debug_unreachable;
#[macro_use]
extern crate derive_more;
extern crate euclid;
extern crate fallible;
extern crate fxhash;
#[cfg(feature = "gecko")]
#[macro_use]
pub mod gecko_string_cache;
extern crate hashglobe;
#[cfg(feature = "servo")]
#[macro_use]
extern crate html5ever;
extern crate indexmap;
extern crate itertools;
extern crate itoa;
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate log;
#[macro_use]
extern crate malloc_size_of;
#[macro_use]
extern crate malloc_size_of_derive;
#[allow(unused_extern_crates)]
#[macro_use]
extern crate matches;
#[cfg(feature = "gecko")]
pub extern crate nsstring;
#[cfg(feature = "gecko")]
extern crate num_cpus;
#[macro_use]
extern crate num_derive;
extern crate num_integer;
extern crate num_traits;
extern crate ordered_float;
extern crate owning_ref;
extern crate parking_lot;
extern crate precomputed_hash;
extern crate rayon;
extern crate selectors;
#[macro_use]
extern crate serde;
pub extern crate servo_arc;
#[cfg(feature = "servo")]
#[macro_use]
extern crate servo_atoms;
#[cfg(feature = "servo")]
extern crate servo_config;
#[cfg(feature = "servo")]
extern crate servo_url;
extern crate smallbitvec;
extern crate smallvec;
#[cfg(feature = "gecko")]
extern crate static_prefs;
#[cfg(feature = "servo")]
extern crate string_cache;
#[macro_use]
extern crate style_derive;
extern crate style_traits;
#[cfg(feature = "gecko")]
extern crate thin_slice;
extern crate time;
extern crate to_shmem;
#[macro_use]
extern crate to_shmem_derive;
extern crate uluru;
extern crate unicode_bidi;
#[allow(unused_extern_crates)]
extern crate unicode_segmentation;
extern crate void;
#[macro_use]
mod macros;
pub mod animation;
pub mod applicable_declarations;
#[allow(missing_docs)] // TODO.
#[cfg(feature = "servo")]
pub mod attr;
pub mod author_styles;
pub mod bezier;
pub mod bloom;
pub mod context;
pub mod counter_style;
pub mod custom_properties;
pub mod data;
pub mod dom;
pub mod dom_apis;
pub mod driver;
pub mod element_state;
#[cfg(feature = "servo")]
mod encoding_support;
pub mod error_reporting;
pub mod font_face;
pub mod font_metrics;
#[cfg(feature = "gecko")]
#[allow(unsafe_code)]
pub mod gecko_bindings;
pub mod global_style_data;
pub mod hash;
pub mod invalidation;
#[allow(missing_docs)] // TODO.
pub mod logical_geometry;
pub mod matching;
#[macro_use]
pub mod media_queries;
pub mod parallel;
pub mod parser;
pub mod rule_cache;
pub mod rule_collector;
pub mod rule_tree;
pub mod scoped_tls;
pub mod selector_map;
pub mod selector_parser;
pub mod shared_lock;
pub mod sharing;
pub mod str;
pub mod style_adjuster;
pub mod style_resolver;
pub mod stylesheet_set;
pub mod stylesheets;
pub mod stylist;
pub mod thread_state;
pub mod timer;
pub mod traversal;
pub mod traversal_flags;
pub mod use_counters;
#[macro_use]
#[allow(non_camel_case_types)]
pub mod values;
#[cfg(feature = "gecko")]
pub use crate::gecko_string_cache as string_cache;
#[cfg(feature = "gecko")]
pub use crate::gecko_string_cache::Atom;
#[cfg(feature = "gecko")]
pub use crate::gecko_string_cache::Atom as Prefix;
#[cfg(feature = "gecko")]
pub use crate::gecko_string_cache::Atom as LocalName;
#[cfg(feature = "gecko")]
pub use crate::gecko_string_cache::Namespace;
#[cfg(feature = "servo")]
pub use html5ever::LocalName;
#[cfg(feature = "servo")]
pub use html5ever::Namespace;
#[cfg(feature = "servo")]
pub use html5ever::Prefix;
#[cfg(feature = "servo")]
pub use servo_atoms::Atom;
pub use style_traits::arc_slice::ArcSlice;
pub use style_traits::owned_slice::OwnedSlice;
pub use style_traits::owned_str::OwnedStr;
/// The CSS properties supported by the style system.
/// Generated from the properties.mako.rs template by build.rs
#[macro_use]
#[allow(unsafe_code)]
#[deny(missing_docs)]
pub mod properties {
include!(concat!(env!("OUT_DIR"), "/properties.rs"));
}
#[cfg(feature = "gecko")]
#[allow(unsafe_code)]
pub mod gecko;
// uses a macro from properties
#[cfg(feature = "servo")]
#[allow(unsafe_code)]
pub mod servo;
#[cfg(feature = "gecko")]
#[allow(unsafe_code, missing_docs)]
pub mod gecko_properties {
include!(concat!(env!("OUT_DIR"), "/gecko_properties.rs"));
}
macro_rules! reexport_computed_values {
( $( { $name: ident, $boxed: expr } )+ ) => {
/// Types for [computed values][computed].
///
/// [computed]: https://drafts.csswg.org/css-cascade/#computed
pub mod computed_values {
$(
pub use crate::properties::longhands::$name::computed_value as $name;
)+
// Don't use a side-specific name needlessly:
pub use crate::properties::longhands::border_top_style::computed_value as border_style;
}
}
}
longhand_properties_idents!(reexport_computed_values);
#[cfg(feature = "gecko")]
use crate::gecko_string_cache::WeakAtom;
#[cfg(feature = "servo")]
use servo_atoms::Atom as WeakAtom;
/// Extension methods for selectors::attr::CaseSensitivity
pub trait CaseSensitivityExt {
/// Return whether two atoms compare equal according to this case sensitivity.
fn eq_atom(self, a: &WeakAtom, b: &WeakAtom) -> bool;
}
impl CaseSensitivityExt for selectors::attr::CaseSensitivity {
fn eq_atom(self, a: &WeakAtom, b: &WeakAtom) -> bool {
match self {
selectors::attr::CaseSensitivity::CaseSensitive => a == b,
selectors::attr::CaseSensitivity::AsciiCaseInsensitive => a.eq_ignore_ascii_case(b),
}
}
}
/// A trait pretty much similar to num_traits::Zero, but without the need of | /// Returns whether this value is zero.
fn is_zero(&self) -> bool;
}
impl<T> Zero for T
where
T: num_traits::Zero,
{
fn zero() -> Self {
<Self as num_traits::Zero>::zero()
}
fn is_zero(&self) -> bool {
<Self as num_traits::Zero>::is_zero(self)
}
} | /// implementing `Add`.
pub trait Zero {
/// Returns the zero value.
fn zero() -> Self;
| random_line_split |
lib.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! Calculate [specified][specified] and [computed values][computed] from a
//! tree of DOM nodes and a set of stylesheets.
//!
//! [computed]: https://drafts.csswg.org/css-cascade/#computed
//! [specified]: https://drafts.csswg.org/css-cascade/#specified
//!
//! In particular, this crate contains the definitions of supported properties,
//! the code to parse them into specified values and calculate the computed
//! values based on the specified values, as well as the code to serialize both
//! specified and computed values.
//!
//! The main entry point is [`recalc_style_at`][recalc_style_at].
//!
//! [recalc_style_at]: traversal/fn.recalc_style_at.html
//!
//! Major dependencies are the [cssparser][cssparser] and [selectors][selectors]
//! crates.
//!
//! [cssparser]:../cssparser/index.html
//! [selectors]:../selectors/index.html
#![deny(missing_docs)]
extern crate app_units;
extern crate arrayvec;
extern crate atomic_refcell;
#[macro_use]
extern crate bitflags;
#[allow(unused_extern_crates)]
extern crate byteorder;
#[cfg(feature = "servo")]
extern crate crossbeam_channel;
#[macro_use]
extern crate cssparser;
#[macro_use]
extern crate debug_unreachable;
#[macro_use]
extern crate derive_more;
extern crate euclid;
extern crate fallible;
extern crate fxhash;
#[cfg(feature = "gecko")]
#[macro_use]
pub mod gecko_string_cache;
extern crate hashglobe;
#[cfg(feature = "servo")]
#[macro_use]
extern crate html5ever;
extern crate indexmap;
extern crate itertools;
extern crate itoa;
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate log;
#[macro_use]
extern crate malloc_size_of;
#[macro_use]
extern crate malloc_size_of_derive;
#[allow(unused_extern_crates)]
#[macro_use]
extern crate matches;
#[cfg(feature = "gecko")]
pub extern crate nsstring;
#[cfg(feature = "gecko")]
extern crate num_cpus;
#[macro_use]
extern crate num_derive;
extern crate num_integer;
extern crate num_traits;
extern crate ordered_float;
extern crate owning_ref;
extern crate parking_lot;
extern crate precomputed_hash;
extern crate rayon;
extern crate selectors;
#[macro_use]
extern crate serde;
pub extern crate servo_arc;
#[cfg(feature = "servo")]
#[macro_use]
extern crate servo_atoms;
#[cfg(feature = "servo")]
extern crate servo_config;
#[cfg(feature = "servo")]
extern crate servo_url;
extern crate smallbitvec;
extern crate smallvec;
#[cfg(feature = "gecko")]
extern crate static_prefs;
#[cfg(feature = "servo")]
extern crate string_cache;
#[macro_use]
extern crate style_derive;
extern crate style_traits;
#[cfg(feature = "gecko")]
extern crate thin_slice;
extern crate time;
extern crate to_shmem;
#[macro_use]
extern crate to_shmem_derive;
extern crate uluru;
extern crate unicode_bidi;
#[allow(unused_extern_crates)]
extern crate unicode_segmentation;
extern crate void;
#[macro_use]
mod macros;
pub mod animation;
pub mod applicable_declarations;
#[allow(missing_docs)] // TODO.
#[cfg(feature = "servo")]
pub mod attr;
pub mod author_styles;
pub mod bezier;
pub mod bloom;
pub mod context;
pub mod counter_style;
pub mod custom_properties;
pub mod data;
pub mod dom;
pub mod dom_apis;
pub mod driver;
pub mod element_state;
#[cfg(feature = "servo")]
mod encoding_support;
pub mod error_reporting;
pub mod font_face;
pub mod font_metrics;
#[cfg(feature = "gecko")]
#[allow(unsafe_code)]
pub mod gecko_bindings;
pub mod global_style_data;
pub mod hash;
pub mod invalidation;
#[allow(missing_docs)] // TODO.
pub mod logical_geometry;
pub mod matching;
#[macro_use]
pub mod media_queries;
pub mod parallel;
pub mod parser;
pub mod rule_cache;
pub mod rule_collector;
pub mod rule_tree;
pub mod scoped_tls;
pub mod selector_map;
pub mod selector_parser;
pub mod shared_lock;
pub mod sharing;
pub mod str;
pub mod style_adjuster;
pub mod style_resolver;
pub mod stylesheet_set;
pub mod stylesheets;
pub mod stylist;
pub mod thread_state;
pub mod timer;
pub mod traversal;
pub mod traversal_flags;
pub mod use_counters;
#[macro_use]
#[allow(non_camel_case_types)]
pub mod values;
#[cfg(feature = "gecko")]
pub use crate::gecko_string_cache as string_cache;
#[cfg(feature = "gecko")]
pub use crate::gecko_string_cache::Atom;
#[cfg(feature = "gecko")]
pub use crate::gecko_string_cache::Atom as Prefix;
#[cfg(feature = "gecko")]
pub use crate::gecko_string_cache::Atom as LocalName;
#[cfg(feature = "gecko")]
pub use crate::gecko_string_cache::Namespace;
#[cfg(feature = "servo")]
pub use html5ever::LocalName;
#[cfg(feature = "servo")]
pub use html5ever::Namespace;
#[cfg(feature = "servo")]
pub use html5ever::Prefix;
#[cfg(feature = "servo")]
pub use servo_atoms::Atom;
pub use style_traits::arc_slice::ArcSlice;
pub use style_traits::owned_slice::OwnedSlice;
pub use style_traits::owned_str::OwnedStr;
/// The CSS properties supported by the style system.
/// Generated from the properties.mako.rs template by build.rs
#[macro_use]
#[allow(unsafe_code)]
#[deny(missing_docs)]
pub mod properties {
include!(concat!(env!("OUT_DIR"), "/properties.rs"));
}
#[cfg(feature = "gecko")]
#[allow(unsafe_code)]
pub mod gecko;
// uses a macro from properties
#[cfg(feature = "servo")]
#[allow(unsafe_code)]
pub mod servo;
#[cfg(feature = "gecko")]
#[allow(unsafe_code, missing_docs)]
pub mod gecko_properties {
include!(concat!(env!("OUT_DIR"), "/gecko_properties.rs"));
}
macro_rules! reexport_computed_values {
( $( { $name: ident, $boxed: expr } )+ ) => {
/// Types for [computed values][computed].
///
/// [computed]: https://drafts.csswg.org/css-cascade/#computed
pub mod computed_values {
$(
pub use crate::properties::longhands::$name::computed_value as $name;
)+
// Don't use a side-specific name needlessly:
pub use crate::properties::longhands::border_top_style::computed_value as border_style;
}
}
}
longhand_properties_idents!(reexport_computed_values);
#[cfg(feature = "gecko")]
use crate::gecko_string_cache::WeakAtom;
#[cfg(feature = "servo")]
use servo_atoms::Atom as WeakAtom;
/// Extension methods for selectors::attr::CaseSensitivity
pub trait CaseSensitivityExt {
/// Return whether two atoms compare equal according to this case sensitivity.
fn eq_atom(self, a: &WeakAtom, b: &WeakAtom) -> bool;
}
impl CaseSensitivityExt for selectors::attr::CaseSensitivity {
fn eq_atom(self, a: &WeakAtom, b: &WeakAtom) -> bool |
}
/// A trait pretty much similar to num_traits::Zero, but without the need of
/// implementing `Add`.
pub trait Zero {
/// Returns the zero value.
fn zero() -> Self;
/// Returns whether this value is zero.
fn is_zero(&self) -> bool;
}
impl<T> Zero for T
where
T: num_traits::Zero,
{
fn zero() -> Self {
<Self as num_traits::Zero>::zero()
}
fn is_zero(&self) -> bool {
<Self as num_traits::Zero>::is_zero(self)
}
}
| {
match self {
selectors::attr::CaseSensitivity::CaseSensitive => a == b,
selectors::attr::CaseSensitivity::AsciiCaseInsensitive => a.eq_ignore_ascii_case(b),
}
} | identifier_body |
gated-quote.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that `quote`-related macro are gated by `quote` feature gate.
// (To sanity-check the code, uncomment this.)
// #![feature(quote)]
// FIXME the error message that is current emitted seems pretty bad.
#![feature(rustc_private)]
#![allow(dead_code, unused_imports, unused_variables)]
#[macro_use]
extern crate syntax;
use syntax::ast;
use syntax::codemap::Span;
use syntax::parse;
struct ParseSess;
impl ParseSess {
fn cfg(&self) -> ast::CrateConfig { loop { } }
fn parse_sess<'a>(&'a self) -> &'a parse::ParseSess { loop { } }
fn call_site(&self) -> Span { loop { } }
fn ident_of(&self, st: &str) -> ast::Ident { loop { } }
fn name_of(&self, st: &str) -> ast::Name { loop { } }
}
pub fn main() | {
let ecx = &ParseSess;
let x = quote_tokens!(ecx, 3); //~ ERROR macro undefined: 'quote_tokens!'
let x = quote_expr!(ecx, 3); //~ ERROR macro undefined: 'quote_expr!'
let x = quote_ty!(ecx, 3); //~ ERROR macro undefined: 'quote_ty!'
let x = quote_method!(ecx, 3); //~ ERROR macro undefined: 'quote_method!'
let x = quote_item!(ecx, 3); //~ ERROR macro undefined: 'quote_item!'
let x = quote_pat!(ecx, 3); //~ ERROR macro undefined: 'quote_pat!'
let x = quote_arm!(ecx, 3); //~ ERROR macro undefined: 'quote_arm!'
let x = quote_stmt!(ecx, 3); //~ ERROR macro undefined: 'quote_stmt!'
let x = quote_matcher!(ecx, 3); //~ ERROR macro undefined: 'quote_matcher!'
let x = quote_attr!(ecx, 3); //~ ERROR macro undefined: 'quote_attr!'
} | identifier_body |
|
gated-quote.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that `quote`-related macro are gated by `quote` feature gate.
// (To sanity-check the code, uncomment this.)
// #![feature(quote)]
// FIXME the error message that is current emitted seems pretty bad.
#![feature(rustc_private)]
#![allow(dead_code, unused_imports, unused_variables)]
#[macro_use]
extern crate syntax;
use syntax::ast;
use syntax::codemap::Span;
use syntax::parse;
struct ParseSess;
impl ParseSess {
fn cfg(&self) -> ast::CrateConfig { loop { } }
fn | <'a>(&'a self) -> &'a parse::ParseSess { loop { } }
fn call_site(&self) -> Span { loop { } }
fn ident_of(&self, st: &str) -> ast::Ident { loop { } }
fn name_of(&self, st: &str) -> ast::Name { loop { } }
}
pub fn main() {
let ecx = &ParseSess;
let x = quote_tokens!(ecx, 3); //~ ERROR macro undefined: 'quote_tokens!'
let x = quote_expr!(ecx, 3); //~ ERROR macro undefined: 'quote_expr!'
let x = quote_ty!(ecx, 3); //~ ERROR macro undefined: 'quote_ty!'
let x = quote_method!(ecx, 3); //~ ERROR macro undefined: 'quote_method!'
let x = quote_item!(ecx, 3); //~ ERROR macro undefined: 'quote_item!'
let x = quote_pat!(ecx, 3); //~ ERROR macro undefined: 'quote_pat!'
let x = quote_arm!(ecx, 3); //~ ERROR macro undefined: 'quote_arm!'
let x = quote_stmt!(ecx, 3); //~ ERROR macro undefined: 'quote_stmt!'
let x = quote_matcher!(ecx, 3); //~ ERROR macro undefined: 'quote_matcher!'
let x = quote_attr!(ecx, 3); //~ ERROR macro undefined: 'quote_attr!'
}
| parse_sess | identifier_name |
gated-quote.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that `quote`-related macro are gated by `quote` feature gate.
// (To sanity-check the code, uncomment this.)
// #![feature(quote)]
// FIXME the error message that is current emitted seems pretty bad.
#![feature(rustc_private)] |
use syntax::ast;
use syntax::codemap::Span;
use syntax::parse;
struct ParseSess;
impl ParseSess {
fn cfg(&self) -> ast::CrateConfig { loop { } }
fn parse_sess<'a>(&'a self) -> &'a parse::ParseSess { loop { } }
fn call_site(&self) -> Span { loop { } }
fn ident_of(&self, st: &str) -> ast::Ident { loop { } }
fn name_of(&self, st: &str) -> ast::Name { loop { } }
}
pub fn main() {
let ecx = &ParseSess;
let x = quote_tokens!(ecx, 3); //~ ERROR macro undefined: 'quote_tokens!'
let x = quote_expr!(ecx, 3); //~ ERROR macro undefined: 'quote_expr!'
let x = quote_ty!(ecx, 3); //~ ERROR macro undefined: 'quote_ty!'
let x = quote_method!(ecx, 3); //~ ERROR macro undefined: 'quote_method!'
let x = quote_item!(ecx, 3); //~ ERROR macro undefined: 'quote_item!'
let x = quote_pat!(ecx, 3); //~ ERROR macro undefined: 'quote_pat!'
let x = quote_arm!(ecx, 3); //~ ERROR macro undefined: 'quote_arm!'
let x = quote_stmt!(ecx, 3); //~ ERROR macro undefined: 'quote_stmt!'
let x = quote_matcher!(ecx, 3); //~ ERROR macro undefined: 'quote_matcher!'
let x = quote_attr!(ecx, 3); //~ ERROR macro undefined: 'quote_attr!'
} | #![allow(dead_code, unused_imports, unused_variables)]
#[macro_use]
extern crate syntax; | random_line_split |
lib.rs | // Copyright 2017 James Duley
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#![feature(proc_macro)]
extern crate proc_macro;
extern crate syn;
#[macro_use]
extern crate quote;
use proc_macro::TokenStream;
use std::str::FromStr;
use syn::{parse_token_trees, Token, TokenTree, Lit, DelimToken, StrStyle};
#[proc_macro]
pub fn gcc_asm(token_stream: TokenStream) -> TokenStream {
let tokens = token_stream.to_string();
let token_trees = parse_token_trees(&tokens).unwrap();
let mut parts = split_on_token(token_trees.as_slice(), &Token::Colon);
let template = parts
.next()
.expect("error: template missing")
.iter()
.map(get_string_literal)
// support C-style string literal concatenation
.fold(String::new(), |acc, ref x| acc + &*x);
let output_operands = parts.next().unwrap_or(&[]);
let input_operands = parts.next().unwrap_or(&[]);
let clobbers = parts.next().unwrap_or(&[]);
assert!(
clobbers.len() == 0usize,
"error: clobbers not supported yet"
);
assert!(parts.next().is_none(), "error: extra tokens after clobbers");
let mut symbolic_names = Vec::new();
let (new_output_operands, tied_input_operands) = split_on_token(output_operands, &Token::Comma)
.map(|tts| extract_symbolic_name(&mut symbolic_names, tts))
.enumerate()
.map(extract_tied_operands)
.fold((Vec::new(), Vec::new()), |mut acc, x| {
acc.0.push(x.0);
if let Some(t) = x.1 {
acc.1.push(t)
};
acc
});
let new_input_operands = split_on_token(input_operands, &Token::Comma)
.map(|tts| extract_symbolic_name(&mut symbolic_names, tts))
.collect::<Vec<_>>();
let new_template = replace_template(template, symbolic_names.as_slice());
let all_new_input_operands = new_input_operands.iter().chain(tied_input_operands.iter());
let is_volatile = new_output_operands.len() == 0;
let mut options = Vec::new();
if is_volatile {
options.push("volatile");
}
// println!(
// "out: {:?}\nin: {:?}\ntied: {:?}",
// new_output_operands,
// new_input_operands,
// tied_input_operands
// );
let new_tokens =
quote! {
asm!(
#new_template :
#(#(#new_output_operands)*),* :
#(#(#all_new_input_operands)*),* :
:
#(#options),*
)
};
// println!("{}", new_tokens);
TokenStream::from_str(new_tokens.as_str()).unwrap()
}
fn split_on_token<'a>(
token_trees: &'a [TokenTree],
separator: &'a Token,
) -> Box<Iterator<Item = &'a [TokenTree]> + 'a> {
if token_trees.is_empty() {
Box::new(std::iter::empty())
} else {
Box::new(token_trees.split(move |tt| match *tt {
TokenTree::Token(ref token) => token == separator,
_ => false,
}))
}
}
fn replace_template(template: String, symbolic_names: &[Option<String>]) -> String {
let with_dollars = template.replace("$", "\u{80}").replace("%", "$").replace(
"$$",
"%",
);
symbolic_names
.iter()
.enumerate()
.fold(with_dollars, |acc, ref number_and_name| {
if let Some(ref x) = *number_and_name.1 {
acc.replace(
&("$[".to_string() + &x + "]"),
&("$".to_string() + &number_and_name.0.to_string()),
)
} else {
acc
}
})
.replace("$=", "${:uid}")
.replace("\u{80}", "$$")
}
fn extract_symbolic_name(
ordered_list: &mut Vec<Option<String>>,
tts: &[TokenTree],
) -> Vec<TokenTree> | without_name
}
fn extract_tied_operands(
pos_and_tts: (usize, Vec<TokenTree>),
) -> (Vec<TokenTree>, Option<Vec<TokenTree>>) {
{
let constraint = get_string_literal(pos_and_tts.1.first().expect("error: empty operand"));
if constraint.starts_with("+") {
let lvalue = pos_and_tts.1.split_at(1).1;
let new_input_constraint = pos_and_tts.0.to_string();
let mut input = Vec::new();
input.push(make_string_literal(new_input_constraint));
input.extend_from_slice(lvalue);
let new_output_constraint = constraint.replace("+", "=");
let mut output = Vec::new();
output.push(make_string_literal(new_output_constraint));
output.extend_from_slice(lvalue);
return (output, Some(input));
}
}
(pos_and_tts.1, None)
}
fn get_string_literal(tt: &TokenTree) -> &String {
match *tt {
TokenTree::Token(Token::Literal(Lit::Str(ref string, _))) => string,
_ => {
panic!(format!(
"error: expected a string literal but found {:?}",
tt
))
}
}
}
fn make_string_literal(string: String) -> TokenTree {
TokenTree::Token(Token::Literal(Lit::Str(string, StrStyle::Cooked)))
}
| {
let name_and_remaining = match *tts.first().expect("error: empty operand") {
TokenTree::Delimited(ref d) => {
assert!(d.delim == DelimToken::Bracket, "error: bad operand");
let name = if d.tts.len() == 1usize {
match d.tts[0] {
TokenTree::Token(Token::Ident(ref name)) => Some(name.to_string()),
_ => None,
}
} else {
None
};
assert!(name.is_some(), "error: bad symbolic name");
(name, tts.split_at(1).1)
}
_ => (None, tts),
};
ordered_list.push(name_and_remaining.0);
let mut without_name = Vec::new();
without_name.extend_from_slice(name_and_remaining.1); | identifier_body |
lib.rs | // Copyright 2017 James Duley
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#![feature(proc_macro)]
extern crate proc_macro;
extern crate syn; | use syn::{parse_token_trees, Token, TokenTree, Lit, DelimToken, StrStyle};
#[proc_macro]
pub fn gcc_asm(token_stream: TokenStream) -> TokenStream {
let tokens = token_stream.to_string();
let token_trees = parse_token_trees(&tokens).unwrap();
let mut parts = split_on_token(token_trees.as_slice(), &Token::Colon);
let template = parts
.next()
.expect("error: template missing")
.iter()
.map(get_string_literal)
// support C-style string literal concatenation
.fold(String::new(), |acc, ref x| acc + &*x);
let output_operands = parts.next().unwrap_or(&[]);
let input_operands = parts.next().unwrap_or(&[]);
let clobbers = parts.next().unwrap_or(&[]);
assert!(
clobbers.len() == 0usize,
"error: clobbers not supported yet"
);
assert!(parts.next().is_none(), "error: extra tokens after clobbers");
let mut symbolic_names = Vec::new();
let (new_output_operands, tied_input_operands) = split_on_token(output_operands, &Token::Comma)
.map(|tts| extract_symbolic_name(&mut symbolic_names, tts))
.enumerate()
.map(extract_tied_operands)
.fold((Vec::new(), Vec::new()), |mut acc, x| {
acc.0.push(x.0);
if let Some(t) = x.1 {
acc.1.push(t)
};
acc
});
let new_input_operands = split_on_token(input_operands, &Token::Comma)
.map(|tts| extract_symbolic_name(&mut symbolic_names, tts))
.collect::<Vec<_>>();
let new_template = replace_template(template, symbolic_names.as_slice());
let all_new_input_operands = new_input_operands.iter().chain(tied_input_operands.iter());
let is_volatile = new_output_operands.len() == 0;
let mut options = Vec::new();
if is_volatile {
options.push("volatile");
}
// println!(
// "out: {:?}\nin: {:?}\ntied: {:?}",
// new_output_operands,
// new_input_operands,
// tied_input_operands
// );
let new_tokens =
quote! {
asm!(
#new_template :
#(#(#new_output_operands)*),* :
#(#(#all_new_input_operands)*),* :
:
#(#options),*
)
};
// println!("{}", new_tokens);
TokenStream::from_str(new_tokens.as_str()).unwrap()
}
fn split_on_token<'a>(
token_trees: &'a [TokenTree],
separator: &'a Token,
) -> Box<Iterator<Item = &'a [TokenTree]> + 'a> {
if token_trees.is_empty() {
Box::new(std::iter::empty())
} else {
Box::new(token_trees.split(move |tt| match *tt {
TokenTree::Token(ref token) => token == separator,
_ => false,
}))
}
}
fn replace_template(template: String, symbolic_names: &[Option<String>]) -> String {
let with_dollars = template.replace("$", "\u{80}").replace("%", "$").replace(
"$$",
"%",
);
symbolic_names
.iter()
.enumerate()
.fold(with_dollars, |acc, ref number_and_name| {
if let Some(ref x) = *number_and_name.1 {
acc.replace(
&("$[".to_string() + &x + "]"),
&("$".to_string() + &number_and_name.0.to_string()),
)
} else {
acc
}
})
.replace("$=", "${:uid}")
.replace("\u{80}", "$$")
}
fn extract_symbolic_name(
ordered_list: &mut Vec<Option<String>>,
tts: &[TokenTree],
) -> Vec<TokenTree> {
let name_and_remaining = match *tts.first().expect("error: empty operand") {
TokenTree::Delimited(ref d) => {
assert!(d.delim == DelimToken::Bracket, "error: bad operand");
let name = if d.tts.len() == 1usize {
match d.tts[0] {
TokenTree::Token(Token::Ident(ref name)) => Some(name.to_string()),
_ => None,
}
} else {
None
};
assert!(name.is_some(), "error: bad symbolic name");
(name, tts.split_at(1).1)
}
_ => (None, tts),
};
ordered_list.push(name_and_remaining.0);
let mut without_name = Vec::new();
without_name.extend_from_slice(name_and_remaining.1);
without_name
}
fn extract_tied_operands(
pos_and_tts: (usize, Vec<TokenTree>),
) -> (Vec<TokenTree>, Option<Vec<TokenTree>>) {
{
let constraint = get_string_literal(pos_and_tts.1.first().expect("error: empty operand"));
if constraint.starts_with("+") {
let lvalue = pos_and_tts.1.split_at(1).1;
let new_input_constraint = pos_and_tts.0.to_string();
let mut input = Vec::new();
input.push(make_string_literal(new_input_constraint));
input.extend_from_slice(lvalue);
let new_output_constraint = constraint.replace("+", "=");
let mut output = Vec::new();
output.push(make_string_literal(new_output_constraint));
output.extend_from_slice(lvalue);
return (output, Some(input));
}
}
(pos_and_tts.1, None)
}
fn get_string_literal(tt: &TokenTree) -> &String {
match *tt {
TokenTree::Token(Token::Literal(Lit::Str(ref string, _))) => string,
_ => {
panic!(format!(
"error: expected a string literal but found {:?}",
tt
))
}
}
}
fn make_string_literal(string: String) -> TokenTree {
TokenTree::Token(Token::Literal(Lit::Str(string, StrStyle::Cooked)))
} | #[macro_use]
extern crate quote;
use proc_macro::TokenStream;
use std::str::FromStr; | random_line_split |
lib.rs | // Copyright 2017 James Duley
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#![feature(proc_macro)]
extern crate proc_macro;
extern crate syn;
#[macro_use]
extern crate quote;
use proc_macro::TokenStream;
use std::str::FromStr;
use syn::{parse_token_trees, Token, TokenTree, Lit, DelimToken, StrStyle};
#[proc_macro]
pub fn | (token_stream: TokenStream) -> TokenStream {
let tokens = token_stream.to_string();
let token_trees = parse_token_trees(&tokens).unwrap();
let mut parts = split_on_token(token_trees.as_slice(), &Token::Colon);
let template = parts
.next()
.expect("error: template missing")
.iter()
.map(get_string_literal)
// support C-style string literal concatenation
.fold(String::new(), |acc, ref x| acc + &*x);
let output_operands = parts.next().unwrap_or(&[]);
let input_operands = parts.next().unwrap_or(&[]);
let clobbers = parts.next().unwrap_or(&[]);
assert!(
clobbers.len() == 0usize,
"error: clobbers not supported yet"
);
assert!(parts.next().is_none(), "error: extra tokens after clobbers");
let mut symbolic_names = Vec::new();
let (new_output_operands, tied_input_operands) = split_on_token(output_operands, &Token::Comma)
.map(|tts| extract_symbolic_name(&mut symbolic_names, tts))
.enumerate()
.map(extract_tied_operands)
.fold((Vec::new(), Vec::new()), |mut acc, x| {
acc.0.push(x.0);
if let Some(t) = x.1 {
acc.1.push(t)
};
acc
});
let new_input_operands = split_on_token(input_operands, &Token::Comma)
.map(|tts| extract_symbolic_name(&mut symbolic_names, tts))
.collect::<Vec<_>>();
let new_template = replace_template(template, symbolic_names.as_slice());
let all_new_input_operands = new_input_operands.iter().chain(tied_input_operands.iter());
let is_volatile = new_output_operands.len() == 0;
let mut options = Vec::new();
if is_volatile {
options.push("volatile");
}
// println!(
// "out: {:?}\nin: {:?}\ntied: {:?}",
// new_output_operands,
// new_input_operands,
// tied_input_operands
// );
let new_tokens =
quote! {
asm!(
#new_template :
#(#(#new_output_operands)*),* :
#(#(#all_new_input_operands)*),* :
:
#(#options),*
)
};
// println!("{}", new_tokens);
TokenStream::from_str(new_tokens.as_str()).unwrap()
}
fn split_on_token<'a>(
token_trees: &'a [TokenTree],
separator: &'a Token,
) -> Box<Iterator<Item = &'a [TokenTree]> + 'a> {
if token_trees.is_empty() {
Box::new(std::iter::empty())
} else {
Box::new(token_trees.split(move |tt| match *tt {
TokenTree::Token(ref token) => token == separator,
_ => false,
}))
}
}
fn replace_template(template: String, symbolic_names: &[Option<String>]) -> String {
let with_dollars = template.replace("$", "\u{80}").replace("%", "$").replace(
"$$",
"%",
);
symbolic_names
.iter()
.enumerate()
.fold(with_dollars, |acc, ref number_and_name| {
if let Some(ref x) = *number_and_name.1 {
acc.replace(
&("$[".to_string() + &x + "]"),
&("$".to_string() + &number_and_name.0.to_string()),
)
} else {
acc
}
})
.replace("$=", "${:uid}")
.replace("\u{80}", "$$")
}
fn extract_symbolic_name(
ordered_list: &mut Vec<Option<String>>,
tts: &[TokenTree],
) -> Vec<TokenTree> {
let name_and_remaining = match *tts.first().expect("error: empty operand") {
TokenTree::Delimited(ref d) => {
assert!(d.delim == DelimToken::Bracket, "error: bad operand");
let name = if d.tts.len() == 1usize {
match d.tts[0] {
TokenTree::Token(Token::Ident(ref name)) => Some(name.to_string()),
_ => None,
}
} else {
None
};
assert!(name.is_some(), "error: bad symbolic name");
(name, tts.split_at(1).1)
}
_ => (None, tts),
};
ordered_list.push(name_and_remaining.0);
let mut without_name = Vec::new();
without_name.extend_from_slice(name_and_remaining.1);
without_name
}
fn extract_tied_operands(
pos_and_tts: (usize, Vec<TokenTree>),
) -> (Vec<TokenTree>, Option<Vec<TokenTree>>) {
{
let constraint = get_string_literal(pos_and_tts.1.first().expect("error: empty operand"));
if constraint.starts_with("+") {
let lvalue = pos_and_tts.1.split_at(1).1;
let new_input_constraint = pos_and_tts.0.to_string();
let mut input = Vec::new();
input.push(make_string_literal(new_input_constraint));
input.extend_from_slice(lvalue);
let new_output_constraint = constraint.replace("+", "=");
let mut output = Vec::new();
output.push(make_string_literal(new_output_constraint));
output.extend_from_slice(lvalue);
return (output, Some(input));
}
}
(pos_and_tts.1, None)
}
fn get_string_literal(tt: &TokenTree) -> &String {
match *tt {
TokenTree::Token(Token::Literal(Lit::Str(ref string, _))) => string,
_ => {
panic!(format!(
"error: expected a string literal but found {:?}",
tt
))
}
}
}
fn make_string_literal(string: String) -> TokenTree {
TokenTree::Token(Token::Literal(Lit::Str(string, StrStyle::Cooked)))
}
| gcc_asm | identifier_name |
scan.rs | #![feature(core, unboxed_closures)]
extern crate core;
#[cfg(test)]
mod tests {
use core::iter::Iterator;
use core::iter::Scan;
struct A<T> {
begin: T,
end: T
}
macro_rules! Iterator_impl {
($T:ty) => {
impl Iterator for A<$T> {
type Item = $T;
fn next(&mut self) -> Option<Self::Item> {
if self.begin < self.end {
let result = self.begin;
self.begin = self.begin.wrapping_add(1);
Some::<Self::Item>(result)
} else {
None::<Self::Item>
}
}
// fn scan<St, B, F>(self, initial_state: St, f: F) -> Scan<Self, St, F>
// where Self: Sized, F: FnMut(&mut St, Self::Item) -> Option<B>,
// {
// Scan{iter: self, f: f, state: initial_state}
// }
}
}
}
type T = i32;
Iterator_impl!(T);
struct F;
type B = T;
type St = T;
type Item = T;
type Args<'a> = (&'a mut St, Item);
impl<'a> FnOnce<Args<'a>> for F {
type Output = Option<B>;
extern "rust-call" fn call_once(self, (st, item): Args) -> Self::Output {
*st += item;
Some::<B>(*st)
}
}
impl<'a> FnMut<Args<'a>> for F {
extern "rust-call" fn call_mut(&mut self, (st, item): Args) -> Self::Output {
*st += item;
Some::<B>(*st)
}
}
#[test]
fn scan_test1() {
let a: A<T> = A { begin: 0, end: 10 };
let st: St = 0;
let f: F = F;
let mut scan: Scan<A<T>, B, F> = a.scan::<St, B, F>(st, f);
for n in 0..10 {
let x: Option<B> = scan.next();
match x {
Some(v) => |
None => { assert!(false); }
}
}
assert_eq!(scan.next(), None::<B>);
}
}
| { assert_eq!(v, n * (n + 1) / 2); } | conditional_block |
scan.rs | #![feature(core, unboxed_closures)]
extern crate core;
#[cfg(test)]
mod tests {
use core::iter::Iterator;
use core::iter::Scan;
struct A<T> {
begin: T,
end: T
}
macro_rules! Iterator_impl {
($T:ty) => {
impl Iterator for A<$T> {
type Item = $T;
fn next(&mut self) -> Option<Self::Item> {
if self.begin < self.end {
let result = self.begin;
self.begin = self.begin.wrapping_add(1);
Some::<Self::Item>(result)
} else {
None::<Self::Item>
}
}
// fn scan<St, B, F>(self, initial_state: St, f: F) -> Scan<Self, St, F>
// where Self: Sized, F: FnMut(&mut St, Self::Item) -> Option<B>,
// {
// Scan{iter: self, f: f, state: initial_state}
// }
}
}
}
type T = i32;
Iterator_impl!(T);
struct F;
type B = T;
type St = T;
type Item = T;
type Args<'a> = (&'a mut St, Item);
impl<'a> FnOnce<Args<'a>> for F {
type Output = Option<B>;
extern "rust-call" fn call_once(self, (st, item): Args) -> Self::Output {
*st += item;
Some::<B>(*st)
}
}
impl<'a> FnMut<Args<'a>> for F {
extern "rust-call" fn | (&mut self, (st, item): Args) -> Self::Output {
*st += item;
Some::<B>(*st)
}
}
#[test]
fn scan_test1() {
let a: A<T> = A { begin: 0, end: 10 };
let st: St = 0;
let f: F = F;
let mut scan: Scan<A<T>, B, F> = a.scan::<St, B, F>(st, f);
for n in 0..10 {
let x: Option<B> = scan.next();
match x {
Some(v) => { assert_eq!(v, n * (n + 1) / 2); }
None => { assert!(false); }
}
}
assert_eq!(scan.next(), None::<B>);
}
}
| call_mut | identifier_name |
scan.rs | #![feature(core, unboxed_closures)]
extern crate core;
#[cfg(test)]
mod tests {
use core::iter::Iterator;
use core::iter::Scan;
struct A<T> {
begin: T,
end: T
}
macro_rules! Iterator_impl {
($T:ty) => {
impl Iterator for A<$T> {
type Item = $T;
fn next(&mut self) -> Option<Self::Item> {
if self.begin < self.end {
let result = self.begin;
self.begin = self.begin.wrapping_add(1);
Some::<Self::Item>(result)
} else {
None::<Self::Item>
}
}
// fn scan<St, B, F>(self, initial_state: St, f: F) -> Scan<Self, St, F>
// where Self: Sized, F: FnMut(&mut St, Self::Item) -> Option<B>,
// {
// Scan{iter: self, f: f, state: initial_state}
// }
}
}
}
type T = i32;
Iterator_impl!(T);
struct F;
type B = T;
type St = T;
type Item = T;
type Args<'a> = (&'a mut St, Item);
impl<'a> FnOnce<Args<'a>> for F {
type Output = Option<B>;
extern "rust-call" fn call_once(self, (st, item): Args) -> Self::Output {
*st += item;
Some::<B>(*st)
}
}
impl<'a> FnMut<Args<'a>> for F {
extern "rust-call" fn call_mut(&mut self, (st, item): Args) -> Self::Output {
*st += item; | Some::<B>(*st)
}
}
#[test]
fn scan_test1() {
let a: A<T> = A { begin: 0, end: 10 };
let st: St = 0;
let f: F = F;
let mut scan: Scan<A<T>, B, F> = a.scan::<St, B, F>(st, f);
for n in 0..10 {
let x: Option<B> = scan.next();
match x {
Some(v) => { assert_eq!(v, n * (n + 1) / 2); }
None => { assert!(false); }
}
}
assert_eq!(scan.next(), None::<B>);
}
} | random_line_split |
|
acquire_pessimistic_lock.rs | // Copyright 2020 TiKV Project Authors. Licensed under Apache-2.0.
use kvproto::kvrpcpb::{ExtraOp, LockInfo};
use txn_types::{Key, OldValues, TimeStamp, TxnExtra};
use crate::storage::kv::WriteData;
use crate::storage::lock_manager::{LockManager, WaitTimeout};
use crate::storage::mvcc::{
Error as MvccError, ErrorInner as MvccErrorInner, MvccTxn, SnapshotReader,
};
use crate::storage::txn::commands::{
Command, CommandExt, ResponsePolicy, TypedCommand, WriteCommand, WriteContext, WriteResult,
WriteResultLockInfo,
};
use crate::storage::txn::{acquire_pessimistic_lock, Error, ErrorInner, Result};
use crate::storage::{
Error as StorageError, ErrorInner as StorageErrorInner, PessimisticLockRes, ProcessResult,
Result as StorageResult, Snapshot,
};
command! {
/// Acquire a Pessimistic lock on the keys.
///
/// This can be rolled back with a [`PessimisticRollback`](Command::PessimisticRollback) command.
AcquirePessimisticLock:
cmd_ty => StorageResult<PessimisticLockRes>,
display => "kv::command::acquirepessimisticlock keys({}) @ {} {} | {:?}", (keys.len, start_ts, for_update_ts, ctx),
content => {
/// The set of keys to lock.
keys: Vec<(Key, bool)>,
/// The primary lock. Secondary locks (from `keys`) will refer to the primary lock.
primary: Vec<u8>,
/// The transaction timestamp.
start_ts: TimeStamp,
lock_ttl: u64,
is_first_lock: bool,
for_update_ts: TimeStamp,
/// Time to wait for lock released in milliseconds when encountering locks.
wait_timeout: Option<WaitTimeout>,
/// If it is true, TiKV will return values of the keys if no error, so TiDB can cache the values for
/// later read in the same transaction.
return_values: bool,
min_commit_ts: TimeStamp,
old_values: OldValues,
}
}
impl CommandExt for AcquirePessimisticLock {
ctx!();
tag!(acquire_pessimistic_lock);
ts!(start_ts);
command_method!(can_be_pipelined, bool, true);
fn write_bytes(&self) -> usize {
self.keys
.iter()
.map(|(key, _)| key.as_encoded().len())
.sum()
}
gen_lock!(keys: multiple(|x| &x.0));
}
fn extract_lock_info_from_result<T>(res: &StorageResult<T>) -> &LockInfo {
match res {
Err(StorageError(box StorageErrorInner::Txn(Error(box ErrorInner::Mvcc(MvccError(
box MvccErrorInner::KeyIsLocked(info),
)))))) => info,
_ => panic!("unexpected mvcc error"),
}
}
impl<S: Snapshot, L: LockManager> WriteCommand<S, L> for AcquirePessimisticLock {
fn process_write(mut self, snapshot: S, context: WriteContext<'_, L>) -> Result<WriteResult> {
let (start_ts, ctx, keys) = (self.start_ts, self.ctx, self.keys);
let mut txn = MvccTxn::new(start_ts, context.concurrency_manager);
let mut reader = SnapshotReader::new(start_ts, snapshot,!ctx.get_not_fill_cache());
let rows = keys.len();
let mut res = if self.return_values {
Ok(PessimisticLockRes::Values(vec![]))
} else {
Ok(PessimisticLockRes::Empty)
};
let need_old_value = context.extra_op == ExtraOp::ReadOldValue;
for (k, should_not_exist) in keys {
match acquire_pessimistic_lock(
&mut txn,
&mut reader,
k.clone(),
&self.primary,
should_not_exist,
self.lock_ttl,
self.for_update_ts,
self.return_values,
self.min_commit_ts,
need_old_value,
) {
Ok((val, old_value)) => {
if self.return_values {
res.as_mut().unwrap().push(val);
}
if old_value.valid() {
let key = k.append_ts(txn.start_ts);
// MutationType is unknown in AcquirePessimisticLock stage.
let mutation_type = None;
self.old_values.insert(key, (old_value, mutation_type));
}
}
Err(e @ MvccError(box MvccErrorInner::KeyIsLocked {.. })) => {
res = Err(e).map_err(Error::from).map_err(StorageError::from);
break;
}
Err(e) => return Err(Error::from(e)),
}
}
// Some values are read, update max_ts
if let Ok(PessimisticLockRes::Values(values)) = &res {
if!values.is_empty() |
}
context.statistics.add(&reader.take_statistics());
// no conflict
let (pr, to_be_write, rows, ctx, lock_info) = if res.is_ok() {
let pr = ProcessResult::PessimisticLockRes { res };
let extra = TxnExtra {
old_values: self.old_values,
// One pc status is unkown AcquirePessimisticLock stage.
one_pc: false,
};
let write_data = WriteData::new(txn.into_modifies(), extra);
(pr, write_data, rows, ctx, None)
} else {
let lock_info_pb = extract_lock_info_from_result(&res);
let lock_info = WriteResultLockInfo::from_lock_info_pb(
lock_info_pb,
self.is_first_lock,
self.wait_timeout,
);
let pr = ProcessResult::PessimisticLockRes { res };
// Wait for lock released
(pr, WriteData::default(), 0, ctx, Some(lock_info))
};
Ok(WriteResult {
ctx,
to_be_write,
rows,
pr,
lock_info,
lock_guards: vec![],
response_policy: ResponsePolicy::OnProposed,
})
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_gen_lock_info_from_result() {
let raw_key = b"key".to_vec();
let key = Key::from_raw(&raw_key);
let ts = 100;
let is_first_lock = true;
let wait_timeout = WaitTimeout::from_encoded(200);
let mut info = LockInfo::default();
info.set_key(raw_key.clone());
info.set_lock_version(ts);
info.set_lock_ttl(100);
let case = StorageError::from(StorageErrorInner::Txn(Error::from(ErrorInner::Mvcc(
MvccError::from(MvccErrorInner::KeyIsLocked(info)),
))));
let lock_info = WriteResultLockInfo::from_lock_info_pb(
extract_lock_info_from_result::<()>(&Err(case)),
is_first_lock,
wait_timeout,
);
assert_eq!(lock_info.lock.ts, ts.into());
assert_eq!(lock_info.lock.hash, key.gen_hash());
assert_eq!(lock_info.key, raw_key);
assert_eq!(lock_info.is_first_lock, is_first_lock);
assert_eq!(lock_info.wait_timeout, wait_timeout);
}
}
| {
txn.concurrency_manager.update_max_ts(self.for_update_ts);
} | conditional_block |
acquire_pessimistic_lock.rs | // Copyright 2020 TiKV Project Authors. Licensed under Apache-2.0.
use kvproto::kvrpcpb::{ExtraOp, LockInfo};
use txn_types::{Key, OldValues, TimeStamp, TxnExtra};
use crate::storage::kv::WriteData;
use crate::storage::lock_manager::{LockManager, WaitTimeout};
use crate::storage::mvcc::{
Error as MvccError, ErrorInner as MvccErrorInner, MvccTxn, SnapshotReader,
};
use crate::storage::txn::commands::{
Command, CommandExt, ResponsePolicy, TypedCommand, WriteCommand, WriteContext, WriteResult,
WriteResultLockInfo,
};
use crate::storage::txn::{acquire_pessimistic_lock, Error, ErrorInner, Result};
use crate::storage::{
Error as StorageError, ErrorInner as StorageErrorInner, PessimisticLockRes, ProcessResult,
Result as StorageResult, Snapshot,
};
command! {
/// Acquire a Pessimistic lock on the keys.
///
/// This can be rolled back with a [`PessimisticRollback`](Command::PessimisticRollback) command.
AcquirePessimisticLock:
cmd_ty => StorageResult<PessimisticLockRes>,
display => "kv::command::acquirepessimisticlock keys({}) @ {} {} | {:?}", (keys.len, start_ts, for_update_ts, ctx),
content => {
/// The set of keys to lock.
keys: Vec<(Key, bool)>,
/// The primary lock. Secondary locks (from `keys`) will refer to the primary lock.
primary: Vec<u8>,
/// The transaction timestamp.
start_ts: TimeStamp,
lock_ttl: u64,
is_first_lock: bool,
for_update_ts: TimeStamp,
/// Time to wait for lock released in milliseconds when encountering locks.
wait_timeout: Option<WaitTimeout>,
/// If it is true, TiKV will return values of the keys if no error, so TiDB can cache the values for
/// later read in the same transaction.
return_values: bool,
min_commit_ts: TimeStamp,
old_values: OldValues,
}
}
impl CommandExt for AcquirePessimisticLock {
ctx!();
tag!(acquire_pessimistic_lock);
ts!(start_ts); |
fn write_bytes(&self) -> usize {
self.keys
.iter()
.map(|(key, _)| key.as_encoded().len())
.sum()
}
gen_lock!(keys: multiple(|x| &x.0));
}
fn extract_lock_info_from_result<T>(res: &StorageResult<T>) -> &LockInfo {
match res {
Err(StorageError(box StorageErrorInner::Txn(Error(box ErrorInner::Mvcc(MvccError(
box MvccErrorInner::KeyIsLocked(info),
)))))) => info,
_ => panic!("unexpected mvcc error"),
}
}
impl<S: Snapshot, L: LockManager> WriteCommand<S, L> for AcquirePessimisticLock {
fn process_write(mut self, snapshot: S, context: WriteContext<'_, L>) -> Result<WriteResult> {
let (start_ts, ctx, keys) = (self.start_ts, self.ctx, self.keys);
let mut txn = MvccTxn::new(start_ts, context.concurrency_manager);
let mut reader = SnapshotReader::new(start_ts, snapshot,!ctx.get_not_fill_cache());
let rows = keys.len();
let mut res = if self.return_values {
Ok(PessimisticLockRes::Values(vec![]))
} else {
Ok(PessimisticLockRes::Empty)
};
let need_old_value = context.extra_op == ExtraOp::ReadOldValue;
for (k, should_not_exist) in keys {
match acquire_pessimistic_lock(
&mut txn,
&mut reader,
k.clone(),
&self.primary,
should_not_exist,
self.lock_ttl,
self.for_update_ts,
self.return_values,
self.min_commit_ts,
need_old_value,
) {
Ok((val, old_value)) => {
if self.return_values {
res.as_mut().unwrap().push(val);
}
if old_value.valid() {
let key = k.append_ts(txn.start_ts);
// MutationType is unknown in AcquirePessimisticLock stage.
let mutation_type = None;
self.old_values.insert(key, (old_value, mutation_type));
}
}
Err(e @ MvccError(box MvccErrorInner::KeyIsLocked {.. })) => {
res = Err(e).map_err(Error::from).map_err(StorageError::from);
break;
}
Err(e) => return Err(Error::from(e)),
}
}
// Some values are read, update max_ts
if let Ok(PessimisticLockRes::Values(values)) = &res {
if!values.is_empty() {
txn.concurrency_manager.update_max_ts(self.for_update_ts);
}
}
context.statistics.add(&reader.take_statistics());
// no conflict
let (pr, to_be_write, rows, ctx, lock_info) = if res.is_ok() {
let pr = ProcessResult::PessimisticLockRes { res };
let extra = TxnExtra {
old_values: self.old_values,
// One pc status is unkown AcquirePessimisticLock stage.
one_pc: false,
};
let write_data = WriteData::new(txn.into_modifies(), extra);
(pr, write_data, rows, ctx, None)
} else {
let lock_info_pb = extract_lock_info_from_result(&res);
let lock_info = WriteResultLockInfo::from_lock_info_pb(
lock_info_pb,
self.is_first_lock,
self.wait_timeout,
);
let pr = ProcessResult::PessimisticLockRes { res };
// Wait for lock released
(pr, WriteData::default(), 0, ctx, Some(lock_info))
};
Ok(WriteResult {
ctx,
to_be_write,
rows,
pr,
lock_info,
lock_guards: vec![],
response_policy: ResponsePolicy::OnProposed,
})
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_gen_lock_info_from_result() {
let raw_key = b"key".to_vec();
let key = Key::from_raw(&raw_key);
let ts = 100;
let is_first_lock = true;
let wait_timeout = WaitTimeout::from_encoded(200);
let mut info = LockInfo::default();
info.set_key(raw_key.clone());
info.set_lock_version(ts);
info.set_lock_ttl(100);
let case = StorageError::from(StorageErrorInner::Txn(Error::from(ErrorInner::Mvcc(
MvccError::from(MvccErrorInner::KeyIsLocked(info)),
))));
let lock_info = WriteResultLockInfo::from_lock_info_pb(
extract_lock_info_from_result::<()>(&Err(case)),
is_first_lock,
wait_timeout,
);
assert_eq!(lock_info.lock.ts, ts.into());
assert_eq!(lock_info.lock.hash, key.gen_hash());
assert_eq!(lock_info.key, raw_key);
assert_eq!(lock_info.is_first_lock, is_first_lock);
assert_eq!(lock_info.wait_timeout, wait_timeout);
}
} | command_method!(can_be_pipelined, bool, true); | random_line_split |
acquire_pessimistic_lock.rs | // Copyright 2020 TiKV Project Authors. Licensed under Apache-2.0.
use kvproto::kvrpcpb::{ExtraOp, LockInfo};
use txn_types::{Key, OldValues, TimeStamp, TxnExtra};
use crate::storage::kv::WriteData;
use crate::storage::lock_manager::{LockManager, WaitTimeout};
use crate::storage::mvcc::{
Error as MvccError, ErrorInner as MvccErrorInner, MvccTxn, SnapshotReader,
};
use crate::storage::txn::commands::{
Command, CommandExt, ResponsePolicy, TypedCommand, WriteCommand, WriteContext, WriteResult,
WriteResultLockInfo,
};
use crate::storage::txn::{acquire_pessimistic_lock, Error, ErrorInner, Result};
use crate::storage::{
Error as StorageError, ErrorInner as StorageErrorInner, PessimisticLockRes, ProcessResult,
Result as StorageResult, Snapshot,
};
command! {
/// Acquire a Pessimistic lock on the keys.
///
/// This can be rolled back with a [`PessimisticRollback`](Command::PessimisticRollback) command.
AcquirePessimisticLock:
cmd_ty => StorageResult<PessimisticLockRes>,
display => "kv::command::acquirepessimisticlock keys({}) @ {} {} | {:?}", (keys.len, start_ts, for_update_ts, ctx),
content => {
/// The set of keys to lock.
keys: Vec<(Key, bool)>,
/// The primary lock. Secondary locks (from `keys`) will refer to the primary lock.
primary: Vec<u8>,
/// The transaction timestamp.
start_ts: TimeStamp,
lock_ttl: u64,
is_first_lock: bool,
for_update_ts: TimeStamp,
/// Time to wait for lock released in milliseconds when encountering locks.
wait_timeout: Option<WaitTimeout>,
/// If it is true, TiKV will return values of the keys if no error, so TiDB can cache the values for
/// later read in the same transaction.
return_values: bool,
min_commit_ts: TimeStamp,
old_values: OldValues,
}
}
impl CommandExt for AcquirePessimisticLock {
ctx!();
tag!(acquire_pessimistic_lock);
ts!(start_ts);
command_method!(can_be_pipelined, bool, true);
fn | (&self) -> usize {
self.keys
.iter()
.map(|(key, _)| key.as_encoded().len())
.sum()
}
gen_lock!(keys: multiple(|x| &x.0));
}
fn extract_lock_info_from_result<T>(res: &StorageResult<T>) -> &LockInfo {
match res {
Err(StorageError(box StorageErrorInner::Txn(Error(box ErrorInner::Mvcc(MvccError(
box MvccErrorInner::KeyIsLocked(info),
)))))) => info,
_ => panic!("unexpected mvcc error"),
}
}
impl<S: Snapshot, L: LockManager> WriteCommand<S, L> for AcquirePessimisticLock {
fn process_write(mut self, snapshot: S, context: WriteContext<'_, L>) -> Result<WriteResult> {
let (start_ts, ctx, keys) = (self.start_ts, self.ctx, self.keys);
let mut txn = MvccTxn::new(start_ts, context.concurrency_manager);
let mut reader = SnapshotReader::new(start_ts, snapshot,!ctx.get_not_fill_cache());
let rows = keys.len();
let mut res = if self.return_values {
Ok(PessimisticLockRes::Values(vec![]))
} else {
Ok(PessimisticLockRes::Empty)
};
let need_old_value = context.extra_op == ExtraOp::ReadOldValue;
for (k, should_not_exist) in keys {
match acquire_pessimistic_lock(
&mut txn,
&mut reader,
k.clone(),
&self.primary,
should_not_exist,
self.lock_ttl,
self.for_update_ts,
self.return_values,
self.min_commit_ts,
need_old_value,
) {
Ok((val, old_value)) => {
if self.return_values {
res.as_mut().unwrap().push(val);
}
if old_value.valid() {
let key = k.append_ts(txn.start_ts);
// MutationType is unknown in AcquirePessimisticLock stage.
let mutation_type = None;
self.old_values.insert(key, (old_value, mutation_type));
}
}
Err(e @ MvccError(box MvccErrorInner::KeyIsLocked {.. })) => {
res = Err(e).map_err(Error::from).map_err(StorageError::from);
break;
}
Err(e) => return Err(Error::from(e)),
}
}
// Some values are read, update max_ts
if let Ok(PessimisticLockRes::Values(values)) = &res {
if!values.is_empty() {
txn.concurrency_manager.update_max_ts(self.for_update_ts);
}
}
context.statistics.add(&reader.take_statistics());
// no conflict
let (pr, to_be_write, rows, ctx, lock_info) = if res.is_ok() {
let pr = ProcessResult::PessimisticLockRes { res };
let extra = TxnExtra {
old_values: self.old_values,
// One pc status is unkown AcquirePessimisticLock stage.
one_pc: false,
};
let write_data = WriteData::new(txn.into_modifies(), extra);
(pr, write_data, rows, ctx, None)
} else {
let lock_info_pb = extract_lock_info_from_result(&res);
let lock_info = WriteResultLockInfo::from_lock_info_pb(
lock_info_pb,
self.is_first_lock,
self.wait_timeout,
);
let pr = ProcessResult::PessimisticLockRes { res };
// Wait for lock released
(pr, WriteData::default(), 0, ctx, Some(lock_info))
};
Ok(WriteResult {
ctx,
to_be_write,
rows,
pr,
lock_info,
lock_guards: vec![],
response_policy: ResponsePolicy::OnProposed,
})
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_gen_lock_info_from_result() {
let raw_key = b"key".to_vec();
let key = Key::from_raw(&raw_key);
let ts = 100;
let is_first_lock = true;
let wait_timeout = WaitTimeout::from_encoded(200);
let mut info = LockInfo::default();
info.set_key(raw_key.clone());
info.set_lock_version(ts);
info.set_lock_ttl(100);
let case = StorageError::from(StorageErrorInner::Txn(Error::from(ErrorInner::Mvcc(
MvccError::from(MvccErrorInner::KeyIsLocked(info)),
))));
let lock_info = WriteResultLockInfo::from_lock_info_pb(
extract_lock_info_from_result::<()>(&Err(case)),
is_first_lock,
wait_timeout,
);
assert_eq!(lock_info.lock.ts, ts.into());
assert_eq!(lock_info.lock.hash, key.gen_hash());
assert_eq!(lock_info.key, raw_key);
assert_eq!(lock_info.is_first_lock, is_first_lock);
assert_eq!(lock_info.wait_timeout, wait_timeout);
}
}
| write_bytes | identifier_name |
acquire_pessimistic_lock.rs | // Copyright 2020 TiKV Project Authors. Licensed under Apache-2.0.
use kvproto::kvrpcpb::{ExtraOp, LockInfo};
use txn_types::{Key, OldValues, TimeStamp, TxnExtra};
use crate::storage::kv::WriteData;
use crate::storage::lock_manager::{LockManager, WaitTimeout};
use crate::storage::mvcc::{
Error as MvccError, ErrorInner as MvccErrorInner, MvccTxn, SnapshotReader,
};
use crate::storage::txn::commands::{
Command, CommandExt, ResponsePolicy, TypedCommand, WriteCommand, WriteContext, WriteResult,
WriteResultLockInfo,
};
use crate::storage::txn::{acquire_pessimistic_lock, Error, ErrorInner, Result};
use crate::storage::{
Error as StorageError, ErrorInner as StorageErrorInner, PessimisticLockRes, ProcessResult,
Result as StorageResult, Snapshot,
};
command! {
/// Acquire a Pessimistic lock on the keys.
///
/// This can be rolled back with a [`PessimisticRollback`](Command::PessimisticRollback) command.
AcquirePessimisticLock:
cmd_ty => StorageResult<PessimisticLockRes>,
display => "kv::command::acquirepessimisticlock keys({}) @ {} {} | {:?}", (keys.len, start_ts, for_update_ts, ctx),
content => {
/// The set of keys to lock.
keys: Vec<(Key, bool)>,
/// The primary lock. Secondary locks (from `keys`) will refer to the primary lock.
primary: Vec<u8>,
/// The transaction timestamp.
start_ts: TimeStamp,
lock_ttl: u64,
is_first_lock: bool,
for_update_ts: TimeStamp,
/// Time to wait for lock released in milliseconds when encountering locks.
wait_timeout: Option<WaitTimeout>,
/// If it is true, TiKV will return values of the keys if no error, so TiDB can cache the values for
/// later read in the same transaction.
return_values: bool,
min_commit_ts: TimeStamp,
old_values: OldValues,
}
}
impl CommandExt for AcquirePessimisticLock {
ctx!();
tag!(acquire_pessimistic_lock);
ts!(start_ts);
command_method!(can_be_pipelined, bool, true);
fn write_bytes(&self) -> usize |
gen_lock!(keys: multiple(|x| &x.0));
}
fn extract_lock_info_from_result<T>(res: &StorageResult<T>) -> &LockInfo {
match res {
Err(StorageError(box StorageErrorInner::Txn(Error(box ErrorInner::Mvcc(MvccError(
box MvccErrorInner::KeyIsLocked(info),
)))))) => info,
_ => panic!("unexpected mvcc error"),
}
}
impl<S: Snapshot, L: LockManager> WriteCommand<S, L> for AcquirePessimisticLock {
fn process_write(mut self, snapshot: S, context: WriteContext<'_, L>) -> Result<WriteResult> {
let (start_ts, ctx, keys) = (self.start_ts, self.ctx, self.keys);
let mut txn = MvccTxn::new(start_ts, context.concurrency_manager);
let mut reader = SnapshotReader::new(start_ts, snapshot,!ctx.get_not_fill_cache());
let rows = keys.len();
let mut res = if self.return_values {
Ok(PessimisticLockRes::Values(vec![]))
} else {
Ok(PessimisticLockRes::Empty)
};
let need_old_value = context.extra_op == ExtraOp::ReadOldValue;
for (k, should_not_exist) in keys {
match acquire_pessimistic_lock(
&mut txn,
&mut reader,
k.clone(),
&self.primary,
should_not_exist,
self.lock_ttl,
self.for_update_ts,
self.return_values,
self.min_commit_ts,
need_old_value,
) {
Ok((val, old_value)) => {
if self.return_values {
res.as_mut().unwrap().push(val);
}
if old_value.valid() {
let key = k.append_ts(txn.start_ts);
// MutationType is unknown in AcquirePessimisticLock stage.
let mutation_type = None;
self.old_values.insert(key, (old_value, mutation_type));
}
}
Err(e @ MvccError(box MvccErrorInner::KeyIsLocked {.. })) => {
res = Err(e).map_err(Error::from).map_err(StorageError::from);
break;
}
Err(e) => return Err(Error::from(e)),
}
}
// Some values are read, update max_ts
if let Ok(PessimisticLockRes::Values(values)) = &res {
if!values.is_empty() {
txn.concurrency_manager.update_max_ts(self.for_update_ts);
}
}
context.statistics.add(&reader.take_statistics());
// no conflict
let (pr, to_be_write, rows, ctx, lock_info) = if res.is_ok() {
let pr = ProcessResult::PessimisticLockRes { res };
let extra = TxnExtra {
old_values: self.old_values,
// One pc status is unkown AcquirePessimisticLock stage.
one_pc: false,
};
let write_data = WriteData::new(txn.into_modifies(), extra);
(pr, write_data, rows, ctx, None)
} else {
let lock_info_pb = extract_lock_info_from_result(&res);
let lock_info = WriteResultLockInfo::from_lock_info_pb(
lock_info_pb,
self.is_first_lock,
self.wait_timeout,
);
let pr = ProcessResult::PessimisticLockRes { res };
// Wait for lock released
(pr, WriteData::default(), 0, ctx, Some(lock_info))
};
Ok(WriteResult {
ctx,
to_be_write,
rows,
pr,
lock_info,
lock_guards: vec![],
response_policy: ResponsePolicy::OnProposed,
})
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_gen_lock_info_from_result() {
let raw_key = b"key".to_vec();
let key = Key::from_raw(&raw_key);
let ts = 100;
let is_first_lock = true;
let wait_timeout = WaitTimeout::from_encoded(200);
let mut info = LockInfo::default();
info.set_key(raw_key.clone());
info.set_lock_version(ts);
info.set_lock_ttl(100);
let case = StorageError::from(StorageErrorInner::Txn(Error::from(ErrorInner::Mvcc(
MvccError::from(MvccErrorInner::KeyIsLocked(info)),
))));
let lock_info = WriteResultLockInfo::from_lock_info_pb(
extract_lock_info_from_result::<()>(&Err(case)),
is_first_lock,
wait_timeout,
);
assert_eq!(lock_info.lock.ts, ts.into());
assert_eq!(lock_info.lock.hash, key.gen_hash());
assert_eq!(lock_info.key, raw_key);
assert_eq!(lock_info.is_first_lock, is_first_lock);
assert_eq!(lock_info.wait_timeout, wait_timeout);
}
}
| {
self.keys
.iter()
.map(|(key, _)| key.as_encoded().len())
.sum()
} | identifier_body |
lib.rs | //! pokereval-rs currently contains a single way of evaluating poker hands (5,6 or 7 cards)
//! to a HandRank, which is a number from 0 to 7461 inclusive, the higher the better the hand.
//! Inside the modules, there are more efficient methods that don't need to convert cards
//! to internal representations first.
extern crate cards;
extern crate holdem;
mod lookups;
pub mod original;
//mod perfect;
pub mod utils;
use cards::card::{Card};
use holdem::{HandRank};
use utils::{card_to_deck_number};
/// Evalate a hand consisting of 5 cards. The cards are grouped in an array.
/// This is quite inefficient, due to the arrays that need to be created. But convenient.
pub fn eval_5cards(cards: &[&Card; 5]) -> HandRank {
let c1 = card_to_deck_number(cards[0]);
let c2 = card_to_deck_number(cards[1]);
let c3 = card_to_deck_number(cards[2]);
let c4 = card_to_deck_number(cards[3]);
let c5 = card_to_deck_number(cards[4]);
let converted_cards = [&c1, &c2, &c3, &c4, &c5];
original::eval_5cards_kev_array(&converted_cards)
}
/// Evalate a hand consisting of 6 cards. The cards are grouped in an array.
/// This is quite inefficient, due to the arrays that need to be created. But convenient.
pub fn | (cards: &[&Card; 6]) -> HandRank {
let c1 = card_to_deck_number(cards[0]);
let c2 = card_to_deck_number(cards[1]);
let c3 = card_to_deck_number(cards[2]);
let c4 = card_to_deck_number(cards[3]);
let c5 = card_to_deck_number(cards[4]);
let c6 = card_to_deck_number(cards[5]);
let converted_cards = [&c1, &c2, &c3, &c4, &c5, &c6];
original::eval_6cards_kev_array(&converted_cards)
}
/// Evalate a hand consisting of 7 cards. The cards are grouped in an array.
/// This is quite inefficient, due to the arrays that need to be created. But convenient.
pub fn eval_7cards(cards: &[&Card; 7]) -> HandRank {
let c1 = card_to_deck_number(cards[0]);
let c2 = card_to_deck_number(cards[1]);
let c3 = card_to_deck_number(cards[2]);
let c4 = card_to_deck_number(cards[3]);
let c5 = card_to_deck_number(cards[4]);
let c6 = card_to_deck_number(cards[5]);
let c7 = card_to_deck_number(cards[6]);
let converted_cards = [&c1, &c2, &c3, &c4, &c5, &c6, &c7];
original::eval_7cards_kev_array(&converted_cards)
}
//TODO: this will be relevant, once the "perfect hash" method works
//use cards::deck::{Deck};
//use pokereval::{original, perfect}; // two evaluation methods
/*
//TODO: as soon as both methods are expected to agree
// this guy does not always pass
#[test]
fn both_evaluation_methods_agree() {
let mut deck = Deck::new();
// try on 10 hands
for _ in 0..10 {
let c1 = deck.draw();
let c2 = deck.draw();
let c3 = deck.draw();
let c4 = deck.draw();
let c5 = deck.draw();
let rank_original = original::eval_5cards([&c1, &c2, &c3, &c4, &c5]);
let rank_perfect = perfect::eval_5cards([&c1, &c2, &c3, &c4, &c5]);
assert_eq!(rank_original, rank_perfect);
}
}
*/
| eval_6cards | identifier_name |
lib.rs | //! pokereval-rs currently contains a single way of evaluating poker hands (5,6 or 7 cards)
//! to a HandRank, which is a number from 0 to 7461 inclusive, the higher the better the hand.
//! Inside the modules, there are more efficient methods that don't need to convert cards
//! to internal representations first.
extern crate cards;
extern crate holdem;
mod lookups; | pub mod original;
//mod perfect;
pub mod utils;
use cards::card::{Card};
use holdem::{HandRank};
use utils::{card_to_deck_number};
/// Evalate a hand consisting of 5 cards. The cards are grouped in an array.
/// This is quite inefficient, due to the arrays that need to be created. But convenient.
pub fn eval_5cards(cards: &[&Card; 5]) -> HandRank {
let c1 = card_to_deck_number(cards[0]);
let c2 = card_to_deck_number(cards[1]);
let c3 = card_to_deck_number(cards[2]);
let c4 = card_to_deck_number(cards[3]);
let c5 = card_to_deck_number(cards[4]);
let converted_cards = [&c1, &c2, &c3, &c4, &c5];
original::eval_5cards_kev_array(&converted_cards)
}
/// Evalate a hand consisting of 6 cards. The cards are grouped in an array.
/// This is quite inefficient, due to the arrays that need to be created. But convenient.
pub fn eval_6cards(cards: &[&Card; 6]) -> HandRank {
let c1 = card_to_deck_number(cards[0]);
let c2 = card_to_deck_number(cards[1]);
let c3 = card_to_deck_number(cards[2]);
let c4 = card_to_deck_number(cards[3]);
let c5 = card_to_deck_number(cards[4]);
let c6 = card_to_deck_number(cards[5]);
let converted_cards = [&c1, &c2, &c3, &c4, &c5, &c6];
original::eval_6cards_kev_array(&converted_cards)
}
/// Evalate a hand consisting of 7 cards. The cards are grouped in an array.
/// This is quite inefficient, due to the arrays that need to be created. But convenient.
pub fn eval_7cards(cards: &[&Card; 7]) -> HandRank {
let c1 = card_to_deck_number(cards[0]);
let c2 = card_to_deck_number(cards[1]);
let c3 = card_to_deck_number(cards[2]);
let c4 = card_to_deck_number(cards[3]);
let c5 = card_to_deck_number(cards[4]);
let c6 = card_to_deck_number(cards[5]);
let c7 = card_to_deck_number(cards[6]);
let converted_cards = [&c1, &c2, &c3, &c4, &c5, &c6, &c7];
original::eval_7cards_kev_array(&converted_cards)
}
//TODO: this will be relevant, once the "perfect hash" method works
//use cards::deck::{Deck};
//use pokereval::{original, perfect}; // two evaluation methods
/*
//TODO: as soon as both methods are expected to agree
// this guy does not always pass
#[test]
fn both_evaluation_methods_agree() {
let mut deck = Deck::new();
// try on 10 hands
for _ in 0..10 {
let c1 = deck.draw();
let c2 = deck.draw();
let c3 = deck.draw();
let c4 = deck.draw();
let c5 = deck.draw();
let rank_original = original::eval_5cards([&c1, &c2, &c3, &c4, &c5]);
let rank_perfect = perfect::eval_5cards([&c1, &c2, &c3, &c4, &c5]);
assert_eq!(rank_original, rank_perfect);
}
}
*/ | random_line_split |
|
lib.rs | //! pokereval-rs currently contains a single way of evaluating poker hands (5,6 or 7 cards)
//! to a HandRank, which is a number from 0 to 7461 inclusive, the higher the better the hand.
//! Inside the modules, there are more efficient methods that don't need to convert cards
//! to internal representations first.
extern crate cards;
extern crate holdem;
mod lookups;
pub mod original;
//mod perfect;
pub mod utils;
use cards::card::{Card};
use holdem::{HandRank};
use utils::{card_to_deck_number};
/// Evalate a hand consisting of 5 cards. The cards are grouped in an array.
/// This is quite inefficient, due to the arrays that need to be created. But convenient.
pub fn eval_5cards(cards: &[&Card; 5]) -> HandRank {
let c1 = card_to_deck_number(cards[0]);
let c2 = card_to_deck_number(cards[1]);
let c3 = card_to_deck_number(cards[2]);
let c4 = card_to_deck_number(cards[3]);
let c5 = card_to_deck_number(cards[4]);
let converted_cards = [&c1, &c2, &c3, &c4, &c5];
original::eval_5cards_kev_array(&converted_cards)
}
/// Evalate a hand consisting of 6 cards. The cards are grouped in an array.
/// This is quite inefficient, due to the arrays that need to be created. But convenient.
pub fn eval_6cards(cards: &[&Card; 6]) -> HandRank |
/// Evalate a hand consisting of 7 cards. The cards are grouped in an array.
/// This is quite inefficient, due to the arrays that need to be created. But convenient.
pub fn eval_7cards(cards: &[&Card; 7]) -> HandRank {
let c1 = card_to_deck_number(cards[0]);
let c2 = card_to_deck_number(cards[1]);
let c3 = card_to_deck_number(cards[2]);
let c4 = card_to_deck_number(cards[3]);
let c5 = card_to_deck_number(cards[4]);
let c6 = card_to_deck_number(cards[5]);
let c7 = card_to_deck_number(cards[6]);
let converted_cards = [&c1, &c2, &c3, &c4, &c5, &c6, &c7];
original::eval_7cards_kev_array(&converted_cards)
}
//TODO: this will be relevant, once the "perfect hash" method works
//use cards::deck::{Deck};
//use pokereval::{original, perfect}; // two evaluation methods
/*
//TODO: as soon as both methods are expected to agree
// this guy does not always pass
#[test]
fn both_evaluation_methods_agree() {
let mut deck = Deck::new();
// try on 10 hands
for _ in 0..10 {
let c1 = deck.draw();
let c2 = deck.draw();
let c3 = deck.draw();
let c4 = deck.draw();
let c5 = deck.draw();
let rank_original = original::eval_5cards([&c1, &c2, &c3, &c4, &c5]);
let rank_perfect = perfect::eval_5cards([&c1, &c2, &c3, &c4, &c5]);
assert_eq!(rank_original, rank_perfect);
}
}
*/
| {
let c1 = card_to_deck_number(cards[0]);
let c2 = card_to_deck_number(cards[1]);
let c3 = card_to_deck_number(cards[2]);
let c4 = card_to_deck_number(cards[3]);
let c5 = card_to_deck_number(cards[4]);
let c6 = card_to_deck_number(cards[5]);
let converted_cards = [&c1, &c2, &c3, &c4, &c5, &c6];
original::eval_6cards_kev_array(&converted_cards)
} | identifier_body |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.