file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
borrowck-imm-ref-to-mut-rec-field-issue-3162-c.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// revisions: ast mir
//[mir]compile-flags: -Z borrowck=mir
fn main()
|
{
let mut _a = 3;
let b = &mut _a;
{
let c = &*b;
_a = 4; //[ast]~ ERROR cannot assign to `_a`
//[mir]~^ ERROR cannot assign to `_a` because it is borrowed
drop(c);
}
drop(b);
}
|
identifier_body
|
|
borrowck-imm-ref-to-mut-rec-field-issue-3162-c.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// revisions: ast mir
//[mir]compile-flags: -Z borrowck=mir
fn
|
() {
let mut _a = 3;
let b = &mut _a;
{
let c = &*b;
_a = 4; //[ast]~ ERROR cannot assign to `_a`
//[mir]~^ ERROR cannot assign to `_a` because it is borrowed
drop(c);
}
drop(b);
}
|
main
|
identifier_name
|
borrowck-imm-ref-to-mut-rec-field-issue-3162-c.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// revisions: ast mir
//[mir]compile-flags: -Z borrowck=mir
fn main() {
let mut _a = 3;
let b = &mut _a;
{
|
_a = 4; //[ast]~ ERROR cannot assign to `_a`
//[mir]~^ ERROR cannot assign to `_a` because it is borrowed
drop(c);
}
drop(b);
}
|
let c = &*b;
|
random_line_split
|
uhci.rs
|
use alloc::boxed::Box;
use collections::vec::Vec;
use core::intrinsics::volatile_load;
use core::mem;
use scheduler::context::context_switch;
use common::debug;
use common::memory::Memory;
use drivers::pci::config::PciConfig;
use drivers::pio::*;
use schemes::KScheme;
use super::{Hci, Packet, Pipe, Setup};
pub struct
|
{
pub base: usize,
pub irq: u8,
pub frame_list: Memory<u32>,
}
impl KScheme for Uhci {
fn on_irq(&mut self, irq: u8) {
if irq == self.irq {
// d("UHCI IRQ\n");
}
}
fn on_poll(&mut self) {
}
}
#[repr(packed)]
#[derive(Copy, Clone, Debug, Default)]
struct Td {
link_ptr: u32,
ctrl_sts: u32,
token: u32,
buffer: u32,
}
#[repr(packed)]
#[derive(Copy, Clone, Debug, Default)]
struct Qh {
head_ptr: u32,
element_ptr: u32,
}
impl Uhci {
pub unsafe fn new(mut pci: PciConfig) -> Box<Self> {
pci.flag(4, 4, true); // Bus mastering
let mut module = box Uhci {
base: pci.read(0x20) as usize & 0xFFFFFFF0,
irq: pci.read(0x3C) as u8 & 0xF,
frame_list: Memory::new(1024).unwrap(),
};
module.init();
return module;
}
pub unsafe fn init(&mut self) {
debugln!("UHCI on: {:X}, IRQ: {:X}", self.base, self.irq);
let base = self.base as u16;
let usbcmd = base;
let usbsts = base + 02;
let usbintr = base + 0x4;
let frnum = base + 0x6;
let flbaseadd = base + 0x8;
let portsc1 = base + 0x10;
let portsc2 = base + 0x12;
debug::d(" CMD ");
debug::dh(inw(usbcmd) as usize);
outw(usbcmd, 1 << 2 | 1 << 1);
debug::d(" to ");
debug::dh(inw(usbcmd) as usize);
outw(usbcmd, 0);
debug::d(" to ");
debug::dh(inw(usbcmd) as usize);
debug::d(" STS ");
debug::dh(inw(usbsts) as usize);
debug::d(" INTR ");
debug::dh(inw(usbintr) as usize);
debug::d(" FRNUM ");
debug::dh(inw(frnum) as usize);
outw(frnum, 0);
debug::d(" to ");
debug::dh(inw(frnum) as usize);
debug::d(" FLBASEADD ");
debug::dh(ind(flbaseadd) as usize);
for i in 0..1024 {
self.frame_list.write(i, 1);
}
outd(flbaseadd, self.frame_list.address() as u32);
debug::d(" to ");
debug::dh(ind(flbaseadd) as usize);
debug::d(" CMD ");
debug::dh(inw(usbcmd) as usize);
outw(usbcmd, 1);
debug::d(" to ");
debug::dh(inw(usbcmd) as usize);
debug::dl();
{
debug::d(" PORTSC1 ");
debug::dh(inw(portsc1) as usize);
outw(portsc1, 1 << 9);
debug::d(" to ");
debug::dh(inw(portsc1) as usize);
outw(portsc1, 0);
debug::d(" to ");
debug::dh(inw(portsc1) as usize);
debug::dl();
if inw(portsc1) & 1 == 1 {
debug::d(" Device Found ");
debug::dh(inw(portsc1) as usize);
outw(portsc1, 4);
debug::d(" to ");
debug::dh(inw(portsc1) as usize);
debug::dl();
self.device(1);
}
}
{
debug::d(" PORTSC2 ");
debug::dh(inw(portsc2) as usize);
outw(portsc2, 1 << 9);
debug::d(" to ");
debug::dh(inw(portsc2) as usize);
outw(portsc2, 0);
debug::d(" to ");
debug::dh(inw(portsc2) as usize);
debug::dl();
if inw(portsc2) & 1 == 1 {
debug::d(" Device Found ");
debug::dh(inw(portsc2) as usize);
outw(portsc2, 4);
debug::d(" to ");
debug::dh(inw(portsc2) as usize);
debug::dl();
self.device(2);
}
}
}
}
impl Hci for Uhci {
fn msg(&mut self, address: u8, endpoint: u8, pipe: Pipe, msgs: &[Packet]) -> usize {
let ctrl_sts = match pipe {
Pipe::Isochronous => 1 << 25 | 1 << 23,
_ => 1 << 23
};
let mut tds = Vec::new();
for msg in msgs.iter().rev() {
let link_ptr = match tds.last() {
Some(td) => (td as *const Td) as u32 | 4,
None => 1
};
match *msg {
Packet::Setup(setup) => tds.push(Td {
link_ptr: link_ptr,
ctrl_sts: ctrl_sts,
token: (mem::size_of::<Setup>() as u32 - 1) << 21 | (endpoint as u32) << 15 | (address as u32) << 8 | 0x2D,
buffer: (&*setup as *const Setup) as u32,
}),
Packet::In(ref data) => tds.push(Td {
link_ptr: link_ptr,
ctrl_sts: ctrl_sts,
token: ((data.len() as u32 - 1) & 0x7FF) << 21 | (endpoint as u32) << 15 | (address as u32) << 8 | 0x69,
buffer: data.as_ptr() as u32,
}),
Packet::Out(ref data) => tds.push(Td {
link_ptr: link_ptr,
ctrl_sts: ctrl_sts,
token: ((data.len() as u32 - 1) & 0x7FF) << 21 | (endpoint as u32) << 15 | (address as u32) << 8 | 0xE1,
buffer: data.as_ptr() as u32,
})
}
}
let mut count = 0;
if! tds.is_empty() {
let queue_head = box Qh {
head_ptr: 1,
element_ptr: (tds.last().unwrap() as *const Td) as u32,
};
let frame_ptr = if tds.len() == 1 {
(&tds[0] as *const Td) as u32
} else {
(&*queue_head as *const Qh) as u32 | 2
};
let frnum = Pio16::new(self.base as u16 + 6);
let frame = (unsafe { frnum.read() } + 1) & 0x3FF;
unsafe { self.frame_list.write(frame as usize, frame_ptr) };
for td in tds.iter().rev() {
while unsafe { volatile_load(td as *const Td).ctrl_sts } & 1 << 23 == 1 << 23 {
unsafe { context_switch(false) };
}
count += (unsafe { volatile_load(td as *const Td).ctrl_sts } & 0x7FF) as usize;
}
unsafe { self.frame_list.write(frame as usize, 1) };
}
count
}
}
|
Uhci
|
identifier_name
|
uhci.rs
|
use alloc::boxed::Box;
use collections::vec::Vec;
use core::intrinsics::volatile_load;
use core::mem;
use scheduler::context::context_switch;
use common::debug;
use common::memory::Memory;
use drivers::pci::config::PciConfig;
use drivers::pio::*;
use schemes::KScheme;
use super::{Hci, Packet, Pipe, Setup};
pub struct Uhci {
pub base: usize,
pub irq: u8,
pub frame_list: Memory<u32>,
}
impl KScheme for Uhci {
fn on_irq(&mut self, irq: u8) {
if irq == self.irq {
// d("UHCI IRQ\n");
}
}
fn on_poll(&mut self) {
}
}
#[repr(packed)]
#[derive(Copy, Clone, Debug, Default)]
struct Td {
link_ptr: u32,
ctrl_sts: u32,
token: u32,
buffer: u32,
}
#[repr(packed)]
#[derive(Copy, Clone, Debug, Default)]
struct Qh {
head_ptr: u32,
element_ptr: u32,
}
impl Uhci {
pub unsafe fn new(mut pci: PciConfig) -> Box<Self> {
pci.flag(4, 4, true); // Bus mastering
let mut module = box Uhci {
base: pci.read(0x20) as usize & 0xFFFFFFF0,
irq: pci.read(0x3C) as u8 & 0xF,
frame_list: Memory::new(1024).unwrap(),
};
module.init();
return module;
}
pub unsafe fn init(&mut self) {
debugln!("UHCI on: {:X}, IRQ: {:X}", self.base, self.irq);
let base = self.base as u16;
let usbcmd = base;
let usbsts = base + 02;
let usbintr = base + 0x4;
let frnum = base + 0x6;
let flbaseadd = base + 0x8;
let portsc1 = base + 0x10;
let portsc2 = base + 0x12;
debug::d(" CMD ");
debug::dh(inw(usbcmd) as usize);
outw(usbcmd, 1 << 2 | 1 << 1);
debug::d(" to ");
debug::dh(inw(usbcmd) as usize);
outw(usbcmd, 0);
debug::d(" to ");
debug::dh(inw(usbcmd) as usize);
debug::d(" STS ");
debug::dh(inw(usbsts) as usize);
|
debug::d(" FRNUM ");
debug::dh(inw(frnum) as usize);
outw(frnum, 0);
debug::d(" to ");
debug::dh(inw(frnum) as usize);
debug::d(" FLBASEADD ");
debug::dh(ind(flbaseadd) as usize);
for i in 0..1024 {
self.frame_list.write(i, 1);
}
outd(flbaseadd, self.frame_list.address() as u32);
debug::d(" to ");
debug::dh(ind(flbaseadd) as usize);
debug::d(" CMD ");
debug::dh(inw(usbcmd) as usize);
outw(usbcmd, 1);
debug::d(" to ");
debug::dh(inw(usbcmd) as usize);
debug::dl();
{
debug::d(" PORTSC1 ");
debug::dh(inw(portsc1) as usize);
outw(portsc1, 1 << 9);
debug::d(" to ");
debug::dh(inw(portsc1) as usize);
outw(portsc1, 0);
debug::d(" to ");
debug::dh(inw(portsc1) as usize);
debug::dl();
if inw(portsc1) & 1 == 1 {
debug::d(" Device Found ");
debug::dh(inw(portsc1) as usize);
outw(portsc1, 4);
debug::d(" to ");
debug::dh(inw(portsc1) as usize);
debug::dl();
self.device(1);
}
}
{
debug::d(" PORTSC2 ");
debug::dh(inw(portsc2) as usize);
outw(portsc2, 1 << 9);
debug::d(" to ");
debug::dh(inw(portsc2) as usize);
outw(portsc2, 0);
debug::d(" to ");
debug::dh(inw(portsc2) as usize);
debug::dl();
if inw(portsc2) & 1 == 1 {
debug::d(" Device Found ");
debug::dh(inw(portsc2) as usize);
outw(portsc2, 4);
debug::d(" to ");
debug::dh(inw(portsc2) as usize);
debug::dl();
self.device(2);
}
}
}
}
impl Hci for Uhci {
fn msg(&mut self, address: u8, endpoint: u8, pipe: Pipe, msgs: &[Packet]) -> usize {
let ctrl_sts = match pipe {
Pipe::Isochronous => 1 << 25 | 1 << 23,
_ => 1 << 23
};
let mut tds = Vec::new();
for msg in msgs.iter().rev() {
let link_ptr = match tds.last() {
Some(td) => (td as *const Td) as u32 | 4,
None => 1
};
match *msg {
Packet::Setup(setup) => tds.push(Td {
link_ptr: link_ptr,
ctrl_sts: ctrl_sts,
token: (mem::size_of::<Setup>() as u32 - 1) << 21 | (endpoint as u32) << 15 | (address as u32) << 8 | 0x2D,
buffer: (&*setup as *const Setup) as u32,
}),
Packet::In(ref data) => tds.push(Td {
link_ptr: link_ptr,
ctrl_sts: ctrl_sts,
token: ((data.len() as u32 - 1) & 0x7FF) << 21 | (endpoint as u32) << 15 | (address as u32) << 8 | 0x69,
buffer: data.as_ptr() as u32,
}),
Packet::Out(ref data) => tds.push(Td {
link_ptr: link_ptr,
ctrl_sts: ctrl_sts,
token: ((data.len() as u32 - 1) & 0x7FF) << 21 | (endpoint as u32) << 15 | (address as u32) << 8 | 0xE1,
buffer: data.as_ptr() as u32,
})
}
}
let mut count = 0;
if! tds.is_empty() {
let queue_head = box Qh {
head_ptr: 1,
element_ptr: (tds.last().unwrap() as *const Td) as u32,
};
let frame_ptr = if tds.len() == 1 {
(&tds[0] as *const Td) as u32
} else {
(&*queue_head as *const Qh) as u32 | 2
};
let frnum = Pio16::new(self.base as u16 + 6);
let frame = (unsafe { frnum.read() } + 1) & 0x3FF;
unsafe { self.frame_list.write(frame as usize, frame_ptr) };
for td in tds.iter().rev() {
while unsafe { volatile_load(td as *const Td).ctrl_sts } & 1 << 23 == 1 << 23 {
unsafe { context_switch(false) };
}
count += (unsafe { volatile_load(td as *const Td).ctrl_sts } & 0x7FF) as usize;
}
unsafe { self.frame_list.write(frame as usize, 1) };
}
count
}
}
|
debug::d(" INTR ");
debug::dh(inw(usbintr) as usize);
|
random_line_split
|
uhci.rs
|
use alloc::boxed::Box;
use collections::vec::Vec;
use core::intrinsics::volatile_load;
use core::mem;
use scheduler::context::context_switch;
use common::debug;
use common::memory::Memory;
use drivers::pci::config::PciConfig;
use drivers::pio::*;
use schemes::KScheme;
use super::{Hci, Packet, Pipe, Setup};
pub struct Uhci {
pub base: usize,
pub irq: u8,
pub frame_list: Memory<u32>,
}
impl KScheme for Uhci {
fn on_irq(&mut self, irq: u8) {
if irq == self.irq {
// d("UHCI IRQ\n");
}
}
fn on_poll(&mut self)
|
}
#[repr(packed)]
#[derive(Copy, Clone, Debug, Default)]
struct Td {
link_ptr: u32,
ctrl_sts: u32,
token: u32,
buffer: u32,
}
#[repr(packed)]
#[derive(Copy, Clone, Debug, Default)]
struct Qh {
head_ptr: u32,
element_ptr: u32,
}
impl Uhci {
pub unsafe fn new(mut pci: PciConfig) -> Box<Self> {
pci.flag(4, 4, true); // Bus mastering
let mut module = box Uhci {
base: pci.read(0x20) as usize & 0xFFFFFFF0,
irq: pci.read(0x3C) as u8 & 0xF,
frame_list: Memory::new(1024).unwrap(),
};
module.init();
return module;
}
pub unsafe fn init(&mut self) {
debugln!("UHCI on: {:X}, IRQ: {:X}", self.base, self.irq);
let base = self.base as u16;
let usbcmd = base;
let usbsts = base + 02;
let usbintr = base + 0x4;
let frnum = base + 0x6;
let flbaseadd = base + 0x8;
let portsc1 = base + 0x10;
let portsc2 = base + 0x12;
debug::d(" CMD ");
debug::dh(inw(usbcmd) as usize);
outw(usbcmd, 1 << 2 | 1 << 1);
debug::d(" to ");
debug::dh(inw(usbcmd) as usize);
outw(usbcmd, 0);
debug::d(" to ");
debug::dh(inw(usbcmd) as usize);
debug::d(" STS ");
debug::dh(inw(usbsts) as usize);
debug::d(" INTR ");
debug::dh(inw(usbintr) as usize);
debug::d(" FRNUM ");
debug::dh(inw(frnum) as usize);
outw(frnum, 0);
debug::d(" to ");
debug::dh(inw(frnum) as usize);
debug::d(" FLBASEADD ");
debug::dh(ind(flbaseadd) as usize);
for i in 0..1024 {
self.frame_list.write(i, 1);
}
outd(flbaseadd, self.frame_list.address() as u32);
debug::d(" to ");
debug::dh(ind(flbaseadd) as usize);
debug::d(" CMD ");
debug::dh(inw(usbcmd) as usize);
outw(usbcmd, 1);
debug::d(" to ");
debug::dh(inw(usbcmd) as usize);
debug::dl();
{
debug::d(" PORTSC1 ");
debug::dh(inw(portsc1) as usize);
outw(portsc1, 1 << 9);
debug::d(" to ");
debug::dh(inw(portsc1) as usize);
outw(portsc1, 0);
debug::d(" to ");
debug::dh(inw(portsc1) as usize);
debug::dl();
if inw(portsc1) & 1 == 1 {
debug::d(" Device Found ");
debug::dh(inw(portsc1) as usize);
outw(portsc1, 4);
debug::d(" to ");
debug::dh(inw(portsc1) as usize);
debug::dl();
self.device(1);
}
}
{
debug::d(" PORTSC2 ");
debug::dh(inw(portsc2) as usize);
outw(portsc2, 1 << 9);
debug::d(" to ");
debug::dh(inw(portsc2) as usize);
outw(portsc2, 0);
debug::d(" to ");
debug::dh(inw(portsc2) as usize);
debug::dl();
if inw(portsc2) & 1 == 1 {
debug::d(" Device Found ");
debug::dh(inw(portsc2) as usize);
outw(portsc2, 4);
debug::d(" to ");
debug::dh(inw(portsc2) as usize);
debug::dl();
self.device(2);
}
}
}
}
impl Hci for Uhci {
fn msg(&mut self, address: u8, endpoint: u8, pipe: Pipe, msgs: &[Packet]) -> usize {
let ctrl_sts = match pipe {
Pipe::Isochronous => 1 << 25 | 1 << 23,
_ => 1 << 23
};
let mut tds = Vec::new();
for msg in msgs.iter().rev() {
let link_ptr = match tds.last() {
Some(td) => (td as *const Td) as u32 | 4,
None => 1
};
match *msg {
Packet::Setup(setup) => tds.push(Td {
link_ptr: link_ptr,
ctrl_sts: ctrl_sts,
token: (mem::size_of::<Setup>() as u32 - 1) << 21 | (endpoint as u32) << 15 | (address as u32) << 8 | 0x2D,
buffer: (&*setup as *const Setup) as u32,
}),
Packet::In(ref data) => tds.push(Td {
link_ptr: link_ptr,
ctrl_sts: ctrl_sts,
token: ((data.len() as u32 - 1) & 0x7FF) << 21 | (endpoint as u32) << 15 | (address as u32) << 8 | 0x69,
buffer: data.as_ptr() as u32,
}),
Packet::Out(ref data) => tds.push(Td {
link_ptr: link_ptr,
ctrl_sts: ctrl_sts,
token: ((data.len() as u32 - 1) & 0x7FF) << 21 | (endpoint as u32) << 15 | (address as u32) << 8 | 0xE1,
buffer: data.as_ptr() as u32,
})
}
}
let mut count = 0;
if! tds.is_empty() {
let queue_head = box Qh {
head_ptr: 1,
element_ptr: (tds.last().unwrap() as *const Td) as u32,
};
let frame_ptr = if tds.len() == 1 {
(&tds[0] as *const Td) as u32
} else {
(&*queue_head as *const Qh) as u32 | 2
};
let frnum = Pio16::new(self.base as u16 + 6);
let frame = (unsafe { frnum.read() } + 1) & 0x3FF;
unsafe { self.frame_list.write(frame as usize, frame_ptr) };
for td in tds.iter().rev() {
while unsafe { volatile_load(td as *const Td).ctrl_sts } & 1 << 23 == 1 << 23 {
unsafe { context_switch(false) };
}
count += (unsafe { volatile_load(td as *const Td).ctrl_sts } & 0x7FF) as usize;
}
unsafe { self.frame_list.write(frame as usize, 1) };
}
count
}
}
|
{
}
|
identifier_body
|
lib.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! This module contains traits in script used generically in the rest of Servo.
//! The traits are here instead of in script so that these modules won't have
//! to depend on script.
#![deny(unsafe_code)]
extern crate app_units;
extern crate atomic_refcell;
extern crate canvas_traits;
extern crate cssparser;
extern crate euclid;
extern crate gfx_traits;
#[macro_use] extern crate html5ever;
extern crate ipc_channel;
extern crate libc;
#[macro_use]
extern crate log;
extern crate malloc_size_of;
#[macro_use] extern crate malloc_size_of_derive;
extern crate metrics;
extern crate msg;
extern crate net_traits;
extern crate nonzero;
extern crate profile_traits;
extern crate range;
extern crate script_traits;
extern crate selectors;
extern crate servo_arc;
extern crate servo_atoms;
extern crate servo_url;
extern crate style;
extern crate webrender_api;
pub mod message;
pub mod reporter;
pub mod rpc;
pub mod wrapper_traits;
use atomic_refcell::AtomicRefCell;
use canvas_traits::canvas::CanvasMsg;
use ipc_channel::ipc::IpcSender;
use libc::c_void;
use net_traits::image_cache::PendingImageId;
use nonzero::NonZero;
use script_traits::UntrustedNodeAddress;
use servo_url::ServoUrl;
use std::sync::atomic::AtomicIsize;
use style::data::ElementData;
#[repr(C)]
pub struct StyleData {
/// Data that the style system associates with a node. When the
/// style system is being used standalone, this is all that hangs
/// off the node. This must be first to permit the various
/// transmutations between ElementData and PersistentLayoutData.
pub element_data: AtomicRefCell<ElementData>,
/// Information needed during parallel traversals.
pub parallel: DomParallelInfo,
}
impl StyleData {
pub fn new() -> Self {
Self {
element_data: AtomicRefCell::new(ElementData::default()),
parallel: DomParallelInfo::new(),
}
}
}
#[derive(Clone, Copy, MallocSizeOf)]
pub struct OpaqueStyleAndLayoutData {
// NB: We really store a `StyleAndLayoutData` here, so be careful!
#[ignore_malloc_size_of = "TODO(#6910) Box value that should be counted but \
the type lives in layout"]
pub ptr: NonZero<*mut StyleData>,
}
#[allow(unsafe_code)]
unsafe impl Send for OpaqueStyleAndLayoutData {}
/// Information that we need stored in each DOM node.
#[derive(MallocSizeOf)]
pub struct DomParallelInfo {
/// The number of children remaining to process during bottom-up traversal.
pub children_to_process: AtomicIsize,
}
|
impl DomParallelInfo {
pub fn new() -> DomParallelInfo {
DomParallelInfo {
children_to_process: AtomicIsize::new(0),
}
}
}
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum LayoutNodeType {
Element(LayoutElementType),
Text,
}
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum LayoutElementType {
Element,
HTMLCanvasElement,
HTMLIFrameElement,
HTMLImageElement,
HTMLInputElement,
HTMLObjectElement,
HTMLTableCellElement,
HTMLTableColElement,
HTMLTableElement,
HTMLTableRowElement,
HTMLTableSectionElement,
HTMLTextAreaElement,
SVGSVGElement,
}
pub enum HTMLCanvasDataSource {
WebGL(webrender_api::ImageKey),
Image(Option<IpcSender<CanvasMsg>>)
}
pub struct HTMLCanvasData {
pub source: HTMLCanvasDataSource,
pub width: u32,
pub height: u32,
}
pub struct SVGSVGData {
pub width: u32,
pub height: u32,
}
/// The address of a node known to be valid. These are sent from script to layout.
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub struct TrustedNodeAddress(pub *const c_void);
#[allow(unsafe_code)]
unsafe impl Send for TrustedNodeAddress {}
pub fn is_image_data(uri: &str) -> bool {
static TYPES: &'static [&'static str] = &["data:image/png", "data:image/gif", "data:image/jpeg"];
TYPES.iter().any(|&type_| uri.starts_with(type_))
}
/// Whether the pending image needs to be fetched or is waiting on an existing fetch.
pub enum PendingImageState {
Unrequested(ServoUrl),
PendingResponse,
}
/// The data associated with an image that is not yet present in the image cache.
/// Used by the script thread to hold on to DOM elements that need to be repainted
/// when an image fetch is complete.
pub struct PendingImage {
pub state: PendingImageState,
pub node: UntrustedNodeAddress,
pub id: PendingImageId,
}
|
random_line_split
|
|
lib.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! This module contains traits in script used generically in the rest of Servo.
//! The traits are here instead of in script so that these modules won't have
//! to depend on script.
#![deny(unsafe_code)]
extern crate app_units;
extern crate atomic_refcell;
extern crate canvas_traits;
extern crate cssparser;
extern crate euclid;
extern crate gfx_traits;
#[macro_use] extern crate html5ever;
extern crate ipc_channel;
extern crate libc;
#[macro_use]
extern crate log;
extern crate malloc_size_of;
#[macro_use] extern crate malloc_size_of_derive;
extern crate metrics;
extern crate msg;
extern crate net_traits;
extern crate nonzero;
extern crate profile_traits;
extern crate range;
extern crate script_traits;
extern crate selectors;
extern crate servo_arc;
extern crate servo_atoms;
extern crate servo_url;
extern crate style;
extern crate webrender_api;
pub mod message;
pub mod reporter;
pub mod rpc;
pub mod wrapper_traits;
use atomic_refcell::AtomicRefCell;
use canvas_traits::canvas::CanvasMsg;
use ipc_channel::ipc::IpcSender;
use libc::c_void;
use net_traits::image_cache::PendingImageId;
use nonzero::NonZero;
use script_traits::UntrustedNodeAddress;
use servo_url::ServoUrl;
use std::sync::atomic::AtomicIsize;
use style::data::ElementData;
#[repr(C)]
pub struct StyleData {
/// Data that the style system associates with a node. When the
/// style system is being used standalone, this is all that hangs
/// off the node. This must be first to permit the various
/// transmutations between ElementData and PersistentLayoutData.
pub element_data: AtomicRefCell<ElementData>,
/// Information needed during parallel traversals.
pub parallel: DomParallelInfo,
}
impl StyleData {
pub fn new() -> Self {
Self {
element_data: AtomicRefCell::new(ElementData::default()),
parallel: DomParallelInfo::new(),
}
}
}
#[derive(Clone, Copy, MallocSizeOf)]
pub struct OpaqueStyleAndLayoutData {
// NB: We really store a `StyleAndLayoutData` here, so be careful!
#[ignore_malloc_size_of = "TODO(#6910) Box value that should be counted but \
the type lives in layout"]
pub ptr: NonZero<*mut StyleData>,
}
#[allow(unsafe_code)]
unsafe impl Send for OpaqueStyleAndLayoutData {}
/// Information that we need stored in each DOM node.
#[derive(MallocSizeOf)]
pub struct DomParallelInfo {
/// The number of children remaining to process during bottom-up traversal.
pub children_to_process: AtomicIsize,
}
impl DomParallelInfo {
pub fn new() -> DomParallelInfo {
DomParallelInfo {
children_to_process: AtomicIsize::new(0),
}
}
}
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum
|
{
Element(LayoutElementType),
Text,
}
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum LayoutElementType {
Element,
HTMLCanvasElement,
HTMLIFrameElement,
HTMLImageElement,
HTMLInputElement,
HTMLObjectElement,
HTMLTableCellElement,
HTMLTableColElement,
HTMLTableElement,
HTMLTableRowElement,
HTMLTableSectionElement,
HTMLTextAreaElement,
SVGSVGElement,
}
pub enum HTMLCanvasDataSource {
WebGL(webrender_api::ImageKey),
Image(Option<IpcSender<CanvasMsg>>)
}
pub struct HTMLCanvasData {
pub source: HTMLCanvasDataSource,
pub width: u32,
pub height: u32,
}
pub struct SVGSVGData {
pub width: u32,
pub height: u32,
}
/// The address of a node known to be valid. These are sent from script to layout.
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub struct TrustedNodeAddress(pub *const c_void);
#[allow(unsafe_code)]
unsafe impl Send for TrustedNodeAddress {}
pub fn is_image_data(uri: &str) -> bool {
static TYPES: &'static [&'static str] = &["data:image/png", "data:image/gif", "data:image/jpeg"];
TYPES.iter().any(|&type_| uri.starts_with(type_))
}
/// Whether the pending image needs to be fetched or is waiting on an existing fetch.
pub enum PendingImageState {
Unrequested(ServoUrl),
PendingResponse,
}
/// The data associated with an image that is not yet present in the image cache.
/// Used by the script thread to hold on to DOM elements that need to be repainted
/// when an image fetch is complete.
pub struct PendingImage {
pub state: PendingImageState,
pub node: UntrustedNodeAddress,
pub id: PendingImageId,
}
|
LayoutNodeType
|
identifier_name
|
htmlhrelement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HTMLHRElementBinding;
use dom::bindings::codegen::InheritTypes::HTMLHRElementDerived;
use dom::bindings::js::Root;
use dom::document::Document;
use dom::eventtarget::{EventTarget, EventTargetTypeId};
use dom::element::ElementTypeId;
use dom::htmlelement::{HTMLElement, HTMLElementTypeId};
use dom::node::{Node, NodeTypeId};
use util::str::DOMString;
#[dom_struct]
pub struct HTMLHRElement {
htmlelement: HTMLElement,
}
impl HTMLHRElementDerived for EventTarget {
fn is_htmlhrelement(&self) -> bool {
*self.type_id() ==
EventTargetTypeId::Node(
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLHRElement)))
}
}
impl HTMLHRElement {
fn
|
(localName: DOMString, prefix: Option<DOMString>, document: &Document) -> HTMLHRElement {
HTMLHRElement {
htmlelement: HTMLElement::new_inherited(HTMLElementTypeId::HTMLHRElement, localName, prefix, document)
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString,
prefix: Option<DOMString>,
document: &Document) -> Root<HTMLHRElement> {
let element = HTMLHRElement::new_inherited(localName, prefix, document);
Node::reflect_node(box element, document, HTMLHRElementBinding::Wrap)
}
}
|
new_inherited
|
identifier_name
|
htmlhrelement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HTMLHRElementBinding;
use dom::bindings::codegen::InheritTypes::HTMLHRElementDerived;
use dom::bindings::js::Root;
use dom::document::Document;
use dom::eventtarget::{EventTarget, EventTargetTypeId};
use dom::element::ElementTypeId;
use dom::htmlelement::{HTMLElement, HTMLElementTypeId};
use dom::node::{Node, NodeTypeId};
use util::str::DOMString;
#[dom_struct]
pub struct HTMLHRElement {
htmlelement: HTMLElement,
}
impl HTMLHRElementDerived for EventTarget {
fn is_htmlhrelement(&self) -> bool {
*self.type_id() ==
EventTargetTypeId::Node(
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLHRElement)))
}
}
impl HTMLHRElement {
fn new_inherited(localName: DOMString, prefix: Option<DOMString>, document: &Document) -> HTMLHRElement
|
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString,
prefix: Option<DOMString>,
document: &Document) -> Root<HTMLHRElement> {
let element = HTMLHRElement::new_inherited(localName, prefix, document);
Node::reflect_node(box element, document, HTMLHRElementBinding::Wrap)
}
}
|
{
HTMLHRElement {
htmlelement: HTMLElement::new_inherited(HTMLElementTypeId::HTMLHRElement, localName, prefix, document)
}
}
|
identifier_body
|
htmlhrelement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HTMLHRElementBinding;
use dom::bindings::codegen::InheritTypes::HTMLHRElementDerived;
use dom::bindings::js::Root;
use dom::document::Document;
use dom::eventtarget::{EventTarget, EventTargetTypeId};
use dom::element::ElementTypeId;
use dom::htmlelement::{HTMLElement, HTMLElementTypeId};
use dom::node::{Node, NodeTypeId};
use util::str::DOMString;
#[dom_struct]
pub struct HTMLHRElement {
htmlelement: HTMLElement,
}
impl HTMLHRElementDerived for EventTarget {
fn is_htmlhrelement(&self) -> bool {
*self.type_id() ==
EventTargetTypeId::Node(
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLHRElement)))
}
}
impl HTMLHRElement {
fn new_inherited(localName: DOMString, prefix: Option<DOMString>, document: &Document) -> HTMLHRElement {
HTMLHRElement {
htmlelement: HTMLElement::new_inherited(HTMLElementTypeId::HTMLHRElement, localName, prefix, document)
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString,
prefix: Option<DOMString>,
|
document: &Document) -> Root<HTMLHRElement> {
let element = HTMLHRElement::new_inherited(localName, prefix, document);
Node::reflect_node(box element, document, HTMLHRElementBinding::Wrap)
}
}
|
random_line_split
|
|
any.rs
|
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Traits for dynamic typing of any `'static` type (through runtime reflection)
//!
//! This module implements the `Any` trait, which enables dynamic typing
//! of any `'static` type through runtime reflection.
//!
//! `Any` itself can be used to get a `TypeId`, and has more features when used
//! as a trait object. As `&Any` (a borrowed trait object), it has the `is` and
//! `as_ref` methods, to test if the contained value is of a given type, and to
//! get a reference to the inner value as a type. As`&mut Any`, there is also
//! the `as_mut` method, for getting a mutable reference to the inner value.
//! `Box<Any>` adds the `move` method, which will unwrap a `Box<T>` from the
//! object. See the extension traits (`*Ext`) for the full details.
//!
//! Note that &Any is limited to testing whether a value is of a specified
//! concrete type, and cannot be used to test whether a type implements a trait.
//!
//! # Examples
//!
//! Consider a situation where we want to log out a value passed to a function.
//! We know the value we're working on implements Show, but we don't know its
//! concrete type. We want to give special treatment to certain types: in this
//! case printing out the length of String values prior to their value.
//! We don't know the concrete type of our value at compile time, so we need to
//! use runtime reflection instead.
//!
//! ```rust
//! use std::fmt::Show;
//! use std::any::{Any, AnyRefExt};
//!
//! // Logger function for any type that implements Show.
//! fn log<T: Any+Show>(value: &T) {
//! let value_any = value as &Any;
//!
//! // try to convert our value to a String. If successful, we want to
//! // output the String's length as well as its value. If not, it's a
//! // different type: just print it out unadorned.
//! match value_any.as_ref::<String>() {
//! Some(as_string) => {
//! println!("String ({}): {}", as_string.len(), as_string);
//! }
//! None => {
//! println!("{}", value);
//! }
//! }
//! }
//!
//! // This function wants to log its parameter out prior to doing work with it.
//! fn do_work<T: Show+'static>(value: &T) {
//! log(value);
//! //...do some other work
//! }
//!
//! fn main() {
//! let my_string = "Hello World".to_string();
//! do_work(&my_string);
//!
//! let my_i8: i8 = 100;
//! do_work(&my_i8);
//! }
//! ```
use mem::{transmute, transmute_copy};
use option::{Option, Some, None};
use raw::TraitObject;
use intrinsics::TypeId;
/// A type with no inhabitants
pub enum Void { }
///////////////////////////////////////////////////////////////////////////////
// Any trait
///////////////////////////////////////////////////////////////////////////////
/// The `Any` trait is implemented by all `'static` types, and can be used for dynamic typing
///
/// Every type with no non-`'static` references implements `Any`, so `Any` can be used as a trait
/// object to emulate the effects dynamic typing.
pub trait Any {
/// Get the `TypeId` of `self`
fn get_type_id(&self) -> TypeId;
}
impl<T:'static> Any for T {
/// Get the `TypeId` of `self`
fn get_type_id(&self) -> TypeId {
TypeId::of::<T>()
}
}
///////////////////////////////////////////////////////////////////////////////
// Extension methods for Any trait objects.
// Implemented as three extension traits so that the methods can be generic.
///////////////////////////////////////////////////////////////////////////////
/// Extension methods for a referenced `Any` trait object
pub trait AnyRefExt<'a> {
/// Returns true if the boxed type is the same as `T`
fn is<T:'static>(self) -> bool;
/// Returns some reference to the boxed value if it is of type `T`, or
/// `None` if it isn't.
fn as_ref<T:'static>(self) -> Option<&'a T>;
}
impl<'a> AnyRefExt<'a> for &'a Any {
#[inline]
fn is<T:'static>(self) -> bool {
// Get TypeId of the type this function is instantiated with
let t = TypeId::of::<T>();
// Get TypeId of the type in the trait object
let boxed = self.get_type_id();
// Compare both TypeIds on equality
t == boxed
}
#[inline]
fn as_ref<T:'static>(self) -> Option<&'a T> {
if self.is::<T>() {
unsafe {
// Get the raw representation of the trait object
let to: TraitObject = transmute_copy(&self);
// Extract the data pointer
Some(transmute(to.data))
}
} else {
None
}
}
}
/// Extension methods for a mutable referenced `Any` trait object
pub trait AnyMutRefExt<'a> {
/// Returns some mutable reference to the boxed value if it is of type `T`, or
/// `None` if it isn't.
fn as_mut<T:'static>(self) -> Option<&'a mut T>;
}
impl<'a> AnyMutRefExt<'a> for &'a mut Any {
#[inline]
fn as_mut<T:'static>(self) -> Option<&'a mut T> {
if self.is::<T>() {
unsafe {
// Get the raw representation of the trait object
let to: TraitObject = transmute_copy(&self);
// Extract the data pointer
Some(transmute(to.data))
}
} else {
None
}
}
}
|
random_line_split
|
|
any.rs
|
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Traits for dynamic typing of any `'static` type (through runtime reflection)
//!
//! This module implements the `Any` trait, which enables dynamic typing
//! of any `'static` type through runtime reflection.
//!
//! `Any` itself can be used to get a `TypeId`, and has more features when used
//! as a trait object. As `&Any` (a borrowed trait object), it has the `is` and
//! `as_ref` methods, to test if the contained value is of a given type, and to
//! get a reference to the inner value as a type. As`&mut Any`, there is also
//! the `as_mut` method, for getting a mutable reference to the inner value.
//! `Box<Any>` adds the `move` method, which will unwrap a `Box<T>` from the
//! object. See the extension traits (`*Ext`) for the full details.
//!
//! Note that &Any is limited to testing whether a value is of a specified
//! concrete type, and cannot be used to test whether a type implements a trait.
//!
//! # Examples
//!
//! Consider a situation where we want to log out a value passed to a function.
//! We know the value we're working on implements Show, but we don't know its
//! concrete type. We want to give special treatment to certain types: in this
//! case printing out the length of String values prior to their value.
//! We don't know the concrete type of our value at compile time, so we need to
//! use runtime reflection instead.
//!
//! ```rust
//! use std::fmt::Show;
//! use std::any::{Any, AnyRefExt};
//!
//! // Logger function for any type that implements Show.
//! fn log<T: Any+Show>(value: &T) {
//! let value_any = value as &Any;
//!
//! // try to convert our value to a String. If successful, we want to
//! // output the String's length as well as its value. If not, it's a
//! // different type: just print it out unadorned.
//! match value_any.as_ref::<String>() {
//! Some(as_string) => {
//! println!("String ({}): {}", as_string.len(), as_string);
//! }
//! None => {
//! println!("{}", value);
//! }
//! }
//! }
//!
//! // This function wants to log its parameter out prior to doing work with it.
//! fn do_work<T: Show+'static>(value: &T) {
//! log(value);
//! //...do some other work
//! }
//!
//! fn main() {
//! let my_string = "Hello World".to_string();
//! do_work(&my_string);
//!
//! let my_i8: i8 = 100;
//! do_work(&my_i8);
//! }
//! ```
use mem::{transmute, transmute_copy};
use option::{Option, Some, None};
use raw::TraitObject;
use intrinsics::TypeId;
/// A type with no inhabitants
pub enum Void { }
///////////////////////////////////////////////////////////////////////////////
// Any trait
///////////////////////////////////////////////////////////////////////////////
/// The `Any` trait is implemented by all `'static` types, and can be used for dynamic typing
///
/// Every type with no non-`'static` references implements `Any`, so `Any` can be used as a trait
/// object to emulate the effects dynamic typing.
pub trait Any {
/// Get the `TypeId` of `self`
fn get_type_id(&self) -> TypeId;
}
impl<T:'static> Any for T {
/// Get the `TypeId` of `self`
fn get_type_id(&self) -> TypeId {
TypeId::of::<T>()
}
}
///////////////////////////////////////////////////////////////////////////////
// Extension methods for Any trait objects.
// Implemented as three extension traits so that the methods can be generic.
///////////////////////////////////////////////////////////////////////////////
/// Extension methods for a referenced `Any` trait object
pub trait AnyRefExt<'a> {
/// Returns true if the boxed type is the same as `T`
fn is<T:'static>(self) -> bool;
/// Returns some reference to the boxed value if it is of type `T`, or
/// `None` if it isn't.
fn as_ref<T:'static>(self) -> Option<&'a T>;
}
impl<'a> AnyRefExt<'a> for &'a Any {
#[inline]
fn
|
<T:'static>(self) -> bool {
// Get TypeId of the type this function is instantiated with
let t = TypeId::of::<T>();
// Get TypeId of the type in the trait object
let boxed = self.get_type_id();
// Compare both TypeIds on equality
t == boxed
}
#[inline]
fn as_ref<T:'static>(self) -> Option<&'a T> {
if self.is::<T>() {
unsafe {
// Get the raw representation of the trait object
let to: TraitObject = transmute_copy(&self);
// Extract the data pointer
Some(transmute(to.data))
}
} else {
None
}
}
}
/// Extension methods for a mutable referenced `Any` trait object
pub trait AnyMutRefExt<'a> {
/// Returns some mutable reference to the boxed value if it is of type `T`, or
/// `None` if it isn't.
fn as_mut<T:'static>(self) -> Option<&'a mut T>;
}
impl<'a> AnyMutRefExt<'a> for &'a mut Any {
#[inline]
fn as_mut<T:'static>(self) -> Option<&'a mut T> {
if self.is::<T>() {
unsafe {
// Get the raw representation of the trait object
let to: TraitObject = transmute_copy(&self);
// Extract the data pointer
Some(transmute(to.data))
}
} else {
None
}
}
}
|
is
|
identifier_name
|
any.rs
|
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Traits for dynamic typing of any `'static` type (through runtime reflection)
//!
//! This module implements the `Any` trait, which enables dynamic typing
//! of any `'static` type through runtime reflection.
//!
//! `Any` itself can be used to get a `TypeId`, and has more features when used
//! as a trait object. As `&Any` (a borrowed trait object), it has the `is` and
//! `as_ref` methods, to test if the contained value is of a given type, and to
//! get a reference to the inner value as a type. As`&mut Any`, there is also
//! the `as_mut` method, for getting a mutable reference to the inner value.
//! `Box<Any>` adds the `move` method, which will unwrap a `Box<T>` from the
//! object. See the extension traits (`*Ext`) for the full details.
//!
//! Note that &Any is limited to testing whether a value is of a specified
//! concrete type, and cannot be used to test whether a type implements a trait.
//!
//! # Examples
//!
//! Consider a situation where we want to log out a value passed to a function.
//! We know the value we're working on implements Show, but we don't know its
//! concrete type. We want to give special treatment to certain types: in this
//! case printing out the length of String values prior to their value.
//! We don't know the concrete type of our value at compile time, so we need to
//! use runtime reflection instead.
//!
//! ```rust
//! use std::fmt::Show;
//! use std::any::{Any, AnyRefExt};
//!
//! // Logger function for any type that implements Show.
//! fn log<T: Any+Show>(value: &T) {
//! let value_any = value as &Any;
//!
//! // try to convert our value to a String. If successful, we want to
//! // output the String's length as well as its value. If not, it's a
//! // different type: just print it out unadorned.
//! match value_any.as_ref::<String>() {
//! Some(as_string) => {
//! println!("String ({}): {}", as_string.len(), as_string);
//! }
//! None => {
//! println!("{}", value);
//! }
//! }
//! }
//!
//! // This function wants to log its parameter out prior to doing work with it.
//! fn do_work<T: Show+'static>(value: &T) {
//! log(value);
//! //...do some other work
//! }
//!
//! fn main() {
//! let my_string = "Hello World".to_string();
//! do_work(&my_string);
//!
//! let my_i8: i8 = 100;
//! do_work(&my_i8);
//! }
//! ```
use mem::{transmute, transmute_copy};
use option::{Option, Some, None};
use raw::TraitObject;
use intrinsics::TypeId;
/// A type with no inhabitants
pub enum Void { }
///////////////////////////////////////////////////////////////////////////////
// Any trait
///////////////////////////////////////////////////////////////////////////////
/// The `Any` trait is implemented by all `'static` types, and can be used for dynamic typing
///
/// Every type with no non-`'static` references implements `Any`, so `Any` can be used as a trait
/// object to emulate the effects dynamic typing.
pub trait Any {
/// Get the `TypeId` of `self`
fn get_type_id(&self) -> TypeId;
}
impl<T:'static> Any for T {
/// Get the `TypeId` of `self`
fn get_type_id(&self) -> TypeId {
TypeId::of::<T>()
}
}
///////////////////////////////////////////////////////////////////////////////
// Extension methods for Any trait objects.
// Implemented as three extension traits so that the methods can be generic.
///////////////////////////////////////////////////////////////////////////////
/// Extension methods for a referenced `Any` trait object
pub trait AnyRefExt<'a> {
/// Returns true if the boxed type is the same as `T`
fn is<T:'static>(self) -> bool;
/// Returns some reference to the boxed value if it is of type `T`, or
/// `None` if it isn't.
fn as_ref<T:'static>(self) -> Option<&'a T>;
}
impl<'a> AnyRefExt<'a> for &'a Any {
#[inline]
fn is<T:'static>(self) -> bool {
// Get TypeId of the type this function is instantiated with
let t = TypeId::of::<T>();
// Get TypeId of the type in the trait object
let boxed = self.get_type_id();
// Compare both TypeIds on equality
t == boxed
}
#[inline]
fn as_ref<T:'static>(self) -> Option<&'a T>
|
}
/// Extension methods for a mutable referenced `Any` trait object
pub trait AnyMutRefExt<'a> {
/// Returns some mutable reference to the boxed value if it is of type `T`, or
/// `None` if it isn't.
fn as_mut<T:'static>(self) -> Option<&'a mut T>;
}
impl<'a> AnyMutRefExt<'a> for &'a mut Any {
#[inline]
fn as_mut<T:'static>(self) -> Option<&'a mut T> {
if self.is::<T>() {
unsafe {
// Get the raw representation of the trait object
let to: TraitObject = transmute_copy(&self);
// Extract the data pointer
Some(transmute(to.data))
}
} else {
None
}
}
}
|
{
if self.is::<T>() {
unsafe {
// Get the raw representation of the trait object
let to: TraitObject = transmute_copy(&self);
// Extract the data pointer
Some(transmute(to.data))
}
} else {
None
}
}
|
identifier_body
|
any.rs
|
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Traits for dynamic typing of any `'static` type (through runtime reflection)
//!
//! This module implements the `Any` trait, which enables dynamic typing
//! of any `'static` type through runtime reflection.
//!
//! `Any` itself can be used to get a `TypeId`, and has more features when used
//! as a trait object. As `&Any` (a borrowed trait object), it has the `is` and
//! `as_ref` methods, to test if the contained value is of a given type, and to
//! get a reference to the inner value as a type. As`&mut Any`, there is also
//! the `as_mut` method, for getting a mutable reference to the inner value.
//! `Box<Any>` adds the `move` method, which will unwrap a `Box<T>` from the
//! object. See the extension traits (`*Ext`) for the full details.
//!
//! Note that &Any is limited to testing whether a value is of a specified
//! concrete type, and cannot be used to test whether a type implements a trait.
//!
//! # Examples
//!
//! Consider a situation where we want to log out a value passed to a function.
//! We know the value we're working on implements Show, but we don't know its
//! concrete type. We want to give special treatment to certain types: in this
//! case printing out the length of String values prior to their value.
//! We don't know the concrete type of our value at compile time, so we need to
//! use runtime reflection instead.
//!
//! ```rust
//! use std::fmt::Show;
//! use std::any::{Any, AnyRefExt};
//!
//! // Logger function for any type that implements Show.
//! fn log<T: Any+Show>(value: &T) {
//! let value_any = value as &Any;
//!
//! // try to convert our value to a String. If successful, we want to
//! // output the String's length as well as its value. If not, it's a
//! // different type: just print it out unadorned.
//! match value_any.as_ref::<String>() {
//! Some(as_string) => {
//! println!("String ({}): {}", as_string.len(), as_string);
//! }
//! None => {
//! println!("{}", value);
//! }
//! }
//! }
//!
//! // This function wants to log its parameter out prior to doing work with it.
//! fn do_work<T: Show+'static>(value: &T) {
//! log(value);
//! //...do some other work
//! }
//!
//! fn main() {
//! let my_string = "Hello World".to_string();
//! do_work(&my_string);
//!
//! let my_i8: i8 = 100;
//! do_work(&my_i8);
//! }
//! ```
use mem::{transmute, transmute_copy};
use option::{Option, Some, None};
use raw::TraitObject;
use intrinsics::TypeId;
/// A type with no inhabitants
pub enum Void { }
///////////////////////////////////////////////////////////////////////////////
// Any trait
///////////////////////////////////////////////////////////////////////////////
/// The `Any` trait is implemented by all `'static` types, and can be used for dynamic typing
///
/// Every type with no non-`'static` references implements `Any`, so `Any` can be used as a trait
/// object to emulate the effects dynamic typing.
pub trait Any {
/// Get the `TypeId` of `self`
fn get_type_id(&self) -> TypeId;
}
impl<T:'static> Any for T {
/// Get the `TypeId` of `self`
fn get_type_id(&self) -> TypeId {
TypeId::of::<T>()
}
}
///////////////////////////////////////////////////////////////////////////////
// Extension methods for Any trait objects.
// Implemented as three extension traits so that the methods can be generic.
///////////////////////////////////////////////////////////////////////////////
/// Extension methods for a referenced `Any` trait object
pub trait AnyRefExt<'a> {
/// Returns true if the boxed type is the same as `T`
fn is<T:'static>(self) -> bool;
/// Returns some reference to the boxed value if it is of type `T`, or
/// `None` if it isn't.
fn as_ref<T:'static>(self) -> Option<&'a T>;
}
impl<'a> AnyRefExt<'a> for &'a Any {
#[inline]
fn is<T:'static>(self) -> bool {
// Get TypeId of the type this function is instantiated with
let t = TypeId::of::<T>();
// Get TypeId of the type in the trait object
let boxed = self.get_type_id();
// Compare both TypeIds on equality
t == boxed
}
#[inline]
fn as_ref<T:'static>(self) -> Option<&'a T> {
if self.is::<T>() {
unsafe {
// Get the raw representation of the trait object
let to: TraitObject = transmute_copy(&self);
// Extract the data pointer
Some(transmute(to.data))
}
} else
|
}
}
/// Extension methods for a mutable referenced `Any` trait object
pub trait AnyMutRefExt<'a> {
/// Returns some mutable reference to the boxed value if it is of type `T`, or
/// `None` if it isn't.
fn as_mut<T:'static>(self) -> Option<&'a mut T>;
}
impl<'a> AnyMutRefExt<'a> for &'a mut Any {
#[inline]
fn as_mut<T:'static>(self) -> Option<&'a mut T> {
if self.is::<T>() {
unsafe {
// Get the raw representation of the trait object
let to: TraitObject = transmute_copy(&self);
// Extract the data pointer
Some(transmute(to.data))
}
} else {
None
}
}
}
|
{
None
}
|
conditional_block
|
reorder.rs
|
//! Reorder items.
//!
//! `mod`, `extern crate` and `use` declarations are reordered in alphabetical
//! order. Trait items are reordered in pre-determined order (associated types
//! and constants comes before methods).
// FIXME(#2455): Reorder trait items.
use std::cmp::{Ord, Ordering};
use rustc_ast::ast;
use rustc_span::{symbol::sym, Span};
use crate::config::{Config, GroupImportsTactic, ImportGranularity};
use crate::imports::{flatten_use_trees, merge_use_trees, SharedPrefix, UseSegment, UseTree};
use crate::items::{is_mod_decl, rewrite_extern_crate, rewrite_mod};
use crate::lists::{itemize_list, write_list, ListFormatting, ListItem};
use crate::rewrite::RewriteContext;
use crate::shape::Shape;
use crate::source_map::LineRangeUtils;
use crate::spanned::Spanned;
use crate::utils::{contains_skip, mk_sp};
use crate::visitor::FmtVisitor;
/// Choose the ordering between the given two items.
fn compare_items(a: &ast::Item, b: &ast::Item) -> Ordering {
match (&a.kind, &b.kind) {
(&ast::ItemKind::Mod(..), &ast::ItemKind::Mod(..)) => {
a.ident.as_str().cmp(&b.ident.as_str())
}
(&ast::ItemKind::ExternCrate(ref a_name), &ast::ItemKind::ExternCrate(ref b_name)) => {
|
let result = a_orig_name.cmp(&b_orig_name);
if result!= Ordering::Equal {
return result;
}
// `extern crate foo as bar;`
// ^^^ Comparing this.
match (a_name, b_name) {
(Some(..), None) => Ordering::Greater,
(None, Some(..)) => Ordering::Less,
(None, None) => Ordering::Equal,
(Some(..), Some(..)) => a.ident.as_str().cmp(&b.ident.as_str()),
}
}
_ => unreachable!(),
}
}
fn wrap_reorderable_items(
context: &RewriteContext<'_>,
list_items: &[ListItem],
shape: Shape,
) -> Option<String> {
let fmt = ListFormatting::new(shape, context.config)
.separator("")
.align_comments(false);
write_list(list_items, &fmt)
}
fn rewrite_reorderable_item(
context: &RewriteContext<'_>,
item: &ast::Item,
shape: Shape,
) -> Option<String> {
match item.kind {
ast::ItemKind::ExternCrate(..) => rewrite_extern_crate(context, item, shape),
ast::ItemKind::Mod(..) => rewrite_mod(context, item, shape),
_ => None,
}
}
/// Rewrite a list of items with reordering and/or regrouping. Every item
/// in `items` must have the same `ast::ItemKind`. Whether reordering, regrouping,
/// or both are done is determined from the `context`.
fn rewrite_reorderable_or_regroupable_items(
context: &RewriteContext<'_>,
reorderable_items: &[&ast::Item],
shape: Shape,
span: Span,
) -> Option<String> {
match reorderable_items[0].kind {
// FIXME: Remove duplicated code.
ast::ItemKind::Use(..) => {
let mut normalized_items: Vec<_> = reorderable_items
.iter()
.filter_map(|item| UseTree::from_ast_with_normalization(context, item))
.collect();
let cloned = normalized_items.clone();
// Add comments before merging.
let list_items = itemize_list(
context.snippet_provider,
cloned.iter(),
"",
";",
|item| item.span().lo(),
|item| item.span().hi(),
|_item| Some("".to_owned()),
span.lo(),
span.hi(),
false,
);
for (item, list_item) in normalized_items.iter_mut().zip(list_items) {
item.list_item = Some(list_item.clone());
}
normalized_items = match context.config.imports_granularity() {
ImportGranularity::Crate => merge_use_trees(normalized_items, SharedPrefix::Crate),
ImportGranularity::Module => {
merge_use_trees(normalized_items, SharedPrefix::Module)
}
ImportGranularity::Item => flatten_use_trees(normalized_items),
ImportGranularity::Preserve => normalized_items,
};
let mut regrouped_items = match context.config.group_imports() {
GroupImportsTactic::Preserve => vec![normalized_items],
GroupImportsTactic::StdExternalCrate => group_imports(normalized_items),
};
if context.config.reorder_imports() {
regrouped_items.iter_mut().for_each(|items| items.sort())
}
// 4 = "use ", 1 = ";"
let nested_shape = shape.offset_left(4)?.sub_width(1)?;
let item_vec: Vec<_> = regrouped_items
.into_iter()
.filter(|use_group|!use_group.is_empty())
.map(|use_group| {
let item_vec: Vec<_> = use_group
.into_iter()
.map(|use_tree| ListItem {
item: use_tree.rewrite_top_level(context, nested_shape),
..use_tree.list_item.unwrap_or_else(ListItem::empty)
})
.collect();
wrap_reorderable_items(context, &item_vec, nested_shape)
})
.collect::<Option<Vec<_>>>()?;
let join_string = format!("\n\n{}", shape.indent.to_string(context.config));
Some(item_vec.join(&join_string))
}
_ => {
let list_items = itemize_list(
context.snippet_provider,
reorderable_items.iter(),
"",
";",
|item| item.span().lo(),
|item| item.span().hi(),
|item| rewrite_reorderable_item(context, item, shape),
span.lo(),
span.hi(),
false,
);
let mut item_pair_vec: Vec<_> = list_items.zip(reorderable_items.iter()).collect();
item_pair_vec.sort_by(|a, b| compare_items(a.1, b.1));
let item_vec: Vec<_> = item_pair_vec.into_iter().map(|pair| pair.0).collect();
wrap_reorderable_items(context, &item_vec, shape)
}
}
}
fn contains_macro_use_attr(item: &ast::Item) -> bool {
crate::attr::contains_name(&item.attrs, sym::macro_use)
}
/// Divides imports into three groups, corresponding to standard, external
/// and local imports. Sorts each subgroup.
fn group_imports(uts: Vec<UseTree>) -> Vec<Vec<UseTree>> {
let mut std_imports = Vec::new();
let mut external_imports = Vec::new();
let mut local_imports = Vec::new();
for ut in uts.into_iter() {
if ut.path.is_empty() {
external_imports.push(ut);
continue;
}
match &ut.path[0] {
UseSegment::Ident(id, _) => match id.as_ref() {
"std" | "alloc" | "core" => std_imports.push(ut),
_ => external_imports.push(ut),
},
UseSegment::Slf(_) | UseSegment::Super(_) | UseSegment::Crate(_) => {
local_imports.push(ut)
}
// These are probably illegal here
UseSegment::Glob | UseSegment::List(_) => external_imports.push(ut),
}
}
vec![std_imports, external_imports, local_imports]
}
/// A simplified version of `ast::ItemKind`.
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
enum ReorderableItemKind {
ExternCrate,
Mod,
Use,
/// An item that cannot be reordered. Either has an unreorderable item kind
/// or an `macro_use` attribute.
Other,
}
impl ReorderableItemKind {
fn from(item: &ast::Item) -> Self {
match item.kind {
_ if contains_macro_use_attr(item) | contains_skip(&item.attrs) => {
ReorderableItemKind::Other
}
ast::ItemKind::ExternCrate(..) => ReorderableItemKind::ExternCrate,
ast::ItemKind::Mod(..) if is_mod_decl(item) => ReorderableItemKind::Mod,
ast::ItemKind::Use(..) => ReorderableItemKind::Use,
_ => ReorderableItemKind::Other,
}
}
fn is_same_item_kind(self, item: &ast::Item) -> bool {
ReorderableItemKind::from(item) == self
}
fn is_reorderable(self, config: &Config) -> bool {
match self {
ReorderableItemKind::ExternCrate => config.reorder_imports(),
ReorderableItemKind::Mod => config.reorder_modules(),
ReorderableItemKind::Use => config.reorder_imports(),
ReorderableItemKind::Other => false,
}
}
fn is_regroupable(self, config: &Config) -> bool {
match self {
ReorderableItemKind::ExternCrate
| ReorderableItemKind::Mod
| ReorderableItemKind::Other => false,
ReorderableItemKind::Use => config.group_imports()!= GroupImportsTactic::Preserve,
}
}
fn in_group(self, config: &Config) -> bool {
match self {
ReorderableItemKind::ExternCrate | ReorderableItemKind::Mod => true,
ReorderableItemKind::Use => config.group_imports() == GroupImportsTactic::Preserve,
ReorderableItemKind::Other => false,
}
}
}
impl<'b, 'a: 'b> FmtVisitor<'a> {
/// Format items with the same item kind and reorder them, regroup them, or
/// both. If `in_group` is `true`, then the items separated by an empty line
/// will not be reordered together.
fn walk_reorderable_or_regroupable_items(
&mut self,
items: &[&ast::Item],
item_kind: ReorderableItemKind,
in_group: bool,
) -> usize {
let mut last = self.parse_sess.lookup_line_range(items[0].span());
let item_length = items
.iter()
.take_while(|ppi| {
item_kind.is_same_item_kind(&***ppi)
&& (!in_group || {
let current = self.parse_sess.lookup_line_range(ppi.span());
let in_same_group = current.lo < last.hi + 2;
last = current;
in_same_group
})
})
.count();
let items = &items[..item_length];
let at_least_one_in_file_lines = items
.iter()
.any(|item|!out_of_file_lines_range!(self, item.span));
if at_least_one_in_file_lines &&!items.is_empty() {
let lo = items.first().unwrap().span().lo();
let hi = items.last().unwrap().span().hi();
let span = mk_sp(lo, hi);
let rw = rewrite_reorderable_or_regroupable_items(
&self.get_context(),
items,
self.shape(),
span,
);
self.push_rewrite(span, rw);
} else {
for item in items {
self.push_rewrite(item.span, None);
}
}
item_length
}
/// Visits and format the given items. Items are reordered If they are
/// consecutive and reorderable.
pub(crate) fn visit_items_with_reordering(&mut self, mut items: &[&ast::Item]) {
while!items.is_empty() {
// If the next item is a `use`, `extern crate` or `mod`, then extract it and any
// subsequent items that have the same item kind to be reordered within
// `walk_reorderable_items`. Otherwise, just format the next item for output.
let item_kind = ReorderableItemKind::from(items[0]);
if item_kind.is_reorderable(self.config) || item_kind.is_regroupable(self.config) {
let visited_items_num = self.walk_reorderable_or_regroupable_items(
items,
item_kind,
item_kind.in_group(self.config),
);
let (_, rest) = items.split_at(visited_items_num);
items = rest;
} else {
// Reaching here means items were not reordered. There must be at least
// one item left in `items`, so calling `unwrap()` here is safe.
let (item, rest) = items.split_first().unwrap();
self.visit_item(item);
items = rest;
}
}
}
}
|
// `extern crate foo as bar;`
// ^^^ Comparing this.
let a_orig_name = a_name.map_or_else(|| a.ident.as_str(), rustc_span::Symbol::as_str);
let b_orig_name = b_name.map_or_else(|| b.ident.as_str(), rustc_span::Symbol::as_str);
|
random_line_split
|
reorder.rs
|
//! Reorder items.
//!
//! `mod`, `extern crate` and `use` declarations are reordered in alphabetical
//! order. Trait items are reordered in pre-determined order (associated types
//! and constants comes before methods).
// FIXME(#2455): Reorder trait items.
use std::cmp::{Ord, Ordering};
use rustc_ast::ast;
use rustc_span::{symbol::sym, Span};
use crate::config::{Config, GroupImportsTactic, ImportGranularity};
use crate::imports::{flatten_use_trees, merge_use_trees, SharedPrefix, UseSegment, UseTree};
use crate::items::{is_mod_decl, rewrite_extern_crate, rewrite_mod};
use crate::lists::{itemize_list, write_list, ListFormatting, ListItem};
use crate::rewrite::RewriteContext;
use crate::shape::Shape;
use crate::source_map::LineRangeUtils;
use crate::spanned::Spanned;
use crate::utils::{contains_skip, mk_sp};
use crate::visitor::FmtVisitor;
/// Choose the ordering between the given two items.
fn compare_items(a: &ast::Item, b: &ast::Item) -> Ordering {
match (&a.kind, &b.kind) {
(&ast::ItemKind::Mod(..), &ast::ItemKind::Mod(..)) => {
a.ident.as_str().cmp(&b.ident.as_str())
}
(&ast::ItemKind::ExternCrate(ref a_name), &ast::ItemKind::ExternCrate(ref b_name)) => {
// `extern crate foo as bar;`
// ^^^ Comparing this.
let a_orig_name = a_name.map_or_else(|| a.ident.as_str(), rustc_span::Symbol::as_str);
let b_orig_name = b_name.map_or_else(|| b.ident.as_str(), rustc_span::Symbol::as_str);
let result = a_orig_name.cmp(&b_orig_name);
if result!= Ordering::Equal {
return result;
}
// `extern crate foo as bar;`
// ^^^ Comparing this.
match (a_name, b_name) {
(Some(..), None) => Ordering::Greater,
(None, Some(..)) => Ordering::Less,
(None, None) => Ordering::Equal,
(Some(..), Some(..)) => a.ident.as_str().cmp(&b.ident.as_str()),
}
}
_ => unreachable!(),
}
}
fn wrap_reorderable_items(
context: &RewriteContext<'_>,
list_items: &[ListItem],
shape: Shape,
) -> Option<String> {
let fmt = ListFormatting::new(shape, context.config)
.separator("")
.align_comments(false);
write_list(list_items, &fmt)
}
fn rewrite_reorderable_item(
context: &RewriteContext<'_>,
item: &ast::Item,
shape: Shape,
) -> Option<String> {
match item.kind {
ast::ItemKind::ExternCrate(..) => rewrite_extern_crate(context, item, shape),
ast::ItemKind::Mod(..) => rewrite_mod(context, item, shape),
_ => None,
}
}
/// Rewrite a list of items with reordering and/or regrouping. Every item
/// in `items` must have the same `ast::ItemKind`. Whether reordering, regrouping,
/// or both are done is determined from the `context`.
fn rewrite_reorderable_or_regroupable_items(
context: &RewriteContext<'_>,
reorderable_items: &[&ast::Item],
shape: Shape,
span: Span,
) -> Option<String> {
match reorderable_items[0].kind {
// FIXME: Remove duplicated code.
ast::ItemKind::Use(..) => {
let mut normalized_items: Vec<_> = reorderable_items
.iter()
.filter_map(|item| UseTree::from_ast_with_normalization(context, item))
.collect();
let cloned = normalized_items.clone();
// Add comments before merging.
let list_items = itemize_list(
context.snippet_provider,
cloned.iter(),
"",
";",
|item| item.span().lo(),
|item| item.span().hi(),
|_item| Some("".to_owned()),
span.lo(),
span.hi(),
false,
);
for (item, list_item) in normalized_items.iter_mut().zip(list_items) {
item.list_item = Some(list_item.clone());
}
normalized_items = match context.config.imports_granularity() {
ImportGranularity::Crate => merge_use_trees(normalized_items, SharedPrefix::Crate),
ImportGranularity::Module => {
merge_use_trees(normalized_items, SharedPrefix::Module)
}
ImportGranularity::Item => flatten_use_trees(normalized_items),
ImportGranularity::Preserve => normalized_items,
};
let mut regrouped_items = match context.config.group_imports() {
GroupImportsTactic::Preserve => vec![normalized_items],
GroupImportsTactic::StdExternalCrate => group_imports(normalized_items),
};
if context.config.reorder_imports() {
regrouped_items.iter_mut().for_each(|items| items.sort())
}
// 4 = "use ", 1 = ";"
let nested_shape = shape.offset_left(4)?.sub_width(1)?;
let item_vec: Vec<_> = regrouped_items
.into_iter()
.filter(|use_group|!use_group.is_empty())
.map(|use_group| {
let item_vec: Vec<_> = use_group
.into_iter()
.map(|use_tree| ListItem {
item: use_tree.rewrite_top_level(context, nested_shape),
..use_tree.list_item.unwrap_or_else(ListItem::empty)
})
.collect();
wrap_reorderable_items(context, &item_vec, nested_shape)
})
.collect::<Option<Vec<_>>>()?;
let join_string = format!("\n\n{}", shape.indent.to_string(context.config));
Some(item_vec.join(&join_string))
}
_ => {
let list_items = itemize_list(
context.snippet_provider,
reorderable_items.iter(),
"",
";",
|item| item.span().lo(),
|item| item.span().hi(),
|item| rewrite_reorderable_item(context, item, shape),
span.lo(),
span.hi(),
false,
);
let mut item_pair_vec: Vec<_> = list_items.zip(reorderable_items.iter()).collect();
item_pair_vec.sort_by(|a, b| compare_items(a.1, b.1));
let item_vec: Vec<_> = item_pair_vec.into_iter().map(|pair| pair.0).collect();
wrap_reorderable_items(context, &item_vec, shape)
}
}
}
fn contains_macro_use_attr(item: &ast::Item) -> bool {
crate::attr::contains_name(&item.attrs, sym::macro_use)
}
/// Divides imports into three groups, corresponding to standard, external
/// and local imports. Sorts each subgroup.
fn group_imports(uts: Vec<UseTree>) -> Vec<Vec<UseTree>> {
let mut std_imports = Vec::new();
let mut external_imports = Vec::new();
let mut local_imports = Vec::new();
for ut in uts.into_iter() {
if ut.path.is_empty() {
external_imports.push(ut);
continue;
}
match &ut.path[0] {
UseSegment::Ident(id, _) => match id.as_ref() {
"std" | "alloc" | "core" => std_imports.push(ut),
_ => external_imports.push(ut),
},
UseSegment::Slf(_) | UseSegment::Super(_) | UseSegment::Crate(_) => {
local_imports.push(ut)
}
// These are probably illegal here
UseSegment::Glob | UseSegment::List(_) => external_imports.push(ut),
}
}
vec![std_imports, external_imports, local_imports]
}
/// A simplified version of `ast::ItemKind`.
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
enum ReorderableItemKind {
ExternCrate,
Mod,
Use,
/// An item that cannot be reordered. Either has an unreorderable item kind
/// or an `macro_use` attribute.
Other,
}
impl ReorderableItemKind {
fn from(item: &ast::Item) -> Self {
match item.kind {
_ if contains_macro_use_attr(item) | contains_skip(&item.attrs) => {
ReorderableItemKind::Other
}
ast::ItemKind::ExternCrate(..) => ReorderableItemKind::ExternCrate,
ast::ItemKind::Mod(..) if is_mod_decl(item) => ReorderableItemKind::Mod,
ast::ItemKind::Use(..) => ReorderableItemKind::Use,
_ => ReorderableItemKind::Other,
}
}
fn is_same_item_kind(self, item: &ast::Item) -> bool {
ReorderableItemKind::from(item) == self
}
fn is_reorderable(self, config: &Config) -> bool {
match self {
ReorderableItemKind::ExternCrate => config.reorder_imports(),
ReorderableItemKind::Mod => config.reorder_modules(),
ReorderableItemKind::Use => config.reorder_imports(),
ReorderableItemKind::Other => false,
}
}
fn is_regroupable(self, config: &Config) -> bool {
match self {
ReorderableItemKind::ExternCrate
| ReorderableItemKind::Mod
| ReorderableItemKind::Other => false,
ReorderableItemKind::Use => config.group_imports()!= GroupImportsTactic::Preserve,
}
}
fn in_group(self, config: &Config) -> bool {
match self {
ReorderableItemKind::ExternCrate | ReorderableItemKind::Mod => true,
ReorderableItemKind::Use => config.group_imports() == GroupImportsTactic::Preserve,
ReorderableItemKind::Other => false,
}
}
}
impl<'b, 'a: 'b> FmtVisitor<'a> {
/// Format items with the same item kind and reorder them, regroup them, or
/// both. If `in_group` is `true`, then the items separated by an empty line
/// will not be reordered together.
fn walk_reorderable_or_regroupable_items(
&mut self,
items: &[&ast::Item],
item_kind: ReorderableItemKind,
in_group: bool,
) -> usize {
let mut last = self.parse_sess.lookup_line_range(items[0].span());
let item_length = items
.iter()
.take_while(|ppi| {
item_kind.is_same_item_kind(&***ppi)
&& (!in_group || {
let current = self.parse_sess.lookup_line_range(ppi.span());
let in_same_group = current.lo < last.hi + 2;
last = current;
in_same_group
})
})
.count();
let items = &items[..item_length];
let at_least_one_in_file_lines = items
.iter()
.any(|item|!out_of_file_lines_range!(self, item.span));
if at_least_one_in_file_lines &&!items.is_empty() {
let lo = items.first().unwrap().span().lo();
let hi = items.last().unwrap().span().hi();
let span = mk_sp(lo, hi);
let rw = rewrite_reorderable_or_regroupable_items(
&self.get_context(),
items,
self.shape(),
span,
);
self.push_rewrite(span, rw);
} else {
for item in items {
self.push_rewrite(item.span, None);
}
}
item_length
}
/// Visits and format the given items. Items are reordered If they are
/// consecutive and reorderable.
pub(crate) fn
|
(&mut self, mut items: &[&ast::Item]) {
while!items.is_empty() {
// If the next item is a `use`, `extern crate` or `mod`, then extract it and any
// subsequent items that have the same item kind to be reordered within
// `walk_reorderable_items`. Otherwise, just format the next item for output.
let item_kind = ReorderableItemKind::from(items[0]);
if item_kind.is_reorderable(self.config) || item_kind.is_regroupable(self.config) {
let visited_items_num = self.walk_reorderable_or_regroupable_items(
items,
item_kind,
item_kind.in_group(self.config),
);
let (_, rest) = items.split_at(visited_items_num);
items = rest;
} else {
// Reaching here means items were not reordered. There must be at least
// one item left in `items`, so calling `unwrap()` here is safe.
let (item, rest) = items.split_first().unwrap();
self.visit_item(item);
items = rest;
}
}
}
}
|
visit_items_with_reordering
|
identifier_name
|
reorder.rs
|
//! Reorder items.
//!
//! `mod`, `extern crate` and `use` declarations are reordered in alphabetical
//! order. Trait items are reordered in pre-determined order (associated types
//! and constants comes before methods).
// FIXME(#2455): Reorder trait items.
use std::cmp::{Ord, Ordering};
use rustc_ast::ast;
use rustc_span::{symbol::sym, Span};
use crate::config::{Config, GroupImportsTactic, ImportGranularity};
use crate::imports::{flatten_use_trees, merge_use_trees, SharedPrefix, UseSegment, UseTree};
use crate::items::{is_mod_decl, rewrite_extern_crate, rewrite_mod};
use crate::lists::{itemize_list, write_list, ListFormatting, ListItem};
use crate::rewrite::RewriteContext;
use crate::shape::Shape;
use crate::source_map::LineRangeUtils;
use crate::spanned::Spanned;
use crate::utils::{contains_skip, mk_sp};
use crate::visitor::FmtVisitor;
/// Choose the ordering between the given two items.
fn compare_items(a: &ast::Item, b: &ast::Item) -> Ordering {
match (&a.kind, &b.kind) {
(&ast::ItemKind::Mod(..), &ast::ItemKind::Mod(..)) => {
a.ident.as_str().cmp(&b.ident.as_str())
}
(&ast::ItemKind::ExternCrate(ref a_name), &ast::ItemKind::ExternCrate(ref b_name)) => {
// `extern crate foo as bar;`
// ^^^ Comparing this.
let a_orig_name = a_name.map_or_else(|| a.ident.as_str(), rustc_span::Symbol::as_str);
let b_orig_name = b_name.map_or_else(|| b.ident.as_str(), rustc_span::Symbol::as_str);
let result = a_orig_name.cmp(&b_orig_name);
if result!= Ordering::Equal {
return result;
}
// `extern crate foo as bar;`
// ^^^ Comparing this.
match (a_name, b_name) {
(Some(..), None) => Ordering::Greater,
(None, Some(..)) => Ordering::Less,
(None, None) => Ordering::Equal,
(Some(..), Some(..)) => a.ident.as_str().cmp(&b.ident.as_str()),
}
}
_ => unreachable!(),
}
}
fn wrap_reorderable_items(
context: &RewriteContext<'_>,
list_items: &[ListItem],
shape: Shape,
) -> Option<String> {
let fmt = ListFormatting::new(shape, context.config)
.separator("")
.align_comments(false);
write_list(list_items, &fmt)
}
fn rewrite_reorderable_item(
context: &RewriteContext<'_>,
item: &ast::Item,
shape: Shape,
) -> Option<String> {
match item.kind {
ast::ItemKind::ExternCrate(..) => rewrite_extern_crate(context, item, shape),
ast::ItemKind::Mod(..) => rewrite_mod(context, item, shape),
_ => None,
}
}
/// Rewrite a list of items with reordering and/or regrouping. Every item
/// in `items` must have the same `ast::ItemKind`. Whether reordering, regrouping,
/// or both are done is determined from the `context`.
fn rewrite_reorderable_or_regroupable_items(
context: &RewriteContext<'_>,
reorderable_items: &[&ast::Item],
shape: Shape,
span: Span,
) -> Option<String> {
match reorderable_items[0].kind {
// FIXME: Remove duplicated code.
ast::ItemKind::Use(..) => {
let mut normalized_items: Vec<_> = reorderable_items
.iter()
.filter_map(|item| UseTree::from_ast_with_normalization(context, item))
.collect();
let cloned = normalized_items.clone();
// Add comments before merging.
let list_items = itemize_list(
context.snippet_provider,
cloned.iter(),
"",
";",
|item| item.span().lo(),
|item| item.span().hi(),
|_item| Some("".to_owned()),
span.lo(),
span.hi(),
false,
);
for (item, list_item) in normalized_items.iter_mut().zip(list_items) {
item.list_item = Some(list_item.clone());
}
normalized_items = match context.config.imports_granularity() {
ImportGranularity::Crate => merge_use_trees(normalized_items, SharedPrefix::Crate),
ImportGranularity::Module => {
merge_use_trees(normalized_items, SharedPrefix::Module)
}
ImportGranularity::Item => flatten_use_trees(normalized_items),
ImportGranularity::Preserve => normalized_items,
};
let mut regrouped_items = match context.config.group_imports() {
GroupImportsTactic::Preserve => vec![normalized_items],
GroupImportsTactic::StdExternalCrate => group_imports(normalized_items),
};
if context.config.reorder_imports() {
regrouped_items.iter_mut().for_each(|items| items.sort())
}
// 4 = "use ", 1 = ";"
let nested_shape = shape.offset_left(4)?.sub_width(1)?;
let item_vec: Vec<_> = regrouped_items
.into_iter()
.filter(|use_group|!use_group.is_empty())
.map(|use_group| {
let item_vec: Vec<_> = use_group
.into_iter()
.map(|use_tree| ListItem {
item: use_tree.rewrite_top_level(context, nested_shape),
..use_tree.list_item.unwrap_or_else(ListItem::empty)
})
.collect();
wrap_reorderable_items(context, &item_vec, nested_shape)
})
.collect::<Option<Vec<_>>>()?;
let join_string = format!("\n\n{}", shape.indent.to_string(context.config));
Some(item_vec.join(&join_string))
}
_ => {
let list_items = itemize_list(
context.snippet_provider,
reorderable_items.iter(),
"",
";",
|item| item.span().lo(),
|item| item.span().hi(),
|item| rewrite_reorderable_item(context, item, shape),
span.lo(),
span.hi(),
false,
);
let mut item_pair_vec: Vec<_> = list_items.zip(reorderable_items.iter()).collect();
item_pair_vec.sort_by(|a, b| compare_items(a.1, b.1));
let item_vec: Vec<_> = item_pair_vec.into_iter().map(|pair| pair.0).collect();
wrap_reorderable_items(context, &item_vec, shape)
}
}
}
fn contains_macro_use_attr(item: &ast::Item) -> bool
|
/// Divides imports into three groups, corresponding to standard, external
/// and local imports. Sorts each subgroup.
fn group_imports(uts: Vec<UseTree>) -> Vec<Vec<UseTree>> {
let mut std_imports = Vec::new();
let mut external_imports = Vec::new();
let mut local_imports = Vec::new();
for ut in uts.into_iter() {
if ut.path.is_empty() {
external_imports.push(ut);
continue;
}
match &ut.path[0] {
UseSegment::Ident(id, _) => match id.as_ref() {
"std" | "alloc" | "core" => std_imports.push(ut),
_ => external_imports.push(ut),
},
UseSegment::Slf(_) | UseSegment::Super(_) | UseSegment::Crate(_) => {
local_imports.push(ut)
}
// These are probably illegal here
UseSegment::Glob | UseSegment::List(_) => external_imports.push(ut),
}
}
vec![std_imports, external_imports, local_imports]
}
/// A simplified version of `ast::ItemKind`.
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
enum ReorderableItemKind {
ExternCrate,
Mod,
Use,
/// An item that cannot be reordered. Either has an unreorderable item kind
/// or an `macro_use` attribute.
Other,
}
impl ReorderableItemKind {
fn from(item: &ast::Item) -> Self {
match item.kind {
_ if contains_macro_use_attr(item) | contains_skip(&item.attrs) => {
ReorderableItemKind::Other
}
ast::ItemKind::ExternCrate(..) => ReorderableItemKind::ExternCrate,
ast::ItemKind::Mod(..) if is_mod_decl(item) => ReorderableItemKind::Mod,
ast::ItemKind::Use(..) => ReorderableItemKind::Use,
_ => ReorderableItemKind::Other,
}
}
fn is_same_item_kind(self, item: &ast::Item) -> bool {
ReorderableItemKind::from(item) == self
}
fn is_reorderable(self, config: &Config) -> bool {
match self {
ReorderableItemKind::ExternCrate => config.reorder_imports(),
ReorderableItemKind::Mod => config.reorder_modules(),
ReorderableItemKind::Use => config.reorder_imports(),
ReorderableItemKind::Other => false,
}
}
fn is_regroupable(self, config: &Config) -> bool {
match self {
ReorderableItemKind::ExternCrate
| ReorderableItemKind::Mod
| ReorderableItemKind::Other => false,
ReorderableItemKind::Use => config.group_imports()!= GroupImportsTactic::Preserve,
}
}
fn in_group(self, config: &Config) -> bool {
match self {
ReorderableItemKind::ExternCrate | ReorderableItemKind::Mod => true,
ReorderableItemKind::Use => config.group_imports() == GroupImportsTactic::Preserve,
ReorderableItemKind::Other => false,
}
}
}
impl<'b, 'a: 'b> FmtVisitor<'a> {
/// Format items with the same item kind and reorder them, regroup them, or
/// both. If `in_group` is `true`, then the items separated by an empty line
/// will not be reordered together.
fn walk_reorderable_or_regroupable_items(
&mut self,
items: &[&ast::Item],
item_kind: ReorderableItemKind,
in_group: bool,
) -> usize {
let mut last = self.parse_sess.lookup_line_range(items[0].span());
let item_length = items
.iter()
.take_while(|ppi| {
item_kind.is_same_item_kind(&***ppi)
&& (!in_group || {
let current = self.parse_sess.lookup_line_range(ppi.span());
let in_same_group = current.lo < last.hi + 2;
last = current;
in_same_group
})
})
.count();
let items = &items[..item_length];
let at_least_one_in_file_lines = items
.iter()
.any(|item|!out_of_file_lines_range!(self, item.span));
if at_least_one_in_file_lines &&!items.is_empty() {
let lo = items.first().unwrap().span().lo();
let hi = items.last().unwrap().span().hi();
let span = mk_sp(lo, hi);
let rw = rewrite_reorderable_or_regroupable_items(
&self.get_context(),
items,
self.shape(),
span,
);
self.push_rewrite(span, rw);
} else {
for item in items {
self.push_rewrite(item.span, None);
}
}
item_length
}
/// Visits and format the given items. Items are reordered If they are
/// consecutive and reorderable.
pub(crate) fn visit_items_with_reordering(&mut self, mut items: &[&ast::Item]) {
while!items.is_empty() {
// If the next item is a `use`, `extern crate` or `mod`, then extract it and any
// subsequent items that have the same item kind to be reordered within
// `walk_reorderable_items`. Otherwise, just format the next item for output.
let item_kind = ReorderableItemKind::from(items[0]);
if item_kind.is_reorderable(self.config) || item_kind.is_regroupable(self.config) {
let visited_items_num = self.walk_reorderable_or_regroupable_items(
items,
item_kind,
item_kind.in_group(self.config),
);
let (_, rest) = items.split_at(visited_items_num);
items = rest;
} else {
// Reaching here means items were not reordered. There must be at least
// one item left in `items`, so calling `unwrap()` here is safe.
let (item, rest) = items.split_first().unwrap();
self.visit_item(item);
items = rest;
}
}
}
}
|
{
crate::attr::contains_name(&item.attrs, sym::macro_use)
}
|
identifier_body
|
control-flow.rs
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
#![feature(generators, generator_trait)]
use std::ops::{GeneratorState, Generator};
fn
|
<T>(mut amt: usize, mut t: T) -> T::Return
where T: Generator<Yield = ()>
{
loop {
match unsafe { t.resume() } {
GeneratorState::Yielded(()) => amt = amt.checked_sub(1).unwrap(),
GeneratorState::Complete(ret) => {
assert_eq!(amt, 0);
return ret
}
}
}
}
fn main() {
finish(1, || yield);
finish(8, || {
for _ in 0..8 {
yield;
}
});
finish(1, || {
if true {
yield;
} else {
}
});
finish(1, || {
if false {
} else {
yield;
}
});
finish(2, || {
if { yield; false } {
yield;
panic!()
}
yield
});
}
|
finish
|
identifier_name
|
control-flow.rs
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
#![feature(generators, generator_trait)]
use std::ops::{GeneratorState, Generator};
fn finish<T>(mut amt: usize, mut t: T) -> T::Return
where T: Generator<Yield = ()>
{
loop {
match unsafe { t.resume() } {
GeneratorState::Yielded(()) => amt = amt.checked_sub(1).unwrap(),
GeneratorState::Complete(ret) => {
assert_eq!(amt, 0);
return ret
}
}
}
}
fn main() {
finish(1, || yield);
finish(8, || {
for _ in 0..8 {
yield;
}
});
finish(1, || {
if true {
yield;
} else {
}
});
finish(1, || {
if false {
} else {
yield;
}
});
finish(2, || {
if { yield; false } {
|
}
|
yield;
panic!()
}
yield
});
|
random_line_split
|
control-flow.rs
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
#![feature(generators, generator_trait)]
use std::ops::{GeneratorState, Generator};
fn finish<T>(mut amt: usize, mut t: T) -> T::Return
where T: Generator<Yield = ()>
{
loop {
match unsafe { t.resume() } {
GeneratorState::Yielded(()) => amt = amt.checked_sub(1).unwrap(),
GeneratorState::Complete(ret) => {
assert_eq!(amt, 0);
return ret
}
}
}
}
fn main() {
finish(1, || yield);
finish(8, || {
for _ in 0..8 {
yield;
}
});
finish(1, || {
if true {
yield;
} else {
}
});
finish(1, || {
if false {
} else {
yield;
}
});
finish(2, || {
if { yield; false }
|
yield
});
}
|
{
yield;
panic!()
}
|
conditional_block
|
control-flow.rs
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
#![feature(generators, generator_trait)]
use std::ops::{GeneratorState, Generator};
fn finish<T>(mut amt: usize, mut t: T) -> T::Return
where T: Generator<Yield = ()>
|
fn main() {
finish(1, || yield);
finish(8, || {
for _ in 0..8 {
yield;
}
});
finish(1, || {
if true {
yield;
} else {
}
});
finish(1, || {
if false {
} else {
yield;
}
});
finish(2, || {
if { yield; false } {
yield;
panic!()
}
yield
});
}
|
{
loop {
match unsafe { t.resume() } {
GeneratorState::Yielded(()) => amt = amt.checked_sub(1).unwrap(),
GeneratorState::Complete(ret) => {
assert_eq!(amt, 0);
return ret
}
}
}
}
|
identifier_body
|
lib.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Calculate [specified][specified] and [computed values][computed] from a
//! tree of DOM nodes and a set of stylesheets.
//!
//! [computed]: https://drafts.csswg.org/css-cascade/#computed
//! [specified]: https://drafts.csswg.org/css-cascade/#specified
//!
//! In particular, this crate contains the definitions of supported properties,
//! the code to parse them into specified values and calculate the computed
//! values based on the specified values, as well as the code to serialize both
//! specified and computed values.
//!
//! The main entry point is [`recalc_style_at`][recalc_style_at].
//!
//! [recalc_style_at]: traversal/fn.recalc_style_at.html
//!
//! Major dependencies are the [cssparser][cssparser] and [selectors][selectors]
//! crates.
//!
//! [cssparser]:../cssparser/index.html
//! [selectors]:../selectors/index.html
#![deny(warnings)]
#![deny(missing_docs)]
// FIXME(bholley): We need to blanket-allow unsafe code in order to make the
// gecko atom!() macro work. When Rust 1.14 is released [1], we can uncomment
// the commented-out attributes in regen_atoms.py and go back to denying unsafe
// code by default.
//
// [1] https://github.com/rust-lang/rust/issues/15701#issuecomment-251900615
//#![deny(unsafe_code)]
#![allow(unused_unsafe)]
#![recursion_limit = "500"] // For define_css_keyword_enum! in -moz-appearance
extern crate app_units;
extern crate atomic_refcell;
#[macro_use]
extern crate bitflags;
#[cfg(feature = "gecko")] #[macro_use] #[no_link] extern crate cfg_if;
#[macro_use] extern crate cssparser;
extern crate euclid;
extern crate fnv;
#[cfg(feature = "gecko")] #[macro_use] pub mod gecko_string_cache;
extern crate heapsize;
#[cfg(feature = "servo")] #[macro_use] extern crate heapsize_derive;
#[cfg(feature = "servo")] #[macro_use] extern crate html5ever_atoms;
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate log;
#[allow(unused_extern_crates)]
#[macro_use]
extern crate matches;
#[cfg(feature = "gecko")] extern crate nsstring_vendor as nsstring;
extern crate num_integer;
extern crate num_traits;
extern crate ordered_float;
extern crate owning_ref;
extern crate parking_lot;
extern crate pdqsort;
extern crate rayon;
extern crate selectors;
#[cfg(feature = "servo")] #[macro_use] extern crate serde_derive;
#[cfg(feature = "servo")] #[macro_use] extern crate servo_atoms;
extern crate servo_config;
extern crate servo_url;
extern crate smallvec;
#[macro_use]
extern crate style_traits;
extern crate time;
#[allow(unused_extern_crates)]
extern crate unicode_segmentation;
pub mod animation;
#[allow(missing_docs)] // TODO.
pub mod attr;
pub mod bezier;
pub mod bloom;
pub mod cache;
pub mod cascade_info;
pub mod context;
pub mod custom_properties;
pub mod data;
pub mod dom;
pub mod element_state;
#[cfg(feature = "servo")] mod encoding_support;
pub mod error_reporting;
pub mod font_face;
pub mod font_metrics;
#[cfg(feature = "gecko")] #[allow(unsafe_code)] pub mod gecko;
#[cfg(feature = "gecko")] #[allow(unsafe_code)] pub mod gecko_bindings;
pub mod keyframes;
#[allow(missing_docs)] // TODO.
pub mod logical_geometry;
pub mod matching;
pub mod media_queries;
pub mod owning_handle;
pub mod parallel;
pub mod parser;
pub mod restyle_hints;
pub mod rule_tree;
pub mod scoped_tls;
pub mod selector_parser;
pub mod stylist;
#[cfg(feature = "servo")] #[allow(unsafe_code)] pub mod servo;
pub mod sequential;
pub mod sink;
pub mod str;
pub mod stylesheets;
pub mod supports;
pub mod thread_state;
pub mod timer;
pub mod traversal;
#[macro_use]
#[allow(non_camel_case_types)]
pub mod values;
pub mod viewport;
use std::fmt;
use std::sync::Arc;
use style_traits::ToCss;
#[cfg(feature = "gecko")] pub use gecko_string_cache as string_cache;
#[cfg(feature = "gecko")] pub use gecko_string_cache::Atom;
#[cfg(feature = "gecko")] pub use gecko_string_cache::Namespace;
#[cfg(feature = "gecko")] pub use gecko_string_cache::Atom as Prefix;
#[cfg(feature = "gecko")] pub use gecko_string_cache::Atom as LocalName;
#[cfg(feature = "servo")] pub use servo_atoms::Atom;
#[cfg(feature = "servo")] pub use html5ever_atoms::Prefix;
#[cfg(feature = "servo")] pub use html5ever_atoms::LocalName;
#[cfg(feature = "servo")] pub use html5ever_atoms::Namespace;
/// The CSS properties supported by the style system.
/// Generated from the properties.mako.rs template by build.rs
#[macro_use]
#[allow(unsafe_code)]
#[deny(missing_docs)]
pub mod properties {
include!(concat!(env!("OUT_DIR"), "/properties.rs"));
}
#[cfg(feature = "gecko")]
#[allow(unsafe_code, missing_docs)]
pub mod gecko_properties {
include!(concat!(env!("OUT_DIR"), "/gecko_properties.rs"));
}
macro_rules! reexport_computed_values {
( $( $name: ident )+ ) => {
/// Types for [computed values][computed].
///
/// [computed]: https://drafts.csswg.org/css-cascade/#computed
pub mod computed_values {
$(
pub use properties::longhands::$name::computed_value as $name;
)+
// Don't use a side-specific name needlessly:
pub use properties::longhands::border_top_style::computed_value as border_style;
}
}
}
longhand_properties_idents!(reexport_computed_values);
/// Returns whether the two arguments point to the same value.
#[inline]
pub fn arc_ptr_eq<T:'static>(a: &Arc<T>, b: &Arc<T>) -> bool {
let a: &T = &**a;
let b: &T = &**b;
(a as *const T) == (b as *const T)
}
/// Serializes as CSS a comma-separated list of any `T` that supports being
/// serialized as CSS.
pub fn serialize_comma_separated_list<W, T>(dest: &mut W,
list: &[T])
-> fmt::Result
where W: fmt::Write,
T: ToCss,
{
if list.is_empty()
|
try!(list[0].to_css(dest));
for item in list.iter().skip(1) {
try!(write!(dest, ", "));
try!(item.to_css(dest));
}
Ok(())
}
|
{
return Ok(());
}
|
conditional_block
|
lib.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Calculate [specified][specified] and [computed values][computed] from a
//! tree of DOM nodes and a set of stylesheets.
//!
//! [computed]: https://drafts.csswg.org/css-cascade/#computed
//! [specified]: https://drafts.csswg.org/css-cascade/#specified
//!
//! In particular, this crate contains the definitions of supported properties,
//! the code to parse them into specified values and calculate the computed
//! values based on the specified values, as well as the code to serialize both
//! specified and computed values.
//!
//! The main entry point is [`recalc_style_at`][recalc_style_at].
//!
//! [recalc_style_at]: traversal/fn.recalc_style_at.html
//!
//! Major dependencies are the [cssparser][cssparser] and [selectors][selectors]
//! crates.
//!
//! [cssparser]:../cssparser/index.html
//! [selectors]:../selectors/index.html
#![deny(warnings)]
#![deny(missing_docs)]
// FIXME(bholley): We need to blanket-allow unsafe code in order to make the
// gecko atom!() macro work. When Rust 1.14 is released [1], we can uncomment
// the commented-out attributes in regen_atoms.py and go back to denying unsafe
// code by default.
//
// [1] https://github.com/rust-lang/rust/issues/15701#issuecomment-251900615
//#![deny(unsafe_code)]
#![allow(unused_unsafe)]
#![recursion_limit = "500"] // For define_css_keyword_enum! in -moz-appearance
extern crate app_units;
extern crate atomic_refcell;
#[macro_use]
extern crate bitflags;
#[cfg(feature = "gecko")] #[macro_use] #[no_link] extern crate cfg_if;
#[macro_use] extern crate cssparser;
extern crate euclid;
extern crate fnv;
#[cfg(feature = "gecko")] #[macro_use] pub mod gecko_string_cache;
extern crate heapsize;
#[cfg(feature = "servo")] #[macro_use] extern crate heapsize_derive;
#[cfg(feature = "servo")] #[macro_use] extern crate html5ever_atoms;
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate log;
#[allow(unused_extern_crates)]
#[macro_use]
extern crate matches;
#[cfg(feature = "gecko")] extern crate nsstring_vendor as nsstring;
extern crate num_integer;
extern crate num_traits;
extern crate ordered_float;
extern crate owning_ref;
extern crate parking_lot;
extern crate pdqsort;
extern crate rayon;
extern crate selectors;
#[cfg(feature = "servo")] #[macro_use] extern crate serde_derive;
#[cfg(feature = "servo")] #[macro_use] extern crate servo_atoms;
extern crate servo_config;
extern crate servo_url;
extern crate smallvec;
#[macro_use]
extern crate style_traits;
extern crate time;
#[allow(unused_extern_crates)]
extern crate unicode_segmentation;
pub mod animation;
#[allow(missing_docs)] // TODO.
pub mod attr;
pub mod bezier;
pub mod bloom;
pub mod cache;
pub mod cascade_info;
pub mod context;
pub mod custom_properties;
pub mod data;
pub mod dom;
pub mod element_state;
#[cfg(feature = "servo")] mod encoding_support;
pub mod error_reporting;
pub mod font_face;
pub mod font_metrics;
#[cfg(feature = "gecko")] #[allow(unsafe_code)] pub mod gecko;
#[cfg(feature = "gecko")] #[allow(unsafe_code)] pub mod gecko_bindings;
pub mod keyframes;
#[allow(missing_docs)] // TODO.
pub mod logical_geometry;
pub mod matching;
pub mod media_queries;
pub mod owning_handle;
pub mod parallel;
pub mod parser;
pub mod restyle_hints;
pub mod rule_tree;
pub mod scoped_tls;
pub mod selector_parser;
pub mod stylist;
#[cfg(feature = "servo")] #[allow(unsafe_code)] pub mod servo;
pub mod sequential;
pub mod sink;
pub mod str;
pub mod stylesheets;
pub mod supports;
pub mod thread_state;
pub mod timer;
pub mod traversal;
#[macro_use]
#[allow(non_camel_case_types)]
pub mod values;
pub mod viewport;
use std::fmt;
use std::sync::Arc;
use style_traits::ToCss;
#[cfg(feature = "gecko")] pub use gecko_string_cache as string_cache;
#[cfg(feature = "gecko")] pub use gecko_string_cache::Atom;
#[cfg(feature = "gecko")] pub use gecko_string_cache::Namespace;
#[cfg(feature = "gecko")] pub use gecko_string_cache::Atom as Prefix;
#[cfg(feature = "gecko")] pub use gecko_string_cache::Atom as LocalName;
#[cfg(feature = "servo")] pub use servo_atoms::Atom;
#[cfg(feature = "servo")] pub use html5ever_atoms::Prefix;
#[cfg(feature = "servo")] pub use html5ever_atoms::LocalName;
#[cfg(feature = "servo")] pub use html5ever_atoms::Namespace;
/// The CSS properties supported by the style system.
/// Generated from the properties.mako.rs template by build.rs
#[macro_use]
#[allow(unsafe_code)]
#[deny(missing_docs)]
pub mod properties {
include!(concat!(env!("OUT_DIR"), "/properties.rs"));
}
#[cfg(feature = "gecko")]
#[allow(unsafe_code, missing_docs)]
pub mod gecko_properties {
include!(concat!(env!("OUT_DIR"), "/gecko_properties.rs"));
}
macro_rules! reexport_computed_values {
( $( $name: ident )+ ) => {
/// Types for [computed values][computed].
///
/// [computed]: https://drafts.csswg.org/css-cascade/#computed
pub mod computed_values {
$(
pub use properties::longhands::$name::computed_value as $name;
)+
// Don't use a side-specific name needlessly:
pub use properties::longhands::border_top_style::computed_value as border_style;
}
}
}
longhand_properties_idents!(reexport_computed_values);
/// Returns whether the two arguments point to the same value.
#[inline]
pub fn arc_ptr_eq<T:'static>(a: &Arc<T>, b: &Arc<T>) -> bool {
let a: &T = &**a;
let b: &T = &**b;
(a as *const T) == (b as *const T)
}
/// Serializes as CSS a comma-separated list of any `T` that supports being
/// serialized as CSS.
pub fn serialize_comma_separated_list<W, T>(dest: &mut W,
list: &[T])
-> fmt::Result
where W: fmt::Write,
T: ToCss,
|
{
if list.is_empty() {
return Ok(());
}
try!(list[0].to_css(dest));
for item in list.iter().skip(1) {
try!(write!(dest, ", "));
try!(item.to_css(dest));
}
Ok(())
}
|
identifier_body
|
|
lib.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Calculate [specified][specified] and [computed values][computed] from a
//! tree of DOM nodes and a set of stylesheets.
//!
//! [computed]: https://drafts.csswg.org/css-cascade/#computed
//! [specified]: https://drafts.csswg.org/css-cascade/#specified
//!
//! In particular, this crate contains the definitions of supported properties,
//! the code to parse them into specified values and calculate the computed
//! values based on the specified values, as well as the code to serialize both
//! specified and computed values.
//!
//! The main entry point is [`recalc_style_at`][recalc_style_at].
//!
//! [recalc_style_at]: traversal/fn.recalc_style_at.html
//!
//! Major dependencies are the [cssparser][cssparser] and [selectors][selectors]
//! crates.
//!
//! [cssparser]:../cssparser/index.html
//! [selectors]:../selectors/index.html
#![deny(warnings)]
#![deny(missing_docs)]
// FIXME(bholley): We need to blanket-allow unsafe code in order to make the
// gecko atom!() macro work. When Rust 1.14 is released [1], we can uncomment
// the commented-out attributes in regen_atoms.py and go back to denying unsafe
// code by default.
//
// [1] https://github.com/rust-lang/rust/issues/15701#issuecomment-251900615
//#![deny(unsafe_code)]
#![allow(unused_unsafe)]
#![recursion_limit = "500"] // For define_css_keyword_enum! in -moz-appearance
extern crate app_units;
extern crate atomic_refcell;
#[macro_use]
extern crate bitflags;
#[cfg(feature = "gecko")] #[macro_use] #[no_link] extern crate cfg_if;
#[macro_use] extern crate cssparser;
extern crate euclid;
extern crate fnv;
#[cfg(feature = "gecko")] #[macro_use] pub mod gecko_string_cache;
extern crate heapsize;
#[cfg(feature = "servo")] #[macro_use] extern crate heapsize_derive;
#[cfg(feature = "servo")] #[macro_use] extern crate html5ever_atoms;
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate log;
#[allow(unused_extern_crates)]
#[macro_use]
extern crate matches;
#[cfg(feature = "gecko")] extern crate nsstring_vendor as nsstring;
extern crate num_integer;
extern crate num_traits;
extern crate ordered_float;
extern crate owning_ref;
extern crate parking_lot;
extern crate pdqsort;
extern crate rayon;
extern crate selectors;
#[cfg(feature = "servo")] #[macro_use] extern crate serde_derive;
#[cfg(feature = "servo")] #[macro_use] extern crate servo_atoms;
extern crate servo_config;
extern crate servo_url;
extern crate smallvec;
#[macro_use]
extern crate style_traits;
extern crate time;
#[allow(unused_extern_crates)]
extern crate unicode_segmentation;
pub mod animation;
#[allow(missing_docs)] // TODO.
pub mod attr;
pub mod bezier;
pub mod bloom;
pub mod cache;
pub mod cascade_info;
pub mod context;
pub mod custom_properties;
pub mod data;
pub mod dom;
pub mod element_state;
#[cfg(feature = "servo")] mod encoding_support;
pub mod error_reporting;
pub mod font_face;
pub mod font_metrics;
#[cfg(feature = "gecko")] #[allow(unsafe_code)] pub mod gecko;
#[cfg(feature = "gecko")] #[allow(unsafe_code)] pub mod gecko_bindings;
pub mod keyframes;
#[allow(missing_docs)] // TODO.
pub mod logical_geometry;
pub mod matching;
pub mod media_queries;
pub mod owning_handle;
pub mod parallel;
pub mod parser;
pub mod restyle_hints;
pub mod rule_tree;
pub mod scoped_tls;
pub mod selector_parser;
pub mod stylist;
#[cfg(feature = "servo")] #[allow(unsafe_code)] pub mod servo;
pub mod sequential;
pub mod sink;
pub mod str;
pub mod stylesheets;
pub mod supports;
pub mod thread_state;
pub mod timer;
pub mod traversal;
#[macro_use]
#[allow(non_camel_case_types)]
pub mod values;
pub mod viewport;
use std::fmt;
use std::sync::Arc;
use style_traits::ToCss;
#[cfg(feature = "gecko")] pub use gecko_string_cache as string_cache;
#[cfg(feature = "gecko")] pub use gecko_string_cache::Atom;
#[cfg(feature = "gecko")] pub use gecko_string_cache::Namespace;
#[cfg(feature = "gecko")] pub use gecko_string_cache::Atom as Prefix;
#[cfg(feature = "gecko")] pub use gecko_string_cache::Atom as LocalName;
#[cfg(feature = "servo")] pub use servo_atoms::Atom;
#[cfg(feature = "servo")] pub use html5ever_atoms::Prefix;
#[cfg(feature = "servo")] pub use html5ever_atoms::LocalName;
#[cfg(feature = "servo")] pub use html5ever_atoms::Namespace;
/// The CSS properties supported by the style system.
/// Generated from the properties.mako.rs template by build.rs
#[macro_use]
#[allow(unsafe_code)]
#[deny(missing_docs)]
pub mod properties {
include!(concat!(env!("OUT_DIR"), "/properties.rs"));
}
#[cfg(feature = "gecko")]
#[allow(unsafe_code, missing_docs)]
pub mod gecko_properties {
include!(concat!(env!("OUT_DIR"), "/gecko_properties.rs"));
}
macro_rules! reexport_computed_values {
( $( $name: ident )+ ) => {
/// Types for [computed values][computed].
///
/// [computed]: https://drafts.csswg.org/css-cascade/#computed
pub mod computed_values {
$(
pub use properties::longhands::$name::computed_value as $name;
)+
// Don't use a side-specific name needlessly:
pub use properties::longhands::border_top_style::computed_value as border_style;
}
}
}
longhand_properties_idents!(reexport_computed_values);
/// Returns whether the two arguments point to the same value.
#[inline]
pub fn arc_ptr_eq<T:'static>(a: &Arc<T>, b: &Arc<T>) -> bool {
let a: &T = &**a;
let b: &T = &**b;
(a as *const T) == (b as *const T)
}
/// Serializes as CSS a comma-separated list of any `T` that supports being
/// serialized as CSS.
pub fn
|
<W, T>(dest: &mut W,
list: &[T])
-> fmt::Result
where W: fmt::Write,
T: ToCss,
{
if list.is_empty() {
return Ok(());
}
try!(list[0].to_css(dest));
for item in list.iter().skip(1) {
try!(write!(dest, ", "));
try!(item.to_css(dest));
}
Ok(())
}
|
serialize_comma_separated_list
|
identifier_name
|
lib.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Calculate [specified][specified] and [computed values][computed] from a
//! tree of DOM nodes and a set of stylesheets.
//!
//! [computed]: https://drafts.csswg.org/css-cascade/#computed
//! [specified]: https://drafts.csswg.org/css-cascade/#specified
//!
//! In particular, this crate contains the definitions of supported properties,
//! the code to parse them into specified values and calculate the computed
//! values based on the specified values, as well as the code to serialize both
//! specified and computed values.
//!
//! The main entry point is [`recalc_style_at`][recalc_style_at].
//!
//! [recalc_style_at]: traversal/fn.recalc_style_at.html
//!
//! Major dependencies are the [cssparser][cssparser] and [selectors][selectors]
//! crates.
//!
//! [cssparser]:../cssparser/index.html
//! [selectors]:../selectors/index.html
#![deny(warnings)]
#![deny(missing_docs)]
// FIXME(bholley): We need to blanket-allow unsafe code in order to make the
// gecko atom!() macro work. When Rust 1.14 is released [1], we can uncomment
// the commented-out attributes in regen_atoms.py and go back to denying unsafe
// code by default.
//
// [1] https://github.com/rust-lang/rust/issues/15701#issuecomment-251900615
//#![deny(unsafe_code)]
#![allow(unused_unsafe)]
#![recursion_limit = "500"] // For define_css_keyword_enum! in -moz-appearance
extern crate app_units;
extern crate atomic_refcell;
#[macro_use]
extern crate bitflags;
#[cfg(feature = "gecko")] #[macro_use] #[no_link] extern crate cfg_if;
#[macro_use] extern crate cssparser;
extern crate euclid;
extern crate fnv;
#[cfg(feature = "gecko")] #[macro_use] pub mod gecko_string_cache;
extern crate heapsize;
#[cfg(feature = "servo")] #[macro_use] extern crate heapsize_derive;
#[cfg(feature = "servo")] #[macro_use] extern crate html5ever_atoms;
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate log;
#[allow(unused_extern_crates)]
#[macro_use]
extern crate matches;
#[cfg(feature = "gecko")] extern crate nsstring_vendor as nsstring;
extern crate num_integer;
extern crate num_traits;
extern crate ordered_float;
extern crate owning_ref;
extern crate parking_lot;
extern crate pdqsort;
extern crate rayon;
extern crate selectors;
#[cfg(feature = "servo")] #[macro_use] extern crate serde_derive;
#[cfg(feature = "servo")] #[macro_use] extern crate servo_atoms;
extern crate servo_config;
extern crate servo_url;
extern crate smallvec;
#[macro_use]
extern crate style_traits;
extern crate time;
#[allow(unused_extern_crates)]
extern crate unicode_segmentation;
pub mod animation;
#[allow(missing_docs)] // TODO.
pub mod attr;
pub mod bezier;
pub mod bloom;
pub mod cache;
pub mod cascade_info;
|
pub mod custom_properties;
pub mod data;
pub mod dom;
pub mod element_state;
#[cfg(feature = "servo")] mod encoding_support;
pub mod error_reporting;
pub mod font_face;
pub mod font_metrics;
#[cfg(feature = "gecko")] #[allow(unsafe_code)] pub mod gecko;
#[cfg(feature = "gecko")] #[allow(unsafe_code)] pub mod gecko_bindings;
pub mod keyframes;
#[allow(missing_docs)] // TODO.
pub mod logical_geometry;
pub mod matching;
pub mod media_queries;
pub mod owning_handle;
pub mod parallel;
pub mod parser;
pub mod restyle_hints;
pub mod rule_tree;
pub mod scoped_tls;
pub mod selector_parser;
pub mod stylist;
#[cfg(feature = "servo")] #[allow(unsafe_code)] pub mod servo;
pub mod sequential;
pub mod sink;
pub mod str;
pub mod stylesheets;
pub mod supports;
pub mod thread_state;
pub mod timer;
pub mod traversal;
#[macro_use]
#[allow(non_camel_case_types)]
pub mod values;
pub mod viewport;
use std::fmt;
use std::sync::Arc;
use style_traits::ToCss;
#[cfg(feature = "gecko")] pub use gecko_string_cache as string_cache;
#[cfg(feature = "gecko")] pub use gecko_string_cache::Atom;
#[cfg(feature = "gecko")] pub use gecko_string_cache::Namespace;
#[cfg(feature = "gecko")] pub use gecko_string_cache::Atom as Prefix;
#[cfg(feature = "gecko")] pub use gecko_string_cache::Atom as LocalName;
#[cfg(feature = "servo")] pub use servo_atoms::Atom;
#[cfg(feature = "servo")] pub use html5ever_atoms::Prefix;
#[cfg(feature = "servo")] pub use html5ever_atoms::LocalName;
#[cfg(feature = "servo")] pub use html5ever_atoms::Namespace;
/// The CSS properties supported by the style system.
/// Generated from the properties.mako.rs template by build.rs
#[macro_use]
#[allow(unsafe_code)]
#[deny(missing_docs)]
pub mod properties {
include!(concat!(env!("OUT_DIR"), "/properties.rs"));
}
#[cfg(feature = "gecko")]
#[allow(unsafe_code, missing_docs)]
pub mod gecko_properties {
include!(concat!(env!("OUT_DIR"), "/gecko_properties.rs"));
}
macro_rules! reexport_computed_values {
( $( $name: ident )+ ) => {
/// Types for [computed values][computed].
///
/// [computed]: https://drafts.csswg.org/css-cascade/#computed
pub mod computed_values {
$(
pub use properties::longhands::$name::computed_value as $name;
)+
// Don't use a side-specific name needlessly:
pub use properties::longhands::border_top_style::computed_value as border_style;
}
}
}
longhand_properties_idents!(reexport_computed_values);
/// Returns whether the two arguments point to the same value.
#[inline]
pub fn arc_ptr_eq<T:'static>(a: &Arc<T>, b: &Arc<T>) -> bool {
let a: &T = &**a;
let b: &T = &**b;
(a as *const T) == (b as *const T)
}
/// Serializes as CSS a comma-separated list of any `T` that supports being
/// serialized as CSS.
pub fn serialize_comma_separated_list<W, T>(dest: &mut W,
list: &[T])
-> fmt::Result
where W: fmt::Write,
T: ToCss,
{
if list.is_empty() {
return Ok(());
}
try!(list[0].to_css(dest));
for item in list.iter().skip(1) {
try!(write!(dest, ", "));
try!(item.to_css(dest));
}
Ok(())
}
|
pub mod context;
|
random_line_split
|
deque.rs
|
use giftr::refs::*;
//use giftr::refs::functional::Ref as Ref;
use giftr::refs::imperative::Ref as Ref;
use std::iter::Iterator;
use std::mem::swap;
use list::List;
#[derive(Clone, Debug)]
pub struct Deque<T: Clone> {
front : Ref<List<T>>,
back : Ref<List<T>>,
}
impl <T: Clone> Deque<T> {
pub fn new() -> Deque<T> {
Deque { front: Ref::new(List::new()), back: Ref::new(List::new())}
}
pub fn push_front(&mut self, x: T) {
self.front.prepend(x);
}
pub fn push_back(&mut self, x: T) {
self.back.prepend(x);
}
pub fn pop_front(&mut self) -> Option<T> {
let front = self.front.pop_front();
if front.is_some() {
front
} else {
// FIXME we should reshuffle here, right?
self.back.pop_back()
}
}
pub fn pop_back(&mut self) -> Option<T> {
let back = self.back.pop_front();
if back.is_some() {
back
} else {
// FIXME we should reshuffle here, right?
self.front.pop_back()
}
}
pub fn reverse(&mut self) {
swap(&mut self.front, &mut self.back);
}
fn to_iter(self) -> Iter<T> {
Iter { deque: Ref::new(self) }
}
pub fn iter(&self) -> Iter<T> {
self.clone().to_iter()
}
pub fn rev_iter(&self) -> Iter<T> {
let mut cln = self.clone();
cln.reverse();
cln.to_iter()
}
}
pub struct Iter<T: Clone> {
deque : Ref<Deque<T>>,
}
impl <T: Clone> Iterator for Iter<T> {
type Item = T;
fn next(&mut self) -> Option<T> {
self.deque.pop_front()
}
}
#[test]
fn
|
() {
let mut d = Deque::new();
d.push_front(2);
d.push_front(1);
d.push_back(3);
assert_eq!(Some(1), d.pop_front());
assert_eq!(Some(2), d.pop_front());
assert_eq!(Some(3), d.pop_front());
assert_eq!(None, d.pop_front());
}
#[test]
fn deque_pop_back() {
let mut d = Deque::new();
d.push_front(2);
d.push_front(1);
d.push_back(3);
assert_eq!(Some(3), d.pop_back());
assert_eq!(Some(2), d.pop_back());
assert_eq!(Some(1), d.pop_back());
assert_eq!(None, d.pop_back());
}
#[test]
fn deque_reverse() {
let mut d = Deque::new();
d.push_front(2);
d.push_front(1);
d.push_back(3);
d.push_back(4);
d.reverse();
assert_eq!(Some(4), d.pop_front());
assert_eq!(Some(3), d.pop_front());
assert_eq!(Some(2), d.pop_front());
assert_eq!(Some(1), d.pop_front());
assert_eq!(None, d.pop_back());
}
#[test]
fn deque_iter() {
let mut d = Ref::new(Deque::new());
d.push_front(2);
d.push_front(1);
d.push_back(3);
d.push_back(4);
let mut i = 1;
for v in d.iter() {
assert_eq!(i, v);
i += 1;
}
//
// let mut i = 4;
// for v in d.rev_iter() {
// assert_eq!(i, v);
// i -= 1;
// }
}
|
deque_pop_front
|
identifier_name
|
deque.rs
|
use giftr::refs::*;
//use giftr::refs::functional::Ref as Ref;
use giftr::refs::imperative::Ref as Ref;
use std::iter::Iterator;
use std::mem::swap;
use list::List;
#[derive(Clone, Debug)]
pub struct Deque<T: Clone> {
front : Ref<List<T>>,
back : Ref<List<T>>,
}
impl <T: Clone> Deque<T> {
pub fn new() -> Deque<T> {
Deque { front: Ref::new(List::new()), back: Ref::new(List::new())}
}
pub fn push_front(&mut self, x: T) {
self.front.prepend(x);
}
pub fn push_back(&mut self, x: T) {
self.back.prepend(x);
}
pub fn pop_front(&mut self) -> Option<T> {
let front = self.front.pop_front();
if front.is_some() {
front
} else {
// FIXME we should reshuffle here, right?
self.back.pop_back()
}
}
pub fn pop_back(&mut self) -> Option<T> {
let back = self.back.pop_front();
if back.is_some() {
back
} else {
// FIXME we should reshuffle here, right?
self.front.pop_back()
}
}
pub fn reverse(&mut self) {
swap(&mut self.front, &mut self.back);
}
fn to_iter(self) -> Iter<T> {
Iter { deque: Ref::new(self) }
}
|
}
pub fn rev_iter(&self) -> Iter<T> {
let mut cln = self.clone();
cln.reverse();
cln.to_iter()
}
}
pub struct Iter<T: Clone> {
deque : Ref<Deque<T>>,
}
impl <T: Clone> Iterator for Iter<T> {
type Item = T;
fn next(&mut self) -> Option<T> {
self.deque.pop_front()
}
}
#[test]
fn deque_pop_front() {
let mut d = Deque::new();
d.push_front(2);
d.push_front(1);
d.push_back(3);
assert_eq!(Some(1), d.pop_front());
assert_eq!(Some(2), d.pop_front());
assert_eq!(Some(3), d.pop_front());
assert_eq!(None, d.pop_front());
}
#[test]
fn deque_pop_back() {
let mut d = Deque::new();
d.push_front(2);
d.push_front(1);
d.push_back(3);
assert_eq!(Some(3), d.pop_back());
assert_eq!(Some(2), d.pop_back());
assert_eq!(Some(1), d.pop_back());
assert_eq!(None, d.pop_back());
}
#[test]
fn deque_reverse() {
let mut d = Deque::new();
d.push_front(2);
d.push_front(1);
d.push_back(3);
d.push_back(4);
d.reverse();
assert_eq!(Some(4), d.pop_front());
assert_eq!(Some(3), d.pop_front());
assert_eq!(Some(2), d.pop_front());
assert_eq!(Some(1), d.pop_front());
assert_eq!(None, d.pop_back());
}
#[test]
fn deque_iter() {
let mut d = Ref::new(Deque::new());
d.push_front(2);
d.push_front(1);
d.push_back(3);
d.push_back(4);
let mut i = 1;
for v in d.iter() {
assert_eq!(i, v);
i += 1;
}
//
// let mut i = 4;
// for v in d.rev_iter() {
// assert_eq!(i, v);
// i -= 1;
// }
}
|
pub fn iter(&self) -> Iter<T> {
self.clone().to_iter()
|
random_line_split
|
deque.rs
|
use giftr::refs::*;
//use giftr::refs::functional::Ref as Ref;
use giftr::refs::imperative::Ref as Ref;
use std::iter::Iterator;
use std::mem::swap;
use list::List;
#[derive(Clone, Debug)]
pub struct Deque<T: Clone> {
front : Ref<List<T>>,
back : Ref<List<T>>,
}
impl <T: Clone> Deque<T> {
pub fn new() -> Deque<T> {
Deque { front: Ref::new(List::new()), back: Ref::new(List::new())}
}
pub fn push_front(&mut self, x: T) {
self.front.prepend(x);
}
pub fn push_back(&mut self, x: T) {
self.back.prepend(x);
}
pub fn pop_front(&mut self) -> Option<T> {
let front = self.front.pop_front();
if front.is_some() {
front
} else
|
}
pub fn pop_back(&mut self) -> Option<T> {
let back = self.back.pop_front();
if back.is_some() {
back
} else {
// FIXME we should reshuffle here, right?
self.front.pop_back()
}
}
pub fn reverse(&mut self) {
swap(&mut self.front, &mut self.back);
}
fn to_iter(self) -> Iter<T> {
Iter { deque: Ref::new(self) }
}
pub fn iter(&self) -> Iter<T> {
self.clone().to_iter()
}
pub fn rev_iter(&self) -> Iter<T> {
let mut cln = self.clone();
cln.reverse();
cln.to_iter()
}
}
pub struct Iter<T: Clone> {
deque : Ref<Deque<T>>,
}
impl <T: Clone> Iterator for Iter<T> {
type Item = T;
fn next(&mut self) -> Option<T> {
self.deque.pop_front()
}
}
#[test]
fn deque_pop_front() {
let mut d = Deque::new();
d.push_front(2);
d.push_front(1);
d.push_back(3);
assert_eq!(Some(1), d.pop_front());
assert_eq!(Some(2), d.pop_front());
assert_eq!(Some(3), d.pop_front());
assert_eq!(None, d.pop_front());
}
#[test]
fn deque_pop_back() {
let mut d = Deque::new();
d.push_front(2);
d.push_front(1);
d.push_back(3);
assert_eq!(Some(3), d.pop_back());
assert_eq!(Some(2), d.pop_back());
assert_eq!(Some(1), d.pop_back());
assert_eq!(None, d.pop_back());
}
#[test]
fn deque_reverse() {
let mut d = Deque::new();
d.push_front(2);
d.push_front(1);
d.push_back(3);
d.push_back(4);
d.reverse();
assert_eq!(Some(4), d.pop_front());
assert_eq!(Some(3), d.pop_front());
assert_eq!(Some(2), d.pop_front());
assert_eq!(Some(1), d.pop_front());
assert_eq!(None, d.pop_back());
}
#[test]
fn deque_iter() {
let mut d = Ref::new(Deque::new());
d.push_front(2);
d.push_front(1);
d.push_back(3);
d.push_back(4);
let mut i = 1;
for v in d.iter() {
assert_eq!(i, v);
i += 1;
}
//
// let mut i = 4;
// for v in d.rev_iter() {
// assert_eq!(i, v);
// i -= 1;
// }
}
|
{
// FIXME we should reshuffle here, right?
self.back.pop_back()
}
|
conditional_block
|
mod.rs
|
/*
Copyright (c) 2016 Saurav Sachidanand
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
//! Some useful constants
pub mod vmcs;
pub mod vmx_cap;
pub mod vmx_exit;
pub mod irq;
use libc::*;
pub const VMX_BASIC_TRUE_CTLS: uint64_t = 1 << 55;
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
random_line_split
|
issue-39559.rs
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
trait Dim {
fn dim() -> usize;
}
enum Dim3 {}
impl Dim for Dim3 {
fn dim() -> usize {
3
}
}
pub struct Vector<T, D: Dim> {
entries: [T; D::dim()],
|
fn main() {}
|
//~^ ERROR no function or associated item named `dim` found for type `D` in the current scope
_dummy: D,
}
|
random_line_split
|
issue-39559.rs
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
trait Dim {
fn dim() -> usize;
}
enum Dim3 {}
impl Dim for Dim3 {
fn dim() -> usize {
3
}
}
pub struct
|
<T, D: Dim> {
entries: [T; D::dim()],
//~^ ERROR no function or associated item named `dim` found for type `D` in the current scope
_dummy: D,
}
fn main() {}
|
Vector
|
identifier_name
|
ifd.rs
|
//! Function for reading TIFF tags
use std::io::{self, Read, Seek};
use std::collections::{HashMap};
use super::stream::{ByteOrder, SmartReader, EndianReader};
use self::Value::{Unsigned, List};
macro_rules! tags {
{$(
$tag:ident
$val:expr;
)*} => {
/// TIFF tag
#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
pub enum Tag {
$($tag,)*
Unknown(u16)
}
impl Tag {
pub fn from_u16(n: u16) -> Tag {
$(if n == $val { Tag::$tag } else)* {
Tag::Unknown(n)
}
}
}
}
}
// Note: These tags appear in the order they are mentioned in the TIFF reference
tags!{
// Baseline tags:
Artist 315; // TODO add support
// grayscale images PhotometricInterpretation 1 or 3
BitsPerSample 258;
CellLength 265; // TODO add support
CellWidth 264; // TODO add support
// palette-color images (PhotometricInterpretation 3)
ColorMap 320; // TODO add support
Compression 259; // TODO add support for 2 and 32773
Copyright 33432; // TODO add support
DateTime 306; // TODO add support
ExtraSamples 338; // TODO add support
FillOrder 266; // TODO add support
FreeByteCounts 289; // TODO add support
FreeOffsets 288; // TODO add support
GrayResponseCurve 291; // TODO add support
GrayResponseUnit 290; // TODO add support
HostComputer 316; // TODO add support
ImageDescription 270; // TODO add support
ImageLength 257;
ImageWidth 256;
Make 271; // TODO add support
MaxSampleValue 281; // TODO add support
MinSampleValue 280; // TODO add support
Model 272; // TODO add support
NewSubfileType 254; // TODO add support
Orientation 274; // TODO add support
PhotometricInterpretation 262;
PlanarConfiguration 284;
ResolutionUnit 296; // TODO add support
RowsPerStrip 278;
SamplesPerPixel 277;
Software 305;
StripByteCounts 279;
StripOffsets 273;
SubfileType 255; // TODO add support
Threshholding 263; // TODO add support
XResolution 282;
YResolution 283;
// Advanced tags
Predictor 317;
}
enum_from_primitive! {
#[derive(Clone, Copy, Debug)]
pub enum Type {
BYTE = 1,
ASCII = 2,
SHORT = 3,
LONG = 4,
RATIONAL = 5,
}
}
#[allow(unused_qualifications)]
#[derive(Debug)]
pub enum Value {
//Signed(i32),
Unsigned(u32),
List(Vec<Value>)
}
impl Value {
pub fn into_u32(self) -> ::image::ImageResult<u32> {
match self {
Unsigned(val) => Ok(val),
val => Err(::image::ImageError::FormatError(format!(
"Expected unsigned integer, {:?} found.", val
)))
}
}
pub fn into_u32_vec(self) -> ::image::ImageResult<Vec<u32>> {
match self {
List(vec) => {
let mut new_vec = Vec::with_capacity(vec.len());
for v in vec {
new_vec.push(v.into_u32()?)
}
Ok(new_vec)
},
Unsigned(val) => Ok(vec![val]),
//_ => Err(::image::FormatError("Tag data malformed.".to_string()))
}
}
}
pub struct Entry {
type_: Type,
count: u32,
offset: [u8; 4],
}
impl ::std::fmt::Debug for Entry {
fn fmt(&self, fmt: &mut ::std::fmt::Formatter) -> Result<(), ::std::fmt::Error> {
fmt.write_str(&format!("Entry {{ type_: {:?}, count: {:?}, offset: {:?} }}",
self.type_,
self.count,
&self.offset
))
}
}
impl Entry {
pub fn new(type_: Type, count: u32, offset: [u8; 4]) -> Entry {
Entry {
type_: type_,
count: count,
offset: offset
}
}
/// Returns a mem_reader for the offset/value field
fn r(&self, byte_order: ByteOrder) -> SmartReader<io::Cursor<Vec<u8>>> {
SmartReader::wrap(
io::Cursor::new(self.offset.to_vec()),
byte_order
)
}
pub fn
|
<R: Read + Seek>(&self, decoder: &mut super::TIFFDecoder<R>)
-> ::image::ImageResult<Value> {
let bo = decoder.byte_order();
match (self.type_, self.count) {
// TODO check if this could give wrong results
// at a different endianess of file/computer.
(Type::BYTE, 1) => Ok(Unsigned(u32::from(self.offset[0]))),
(Type::SHORT, 1) => Ok(Unsigned(u32::from(self.r(bo).read_u16()?))),
(Type::SHORT, 2) => {
let mut r = self.r(bo);
Ok(List(vec![
Unsigned(u32::from(r.read_u16()?)),
Unsigned(u32::from(r.read_u16()?))
]))
},
(Type::SHORT, n) => {
let mut v = Vec::with_capacity(n as usize);
try!(decoder.goto_offset(try!(self.r(bo).read_u32())));
for _ in 0.. n {
v.push(Unsigned(u32::from(decoder.read_short()?)))
}
Ok(List(v))
},
(Type::LONG, 1) => Ok(Unsigned(try!(self.r(bo).read_u32()))),
(Type::LONG, n) => {
let mut v = Vec::with_capacity(n as usize);
try!(decoder.goto_offset(try!(self.r(bo).read_u32())));
for _ in 0.. n {
v.push(Unsigned(try!(decoder.read_long())))
}
Ok(List(v))
}
_ => Err(::image::ImageError::UnsupportedError("Unsupported data type.".to_string()))
}
}
}
/// Type representing an Image File Directory
pub type Directory = HashMap<Tag, Entry>;
|
val
|
identifier_name
|
ifd.rs
|
//! Function for reading TIFF tags
use std::io::{self, Read, Seek};
use std::collections::{HashMap};
use super::stream::{ByteOrder, SmartReader, EndianReader};
use self::Value::{Unsigned, List};
macro_rules! tags {
{$(
$tag:ident
$val:expr;
)*} => {
/// TIFF tag
#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
pub enum Tag {
$($tag,)*
Unknown(u16)
}
impl Tag {
pub fn from_u16(n: u16) -> Tag {
$(if n == $val { Tag::$tag } else)* {
Tag::Unknown(n)
}
}
}
}
}
// Note: These tags appear in the order they are mentioned in the TIFF reference
tags!{
// Baseline tags:
Artist 315; // TODO add support
// grayscale images PhotometricInterpretation 1 or 3
BitsPerSample 258;
CellLength 265; // TODO add support
CellWidth 264; // TODO add support
// palette-color images (PhotometricInterpretation 3)
ColorMap 320; // TODO add support
Compression 259; // TODO add support for 2 and 32773
Copyright 33432; // TODO add support
DateTime 306; // TODO add support
ExtraSamples 338; // TODO add support
FillOrder 266; // TODO add support
FreeByteCounts 289; // TODO add support
FreeOffsets 288; // TODO add support
GrayResponseCurve 291; // TODO add support
GrayResponseUnit 290; // TODO add support
HostComputer 316; // TODO add support
ImageDescription 270; // TODO add support
ImageLength 257;
ImageWidth 256;
Make 271; // TODO add support
MaxSampleValue 281; // TODO add support
MinSampleValue 280; // TODO add support
Model 272; // TODO add support
NewSubfileType 254; // TODO add support
Orientation 274; // TODO add support
PhotometricInterpretation 262;
PlanarConfiguration 284;
ResolutionUnit 296; // TODO add support
RowsPerStrip 278;
SamplesPerPixel 277;
Software 305;
StripByteCounts 279;
StripOffsets 273;
SubfileType 255; // TODO add support
Threshholding 263; // TODO add support
XResolution 282;
YResolution 283;
// Advanced tags
Predictor 317;
}
enum_from_primitive! {
#[derive(Clone, Copy, Debug)]
pub enum Type {
BYTE = 1,
ASCII = 2,
SHORT = 3,
LONG = 4,
RATIONAL = 5,
}
}
#[allow(unused_qualifications)]
#[derive(Debug)]
pub enum Value {
//Signed(i32),
Unsigned(u32),
List(Vec<Value>)
}
impl Value {
pub fn into_u32(self) -> ::image::ImageResult<u32> {
match self {
Unsigned(val) => Ok(val),
val => Err(::image::ImageError::FormatError(format!(
"Expected unsigned integer, {:?} found.", val
)))
}
}
pub fn into_u32_vec(self) -> ::image::ImageResult<Vec<u32>> {
match self {
List(vec) => {
let mut new_vec = Vec::with_capacity(vec.len());
for v in vec {
new_vec.push(v.into_u32()?)
}
Ok(new_vec)
},
Unsigned(val) => Ok(vec![val]),
//_ => Err(::image::FormatError("Tag data malformed.".to_string()))
}
}
}
pub struct Entry {
type_: Type,
count: u32,
offset: [u8; 4],
}
impl ::std::fmt::Debug for Entry {
fn fmt(&self, fmt: &mut ::std::fmt::Formatter) -> Result<(), ::std::fmt::Error> {
fmt.write_str(&format!("Entry {{ type_: {:?}, count: {:?}, offset: {:?} }}",
self.type_,
self.count,
&self.offset
))
}
}
impl Entry {
pub fn new(type_: Type, count: u32, offset: [u8; 4]) -> Entry {
Entry {
type_: type_,
count: count,
offset: offset
}
}
/// Returns a mem_reader for the offset/value field
fn r(&self, byte_order: ByteOrder) -> SmartReader<io::Cursor<Vec<u8>>>
|
pub fn val<R: Read + Seek>(&self, decoder: &mut super::TIFFDecoder<R>)
-> ::image::ImageResult<Value> {
let bo = decoder.byte_order();
match (self.type_, self.count) {
// TODO check if this could give wrong results
// at a different endianess of file/computer.
(Type::BYTE, 1) => Ok(Unsigned(u32::from(self.offset[0]))),
(Type::SHORT, 1) => Ok(Unsigned(u32::from(self.r(bo).read_u16()?))),
(Type::SHORT, 2) => {
let mut r = self.r(bo);
Ok(List(vec![
Unsigned(u32::from(r.read_u16()?)),
Unsigned(u32::from(r.read_u16()?))
]))
},
(Type::SHORT, n) => {
let mut v = Vec::with_capacity(n as usize);
try!(decoder.goto_offset(try!(self.r(bo).read_u32())));
for _ in 0.. n {
v.push(Unsigned(u32::from(decoder.read_short()?)))
}
Ok(List(v))
},
(Type::LONG, 1) => Ok(Unsigned(try!(self.r(bo).read_u32()))),
(Type::LONG, n) => {
let mut v = Vec::with_capacity(n as usize);
try!(decoder.goto_offset(try!(self.r(bo).read_u32())));
for _ in 0.. n {
v.push(Unsigned(try!(decoder.read_long())))
}
Ok(List(v))
}
_ => Err(::image::ImageError::UnsupportedError("Unsupported data type.".to_string()))
}
}
}
/// Type representing an Image File Directory
pub type Directory = HashMap<Tag, Entry>;
|
{
SmartReader::wrap(
io::Cursor::new(self.offset.to_vec()),
byte_order
)
}
|
identifier_body
|
ifd.rs
|
//! Function for reading TIFF tags
use std::io::{self, Read, Seek};
use std::collections::{HashMap};
use super::stream::{ByteOrder, SmartReader, EndianReader};
use self::Value::{Unsigned, List};
macro_rules! tags {
{$(
$tag:ident
$val:expr;
)*} => {
/// TIFF tag
#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
|
}
impl Tag {
pub fn from_u16(n: u16) -> Tag {
$(if n == $val { Tag::$tag } else)* {
Tag::Unknown(n)
}
}
}
}
}
// Note: These tags appear in the order they are mentioned in the TIFF reference
tags!{
// Baseline tags:
Artist 315; // TODO add support
// grayscale images PhotometricInterpretation 1 or 3
BitsPerSample 258;
CellLength 265; // TODO add support
CellWidth 264; // TODO add support
// palette-color images (PhotometricInterpretation 3)
ColorMap 320; // TODO add support
Compression 259; // TODO add support for 2 and 32773
Copyright 33432; // TODO add support
DateTime 306; // TODO add support
ExtraSamples 338; // TODO add support
FillOrder 266; // TODO add support
FreeByteCounts 289; // TODO add support
FreeOffsets 288; // TODO add support
GrayResponseCurve 291; // TODO add support
GrayResponseUnit 290; // TODO add support
HostComputer 316; // TODO add support
ImageDescription 270; // TODO add support
ImageLength 257;
ImageWidth 256;
Make 271; // TODO add support
MaxSampleValue 281; // TODO add support
MinSampleValue 280; // TODO add support
Model 272; // TODO add support
NewSubfileType 254; // TODO add support
Orientation 274; // TODO add support
PhotometricInterpretation 262;
PlanarConfiguration 284;
ResolutionUnit 296; // TODO add support
RowsPerStrip 278;
SamplesPerPixel 277;
Software 305;
StripByteCounts 279;
StripOffsets 273;
SubfileType 255; // TODO add support
Threshholding 263; // TODO add support
XResolution 282;
YResolution 283;
// Advanced tags
Predictor 317;
}
enum_from_primitive! {
#[derive(Clone, Copy, Debug)]
pub enum Type {
BYTE = 1,
ASCII = 2,
SHORT = 3,
LONG = 4,
RATIONAL = 5,
}
}
#[allow(unused_qualifications)]
#[derive(Debug)]
pub enum Value {
//Signed(i32),
Unsigned(u32),
List(Vec<Value>)
}
impl Value {
pub fn into_u32(self) -> ::image::ImageResult<u32> {
match self {
Unsigned(val) => Ok(val),
val => Err(::image::ImageError::FormatError(format!(
"Expected unsigned integer, {:?} found.", val
)))
}
}
pub fn into_u32_vec(self) -> ::image::ImageResult<Vec<u32>> {
match self {
List(vec) => {
let mut new_vec = Vec::with_capacity(vec.len());
for v in vec {
new_vec.push(v.into_u32()?)
}
Ok(new_vec)
},
Unsigned(val) => Ok(vec![val]),
//_ => Err(::image::FormatError("Tag data malformed.".to_string()))
}
}
}
pub struct Entry {
type_: Type,
count: u32,
offset: [u8; 4],
}
impl ::std::fmt::Debug for Entry {
fn fmt(&self, fmt: &mut ::std::fmt::Formatter) -> Result<(), ::std::fmt::Error> {
fmt.write_str(&format!("Entry {{ type_: {:?}, count: {:?}, offset: {:?} }}",
self.type_,
self.count,
&self.offset
))
}
}
impl Entry {
pub fn new(type_: Type, count: u32, offset: [u8; 4]) -> Entry {
Entry {
type_: type_,
count: count,
offset: offset
}
}
/// Returns a mem_reader for the offset/value field
fn r(&self, byte_order: ByteOrder) -> SmartReader<io::Cursor<Vec<u8>>> {
SmartReader::wrap(
io::Cursor::new(self.offset.to_vec()),
byte_order
)
}
pub fn val<R: Read + Seek>(&self, decoder: &mut super::TIFFDecoder<R>)
-> ::image::ImageResult<Value> {
let bo = decoder.byte_order();
match (self.type_, self.count) {
// TODO check if this could give wrong results
// at a different endianess of file/computer.
(Type::BYTE, 1) => Ok(Unsigned(u32::from(self.offset[0]))),
(Type::SHORT, 1) => Ok(Unsigned(u32::from(self.r(bo).read_u16()?))),
(Type::SHORT, 2) => {
let mut r = self.r(bo);
Ok(List(vec![
Unsigned(u32::from(r.read_u16()?)),
Unsigned(u32::from(r.read_u16()?))
]))
},
(Type::SHORT, n) => {
let mut v = Vec::with_capacity(n as usize);
try!(decoder.goto_offset(try!(self.r(bo).read_u32())));
for _ in 0.. n {
v.push(Unsigned(u32::from(decoder.read_short()?)))
}
Ok(List(v))
},
(Type::LONG, 1) => Ok(Unsigned(try!(self.r(bo).read_u32()))),
(Type::LONG, n) => {
let mut v = Vec::with_capacity(n as usize);
try!(decoder.goto_offset(try!(self.r(bo).read_u32())));
for _ in 0.. n {
v.push(Unsigned(try!(decoder.read_long())))
}
Ok(List(v))
}
_ => Err(::image::ImageError::UnsupportedError("Unsupported data type.".to_string()))
}
}
}
/// Type representing an Image File Directory
pub type Directory = HashMap<Tag, Entry>;
|
pub enum Tag {
$($tag,)*
Unknown(u16)
|
random_line_split
|
blake2s.rs
|
pub const BLOCK_BYTES : usize = 64;
pub const OUT_BYTES : usize = 32;
pub const KEY_BYTES : usize = 32;
static IV : [u32; 8] = [
0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a,
0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19,
];
static SIGMA : [[u8; 16]; 10] = [
[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 ],
[ 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 ],
[ 11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4 ],
[ 7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8 ],
[ 9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13 ],
[ 2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9 ],
[ 12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11 ],
[ 13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10 ],
[ 6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5 ],
[ 10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0 ],
];
pub struct Blake2s {
h: [u32; 8],
t: [u32; 2],
f: [u32; 2],
buf: [u8; 2*BLOCK_BYTES],
buf_len: usize,
}
impl Copy for Blake2s {}
impl Clone for Blake2s { fn clone(&self) -> Blake2s { *self } }
impl Blake2s {
pub fn new(size: usize) -> Blake2s {
assert!(size > 0 && size <= OUT_BYTES);
let param = encode_params(size as u8, 0);
let mut state = IV;
for i in 0..state.len() {
state[i] ^= load32(¶m[i*4..]);
}
Blake2s{
h: state,
t: [0, 0],
f: [0, 0],
buf: [0u8; 2*BLOCK_BYTES],
buf_len: 0,
}
}
pub fn new_with_key(size: usize, key: &[u8]) -> Blake2s {
assert!(size > 0 && size <= OUT_BYTES);
assert!(key.len() > 0 && key.len() <= KEY_BYTES);
let param = encode_params(size as u8, key.len() as u8);
let mut state = IV;
for i in 0..state.len() {
state[i] ^= load32(¶m[i*4..]);
}
let mut b = Blake2s{
h: state,
t: [0, 0],
f: [0, 0],
buf: [0u8; 2*BLOCK_BYTES],
buf_len: 0,
};
let mut block = [0u8; BLOCK_BYTES];
for i in 0..key.len() {
block[i] = key[i];
}
b.update(block.as_ref());
b
}
pub fn update(&mut self, m: &[u8]) {
let mut m = m;
while m.len() > 0 {
let left = self.buf_len;
let fill = 2 * BLOCK_BYTES - left;
if m.len() > fill {
for i in 0..fill {
self.buf[left+i] = m[i];
}
self.buf_len += fill;
m = &m[fill..];
self.increment_counter(BLOCK_BYTES as u32);
self.compress();
for i in 0..BLOCK_BYTES {
self.buf[i] = self.buf[i+BLOCK_BYTES];
}
self.buf_len -= BLOCK_BYTES;
} else {
for i in 0..m.len() {
self.buf[left+i] = m[i];
}
self.buf_len += m.len();
m = &m[m.len()..];
}
}
}
pub fn finalize(&mut self, out: &mut [u8]) {
let mut buf = [0u8; OUT_BYTES];
if self.buf_len > BLOCK_BYTES {
self.increment_counter(BLOCK_BYTES as u32);
self.compress();
for i in 0..BLOCK_BYTES {
self.buf[i] = self.buf[i+BLOCK_BYTES];
}
self.buf_len -= BLOCK_BYTES;
}
let n = self.buf_len as u32;
self.increment_counter(n);
self.f[0] =!0;
for i in self.buf_len..self.buf.len() {
self.buf[i] = 0;
}
self.compress();
for i in 0..self.h.len() {
store32(&mut buf[i*4..], self.h[i]);
}
for i in 0..::std::cmp::min(out.len(), OUT_BYTES) {
out[i] = buf[i];
}
}
fn increment_counter(&mut self, inc: u32) {
self.t[0] += inc;
self.t[1] += if self.t[0] < inc {1} else
|
;
}
fn compress(&mut self) {
let mut m = [0u32; 16];
let mut v = [0u32; 16];
let block = self.buf.as_ref();
assert!(block.len() >= BLOCK_BYTES);
for i in 0..m.len() {
m[i] = load32(&block[i*4..]);
}
for i in 0..8 {
v[i] = self.h[i];
}
v[ 8] = IV[0];
v[ 9] = IV[1];
v[10] = IV[2];
v[11] = IV[3];
v[12] = self.t[0] ^ IV[4];
v[13] = self.t[1] ^ IV[5];
v[14] = self.f[0] ^ IV[6];
v[15] = self.f[1] ^ IV[7];
macro_rules! g(
($r: expr, $i: expr, $a: expr, $b: expr, $c: expr, $d: expr) => ({
$a = $a.wrapping_add($b).wrapping_add(m[SIGMA[$r][2*$i+0] as usize]);
$d = ($d ^ $a).rotate_right(16);
$c = $c.wrapping_add($d);
$b = ($b ^ $c).rotate_right(12);
$a = $a.wrapping_add($b).wrapping_add(m[SIGMA[$r][2*$i+1] as usize]);
$d = ($d ^ $a).rotate_right(8);
$c = $c.wrapping_add($d);
$b = ($b ^ $c).rotate_right(7);
});
);
macro_rules! round(
($r: expr) => ({
g!($r, 0, v[ 0], v[ 4], v[ 8], v[12]);
g!($r, 1, v[ 1], v[ 5], v[ 9], v[13]);
g!($r, 2, v[ 2], v[ 6], v[10], v[14]);
g!($r, 3, v[ 3], v[ 7], v[11], v[15]);
g!($r, 4, v[ 0], v[ 5], v[10], v[15]);
g!($r, 5, v[ 1], v[ 6], v[11], v[12]);
g!($r, 6, v[ 2], v[ 7], v[ 8], v[13]);
g!($r, 7, v[ 3], v[ 4], v[ 9], v[14]);
});
);
for i in 0..10 {
round!(i);
}
for i in 0..8 {
self.h[i] = self.h[i] ^ v[i] ^ v[i+8];
}
}
}
fn encode_params(size: u8, keylen: u8) -> [u8; 64] {
let mut param = [0u8; 64];
param[0] = size as u8;
param[1] = keylen as u8;
param[2] = 1; // fanout
param[3] = 1; // depth
param
}
fn load32(b: &[u8]) -> u32{
let mut v = 0u32;
for i in 0..4 {
v |= (b[i] as u32) << (8*i);
}
v
}
fn store32(b: &mut [u8], v: u32) {
let mut w = v;
for i in 0..4 {
b[i] = w as u8;
w >>= 8;
}
}
#[cfg(test)]
mod tests {
use super::{Blake2s, KEY_BYTES, OUT_BYTES};
use super::super::kat;
#[test]
fn test_blake2s_out_size() {
let input = [0u8; 256];
for i in 0..kat::BLAKE2S_KAT_OUT_SIZE.len() {
let out_size = i+1;
let mut out = [0u8; OUT_BYTES];
let mut h = Blake2s::new(out_size);
h.update(input.as_ref());
h.finalize(&mut out[..out_size]);
assert_eq!(&out[..out_size], kat::BLAKE2S_KAT_OUT_SIZE[i]);
}
}
#[test]
fn test_blake2s_kat() {
let mut input = [0u8; 256];
for i in 0..input.len() {
input[i] = i as u8;
}
for i in 0..kat::BLAKE2S_KAT.len() {
let mut h = Blake2s::new(OUT_BYTES);
let mut out = [0u8; OUT_BYTES];
h.update(&input[..i]);
h.finalize(&mut out);
assert_eq!(out.as_ref(), kat::BLAKE2S_KAT[i].as_ref());
}
}
#[test]
fn test_blake2s_keyed_kat() {
let mut input = [0u8; 256];
let mut key = [0u8; KEY_BYTES];
for i in 0..input.len() {
input[i] = i as u8;
}
for i in 0..key.len() {
key[i] = i as u8;
}
for i in 0..kat::BLAKE2S_KEYED_KAT.len() {
let mut h = Blake2s::new_with_key(OUT_BYTES, key.as_ref());
let mut out = [0u8; OUT_BYTES];
h.update(&input[..i]);
h.finalize(&mut out);
assert_eq!(out.as_ref(), kat::BLAKE2S_KEYED_KAT[i].as_ref());
}
}
}
#[cfg(test)]
mod bench {
use std::iter::repeat;
use super::{Blake2s, OUT_BYTES};
use test::Bencher;
fn bench_chunk_size(b: &mut Bencher, n: usize) {
let mut h = Blake2s::new(OUT_BYTES);
let input : Vec<u8> = repeat(0).take(n).collect();
b.bytes = input.len() as u64;
b.iter(|| {
h.update(input.as_ref());
});
}
#[bench]
fn bench_blake2s_16(b: &mut Bencher) {
bench_chunk_size(b, 16);
}
#[bench]
fn bench_blake2s_1k(b: &mut Bencher) {
bench_chunk_size(b, 1 << 10);
}
#[bench]
fn bench_blake2s_64k(b: &mut Bencher) {
bench_chunk_size(b, 1 << 16);
}
}
|
{0}
|
conditional_block
|
blake2s.rs
|
pub const BLOCK_BYTES : usize = 64;
pub const OUT_BYTES : usize = 32;
pub const KEY_BYTES : usize = 32;
static IV : [u32; 8] = [
0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a,
0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19,
];
static SIGMA : [[u8; 16]; 10] = [
[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 ],
[ 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 ],
[ 11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4 ],
[ 7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8 ],
[ 9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13 ],
[ 2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9 ],
[ 12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11 ],
[ 13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10 ],
[ 6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5 ],
[ 10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0 ],
];
pub struct Blake2s {
h: [u32; 8],
t: [u32; 2],
f: [u32; 2],
buf: [u8; 2*BLOCK_BYTES],
buf_len: usize,
}
impl Copy for Blake2s {}
impl Clone for Blake2s { fn clone(&self) -> Blake2s { *self } }
impl Blake2s {
pub fn new(size: usize) -> Blake2s {
assert!(size > 0 && size <= OUT_BYTES);
let param = encode_params(size as u8, 0);
let mut state = IV;
for i in 0..state.len() {
state[i] ^= load32(¶m[i*4..]);
}
Blake2s{
h: state,
t: [0, 0],
f: [0, 0],
buf: [0u8; 2*BLOCK_BYTES],
buf_len: 0,
}
}
pub fn new_with_key(size: usize, key: &[u8]) -> Blake2s {
assert!(size > 0 && size <= OUT_BYTES);
assert!(key.len() > 0 && key.len() <= KEY_BYTES);
let param = encode_params(size as u8, key.len() as u8);
let mut state = IV;
for i in 0..state.len() {
state[i] ^= load32(¶m[i*4..]);
}
let mut b = Blake2s{
h: state,
t: [0, 0],
f: [0, 0],
buf: [0u8; 2*BLOCK_BYTES],
buf_len: 0,
};
let mut block = [0u8; BLOCK_BYTES];
for i in 0..key.len() {
block[i] = key[i];
}
b.update(block.as_ref());
b
}
pub fn update(&mut self, m: &[u8]) {
let mut m = m;
while m.len() > 0 {
let left = self.buf_len;
let fill = 2 * BLOCK_BYTES - left;
if m.len() > fill {
for i in 0..fill {
self.buf[left+i] = m[i];
}
self.buf_len += fill;
m = &m[fill..];
self.increment_counter(BLOCK_BYTES as u32);
self.compress();
for i in 0..BLOCK_BYTES {
self.buf[i] = self.buf[i+BLOCK_BYTES];
}
self.buf_len -= BLOCK_BYTES;
} else {
for i in 0..m.len() {
self.buf[left+i] = m[i];
}
self.buf_len += m.len();
m = &m[m.len()..];
}
}
}
pub fn finalize(&mut self, out: &mut [u8]) {
let mut buf = [0u8; OUT_BYTES];
if self.buf_len > BLOCK_BYTES {
self.increment_counter(BLOCK_BYTES as u32);
self.compress();
for i in 0..BLOCK_BYTES {
self.buf[i] = self.buf[i+BLOCK_BYTES];
}
self.buf_len -= BLOCK_BYTES;
}
let n = self.buf_len as u32;
self.increment_counter(n);
self.f[0] =!0;
for i in self.buf_len..self.buf.len() {
self.buf[i] = 0;
}
self.compress();
for i in 0..self.h.len() {
store32(&mut buf[i*4..], self.h[i]);
}
for i in 0..::std::cmp::min(out.len(), OUT_BYTES) {
out[i] = buf[i];
}
}
fn increment_counter(&mut self, inc: u32) {
self.t[0] += inc;
self.t[1] += if self.t[0] < inc {1} else {0};
}
fn compress(&mut self) {
let mut m = [0u32; 16];
let mut v = [0u32; 16];
let block = self.buf.as_ref();
assert!(block.len() >= BLOCK_BYTES);
for i in 0..m.len() {
m[i] = load32(&block[i*4..]);
}
for i in 0..8 {
v[i] = self.h[i];
}
v[ 8] = IV[0];
v[ 9] = IV[1];
v[10] = IV[2];
v[11] = IV[3];
v[12] = self.t[0] ^ IV[4];
v[13] = self.t[1] ^ IV[5];
v[14] = self.f[0] ^ IV[6];
v[15] = self.f[1] ^ IV[7];
macro_rules! g(
($r: expr, $i: expr, $a: expr, $b: expr, $c: expr, $d: expr) => ({
$a = $a.wrapping_add($b).wrapping_add(m[SIGMA[$r][2*$i+0] as usize]);
$d = ($d ^ $a).rotate_right(16);
$c = $c.wrapping_add($d);
$b = ($b ^ $c).rotate_right(12);
$a = $a.wrapping_add($b).wrapping_add(m[SIGMA[$r][2*$i+1] as usize]);
$d = ($d ^ $a).rotate_right(8);
$c = $c.wrapping_add($d);
$b = ($b ^ $c).rotate_right(7);
});
);
macro_rules! round(
($r: expr) => ({
g!($r, 0, v[ 0], v[ 4], v[ 8], v[12]);
g!($r, 1, v[ 1], v[ 5], v[ 9], v[13]);
g!($r, 2, v[ 2], v[ 6], v[10], v[14]);
g!($r, 3, v[ 3], v[ 7], v[11], v[15]);
g!($r, 4, v[ 0], v[ 5], v[10], v[15]);
g!($r, 5, v[ 1], v[ 6], v[11], v[12]);
g!($r, 6, v[ 2], v[ 7], v[ 8], v[13]);
g!($r, 7, v[ 3], v[ 4], v[ 9], v[14]);
});
);
for i in 0..10 {
round!(i);
}
for i in 0..8 {
self.h[i] = self.h[i] ^ v[i] ^ v[i+8];
}
}
}
fn encode_params(size: u8, keylen: u8) -> [u8; 64] {
let mut param = [0u8; 64];
param[0] = size as u8;
param[1] = keylen as u8;
param[2] = 1; // fanout
param[3] = 1; // depth
param
}
fn load32(b: &[u8]) -> u32{
let mut v = 0u32;
for i in 0..4 {
v |= (b[i] as u32) << (8*i);
}
v
}
fn store32(b: &mut [u8], v: u32) {
let mut w = v;
for i in 0..4 {
b[i] = w as u8;
w >>= 8;
}
}
#[cfg(test)]
mod tests {
use super::{Blake2s, KEY_BYTES, OUT_BYTES};
use super::super::kat;
#[test]
fn test_blake2s_out_size() {
let input = [0u8; 256];
for i in 0..kat::BLAKE2S_KAT_OUT_SIZE.len() {
let out_size = i+1;
let mut out = [0u8; OUT_BYTES];
let mut h = Blake2s::new(out_size);
h.update(input.as_ref());
h.finalize(&mut out[..out_size]);
assert_eq!(&out[..out_size], kat::BLAKE2S_KAT_OUT_SIZE[i]);
}
}
#[test]
fn test_blake2s_kat() {
let mut input = [0u8; 256];
for i in 0..input.len() {
input[i] = i as u8;
}
for i in 0..kat::BLAKE2S_KAT.len() {
let mut h = Blake2s::new(OUT_BYTES);
let mut out = [0u8; OUT_BYTES];
h.update(&input[..i]);
h.finalize(&mut out);
assert_eq!(out.as_ref(), kat::BLAKE2S_KAT[i].as_ref());
}
}
#[test]
fn test_blake2s_keyed_kat() {
let mut input = [0u8; 256];
let mut key = [0u8; KEY_BYTES];
for i in 0..input.len() {
input[i] = i as u8;
}
for i in 0..key.len() {
key[i] = i as u8;
}
for i in 0..kat::BLAKE2S_KEYED_KAT.len() {
let mut h = Blake2s::new_with_key(OUT_BYTES, key.as_ref());
let mut out = [0u8; OUT_BYTES];
h.update(&input[..i]);
h.finalize(&mut out);
assert_eq!(out.as_ref(), kat::BLAKE2S_KEYED_KAT[i].as_ref());
}
}
}
#[cfg(test)]
mod bench {
use std::iter::repeat;
use super::{Blake2s, OUT_BYTES};
use test::Bencher;
fn bench_chunk_size(b: &mut Bencher, n: usize) {
let mut h = Blake2s::new(OUT_BYTES);
let input : Vec<u8> = repeat(0).take(n).collect();
b.bytes = input.len() as u64;
b.iter(|| {
h.update(input.as_ref());
});
}
#[bench]
fn bench_blake2s_16(b: &mut Bencher) {
bench_chunk_size(b, 16);
}
#[bench]
fn bench_blake2s_1k(b: &mut Bencher)
|
#[bench]
fn bench_blake2s_64k(b: &mut Bencher) {
bench_chunk_size(b, 1 << 16);
}
}
|
{
bench_chunk_size(b, 1 << 10);
}
|
identifier_body
|
blake2s.rs
|
pub const BLOCK_BYTES : usize = 64;
pub const OUT_BYTES : usize = 32;
pub const KEY_BYTES : usize = 32;
static IV : [u32; 8] = [
0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a,
0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19,
];
static SIGMA : [[u8; 16]; 10] = [
[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 ],
[ 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 ],
[ 11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4 ],
[ 7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8 ],
[ 9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13 ],
[ 2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9 ],
[ 12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11 ],
[ 13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10 ],
[ 6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5 ],
[ 10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0 ],
];
pub struct Blake2s {
h: [u32; 8],
t: [u32; 2],
f: [u32; 2],
buf: [u8; 2*BLOCK_BYTES],
buf_len: usize,
}
impl Copy for Blake2s {}
impl Clone for Blake2s { fn clone(&self) -> Blake2s { *self } }
impl Blake2s {
pub fn new(size: usize) -> Blake2s {
assert!(size > 0 && size <= OUT_BYTES);
let param = encode_params(size as u8, 0);
let mut state = IV;
for i in 0..state.len() {
state[i] ^= load32(¶m[i*4..]);
}
Blake2s{
h: state,
t: [0, 0],
f: [0, 0],
buf: [0u8; 2*BLOCK_BYTES],
buf_len: 0,
}
}
pub fn new_with_key(size: usize, key: &[u8]) -> Blake2s {
assert!(size > 0 && size <= OUT_BYTES);
assert!(key.len() > 0 && key.len() <= KEY_BYTES);
let param = encode_params(size as u8, key.len() as u8);
let mut state = IV;
for i in 0..state.len() {
state[i] ^= load32(¶m[i*4..]);
}
let mut b = Blake2s{
h: state,
t: [0, 0],
f: [0, 0],
buf: [0u8; 2*BLOCK_BYTES],
buf_len: 0,
};
let mut block = [0u8; BLOCK_BYTES];
for i in 0..key.len() {
block[i] = key[i];
}
b.update(block.as_ref());
b
}
pub fn update(&mut self, m: &[u8]) {
let mut m = m;
while m.len() > 0 {
let left = self.buf_len;
let fill = 2 * BLOCK_BYTES - left;
if m.len() > fill {
for i in 0..fill {
self.buf[left+i] = m[i];
}
self.buf_len += fill;
m = &m[fill..];
self.increment_counter(BLOCK_BYTES as u32);
self.compress();
for i in 0..BLOCK_BYTES {
self.buf[i] = self.buf[i+BLOCK_BYTES];
}
self.buf_len -= BLOCK_BYTES;
} else {
for i in 0..m.len() {
self.buf[left+i] = m[i];
}
self.buf_len += m.len();
m = &m[m.len()..];
}
}
}
pub fn finalize(&mut self, out: &mut [u8]) {
let mut buf = [0u8; OUT_BYTES];
if self.buf_len > BLOCK_BYTES {
self.increment_counter(BLOCK_BYTES as u32);
self.compress();
for i in 0..BLOCK_BYTES {
self.buf[i] = self.buf[i+BLOCK_BYTES];
}
self.buf_len -= BLOCK_BYTES;
}
let n = self.buf_len as u32;
self.increment_counter(n);
self.f[0] =!0;
for i in self.buf_len..self.buf.len() {
self.buf[i] = 0;
}
self.compress();
for i in 0..self.h.len() {
store32(&mut buf[i*4..], self.h[i]);
}
for i in 0..::std::cmp::min(out.len(), OUT_BYTES) {
out[i] = buf[i];
}
}
fn
|
(&mut self, inc: u32) {
self.t[0] += inc;
self.t[1] += if self.t[0] < inc {1} else {0};
}
fn compress(&mut self) {
let mut m = [0u32; 16];
let mut v = [0u32; 16];
let block = self.buf.as_ref();
assert!(block.len() >= BLOCK_BYTES);
for i in 0..m.len() {
m[i] = load32(&block[i*4..]);
}
for i in 0..8 {
v[i] = self.h[i];
}
v[ 8] = IV[0];
v[ 9] = IV[1];
v[10] = IV[2];
v[11] = IV[3];
v[12] = self.t[0] ^ IV[4];
v[13] = self.t[1] ^ IV[5];
v[14] = self.f[0] ^ IV[6];
v[15] = self.f[1] ^ IV[7];
macro_rules! g(
($r: expr, $i: expr, $a: expr, $b: expr, $c: expr, $d: expr) => ({
$a = $a.wrapping_add($b).wrapping_add(m[SIGMA[$r][2*$i+0] as usize]);
$d = ($d ^ $a).rotate_right(16);
$c = $c.wrapping_add($d);
$b = ($b ^ $c).rotate_right(12);
$a = $a.wrapping_add($b).wrapping_add(m[SIGMA[$r][2*$i+1] as usize]);
$d = ($d ^ $a).rotate_right(8);
$c = $c.wrapping_add($d);
$b = ($b ^ $c).rotate_right(7);
});
);
macro_rules! round(
($r: expr) => ({
g!($r, 0, v[ 0], v[ 4], v[ 8], v[12]);
g!($r, 1, v[ 1], v[ 5], v[ 9], v[13]);
g!($r, 2, v[ 2], v[ 6], v[10], v[14]);
g!($r, 3, v[ 3], v[ 7], v[11], v[15]);
g!($r, 4, v[ 0], v[ 5], v[10], v[15]);
g!($r, 5, v[ 1], v[ 6], v[11], v[12]);
g!($r, 6, v[ 2], v[ 7], v[ 8], v[13]);
g!($r, 7, v[ 3], v[ 4], v[ 9], v[14]);
});
);
for i in 0..10 {
round!(i);
}
for i in 0..8 {
self.h[i] = self.h[i] ^ v[i] ^ v[i+8];
}
}
}
fn encode_params(size: u8, keylen: u8) -> [u8; 64] {
let mut param = [0u8; 64];
param[0] = size as u8;
param[1] = keylen as u8;
param[2] = 1; // fanout
param[3] = 1; // depth
param
}
fn load32(b: &[u8]) -> u32{
let mut v = 0u32;
for i in 0..4 {
v |= (b[i] as u32) << (8*i);
}
v
}
fn store32(b: &mut [u8], v: u32) {
let mut w = v;
for i in 0..4 {
b[i] = w as u8;
w >>= 8;
}
}
#[cfg(test)]
mod tests {
use super::{Blake2s, KEY_BYTES, OUT_BYTES};
use super::super::kat;
#[test]
fn test_blake2s_out_size() {
let input = [0u8; 256];
for i in 0..kat::BLAKE2S_KAT_OUT_SIZE.len() {
let out_size = i+1;
let mut out = [0u8; OUT_BYTES];
let mut h = Blake2s::new(out_size);
h.update(input.as_ref());
h.finalize(&mut out[..out_size]);
assert_eq!(&out[..out_size], kat::BLAKE2S_KAT_OUT_SIZE[i]);
}
}
#[test]
fn test_blake2s_kat() {
let mut input = [0u8; 256];
for i in 0..input.len() {
input[i] = i as u8;
}
for i in 0..kat::BLAKE2S_KAT.len() {
let mut h = Blake2s::new(OUT_BYTES);
let mut out = [0u8; OUT_BYTES];
h.update(&input[..i]);
h.finalize(&mut out);
assert_eq!(out.as_ref(), kat::BLAKE2S_KAT[i].as_ref());
}
}
#[test]
fn test_blake2s_keyed_kat() {
let mut input = [0u8; 256];
let mut key = [0u8; KEY_BYTES];
for i in 0..input.len() {
input[i] = i as u8;
}
for i in 0..key.len() {
key[i] = i as u8;
}
for i in 0..kat::BLAKE2S_KEYED_KAT.len() {
let mut h = Blake2s::new_with_key(OUT_BYTES, key.as_ref());
let mut out = [0u8; OUT_BYTES];
h.update(&input[..i]);
h.finalize(&mut out);
assert_eq!(out.as_ref(), kat::BLAKE2S_KEYED_KAT[i].as_ref());
}
}
}
#[cfg(test)]
mod bench {
use std::iter::repeat;
use super::{Blake2s, OUT_BYTES};
use test::Bencher;
fn bench_chunk_size(b: &mut Bencher, n: usize) {
let mut h = Blake2s::new(OUT_BYTES);
let input : Vec<u8> = repeat(0).take(n).collect();
b.bytes = input.len() as u64;
b.iter(|| {
h.update(input.as_ref());
});
}
#[bench]
fn bench_blake2s_16(b: &mut Bencher) {
bench_chunk_size(b, 16);
}
#[bench]
fn bench_blake2s_1k(b: &mut Bencher) {
bench_chunk_size(b, 1 << 10);
}
#[bench]
fn bench_blake2s_64k(b: &mut Bencher) {
bench_chunk_size(b, 1 << 16);
}
}
|
increment_counter
|
identifier_name
|
blake2s.rs
|
pub const BLOCK_BYTES : usize = 64;
pub const OUT_BYTES : usize = 32;
pub const KEY_BYTES : usize = 32;
static IV : [u32; 8] = [
0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a,
0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19,
];
static SIGMA : [[u8; 16]; 10] = [
[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 ],
[ 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 ],
[ 11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4 ],
[ 7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8 ],
[ 9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13 ],
[ 2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9 ],
[ 12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11 ],
[ 13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10 ],
[ 6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5 ],
[ 10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0 ],
];
pub struct Blake2s {
h: [u32; 8],
t: [u32; 2],
f: [u32; 2],
buf: [u8; 2*BLOCK_BYTES],
buf_len: usize,
}
impl Copy for Blake2s {}
impl Clone for Blake2s { fn clone(&self) -> Blake2s { *self } }
impl Blake2s {
pub fn new(size: usize) -> Blake2s {
assert!(size > 0 && size <= OUT_BYTES);
let param = encode_params(size as u8, 0);
let mut state = IV;
for i in 0..state.len() {
state[i] ^= load32(¶m[i*4..]);
}
Blake2s{
h: state,
t: [0, 0],
f: [0, 0],
buf: [0u8; 2*BLOCK_BYTES],
buf_len: 0,
}
}
pub fn new_with_key(size: usize, key: &[u8]) -> Blake2s {
assert!(size > 0 && size <= OUT_BYTES);
assert!(key.len() > 0 && key.len() <= KEY_BYTES);
let param = encode_params(size as u8, key.len() as u8);
let mut state = IV;
for i in 0..state.len() {
state[i] ^= load32(¶m[i*4..]);
}
let mut b = Blake2s{
h: state,
t: [0, 0],
f: [0, 0],
buf: [0u8; 2*BLOCK_BYTES],
buf_len: 0,
};
let mut block = [0u8; BLOCK_BYTES];
for i in 0..key.len() {
block[i] = key[i];
}
b.update(block.as_ref());
b
}
pub fn update(&mut self, m: &[u8]) {
let mut m = m;
while m.len() > 0 {
let left = self.buf_len;
let fill = 2 * BLOCK_BYTES - left;
if m.len() > fill {
for i in 0..fill {
self.buf[left+i] = m[i];
}
self.buf_len += fill;
m = &m[fill..];
self.increment_counter(BLOCK_BYTES as u32);
self.compress();
for i in 0..BLOCK_BYTES {
self.buf[i] = self.buf[i+BLOCK_BYTES];
}
self.buf_len -= BLOCK_BYTES;
} else {
for i in 0..m.len() {
self.buf[left+i] = m[i];
}
self.buf_len += m.len();
m = &m[m.len()..];
}
}
}
pub fn finalize(&mut self, out: &mut [u8]) {
let mut buf = [0u8; OUT_BYTES];
if self.buf_len > BLOCK_BYTES {
self.increment_counter(BLOCK_BYTES as u32);
self.compress();
for i in 0..BLOCK_BYTES {
self.buf[i] = self.buf[i+BLOCK_BYTES];
}
self.buf_len -= BLOCK_BYTES;
}
let n = self.buf_len as u32;
self.increment_counter(n);
self.f[0] =!0;
for i in self.buf_len..self.buf.len() {
self.buf[i] = 0;
}
self.compress();
for i in 0..self.h.len() {
store32(&mut buf[i*4..], self.h[i]);
}
for i in 0..::std::cmp::min(out.len(), OUT_BYTES) {
out[i] = buf[i];
}
}
fn increment_counter(&mut self, inc: u32) {
self.t[0] += inc;
self.t[1] += if self.t[0] < inc {1} else {0};
}
fn compress(&mut self) {
let mut m = [0u32; 16];
let mut v = [0u32; 16];
let block = self.buf.as_ref();
assert!(block.len() >= BLOCK_BYTES);
for i in 0..m.len() {
m[i] = load32(&block[i*4..]);
}
for i in 0..8 {
v[i] = self.h[i];
}
v[ 8] = IV[0];
v[ 9] = IV[1];
v[10] = IV[2];
v[11] = IV[3];
v[12] = self.t[0] ^ IV[4];
v[13] = self.t[1] ^ IV[5];
|
$a = $a.wrapping_add($b).wrapping_add(m[SIGMA[$r][2*$i+0] as usize]);
$d = ($d ^ $a).rotate_right(16);
$c = $c.wrapping_add($d);
$b = ($b ^ $c).rotate_right(12);
$a = $a.wrapping_add($b).wrapping_add(m[SIGMA[$r][2*$i+1] as usize]);
$d = ($d ^ $a).rotate_right(8);
$c = $c.wrapping_add($d);
$b = ($b ^ $c).rotate_right(7);
});
);
macro_rules! round(
($r: expr) => ({
g!($r, 0, v[ 0], v[ 4], v[ 8], v[12]);
g!($r, 1, v[ 1], v[ 5], v[ 9], v[13]);
g!($r, 2, v[ 2], v[ 6], v[10], v[14]);
g!($r, 3, v[ 3], v[ 7], v[11], v[15]);
g!($r, 4, v[ 0], v[ 5], v[10], v[15]);
g!($r, 5, v[ 1], v[ 6], v[11], v[12]);
g!($r, 6, v[ 2], v[ 7], v[ 8], v[13]);
g!($r, 7, v[ 3], v[ 4], v[ 9], v[14]);
});
);
for i in 0..10 {
round!(i);
}
for i in 0..8 {
self.h[i] = self.h[i] ^ v[i] ^ v[i+8];
}
}
}
fn encode_params(size: u8, keylen: u8) -> [u8; 64] {
let mut param = [0u8; 64];
param[0] = size as u8;
param[1] = keylen as u8;
param[2] = 1; // fanout
param[3] = 1; // depth
param
}
fn load32(b: &[u8]) -> u32{
let mut v = 0u32;
for i in 0..4 {
v |= (b[i] as u32) << (8*i);
}
v
}
fn store32(b: &mut [u8], v: u32) {
let mut w = v;
for i in 0..4 {
b[i] = w as u8;
w >>= 8;
}
}
#[cfg(test)]
mod tests {
use super::{Blake2s, KEY_BYTES, OUT_BYTES};
use super::super::kat;
#[test]
fn test_blake2s_out_size() {
let input = [0u8; 256];
for i in 0..kat::BLAKE2S_KAT_OUT_SIZE.len() {
let out_size = i+1;
let mut out = [0u8; OUT_BYTES];
let mut h = Blake2s::new(out_size);
h.update(input.as_ref());
h.finalize(&mut out[..out_size]);
assert_eq!(&out[..out_size], kat::BLAKE2S_KAT_OUT_SIZE[i]);
}
}
#[test]
fn test_blake2s_kat() {
let mut input = [0u8; 256];
for i in 0..input.len() {
input[i] = i as u8;
}
for i in 0..kat::BLAKE2S_KAT.len() {
let mut h = Blake2s::new(OUT_BYTES);
let mut out = [0u8; OUT_BYTES];
h.update(&input[..i]);
h.finalize(&mut out);
assert_eq!(out.as_ref(), kat::BLAKE2S_KAT[i].as_ref());
}
}
#[test]
fn test_blake2s_keyed_kat() {
let mut input = [0u8; 256];
let mut key = [0u8; KEY_BYTES];
for i in 0..input.len() {
input[i] = i as u8;
}
for i in 0..key.len() {
key[i] = i as u8;
}
for i in 0..kat::BLAKE2S_KEYED_KAT.len() {
let mut h = Blake2s::new_with_key(OUT_BYTES, key.as_ref());
let mut out = [0u8; OUT_BYTES];
h.update(&input[..i]);
h.finalize(&mut out);
assert_eq!(out.as_ref(), kat::BLAKE2S_KEYED_KAT[i].as_ref());
}
}
}
#[cfg(test)]
mod bench {
use std::iter::repeat;
use super::{Blake2s, OUT_BYTES};
use test::Bencher;
fn bench_chunk_size(b: &mut Bencher, n: usize) {
let mut h = Blake2s::new(OUT_BYTES);
let input : Vec<u8> = repeat(0).take(n).collect();
b.bytes = input.len() as u64;
b.iter(|| {
h.update(input.as_ref());
});
}
#[bench]
fn bench_blake2s_16(b: &mut Bencher) {
bench_chunk_size(b, 16);
}
#[bench]
fn bench_blake2s_1k(b: &mut Bencher) {
bench_chunk_size(b, 1 << 10);
}
#[bench]
fn bench_blake2s_64k(b: &mut Bencher) {
bench_chunk_size(b, 1 << 16);
}
}
|
v[14] = self.f[0] ^ IV[6];
v[15] = self.f[1] ^ IV[7];
macro_rules! g(
($r: expr, $i: expr, $a: expr, $b: expr, $c: expr, $d: expr) => ({
|
random_line_split
|
lib.rs
|
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
#![crate_name = "lrs_hashmap"]
#![crate_type = "lib"]
#![feature(custom_derive)]
#![no_std]
extern crate lrs_base as base;
extern crate lrs_alloc as alloc;
extern crate lrs_hash as hash;
extern crate lrs_fmt as fmt;
use base::prelude::*;
use core::ops::{Eq};
use base::undef::{UndefState};
use alloc::{Heap};
use hash::{Hash};
use hash::xx_hash::{XxHash32};
use bucket::compact::{CompactBucket};
|
pub use table::{Entry, VacantEntry, OccupiedEntry};
mod std { pub use fmt::std::*; }
mod bucket;
mod table;
pub type CompactMap<Key, Value, Hasher = XxHash32, Seed = (), Allocator = Heap>
where Allocator: alloc::MemPool,
Hasher: hash::Hasher,
Seed: Into<Hasher::Seed>+To,
Key: Eq + Hash + UndefState
= GenericMap<Key, Value, CompactBucket<Key, Value>, Hasher, Seed, Allocator>;
pub type HashMap<Key, Value, Hasher = XxHash32, Seed = (), Allocator = Heap>
where Allocator: alloc::MemPool,
Hasher: hash::Hasher,
Seed: Into<Hasher::Seed>+To,
Key: Eq + Hash + UndefState
= GenericMap<Key, Value, LooseBucket<Key, Value>, Hasher, Seed, Allocator>;
|
use bucket::loose::{LooseBucket};
use table::{GenericMap};
|
random_line_split
|
performanceentry.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::PerformanceEntryBinding;
use dom::bindings::codegen::Bindings::PerformanceEntryBinding::PerformanceEntryMethods;
use dom::bindings::js::Root;
use dom::bindings::num::Finite;
use dom::bindings::reflector::{Reflector, reflect_dom_object};
use dom::bindings::str::DOMString;
use dom::globalscope::GlobalScope;
use dom_struct::dom_struct;
#[dom_struct]
pub struct PerformanceEntry {
reflector_: Reflector,
name: DOMString,
entry_type: DOMString,
start_time: f64,
duration: f64,
}
impl PerformanceEntry {
pub fn new_inherited(name: DOMString,
entry_type: DOMString,
start_time: f64,
duration: f64) -> PerformanceEntry {
PerformanceEntry {
reflector_: Reflector::new(),
name,
entry_type,
start_time,
duration,
}
}
#[allow(unrooted_must_root)]
pub fn new(global: &GlobalScope,
name: DOMString,
entry_type: DOMString,
start_time: f64,
duration: f64) -> Root<PerformanceEntry> {
let entry = PerformanceEntry::new_inherited(name, entry_type, start_time, duration);
reflect_dom_object(box entry, global, PerformanceEntryBinding::Wrap)
}
pub fn entry_type(&self) -> &DOMString {
&self.entry_type
}
|
&self.name
}
pub fn start_time(&self) -> f64 {
self.start_time
}
}
impl PerformanceEntryMethods for PerformanceEntry {
// https://w3c.github.io/performance-timeline/#dom-performanceentry-name
fn Name(&self) -> DOMString {
DOMString::from(self.name.clone())
}
// https://w3c.github.io/performance-timeline/#dom-performanceentry-entrytype
fn EntryType(&self) -> DOMString {
DOMString::from(self.entry_type.clone())
}
// https://w3c.github.io/performance-timeline/#dom-performanceentry-starttime
fn StartTime(&self) -> Finite<f64> {
Finite::wrap(self.start_time)
}
// https://w3c.github.io/performance-timeline/#dom-performanceentry-duration
fn Duration(&self) -> Finite<f64> {
Finite::wrap(self.duration)
}
}
|
pub fn name(&self) -> &DOMString {
|
random_line_split
|
performanceentry.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::PerformanceEntryBinding;
use dom::bindings::codegen::Bindings::PerformanceEntryBinding::PerformanceEntryMethods;
use dom::bindings::js::Root;
use dom::bindings::num::Finite;
use dom::bindings::reflector::{Reflector, reflect_dom_object};
use dom::bindings::str::DOMString;
use dom::globalscope::GlobalScope;
use dom_struct::dom_struct;
#[dom_struct]
pub struct PerformanceEntry {
reflector_: Reflector,
name: DOMString,
entry_type: DOMString,
start_time: f64,
duration: f64,
}
impl PerformanceEntry {
pub fn new_inherited(name: DOMString,
entry_type: DOMString,
start_time: f64,
duration: f64) -> PerformanceEntry {
PerformanceEntry {
reflector_: Reflector::new(),
name,
entry_type,
start_time,
duration,
}
}
#[allow(unrooted_must_root)]
pub fn new(global: &GlobalScope,
name: DOMString,
entry_type: DOMString,
start_time: f64,
duration: f64) -> Root<PerformanceEntry> {
let entry = PerformanceEntry::new_inherited(name, entry_type, start_time, duration);
reflect_dom_object(box entry, global, PerformanceEntryBinding::Wrap)
}
pub fn entry_type(&self) -> &DOMString {
&self.entry_type
}
pub fn
|
(&self) -> &DOMString {
&self.name
}
pub fn start_time(&self) -> f64 {
self.start_time
}
}
impl PerformanceEntryMethods for PerformanceEntry {
// https://w3c.github.io/performance-timeline/#dom-performanceentry-name
fn Name(&self) -> DOMString {
DOMString::from(self.name.clone())
}
// https://w3c.github.io/performance-timeline/#dom-performanceentry-entrytype
fn EntryType(&self) -> DOMString {
DOMString::from(self.entry_type.clone())
}
// https://w3c.github.io/performance-timeline/#dom-performanceentry-starttime
fn StartTime(&self) -> Finite<f64> {
Finite::wrap(self.start_time)
}
// https://w3c.github.io/performance-timeline/#dom-performanceentry-duration
fn Duration(&self) -> Finite<f64> {
Finite::wrap(self.duration)
}
}
|
name
|
identifier_name
|
iter.rs
|
//! Utilities for iteration with duration objects.
use duration::FloatDuration;
use std::iter;
/// An iterator over an evenly spaced lattice of `FloatDuration`s.
///
/// This type is returned by `subdivide` and friends, and it not meant to be
/// instantiated directly.
#[derive(Debug, Clone)]
pub struct Subdivide {
start: FloatDuration,
|
impl Subdivide {
fn new(start: FloatDuration, end: FloatDuration, steps: usize) -> Subdivide {
assert!(steps >= 2, "subdivide requires at least two steps");
let step_size = (end - start) / (steps - 1) as f64;
Subdivide {
start: start,
step_size: step_size,
len: steps,
index: 0,
}
}
/// The distance between steps in the iteration.
pub fn step_size(&self) -> FloatDuration {
self.step_size
}
}
impl Iterator for Subdivide {
type Item = FloatDuration;
#[inline]
fn next(&mut self) -> Option<FloatDuration> {
if self.index >= self.len {
None
} else {
let index = self.index;
self.index += 1;
Some(self.start + self.step_size * (index as f64))
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let left = self.len - self.index;
(left, Some(left))
}
}
impl DoubleEndedIterator for Subdivide {
fn next_back(&mut self) -> Option<FloatDuration> {
if self.index >= self.len {
None
} else {
self.len -= 1;
let index = self.len;
Some(self.start + self.step_size * (index as f64))
}
}
}
impl ExactSizeIterator for Subdivide {}
/// Subdivide the distance between two duration into `steps` evenly spaced points.
///
/// `subdivide` returns an iterator that lazily computes and returns exactly `steps`
/// evenly spaced
/// points between `begin` and `end`. This iterator is *inclusive* in that it
/// returns `begin` as the first element and `end` as the final element.
///
/// The returned iterator [`Subdivide`](struct.Subdivide.html) implements
/// `DoubleEndedIterator`, and thus can be reversed or consumed from both sides.
///
/// ```rust
/// use float_duration::FloatDuration;
/// use float_duration::iter::subdivide;
///
/// fn cost_function(t: &FloatDuration) -> f64 {
/// return 0.5*t.as_seconds()*t.as_seconds()
/// }
/// fn main() {
/// let start = FloatDuration::zero();
/// let end = FloatDuration::minutes(10.0);
/// let total: f64 = subdivide(start, end, 100).map(|x| cost_function(&x)).sum();
/// }
/// ```
///
/// # Panics
/// This function panics if `steps < 2` as this would violate the property
/// that the iterator visits both endpoints.
pub fn subdivide(begin: FloatDuration, end: FloatDuration, steps: usize) -> Subdivide {
Subdivide::new(begin, end, steps)
}
/// Subdivide the distance between two duration into `steps` evenly spaced points
/// and include a timestep.
///
/// `subdivide_with_step` is equivalent to [`subdivide`](fn.subdivide.html)
/// except that it returns the
/// step size with the current time in each iteration. It is mainly a convenience
/// function for the common case of running a simulation over discrete time steps.
///
/// It is exactly equivalent to:
///
/// ```rust
/// # use float_duration::FloatDuration;
/// # use float_duration::iter::subdivide;
/// use std::iter;
///
/// let steps = 100;
/// # let begin = FloatDuration::zero();
/// # let end = FloatDuration::minutes(5.0);
///
/// let sub = subdivide(begin, end, steps);
/// let step_size = sub.step_size();
/// let my_iter = sub.zip(iter::repeat(step_size));
/// ```
///
/// Example usage in a simulation:
///
/// ```rust
/// use float_duration::FloatDuration;
/// use float_duration::iter::subdivide_with_step;
///
/// let start = FloatDuration::zero();
/// let end = FloatDuration::hours(1.0);
///
/// let mut x = 5.0;
/// let mut v = 0.0;
///
/// for (t, dt) in subdivide_with_step(start, end, 100) {
/// let a = x*x - v*x;
/// let v = a*dt.as_seconds();
/// let x = v*dt.as_seconds();
///
/// println!("Position: {}", x);
/// }
/// ```
///
/// # Panics
/// This function panics if `steps < 2` as this would violate the property
/// that the iterator visits both endpoints.
///
pub fn subdivide_with_step(begin: FloatDuration,
end: FloatDuration,
steps: usize)
-> iter::Zip<Subdivide, iter::Repeat<FloatDuration>> {
let sub = subdivide(begin, end, steps);
let step_size = sub.step_size();
sub.zip(iter::repeat(step_size))
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_subdivide() {
let s = subdivide(FloatDuration::zero(), FloatDuration::minutes(1.0), 3);
let s_rev = s.clone().rev();
assert_eq!(s.collect::<Vec<_>>(),
vec![FloatDuration::zero(),
FloatDuration::seconds(30.0),
FloatDuration::minutes(1.0)]);
assert_eq!(s_rev.collect::<Vec<_>>(),
vec![FloatDuration::minutes(1.0),
FloatDuration::seconds(30.0),
FloatDuration::zero()]);
assert_eq!(subdivide(FloatDuration::zero(), FloatDuration::zero(), 3).collect::<Vec<_>>(),
vec![FloatDuration::zero(),
FloatDuration::zero(),
FloatDuration::zero()]);
}
#[should_panic]
#[test]
fn test_subdivide_panic() {
subdivide(FloatDuration::zero(), FloatDuration::minutes(1.0), 1);
}
}
|
step_size: FloatDuration,
len: usize,
index: usize,
}
|
random_line_split
|
iter.rs
|
//! Utilities for iteration with duration objects.
use duration::FloatDuration;
use std::iter;
/// An iterator over an evenly spaced lattice of `FloatDuration`s.
///
/// This type is returned by `subdivide` and friends, and it not meant to be
/// instantiated directly.
#[derive(Debug, Clone)]
pub struct Subdivide {
start: FloatDuration,
step_size: FloatDuration,
len: usize,
index: usize,
}
impl Subdivide {
fn new(start: FloatDuration, end: FloatDuration, steps: usize) -> Subdivide {
assert!(steps >= 2, "subdivide requires at least two steps");
let step_size = (end - start) / (steps - 1) as f64;
Subdivide {
start: start,
step_size: step_size,
len: steps,
index: 0,
}
}
/// The distance between steps in the iteration.
pub fn step_size(&self) -> FloatDuration {
self.step_size
}
}
impl Iterator for Subdivide {
type Item = FloatDuration;
#[inline]
fn next(&mut self) -> Option<FloatDuration> {
if self.index >= self.len {
None
} else {
let index = self.index;
self.index += 1;
Some(self.start + self.step_size * (index as f64))
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let left = self.len - self.index;
(left, Some(left))
}
}
impl DoubleEndedIterator for Subdivide {
fn next_back(&mut self) -> Option<FloatDuration> {
if self.index >= self.len {
None
} else {
self.len -= 1;
let index = self.len;
Some(self.start + self.step_size * (index as f64))
}
}
}
impl ExactSizeIterator for Subdivide {}
/// Subdivide the distance between two duration into `steps` evenly spaced points.
///
/// `subdivide` returns an iterator that lazily computes and returns exactly `steps`
/// evenly spaced
/// points between `begin` and `end`. This iterator is *inclusive* in that it
/// returns `begin` as the first element and `end` as the final element.
///
/// The returned iterator [`Subdivide`](struct.Subdivide.html) implements
/// `DoubleEndedIterator`, and thus can be reversed or consumed from both sides.
///
/// ```rust
/// use float_duration::FloatDuration;
/// use float_duration::iter::subdivide;
///
/// fn cost_function(t: &FloatDuration) -> f64 {
/// return 0.5*t.as_seconds()*t.as_seconds()
/// }
/// fn main() {
/// let start = FloatDuration::zero();
/// let end = FloatDuration::minutes(10.0);
/// let total: f64 = subdivide(start, end, 100).map(|x| cost_function(&x)).sum();
/// }
/// ```
///
/// # Panics
/// This function panics if `steps < 2` as this would violate the property
/// that the iterator visits both endpoints.
pub fn subdivide(begin: FloatDuration, end: FloatDuration, steps: usize) -> Subdivide {
Subdivide::new(begin, end, steps)
}
/// Subdivide the distance between two duration into `steps` evenly spaced points
/// and include a timestep.
///
/// `subdivide_with_step` is equivalent to [`subdivide`](fn.subdivide.html)
/// except that it returns the
/// step size with the current time in each iteration. It is mainly a convenience
/// function for the common case of running a simulation over discrete time steps.
///
/// It is exactly equivalent to:
///
/// ```rust
/// # use float_duration::FloatDuration;
/// # use float_duration::iter::subdivide;
/// use std::iter;
///
/// let steps = 100;
/// # let begin = FloatDuration::zero();
/// # let end = FloatDuration::minutes(5.0);
///
/// let sub = subdivide(begin, end, steps);
/// let step_size = sub.step_size();
/// let my_iter = sub.zip(iter::repeat(step_size));
/// ```
///
/// Example usage in a simulation:
///
/// ```rust
/// use float_duration::FloatDuration;
/// use float_duration::iter::subdivide_with_step;
///
/// let start = FloatDuration::zero();
/// let end = FloatDuration::hours(1.0);
///
/// let mut x = 5.0;
/// let mut v = 0.0;
///
/// for (t, dt) in subdivide_with_step(start, end, 100) {
/// let a = x*x - v*x;
/// let v = a*dt.as_seconds();
/// let x = v*dt.as_seconds();
///
/// println!("Position: {}", x);
/// }
/// ```
///
/// # Panics
/// This function panics if `steps < 2` as this would violate the property
/// that the iterator visits both endpoints.
///
pub fn subdivide_with_step(begin: FloatDuration,
end: FloatDuration,
steps: usize)
-> iter::Zip<Subdivide, iter::Repeat<FloatDuration>>
|
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_subdivide() {
let s = subdivide(FloatDuration::zero(), FloatDuration::minutes(1.0), 3);
let s_rev = s.clone().rev();
assert_eq!(s.collect::<Vec<_>>(),
vec![FloatDuration::zero(),
FloatDuration::seconds(30.0),
FloatDuration::minutes(1.0)]);
assert_eq!(s_rev.collect::<Vec<_>>(),
vec![FloatDuration::minutes(1.0),
FloatDuration::seconds(30.0),
FloatDuration::zero()]);
assert_eq!(subdivide(FloatDuration::zero(), FloatDuration::zero(), 3).collect::<Vec<_>>(),
vec![FloatDuration::zero(),
FloatDuration::zero(),
FloatDuration::zero()]);
}
#[should_panic]
#[test]
fn test_subdivide_panic() {
subdivide(FloatDuration::zero(), FloatDuration::minutes(1.0), 1);
}
}
|
{
let sub = subdivide(begin, end, steps);
let step_size = sub.step_size();
sub.zip(iter::repeat(step_size))
}
|
identifier_body
|
iter.rs
|
//! Utilities for iteration with duration objects.
use duration::FloatDuration;
use std::iter;
/// An iterator over an evenly spaced lattice of `FloatDuration`s.
///
/// This type is returned by `subdivide` and friends, and it not meant to be
/// instantiated directly.
#[derive(Debug, Clone)]
pub struct Subdivide {
start: FloatDuration,
step_size: FloatDuration,
len: usize,
index: usize,
}
impl Subdivide {
fn new(start: FloatDuration, end: FloatDuration, steps: usize) -> Subdivide {
assert!(steps >= 2, "subdivide requires at least two steps");
let step_size = (end - start) / (steps - 1) as f64;
Subdivide {
start: start,
step_size: step_size,
len: steps,
index: 0,
}
}
/// The distance between steps in the iteration.
pub fn step_size(&self) -> FloatDuration {
self.step_size
}
}
impl Iterator for Subdivide {
type Item = FloatDuration;
#[inline]
fn next(&mut self) -> Option<FloatDuration> {
if self.index >= self.len {
None
} else {
let index = self.index;
self.index += 1;
Some(self.start + self.step_size * (index as f64))
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let left = self.len - self.index;
(left, Some(left))
}
}
impl DoubleEndedIterator for Subdivide {
fn next_back(&mut self) -> Option<FloatDuration> {
if self.index >= self.len
|
else {
self.len -= 1;
let index = self.len;
Some(self.start + self.step_size * (index as f64))
}
}
}
impl ExactSizeIterator for Subdivide {}
/// Subdivide the distance between two duration into `steps` evenly spaced points.
///
/// `subdivide` returns an iterator that lazily computes and returns exactly `steps`
/// evenly spaced
/// points between `begin` and `end`. This iterator is *inclusive* in that it
/// returns `begin` as the first element and `end` as the final element.
///
/// The returned iterator [`Subdivide`](struct.Subdivide.html) implements
/// `DoubleEndedIterator`, and thus can be reversed or consumed from both sides.
///
/// ```rust
/// use float_duration::FloatDuration;
/// use float_duration::iter::subdivide;
///
/// fn cost_function(t: &FloatDuration) -> f64 {
/// return 0.5*t.as_seconds()*t.as_seconds()
/// }
/// fn main() {
/// let start = FloatDuration::zero();
/// let end = FloatDuration::minutes(10.0);
/// let total: f64 = subdivide(start, end, 100).map(|x| cost_function(&x)).sum();
/// }
/// ```
///
/// # Panics
/// This function panics if `steps < 2` as this would violate the property
/// that the iterator visits both endpoints.
pub fn subdivide(begin: FloatDuration, end: FloatDuration, steps: usize) -> Subdivide {
Subdivide::new(begin, end, steps)
}
/// Subdivide the distance between two duration into `steps` evenly spaced points
/// and include a timestep.
///
/// `subdivide_with_step` is equivalent to [`subdivide`](fn.subdivide.html)
/// except that it returns the
/// step size with the current time in each iteration. It is mainly a convenience
/// function for the common case of running a simulation over discrete time steps.
///
/// It is exactly equivalent to:
///
/// ```rust
/// # use float_duration::FloatDuration;
/// # use float_duration::iter::subdivide;
/// use std::iter;
///
/// let steps = 100;
/// # let begin = FloatDuration::zero();
/// # let end = FloatDuration::minutes(5.0);
///
/// let sub = subdivide(begin, end, steps);
/// let step_size = sub.step_size();
/// let my_iter = sub.zip(iter::repeat(step_size));
/// ```
///
/// Example usage in a simulation:
///
/// ```rust
/// use float_duration::FloatDuration;
/// use float_duration::iter::subdivide_with_step;
///
/// let start = FloatDuration::zero();
/// let end = FloatDuration::hours(1.0);
///
/// let mut x = 5.0;
/// let mut v = 0.0;
///
/// for (t, dt) in subdivide_with_step(start, end, 100) {
/// let a = x*x - v*x;
/// let v = a*dt.as_seconds();
/// let x = v*dt.as_seconds();
///
/// println!("Position: {}", x);
/// }
/// ```
///
/// # Panics
/// This function panics if `steps < 2` as this would violate the property
/// that the iterator visits both endpoints.
///
pub fn subdivide_with_step(begin: FloatDuration,
end: FloatDuration,
steps: usize)
-> iter::Zip<Subdivide, iter::Repeat<FloatDuration>> {
let sub = subdivide(begin, end, steps);
let step_size = sub.step_size();
sub.zip(iter::repeat(step_size))
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_subdivide() {
let s = subdivide(FloatDuration::zero(), FloatDuration::minutes(1.0), 3);
let s_rev = s.clone().rev();
assert_eq!(s.collect::<Vec<_>>(),
vec![FloatDuration::zero(),
FloatDuration::seconds(30.0),
FloatDuration::minutes(1.0)]);
assert_eq!(s_rev.collect::<Vec<_>>(),
vec![FloatDuration::minutes(1.0),
FloatDuration::seconds(30.0),
FloatDuration::zero()]);
assert_eq!(subdivide(FloatDuration::zero(), FloatDuration::zero(), 3).collect::<Vec<_>>(),
vec![FloatDuration::zero(),
FloatDuration::zero(),
FloatDuration::zero()]);
}
#[should_panic]
#[test]
fn test_subdivide_panic() {
subdivide(FloatDuration::zero(), FloatDuration::minutes(1.0), 1);
}
}
|
{
None
}
|
conditional_block
|
iter.rs
|
//! Utilities for iteration with duration objects.
use duration::FloatDuration;
use std::iter;
/// An iterator over an evenly spaced lattice of `FloatDuration`s.
///
/// This type is returned by `subdivide` and friends, and it not meant to be
/// instantiated directly.
#[derive(Debug, Clone)]
pub struct
|
{
start: FloatDuration,
step_size: FloatDuration,
len: usize,
index: usize,
}
impl Subdivide {
fn new(start: FloatDuration, end: FloatDuration, steps: usize) -> Subdivide {
assert!(steps >= 2, "subdivide requires at least two steps");
let step_size = (end - start) / (steps - 1) as f64;
Subdivide {
start: start,
step_size: step_size,
len: steps,
index: 0,
}
}
/// The distance between steps in the iteration.
pub fn step_size(&self) -> FloatDuration {
self.step_size
}
}
impl Iterator for Subdivide {
type Item = FloatDuration;
#[inline]
fn next(&mut self) -> Option<FloatDuration> {
if self.index >= self.len {
None
} else {
let index = self.index;
self.index += 1;
Some(self.start + self.step_size * (index as f64))
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let left = self.len - self.index;
(left, Some(left))
}
}
impl DoubleEndedIterator for Subdivide {
fn next_back(&mut self) -> Option<FloatDuration> {
if self.index >= self.len {
None
} else {
self.len -= 1;
let index = self.len;
Some(self.start + self.step_size * (index as f64))
}
}
}
impl ExactSizeIterator for Subdivide {}
/// Subdivide the distance between two duration into `steps` evenly spaced points.
///
/// `subdivide` returns an iterator that lazily computes and returns exactly `steps`
/// evenly spaced
/// points between `begin` and `end`. This iterator is *inclusive* in that it
/// returns `begin` as the first element and `end` as the final element.
///
/// The returned iterator [`Subdivide`](struct.Subdivide.html) implements
/// `DoubleEndedIterator`, and thus can be reversed or consumed from both sides.
///
/// ```rust
/// use float_duration::FloatDuration;
/// use float_duration::iter::subdivide;
///
/// fn cost_function(t: &FloatDuration) -> f64 {
/// return 0.5*t.as_seconds()*t.as_seconds()
/// }
/// fn main() {
/// let start = FloatDuration::zero();
/// let end = FloatDuration::minutes(10.0);
/// let total: f64 = subdivide(start, end, 100).map(|x| cost_function(&x)).sum();
/// }
/// ```
///
/// # Panics
/// This function panics if `steps < 2` as this would violate the property
/// that the iterator visits both endpoints.
pub fn subdivide(begin: FloatDuration, end: FloatDuration, steps: usize) -> Subdivide {
Subdivide::new(begin, end, steps)
}
/// Subdivide the distance between two duration into `steps` evenly spaced points
/// and include a timestep.
///
/// `subdivide_with_step` is equivalent to [`subdivide`](fn.subdivide.html)
/// except that it returns the
/// step size with the current time in each iteration. It is mainly a convenience
/// function for the common case of running a simulation over discrete time steps.
///
/// It is exactly equivalent to:
///
/// ```rust
/// # use float_duration::FloatDuration;
/// # use float_duration::iter::subdivide;
/// use std::iter;
///
/// let steps = 100;
/// # let begin = FloatDuration::zero();
/// # let end = FloatDuration::minutes(5.0);
///
/// let sub = subdivide(begin, end, steps);
/// let step_size = sub.step_size();
/// let my_iter = sub.zip(iter::repeat(step_size));
/// ```
///
/// Example usage in a simulation:
///
/// ```rust
/// use float_duration::FloatDuration;
/// use float_duration::iter::subdivide_with_step;
///
/// let start = FloatDuration::zero();
/// let end = FloatDuration::hours(1.0);
///
/// let mut x = 5.0;
/// let mut v = 0.0;
///
/// for (t, dt) in subdivide_with_step(start, end, 100) {
/// let a = x*x - v*x;
/// let v = a*dt.as_seconds();
/// let x = v*dt.as_seconds();
///
/// println!("Position: {}", x);
/// }
/// ```
///
/// # Panics
/// This function panics if `steps < 2` as this would violate the property
/// that the iterator visits both endpoints.
///
pub fn subdivide_with_step(begin: FloatDuration,
end: FloatDuration,
steps: usize)
-> iter::Zip<Subdivide, iter::Repeat<FloatDuration>> {
let sub = subdivide(begin, end, steps);
let step_size = sub.step_size();
sub.zip(iter::repeat(step_size))
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_subdivide() {
let s = subdivide(FloatDuration::zero(), FloatDuration::minutes(1.0), 3);
let s_rev = s.clone().rev();
assert_eq!(s.collect::<Vec<_>>(),
vec![FloatDuration::zero(),
FloatDuration::seconds(30.0),
FloatDuration::minutes(1.0)]);
assert_eq!(s_rev.collect::<Vec<_>>(),
vec![FloatDuration::minutes(1.0),
FloatDuration::seconds(30.0),
FloatDuration::zero()]);
assert_eq!(subdivide(FloatDuration::zero(), FloatDuration::zero(), 3).collect::<Vec<_>>(),
vec![FloatDuration::zero(),
FloatDuration::zero(),
FloatDuration::zero()]);
}
#[should_panic]
#[test]
fn test_subdivide_panic() {
subdivide(FloatDuration::zero(), FloatDuration::minutes(1.0), 1);
}
}
|
Subdivide
|
identifier_name
|
resource-cycle2.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Don't leak the unique pointers
use std::cast;
struct
|
{
a: int,
b: int,
c: *int
}
struct r {
v: U,
}
impl Drop for r {
fn drop(&mut self) {
unsafe {
let _v2: ~int = cast::transmute(self.v.c);
}
}
}
fn r(v: U) -> r {
r {
v: v
}
}
struct t(Node);
struct Node {
next: Option<@mut t>,
r: r
}
pub fn main() {
unsafe {
let i1 = ~0xA;
let i1p = cast::transmute_copy(&i1);
cast::forget(i1);
let i2 = ~0xA;
let i2p = cast::transmute_copy(&i2);
cast::forget(i2);
let u1 = U {a: 0xB, b: 0xC, c: i1p};
let u2 = U {a: 0xB, b: 0xC, c: i2p};
let x1 = @mut t(Node {
next: None,
r: r(u1)
});
let x2 = @mut t(Node {
next: None,
r: r(u2)
});
x1.next = Some(x2);
x2.next = Some(x1);
}
}
|
U
|
identifier_name
|
resource-cycle2.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Don't leak the unique pointers
use std::cast;
struct U {
a: int,
b: int,
c: *int
}
struct r {
v: U,
}
impl Drop for r {
fn drop(&mut self) {
unsafe {
let _v2: ~int = cast::transmute(self.v.c);
}
}
}
fn r(v: U) -> r {
r {
v: v
}
}
struct t(Node);
struct Node {
next: Option<@mut t>,
|
r: r
}
pub fn main() {
unsafe {
let i1 = ~0xA;
let i1p = cast::transmute_copy(&i1);
cast::forget(i1);
let i2 = ~0xA;
let i2p = cast::transmute_copy(&i2);
cast::forget(i2);
let u1 = U {a: 0xB, b: 0xC, c: i1p};
let u2 = U {a: 0xB, b: 0xC, c: i2p};
let x1 = @mut t(Node {
next: None,
r: r(u1)
});
let x2 = @mut t(Node {
next: None,
r: r(u2)
});
x1.next = Some(x2);
x2.next = Some(x1);
}
}
|
random_line_split
|
|
resource-cycle2.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Don't leak the unique pointers
use std::cast;
struct U {
a: int,
b: int,
c: *int
}
struct r {
v: U,
}
impl Drop for r {
fn drop(&mut self) {
unsafe {
let _v2: ~int = cast::transmute(self.v.c);
}
}
}
fn r(v: U) -> r {
r {
v: v
}
}
struct t(Node);
struct Node {
next: Option<@mut t>,
r: r
}
pub fn main()
|
x1.next = Some(x2);
x2.next = Some(x1);
}
}
|
{
unsafe {
let i1 = ~0xA;
let i1p = cast::transmute_copy(&i1);
cast::forget(i1);
let i2 = ~0xA;
let i2p = cast::transmute_copy(&i2);
cast::forget(i2);
let u1 = U {a: 0xB, b: 0xC, c: i1p};
let u2 = U {a: 0xB, b: 0xC, c: i2p};
let x1 = @mut t(Node {
next: None,
r: r(u1)
});
let x2 = @mut t(Node {
next: None,
r: r(u2)
});
|
identifier_body
|
event.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
use dom::bindings::codegen::Bindings::EventBinding;
use dom::bindings::codegen::Bindings::EventBinding::{EventConstants, EventMethods};
use dom::bindings::error::Fallible;
use dom::bindings::global::GlobalRef;
use dom::bindings::js::{MutNullableJS, JSRef, Temporary};
use dom::bindings::utils::{Reflector, reflect_dom_object};
use dom::eventtarget::{EventTarget, EventTargetHelpers};
use util::str::DOMString;
use std::borrow::ToOwned;
use std::cell::Cell;
use std::default::Default;
use time;
#[jstraceable]
#[derive(Copy)]
#[repr(u16)]
pub enum EventPhase {
None = EventConstants::NONE,
Capturing = EventConstants::CAPTURING_PHASE,
AtTarget = EventConstants::AT_TARGET,
Bubbling = EventConstants::BUBBLING_PHASE,
}
#[derive(PartialEq)]
#[jstraceable]
pub enum EventTypeId {
CustomEvent,
HTMLEvent,
KeyboardEvent,
MessageEvent,
MouseEvent,
ProgressEvent,
StorageEvent,
UIEvent,
ErrorEvent
}
#[derive(PartialEq)]
pub enum EventBubbles {
Bubbles,
DoesNotBubble
}
#[derive(PartialEq)]
pub enum EventCancelable {
Cancelable,
NotCancelable
}
#[dom_struct]
pub struct Event {
reflector_: Reflector,
type_id: EventTypeId,
current_target: MutNullableJS<EventTarget>,
target: MutNullableJS<EventTarget>,
type_: DOMRefCell<DOMString>,
phase: Cell<EventPhase>,
canceled: Cell<bool>,
stop_propagation: Cell<bool>,
stop_immediate: Cell<bool>,
cancelable: Cell<bool>,
bubbles: Cell<bool>,
trusted: Cell<bool>,
dispatching: Cell<bool>,
initialized: Cell<bool>,
timestamp: u64,
}
impl Event {
pub fn new_inherited(type_id: EventTypeId) -> Event {
Event {
reflector_: Reflector::new(),
type_id: type_id,
current_target: Default::default(),
target: Default::default(),
phase: Cell::new(EventPhase::None),
type_: DOMRefCell::new("".to_owned()),
canceled: Cell::new(false),
cancelable: Cell::new(false),
bubbles: Cell::new(false),
trusted: Cell::new(false),
dispatching: Cell::new(false),
stop_propagation: Cell::new(false),
stop_immediate: Cell::new(false),
initialized: Cell::new(false),
timestamp: time::get_time().sec as u64,
}
}
pub fn new_uninitialized(global: GlobalRef) -> Temporary<Event> {
reflect_dom_object(box Event::new_inherited(EventTypeId::HTMLEvent),
global,
EventBinding::Wrap)
}
pub fn new(global: GlobalRef,
type_: DOMString,
bubbles: EventBubbles,
cancelable: EventCancelable) -> Temporary<Event> {
let event = Event::new_uninitialized(global).root();
event.r().InitEvent(type_, bubbles == EventBubbles::Bubbles, cancelable == EventCancelable::Cancelable);
Temporary::from_rooted(event.r())
}
pub fn Constructor(global: GlobalRef,
type_: DOMString,
init: &EventBinding::EventInit) -> Fallible<Temporary<Event>> {
let bubbles = if init.bubbles { EventBubbles::Bubbles } else { EventBubbles::DoesNotBubble };
let cancelable = if init.cancelable { EventCancelable::Cancelable } else { EventCancelable::NotCancelable };
Ok(Event::new(global, type_, bubbles, cancelable))
}
#[inline]
pub fn type_id<'a>(&'a self) -> &'a EventTypeId {
&self.type_id
}
#[inline]
pub fn clear_current_target(&self) {
self.current_target.clear();
}
#[inline]
pub fn set_current_target(&self, val: JSRef<EventTarget>) {
self.current_target.assign(Some(val));
}
#[inline]
pub fn set_target(&self, val: JSRef<EventTarget>) {
self.target.assign(Some(val));
}
#[inline]
pub fn set_phase(&self, val: EventPhase) {
self.phase.set(val)
}
#[inline]
pub fn stop_propagation(&self) -> bool {
self.stop_propagation.get()
}
#[inline]
pub fn stop_immediate(&self) -> bool {
self.stop_immediate.get()
}
#[inline]
pub fn bubbles(&self) -> bool {
self.bubbles.get()
}
#[inline]
pub fn dispatching(&self) -> bool {
self.dispatching.get()
}
#[inline]
pub fn set_dispatching(&self, val: bool) {
self.dispatching.set(val)
}
#[inline]
pub fn initialized(&self) -> bool {
self.initialized.get()
}
}
impl<'a> EventMethods for JSRef<'a, Event> {
fn EventPhase(self) -> u16 {
self.phase.get() as u16
}
fn Type(self) -> DOMString {
// FIXME(https://github.com/rust-lang/rust/issues/23338)
let type_ = self.type_.borrow();
type_.clone()
}
fn GetTarget(self) -> Option<Temporary<EventTarget>> {
self.target.get()
}
fn GetCurrentTarget(self) -> Option<Temporary<EventTarget>> {
self.current_target.get()
}
fn DefaultPrevented(self) -> bool {
self.canceled.get()
}
fn PreventDefault(self) {
if self.cancelable.get() {
self.canceled.set(true)
}
}
fn StopPropagation(self) {
self.stop_propagation.set(true);
}
fn StopImmediatePropagation(self) {
self.stop_immediate.set(true);
self.stop_propagation.set(true);
}
fn Bubbles(self) -> bool {
self.bubbles.get()
}
fn Cancelable(self) -> bool {
self.cancelable.get()
}
fn TimeStamp(self) -> u64 {
self.timestamp
}
fn InitEvent(self,
type_: DOMString,
bubbles: bool,
cancelable: bool) {
if self.dispatching.get() {
return;
}
self.initialized.set(true);
self.stop_propagation.set(false);
self.stop_immediate.set(false);
self.canceled.set(false);
self.trusted.set(false);
self.target.clear();
*self.type_.borrow_mut() = type_;
self.bubbles.set(bubbles);
self.cancelable.set(cancelable);
}
fn IsTrusted(self) -> bool {
self.trusted.get()
}
}
pub trait EventHelpers {
fn set_trusted(self, trusted: bool);
fn fire(self, target: JSRef<EventTarget>) -> bool;
}
impl<'a> EventHelpers for JSRef<'a, Event> {
fn set_trusted(self, trusted: bool) {
self.trusted.set(trusted);
}
// https://html.spec.whatwg.org/multipage/webappapis.html#fire-a-simple-event
fn fire(self, target: JSRef<EventTarget>) -> bool {
self.set_trusted(true);
target.dispatch_event(self)
}
}
|
use dom::bindings::cell::DOMRefCell;
|
random_line_split
|
event.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::EventBinding;
use dom::bindings::codegen::Bindings::EventBinding::{EventConstants, EventMethods};
use dom::bindings::error::Fallible;
use dom::bindings::global::GlobalRef;
use dom::bindings::js::{MutNullableJS, JSRef, Temporary};
use dom::bindings::utils::{Reflector, reflect_dom_object};
use dom::eventtarget::{EventTarget, EventTargetHelpers};
use util::str::DOMString;
use std::borrow::ToOwned;
use std::cell::Cell;
use std::default::Default;
use time;
#[jstraceable]
#[derive(Copy)]
#[repr(u16)]
pub enum EventPhase {
None = EventConstants::NONE,
Capturing = EventConstants::CAPTURING_PHASE,
AtTarget = EventConstants::AT_TARGET,
Bubbling = EventConstants::BUBBLING_PHASE,
}
#[derive(PartialEq)]
#[jstraceable]
pub enum EventTypeId {
CustomEvent,
HTMLEvent,
KeyboardEvent,
MessageEvent,
MouseEvent,
ProgressEvent,
StorageEvent,
UIEvent,
ErrorEvent
}
#[derive(PartialEq)]
pub enum EventBubbles {
Bubbles,
DoesNotBubble
}
#[derive(PartialEq)]
pub enum EventCancelable {
Cancelable,
NotCancelable
}
#[dom_struct]
pub struct Event {
reflector_: Reflector,
type_id: EventTypeId,
current_target: MutNullableJS<EventTarget>,
target: MutNullableJS<EventTarget>,
type_: DOMRefCell<DOMString>,
phase: Cell<EventPhase>,
canceled: Cell<bool>,
stop_propagation: Cell<bool>,
stop_immediate: Cell<bool>,
cancelable: Cell<bool>,
bubbles: Cell<bool>,
trusted: Cell<bool>,
dispatching: Cell<bool>,
initialized: Cell<bool>,
timestamp: u64,
}
impl Event {
pub fn new_inherited(type_id: EventTypeId) -> Event {
Event {
reflector_: Reflector::new(),
type_id: type_id,
current_target: Default::default(),
target: Default::default(),
phase: Cell::new(EventPhase::None),
type_: DOMRefCell::new("".to_owned()),
canceled: Cell::new(false),
cancelable: Cell::new(false),
bubbles: Cell::new(false),
trusted: Cell::new(false),
dispatching: Cell::new(false),
stop_propagation: Cell::new(false),
stop_immediate: Cell::new(false),
initialized: Cell::new(false),
timestamp: time::get_time().sec as u64,
}
}
pub fn new_uninitialized(global: GlobalRef) -> Temporary<Event> {
reflect_dom_object(box Event::new_inherited(EventTypeId::HTMLEvent),
global,
EventBinding::Wrap)
}
pub fn new(global: GlobalRef,
type_: DOMString,
bubbles: EventBubbles,
cancelable: EventCancelable) -> Temporary<Event> {
let event = Event::new_uninitialized(global).root();
event.r().InitEvent(type_, bubbles == EventBubbles::Bubbles, cancelable == EventCancelable::Cancelable);
Temporary::from_rooted(event.r())
}
pub fn Constructor(global: GlobalRef,
type_: DOMString,
init: &EventBinding::EventInit) -> Fallible<Temporary<Event>> {
let bubbles = if init.bubbles { EventBubbles::Bubbles } else { EventBubbles::DoesNotBubble };
let cancelable = if init.cancelable { EventCancelable::Cancelable } else { EventCancelable::NotCancelable };
Ok(Event::new(global, type_, bubbles, cancelable))
}
#[inline]
pub fn type_id<'a>(&'a self) -> &'a EventTypeId {
&self.type_id
}
#[inline]
pub fn clear_current_target(&self) {
self.current_target.clear();
}
#[inline]
pub fn set_current_target(&self, val: JSRef<EventTarget>) {
self.current_target.assign(Some(val));
}
#[inline]
pub fn set_target(&self, val: JSRef<EventTarget>) {
self.target.assign(Some(val));
}
#[inline]
pub fn set_phase(&self, val: EventPhase)
|
#[inline]
pub fn stop_propagation(&self) -> bool {
self.stop_propagation.get()
}
#[inline]
pub fn stop_immediate(&self) -> bool {
self.stop_immediate.get()
}
#[inline]
pub fn bubbles(&self) -> bool {
self.bubbles.get()
}
#[inline]
pub fn dispatching(&self) -> bool {
self.dispatching.get()
}
#[inline]
pub fn set_dispatching(&self, val: bool) {
self.dispatching.set(val)
}
#[inline]
pub fn initialized(&self) -> bool {
self.initialized.get()
}
}
impl<'a> EventMethods for JSRef<'a, Event> {
fn EventPhase(self) -> u16 {
self.phase.get() as u16
}
fn Type(self) -> DOMString {
// FIXME(https://github.com/rust-lang/rust/issues/23338)
let type_ = self.type_.borrow();
type_.clone()
}
fn GetTarget(self) -> Option<Temporary<EventTarget>> {
self.target.get()
}
fn GetCurrentTarget(self) -> Option<Temporary<EventTarget>> {
self.current_target.get()
}
fn DefaultPrevented(self) -> bool {
self.canceled.get()
}
fn PreventDefault(self) {
if self.cancelable.get() {
self.canceled.set(true)
}
}
fn StopPropagation(self) {
self.stop_propagation.set(true);
}
fn StopImmediatePropagation(self) {
self.stop_immediate.set(true);
self.stop_propagation.set(true);
}
fn Bubbles(self) -> bool {
self.bubbles.get()
}
fn Cancelable(self) -> bool {
self.cancelable.get()
}
fn TimeStamp(self) -> u64 {
self.timestamp
}
fn InitEvent(self,
type_: DOMString,
bubbles: bool,
cancelable: bool) {
if self.dispatching.get() {
return;
}
self.initialized.set(true);
self.stop_propagation.set(false);
self.stop_immediate.set(false);
self.canceled.set(false);
self.trusted.set(false);
self.target.clear();
*self.type_.borrow_mut() = type_;
self.bubbles.set(bubbles);
self.cancelable.set(cancelable);
}
fn IsTrusted(self) -> bool {
self.trusted.get()
}
}
pub trait EventHelpers {
fn set_trusted(self, trusted: bool);
fn fire(self, target: JSRef<EventTarget>) -> bool;
}
impl<'a> EventHelpers for JSRef<'a, Event> {
fn set_trusted(self, trusted: bool) {
self.trusted.set(trusted);
}
// https://html.spec.whatwg.org/multipage/webappapis.html#fire-a-simple-event
fn fire(self, target: JSRef<EventTarget>) -> bool {
self.set_trusted(true);
target.dispatch_event(self)
}
}
|
{
self.phase.set(val)
}
|
identifier_body
|
event.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::EventBinding;
use dom::bindings::codegen::Bindings::EventBinding::{EventConstants, EventMethods};
use dom::bindings::error::Fallible;
use dom::bindings::global::GlobalRef;
use dom::bindings::js::{MutNullableJS, JSRef, Temporary};
use dom::bindings::utils::{Reflector, reflect_dom_object};
use dom::eventtarget::{EventTarget, EventTargetHelpers};
use util::str::DOMString;
use std::borrow::ToOwned;
use std::cell::Cell;
use std::default::Default;
use time;
#[jstraceable]
#[derive(Copy)]
#[repr(u16)]
pub enum EventPhase {
None = EventConstants::NONE,
Capturing = EventConstants::CAPTURING_PHASE,
AtTarget = EventConstants::AT_TARGET,
Bubbling = EventConstants::BUBBLING_PHASE,
}
#[derive(PartialEq)]
#[jstraceable]
pub enum EventTypeId {
CustomEvent,
HTMLEvent,
KeyboardEvent,
MessageEvent,
MouseEvent,
ProgressEvent,
StorageEvent,
UIEvent,
ErrorEvent
}
#[derive(PartialEq)]
pub enum EventBubbles {
Bubbles,
DoesNotBubble
}
#[derive(PartialEq)]
pub enum EventCancelable {
Cancelable,
NotCancelable
}
#[dom_struct]
pub struct Event {
reflector_: Reflector,
type_id: EventTypeId,
current_target: MutNullableJS<EventTarget>,
target: MutNullableJS<EventTarget>,
type_: DOMRefCell<DOMString>,
phase: Cell<EventPhase>,
canceled: Cell<bool>,
stop_propagation: Cell<bool>,
stop_immediate: Cell<bool>,
cancelable: Cell<bool>,
bubbles: Cell<bool>,
trusted: Cell<bool>,
dispatching: Cell<bool>,
initialized: Cell<bool>,
timestamp: u64,
}
impl Event {
pub fn new_inherited(type_id: EventTypeId) -> Event {
Event {
reflector_: Reflector::new(),
type_id: type_id,
current_target: Default::default(),
target: Default::default(),
phase: Cell::new(EventPhase::None),
type_: DOMRefCell::new("".to_owned()),
canceled: Cell::new(false),
cancelable: Cell::new(false),
bubbles: Cell::new(false),
trusted: Cell::new(false),
dispatching: Cell::new(false),
stop_propagation: Cell::new(false),
stop_immediate: Cell::new(false),
initialized: Cell::new(false),
timestamp: time::get_time().sec as u64,
}
}
pub fn new_uninitialized(global: GlobalRef) -> Temporary<Event> {
reflect_dom_object(box Event::new_inherited(EventTypeId::HTMLEvent),
global,
EventBinding::Wrap)
}
pub fn new(global: GlobalRef,
type_: DOMString,
bubbles: EventBubbles,
cancelable: EventCancelable) -> Temporary<Event> {
let event = Event::new_uninitialized(global).root();
event.r().InitEvent(type_, bubbles == EventBubbles::Bubbles, cancelable == EventCancelable::Cancelable);
Temporary::from_rooted(event.r())
}
pub fn Constructor(global: GlobalRef,
type_: DOMString,
init: &EventBinding::EventInit) -> Fallible<Temporary<Event>> {
let bubbles = if init.bubbles { EventBubbles::Bubbles } else { EventBubbles::DoesNotBubble };
let cancelable = if init.cancelable { EventCancelable::Cancelable } else { EventCancelable::NotCancelable };
Ok(Event::new(global, type_, bubbles, cancelable))
}
#[inline]
pub fn type_id<'a>(&'a self) -> &'a EventTypeId {
&self.type_id
}
#[inline]
pub fn clear_current_target(&self) {
self.current_target.clear();
}
#[inline]
pub fn set_current_target(&self, val: JSRef<EventTarget>) {
self.current_target.assign(Some(val));
}
#[inline]
pub fn set_target(&self, val: JSRef<EventTarget>) {
self.target.assign(Some(val));
}
#[inline]
pub fn set_phase(&self, val: EventPhase) {
self.phase.set(val)
}
#[inline]
pub fn stop_propagation(&self) -> bool {
self.stop_propagation.get()
}
#[inline]
pub fn stop_immediate(&self) -> bool {
self.stop_immediate.get()
}
#[inline]
pub fn bubbles(&self) -> bool {
self.bubbles.get()
}
#[inline]
pub fn dispatching(&self) -> bool {
self.dispatching.get()
}
#[inline]
pub fn set_dispatching(&self, val: bool) {
self.dispatching.set(val)
}
#[inline]
pub fn initialized(&self) -> bool {
self.initialized.get()
}
}
impl<'a> EventMethods for JSRef<'a, Event> {
fn EventPhase(self) -> u16 {
self.phase.get() as u16
}
fn Type(self) -> DOMString {
// FIXME(https://github.com/rust-lang/rust/issues/23338)
let type_ = self.type_.borrow();
type_.clone()
}
fn GetTarget(self) -> Option<Temporary<EventTarget>> {
self.target.get()
}
fn GetCurrentTarget(self) -> Option<Temporary<EventTarget>> {
self.current_target.get()
}
fn DefaultPrevented(self) -> bool {
self.canceled.get()
}
fn PreventDefault(self) {
if self.cancelable.get() {
self.canceled.set(true)
}
}
fn StopPropagation(self) {
self.stop_propagation.set(true);
}
fn StopImmediatePropagation(self) {
self.stop_immediate.set(true);
self.stop_propagation.set(true);
}
fn Bubbles(self) -> bool {
self.bubbles.get()
}
fn Cancelable(self) -> bool {
self.cancelable.get()
}
fn TimeStamp(self) -> u64 {
self.timestamp
}
fn
|
(self,
type_: DOMString,
bubbles: bool,
cancelable: bool) {
if self.dispatching.get() {
return;
}
self.initialized.set(true);
self.stop_propagation.set(false);
self.stop_immediate.set(false);
self.canceled.set(false);
self.trusted.set(false);
self.target.clear();
*self.type_.borrow_mut() = type_;
self.bubbles.set(bubbles);
self.cancelable.set(cancelable);
}
fn IsTrusted(self) -> bool {
self.trusted.get()
}
}
pub trait EventHelpers {
fn set_trusted(self, trusted: bool);
fn fire(self, target: JSRef<EventTarget>) -> bool;
}
impl<'a> EventHelpers for JSRef<'a, Event> {
fn set_trusted(self, trusted: bool) {
self.trusted.set(trusted);
}
// https://html.spec.whatwg.org/multipage/webappapis.html#fire-a-simple-event
fn fire(self, target: JSRef<EventTarget>) -> bool {
self.set_trusted(true);
target.dispatch_event(self)
}
}
|
InitEvent
|
identifier_name
|
event.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::EventBinding;
use dom::bindings::codegen::Bindings::EventBinding::{EventConstants, EventMethods};
use dom::bindings::error::Fallible;
use dom::bindings::global::GlobalRef;
use dom::bindings::js::{MutNullableJS, JSRef, Temporary};
use dom::bindings::utils::{Reflector, reflect_dom_object};
use dom::eventtarget::{EventTarget, EventTargetHelpers};
use util::str::DOMString;
use std::borrow::ToOwned;
use std::cell::Cell;
use std::default::Default;
use time;
#[jstraceable]
#[derive(Copy)]
#[repr(u16)]
pub enum EventPhase {
None = EventConstants::NONE,
Capturing = EventConstants::CAPTURING_PHASE,
AtTarget = EventConstants::AT_TARGET,
Bubbling = EventConstants::BUBBLING_PHASE,
}
#[derive(PartialEq)]
#[jstraceable]
pub enum EventTypeId {
CustomEvent,
HTMLEvent,
KeyboardEvent,
MessageEvent,
MouseEvent,
ProgressEvent,
StorageEvent,
UIEvent,
ErrorEvent
}
#[derive(PartialEq)]
pub enum EventBubbles {
Bubbles,
DoesNotBubble
}
#[derive(PartialEq)]
pub enum EventCancelable {
Cancelable,
NotCancelable
}
#[dom_struct]
pub struct Event {
reflector_: Reflector,
type_id: EventTypeId,
current_target: MutNullableJS<EventTarget>,
target: MutNullableJS<EventTarget>,
type_: DOMRefCell<DOMString>,
phase: Cell<EventPhase>,
canceled: Cell<bool>,
stop_propagation: Cell<bool>,
stop_immediate: Cell<bool>,
cancelable: Cell<bool>,
bubbles: Cell<bool>,
trusted: Cell<bool>,
dispatching: Cell<bool>,
initialized: Cell<bool>,
timestamp: u64,
}
impl Event {
pub fn new_inherited(type_id: EventTypeId) -> Event {
Event {
reflector_: Reflector::new(),
type_id: type_id,
current_target: Default::default(),
target: Default::default(),
phase: Cell::new(EventPhase::None),
type_: DOMRefCell::new("".to_owned()),
canceled: Cell::new(false),
cancelable: Cell::new(false),
bubbles: Cell::new(false),
trusted: Cell::new(false),
dispatching: Cell::new(false),
stop_propagation: Cell::new(false),
stop_immediate: Cell::new(false),
initialized: Cell::new(false),
timestamp: time::get_time().sec as u64,
}
}
pub fn new_uninitialized(global: GlobalRef) -> Temporary<Event> {
reflect_dom_object(box Event::new_inherited(EventTypeId::HTMLEvent),
global,
EventBinding::Wrap)
}
pub fn new(global: GlobalRef,
type_: DOMString,
bubbles: EventBubbles,
cancelable: EventCancelable) -> Temporary<Event> {
let event = Event::new_uninitialized(global).root();
event.r().InitEvent(type_, bubbles == EventBubbles::Bubbles, cancelable == EventCancelable::Cancelable);
Temporary::from_rooted(event.r())
}
pub fn Constructor(global: GlobalRef,
type_: DOMString,
init: &EventBinding::EventInit) -> Fallible<Temporary<Event>> {
let bubbles = if init.bubbles { EventBubbles::Bubbles } else
|
;
let cancelable = if init.cancelable { EventCancelable::Cancelable } else { EventCancelable::NotCancelable };
Ok(Event::new(global, type_, bubbles, cancelable))
}
#[inline]
pub fn type_id<'a>(&'a self) -> &'a EventTypeId {
&self.type_id
}
#[inline]
pub fn clear_current_target(&self) {
self.current_target.clear();
}
#[inline]
pub fn set_current_target(&self, val: JSRef<EventTarget>) {
self.current_target.assign(Some(val));
}
#[inline]
pub fn set_target(&self, val: JSRef<EventTarget>) {
self.target.assign(Some(val));
}
#[inline]
pub fn set_phase(&self, val: EventPhase) {
self.phase.set(val)
}
#[inline]
pub fn stop_propagation(&self) -> bool {
self.stop_propagation.get()
}
#[inline]
pub fn stop_immediate(&self) -> bool {
self.stop_immediate.get()
}
#[inline]
pub fn bubbles(&self) -> bool {
self.bubbles.get()
}
#[inline]
pub fn dispatching(&self) -> bool {
self.dispatching.get()
}
#[inline]
pub fn set_dispatching(&self, val: bool) {
self.dispatching.set(val)
}
#[inline]
pub fn initialized(&self) -> bool {
self.initialized.get()
}
}
impl<'a> EventMethods for JSRef<'a, Event> {
fn EventPhase(self) -> u16 {
self.phase.get() as u16
}
fn Type(self) -> DOMString {
// FIXME(https://github.com/rust-lang/rust/issues/23338)
let type_ = self.type_.borrow();
type_.clone()
}
fn GetTarget(self) -> Option<Temporary<EventTarget>> {
self.target.get()
}
fn GetCurrentTarget(self) -> Option<Temporary<EventTarget>> {
self.current_target.get()
}
fn DefaultPrevented(self) -> bool {
self.canceled.get()
}
fn PreventDefault(self) {
if self.cancelable.get() {
self.canceled.set(true)
}
}
fn StopPropagation(self) {
self.stop_propagation.set(true);
}
fn StopImmediatePropagation(self) {
self.stop_immediate.set(true);
self.stop_propagation.set(true);
}
fn Bubbles(self) -> bool {
self.bubbles.get()
}
fn Cancelable(self) -> bool {
self.cancelable.get()
}
fn TimeStamp(self) -> u64 {
self.timestamp
}
fn InitEvent(self,
type_: DOMString,
bubbles: bool,
cancelable: bool) {
if self.dispatching.get() {
return;
}
self.initialized.set(true);
self.stop_propagation.set(false);
self.stop_immediate.set(false);
self.canceled.set(false);
self.trusted.set(false);
self.target.clear();
*self.type_.borrow_mut() = type_;
self.bubbles.set(bubbles);
self.cancelable.set(cancelable);
}
fn IsTrusted(self) -> bool {
self.trusted.get()
}
}
pub trait EventHelpers {
fn set_trusted(self, trusted: bool);
fn fire(self, target: JSRef<EventTarget>) -> bool;
}
impl<'a> EventHelpers for JSRef<'a, Event> {
fn set_trusted(self, trusted: bool) {
self.trusted.set(trusted);
}
// https://html.spec.whatwg.org/multipage/webappapis.html#fire-a-simple-event
fn fire(self, target: JSRef<EventTarget>) -> bool {
self.set_trusted(true);
target.dispatch_event(self)
}
}
|
{ EventBubbles::DoesNotBubble }
|
conditional_block
|
blockchain.rs
|
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::str::{FromStr, from_utf8};
use std::{io, fs};
use std::io::{BufReader, BufRead};
use std::time::{Instant, Duration};
use std::thread::sleep;
use std::sync::Arc;
use rustc_serialize::hex::FromHex;
use util::{ToPretty, U256, H256, Address, Hashable};
use rlp::PayloadInfo;
use ethcore::service::ClientService;
use ethcore::client::{Mode, DatabaseCompactionProfile, VMType, BlockImportError, BlockChainClient, BlockId};
use ethcore::error::ImportError;
use ethcore::miner::Miner;
use ethcore::verification::queue::VerifierSettings;
use cache::CacheConfig;
use informant::{Informant, MillisecondDuration};
use params::{SpecType, Pruning, Switch, tracing_switch_to_bool, fatdb_switch_to_bool};
use helpers::{to_client_config, execute_upgrades};
use dir::Directories;
use user_defaults::UserDefaults;
use fdlimit;
#[derive(Debug, PartialEq)]
pub enum DataFormat {
Hex,
Binary,
}
impl Default for DataFormat {
fn default() -> Self {
DataFormat::Binary
}
}
impl FromStr for DataFormat {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"binary" | "bin" => Ok(DataFormat::Binary),
"hex" => Ok(DataFormat::Hex),
x => Err(format!("Invalid format: {}", x))
}
}
}
#[derive(Debug, PartialEq)]
pub enum BlockchainCmd {
Kill(KillBlockchain),
Import(ImportBlockchain),
Export(ExportBlockchain),
ExportState(ExportState),
}
#[derive(Debug, PartialEq)]
pub struct KillBlockchain {
pub spec: SpecType,
pub dirs: Directories,
pub pruning: Pruning,
}
#[derive(Debug, PartialEq)]
pub struct ImportBlockchain {
pub spec: SpecType,
pub cache_config: CacheConfig,
pub dirs: Directories,
pub file_path: Option<String>,
pub format: Option<DataFormat>,
pub pruning: Pruning,
pub pruning_history: u64,
pub pruning_memory: usize,
pub compaction: DatabaseCompactionProfile,
pub wal: bool,
pub tracing: Switch,
pub fat_db: Switch,
pub vm_type: VMType,
pub check_seal: bool,
pub with_color: bool,
pub verifier_settings: VerifierSettings,
}
#[derive(Debug, PartialEq)]
pub struct ExportBlockchain {
pub spec: SpecType,
pub cache_config: CacheConfig,
pub dirs: Directories,
pub file_path: Option<String>,
pub format: Option<DataFormat>,
pub pruning: Pruning,
pub pruning_history: u64,
pub pruning_memory: usize,
pub compaction: DatabaseCompactionProfile,
pub wal: bool,
pub fat_db: Switch,
pub tracing: Switch,
pub from_block: BlockId,
pub to_block: BlockId,
pub check_seal: bool,
}
#[derive(Debug, PartialEq)]
pub struct ExportState {
pub spec: SpecType,
pub cache_config: CacheConfig,
pub dirs: Directories,
pub file_path: Option<String>,
pub format: Option<DataFormat>,
pub pruning: Pruning,
pub pruning_history: u64,
pub pruning_memory: usize,
pub compaction: DatabaseCompactionProfile,
pub wal: bool,
pub fat_db: Switch,
pub tracing: Switch,
pub at: BlockId,
pub storage: bool,
pub code: bool,
pub min_balance: Option<U256>,
pub max_balance: Option<U256>,
}
pub fn execute(cmd: BlockchainCmd) -> Result<(), String> {
match cmd {
BlockchainCmd::Kill(kill_cmd) => kill_db(kill_cmd),
BlockchainCmd::Import(import_cmd) => execute_import(import_cmd),
BlockchainCmd::Export(export_cmd) => execute_export(export_cmd),
BlockchainCmd::ExportState(export_cmd) => execute_export_state(export_cmd),
}
}
fn execute_import(cmd: ImportBlockchain) -> Result<(), String> {
let timer = Instant::now();
// load spec file
let spec = cmd.spec.spec()?;
// load genesis hash
let genesis_hash = spec.genesis_header().hash();
// database paths
let db_dirs = cmd.dirs.database(genesis_hash, None, spec.data_dir.clone());
// user defaults path
let user_defaults_path = db_dirs.user_defaults_path();
// load user defaults
let mut user_defaults = UserDefaults::load(&user_defaults_path)?;
fdlimit::raise_fd_limit();
// select pruning algorithm
let algorithm = cmd.pruning.to_algorithm(&user_defaults);
// check if tracing is on
let tracing = tracing_switch_to_bool(cmd.tracing, &user_defaults)?;
// check if fatdb is on
let fat_db = fatdb_switch_to_bool(cmd.fat_db, &user_defaults, algorithm)?;
// prepare client and snapshot paths.
let client_path = db_dirs.client_path(algorithm);
let snapshot_path = db_dirs.snapshot_path();
// execute upgrades
execute_upgrades(&cmd.dirs.base, &db_dirs, algorithm, cmd.compaction.compaction_profile(db_dirs.db_root_path().as_path()))?;
// create dirs used by parity
cmd.dirs.create_dirs(false, false, false)?;
// prepare client config
let mut client_config = to_client_config(
&cmd.cache_config,
spec.name.to_lowercase(),
Mode::Active,
tracing,
fat_db,
cmd.compaction,
cmd.wal,
cmd.vm_type,
"".into(),
algorithm,
cmd.pruning_history,
cmd.pruning_memory,
cmd.check_seal
);
client_config.queue.verifier_settings = cmd.verifier_settings;
// build client
let service = ClientService::start(
client_config,
&spec,
&client_path,
&snapshot_path,
&cmd.dirs.ipc_path(),
Arc::new(Miner::with_spec(&spec)),
).map_err(|e| format!("Client service error: {:?}", e))?;
// free up the spec in memory.
drop(spec);
let client = service.client();
let mut instream: Box<io::Read> = match cmd.file_path {
Some(f) => Box::new(fs::File::open(&f).map_err(|_| format!("Cannot open given file: {}", f))?),
None => Box::new(io::stdin()),
};
const READAHEAD_BYTES: usize = 8;
let mut first_bytes: Vec<u8> = vec![0; READAHEAD_BYTES];
let mut first_read = 0;
let format = match cmd.format {
Some(format) => format,
None => {
first_read = instream.read(&mut first_bytes).map_err(|_| "Error reading from the file/stream.")?;
match first_bytes[0] {
0xf9 => DataFormat::Binary,
_ => DataFormat::Hex,
}
}
};
let informant = Arc::new(Informant::new(client.clone(), None, None, None, None, cmd.with_color));
service.register_io_handler(informant).map_err(|_| "Unable to register informant handler".to_owned())?;
let do_import = |bytes| {
while client.queue_info().is_full() { sleep(Duration::from_secs(1)); }
match client.import_block(bytes) {
Err(BlockImportError::Import(ImportError::AlreadyInChain)) => {
trace!("Skipping block already in chain.");
}
Err(e) => {
return Err(format!("Cannot import block: {:?}", e));
},
Ok(_) => {},
}
Ok(())
};
match format {
DataFormat::Binary => {
loop {
let mut bytes = if first_read > 0 {first_bytes.clone()} else {vec![0; READAHEAD_BYTES]};
let n = if first_read > 0 {
first_read
} else {
instream.read(&mut bytes).map_err(|_| "Error reading from the file/stream.")?
};
if n == 0 { break; }
first_read = 0;
let s = PayloadInfo::from(&bytes).map_err(|e| format!("Invalid RLP in the file/stream: {:?}", e))?.total();
bytes.resize(s, 0);
instream.read_exact(&mut bytes[n..]).map_err(|_| "Error reading from the file/stream.")?;
do_import(bytes)?;
}
}
DataFormat::Hex => {
for line in BufReader::new(instream).lines() {
let s = line.map_err(|_| "Error reading from the file/stream.")?;
let s = if first_read > 0 {from_utf8(&first_bytes).unwrap().to_owned() + &(s[..])} else {s};
first_read = 0;
let bytes = s.from_hex().map_err(|_| "Invalid hex in file/stream.")?;
do_import(bytes)?;
}
}
}
client.flush_queue();
// save user defaults
user_defaults.pruning = algorithm;
user_defaults.tracing = tracing;
user_defaults.fat_db = fat_db;
user_defaults.save(&user_defaults_path)?;
let report = client.report();
let ms = timer.elapsed().as_milliseconds();
info!("Import completed in {} seconds, {} blocks, {} blk/s, {} transactions, {} tx/s, {} Mgas, {} Mgas/s",
ms / 1000,
report.blocks_imported,
(report.blocks_imported * 1000) as u64 / ms,
report.transactions_applied,
(report.transactions_applied * 1000) as u64 / ms,
report.gas_processed / From::from(1_000_000),
(report.gas_processed / From::from(ms * 1000)).low_u64(),
);
Ok(())
}
fn start_client(
dirs: Directories,
spec: SpecType,
pruning: Pruning,
pruning_history: u64,
pruning_memory: usize,
tracing: Switch,
fat_db: Switch,
compaction: DatabaseCompactionProfile,
wal: bool,
cache_config: CacheConfig,
require_fat_db: bool,
) -> Result<ClientService, String> {
// load spec file
let spec = spec.spec()?;
// load genesis hash
let genesis_hash = spec.genesis_header().hash();
// database paths
let db_dirs = dirs.database(genesis_hash, None, spec.data_dir.clone());
// user defaults path
let user_defaults_path = db_dirs.user_defaults_path();
// load user defaults
let user_defaults = UserDefaults::load(&user_defaults_path)?;
fdlimit::raise_fd_limit();
// select pruning algorithm
let algorithm = pruning.to_algorithm(&user_defaults);
// check if tracing is on
let tracing = tracing_switch_to_bool(tracing, &user_defaults)?;
// check if fatdb is on
let fat_db = fatdb_switch_to_bool(fat_db, &user_defaults, algorithm)?;
if!fat_db && require_fat_db {
return Err("This command requires Parity to be synced with --fat-db on.".to_owned());
}
// prepare client and snapshot paths.
let client_path = db_dirs.client_path(algorithm);
let snapshot_path = db_dirs.snapshot_path();
// execute upgrades
execute_upgrades(&dirs.base, &db_dirs, algorithm, compaction.compaction_profile(db_dirs.db_root_path().as_path()))?;
// create dirs used by parity
dirs.create_dirs(false, false, false)?;
// prepare client config
let client_config = to_client_config(
&cache_config,
spec.name.to_lowercase(),
Mode::Active,
tracing,
fat_db,
compaction,
wal,
VMType::default(),
"".into(),
algorithm,
pruning_history,
pruning_memory,
true,
);
let service = ClientService::start(
client_config,
&spec,
&client_path,
&snapshot_path,
&dirs.ipc_path(),
Arc::new(Miner::with_spec(&spec)),
).map_err(|e| format!("Client service error: {:?}", e))?;
drop(spec);
Ok(service)
}
fn execute_export(cmd: ExportBlockchain) -> Result<(), String> {
let service = start_client(
cmd.dirs,
cmd.spec,
cmd.pruning,
cmd.pruning_history,
cmd.pruning_memory,
cmd.tracing,
cmd.fat_db,
cmd.compaction,
cmd.wal,
cmd.cache_config,
false,
)?;
let format = cmd.format.unwrap_or_default();
let client = service.client();
let mut out: Box<io::Write> = match cmd.file_path {
Some(f) => Box::new(fs::File::create(&f).map_err(|_| format!("Cannot write to file given: {}", f))?),
None => Box::new(io::stdout()),
};
let from = client.block_number(cmd.from_block).ok_or("From block could not be found")?;
let to = client.block_number(cmd.to_block).ok_or("To block could not be found")?;
for i in from..(to + 1) {
if i % 10000 == 0 {
info!("#{}", i);
}
let b = client.block(BlockId::Number(i)).ok_or("Error exporting incomplete chain")?.into_inner();
match format {
DataFormat::Binary => { out.write(&b).expect("Couldn't write to stream."); }
DataFormat::Hex => { out.write_fmt(format_args!("{}", b.pretty())).expect("Couldn't write to stream."); }
}
}
info!("Export completed.");
Ok(())
}
fn execute_export_state(cmd: ExportState) -> Result<(), String> {
let service = start_client(
cmd.dirs,
cmd.spec,
cmd.pruning,
cmd.pruning_history,
cmd.pruning_memory,
cmd.tracing,
cmd.fat_db,
cmd.compaction,
cmd.wal,
cmd.cache_config,
true
)?;
let client = service.client();
let mut out: Box<io::Write> = match cmd.file_path {
Some(f) => Box::new(fs::File::create(&f).map_err(|_| format!("Cannot write to file given: {}", f))?),
None => Box::new(io::stdout()),
};
let mut last: Option<Address> = None;
let at = cmd.at;
let mut i = 0usize;
out.write_fmt(format_args!("{{ \"state\": {{", )).expect("Couldn't write to stream.");
loop {
let accounts = client.list_accounts(at, last.as_ref(), 1000).ok_or("Specified block not found")?;
if accounts.is_empty() {
break;
}
for account in accounts.into_iter() {
let balance = client.balance(&account, at).unwrap_or_else(U256::zero);
if cmd.min_balance.map_or(false, |m| balance < m) || cmd.max_balance.map_or(false, |m| balance > m) {
last = Some(account);
continue; //filtered out
}
if i!= 0
|
out.write_fmt(format_args!("\n\"0x{}\": {{\"balance\": \"{:x}\", \"nonce\": \"{:x}\"", account.hex(), balance, client.nonce(&account, at).unwrap_or_else(U256::zero))).expect("Write error");
let code = client.code(&account, at).unwrap_or(None).unwrap_or_else(Vec::new);
if!code.is_empty() {
out.write_fmt(format_args!(", \"code_hash\": \"0x{}\"", code.sha3().hex())).expect("Write error");
if cmd.code {
out.write_fmt(format_args!(", \"code\": \"{}\"", code.to_hex())).expect("Write error");
}
}
let storage_root = client.storage_root(&account, at).unwrap_or(::util::SHA3_NULL_RLP);
if storage_root!= ::util::SHA3_NULL_RLP {
out.write_fmt(format_args!(", \"storage_root\": \"0x{}\"", storage_root.hex())).expect("Write error");
if cmd.storage {
out.write_fmt(format_args!(", \"storage\": {{")).expect("Write error");
let mut last_storage: Option<H256> = None;
loop {
let keys = client.list_storage(at, &account, last_storage.as_ref(), 1000).ok_or("Specified block not found")?;
if keys.is_empty() {
break;
}
for key in keys.into_iter() {
if last_storage.is_some() {
out.write(b",").expect("Write error");
}
out.write_fmt(format_args!("\n\t\"0x{}\": \"0x{}\"", key.hex(), client.storage_at(&account, &key, at).unwrap_or_else(Default::default).hex())).expect("Write error");
last_storage = Some(key);
}
}
out.write(b"\n}").expect("Write error");
}
}
out.write(b"}").expect("Write error");
i += 1;
if i % 10000 == 0 {
info!("Account #{}", i);
}
last = Some(account);
}
}
out.write_fmt(format_args!("\n}}}}")).expect("Write error");
info!("Export completed.");
Ok(())
}
pub fn kill_db(cmd: KillBlockchain) -> Result<(), String> {
let spec = cmd.spec.spec()?;
let genesis_hash = spec.genesis_header().hash();
let db_dirs = cmd.dirs.database(genesis_hash, None, spec.data_dir);
let user_defaults_path = db_dirs.user_defaults_path();
let user_defaults = UserDefaults::load(&user_defaults_path)?;
let algorithm = cmd.pruning.to_algorithm(&user_defaults);
let dir = db_dirs.db_path(algorithm);
fs::remove_dir_all(&dir).map_err(|e| format!("Error removing database: {:?}", e))?;
info!("Database deleted.");
Ok(())
}
#[cfg(test)]
mod test {
use super::DataFormat;
#[test]
fn test_data_format_parsing() {
assert_eq!(DataFormat::Binary, "binary".parse().unwrap());
assert_eq!(DataFormat::Binary, "bin".parse().unwrap());
assert_eq!(DataFormat::Hex, "hex".parse().unwrap());
}
}
|
{
out.write(b",").expect("Write error");
}
|
conditional_block
|
blockchain.rs
|
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::str::{FromStr, from_utf8};
use std::{io, fs};
use std::io::{BufReader, BufRead};
use std::time::{Instant, Duration};
use std::thread::sleep;
use std::sync::Arc;
use rustc_serialize::hex::FromHex;
use util::{ToPretty, U256, H256, Address, Hashable};
use rlp::PayloadInfo;
use ethcore::service::ClientService;
use ethcore::client::{Mode, DatabaseCompactionProfile, VMType, BlockImportError, BlockChainClient, BlockId};
use ethcore::error::ImportError;
use ethcore::miner::Miner;
use ethcore::verification::queue::VerifierSettings;
use cache::CacheConfig;
use informant::{Informant, MillisecondDuration};
use params::{SpecType, Pruning, Switch, tracing_switch_to_bool, fatdb_switch_to_bool};
use helpers::{to_client_config, execute_upgrades};
use dir::Directories;
use user_defaults::UserDefaults;
use fdlimit;
#[derive(Debug, PartialEq)]
pub enum DataFormat {
Hex,
Binary,
}
impl Default for DataFormat {
fn default() -> Self {
DataFormat::Binary
}
}
impl FromStr for DataFormat {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"binary" | "bin" => Ok(DataFormat::Binary),
"hex" => Ok(DataFormat::Hex),
x => Err(format!("Invalid format: {}", x))
}
}
}
#[derive(Debug, PartialEq)]
pub enum BlockchainCmd {
Kill(KillBlockchain),
Import(ImportBlockchain),
Export(ExportBlockchain),
ExportState(ExportState),
}
#[derive(Debug, PartialEq)]
pub struct KillBlockchain {
pub spec: SpecType,
pub dirs: Directories,
pub pruning: Pruning,
}
#[derive(Debug, PartialEq)]
pub struct ImportBlockchain {
pub spec: SpecType,
pub cache_config: CacheConfig,
pub dirs: Directories,
pub file_path: Option<String>,
pub format: Option<DataFormat>,
pub pruning: Pruning,
pub pruning_history: u64,
pub pruning_memory: usize,
pub compaction: DatabaseCompactionProfile,
pub wal: bool,
pub tracing: Switch,
pub fat_db: Switch,
pub vm_type: VMType,
pub check_seal: bool,
pub with_color: bool,
pub verifier_settings: VerifierSettings,
}
#[derive(Debug, PartialEq)]
pub struct ExportBlockchain {
pub spec: SpecType,
pub cache_config: CacheConfig,
pub dirs: Directories,
pub file_path: Option<String>,
pub format: Option<DataFormat>,
pub pruning: Pruning,
pub pruning_history: u64,
pub pruning_memory: usize,
pub compaction: DatabaseCompactionProfile,
pub wal: bool,
pub fat_db: Switch,
pub tracing: Switch,
pub from_block: BlockId,
pub to_block: BlockId,
pub check_seal: bool,
}
#[derive(Debug, PartialEq)]
pub struct ExportState {
pub spec: SpecType,
pub cache_config: CacheConfig,
pub dirs: Directories,
pub file_path: Option<String>,
pub format: Option<DataFormat>,
pub pruning: Pruning,
pub pruning_history: u64,
pub pruning_memory: usize,
pub compaction: DatabaseCompactionProfile,
pub wal: bool,
pub fat_db: Switch,
pub tracing: Switch,
pub at: BlockId,
pub storage: bool,
pub code: bool,
pub min_balance: Option<U256>,
pub max_balance: Option<U256>,
}
pub fn execute(cmd: BlockchainCmd) -> Result<(), String> {
match cmd {
BlockchainCmd::Kill(kill_cmd) => kill_db(kill_cmd),
BlockchainCmd::Import(import_cmd) => execute_import(import_cmd),
BlockchainCmd::Export(export_cmd) => execute_export(export_cmd),
BlockchainCmd::ExportState(export_cmd) => execute_export_state(export_cmd),
}
}
fn execute_import(cmd: ImportBlockchain) -> Result<(), String> {
let timer = Instant::now();
// load spec file
let spec = cmd.spec.spec()?;
// load genesis hash
let genesis_hash = spec.genesis_header().hash();
// database paths
let db_dirs = cmd.dirs.database(genesis_hash, None, spec.data_dir.clone());
// user defaults path
let user_defaults_path = db_dirs.user_defaults_path();
// load user defaults
let mut user_defaults = UserDefaults::load(&user_defaults_path)?;
fdlimit::raise_fd_limit();
// select pruning algorithm
let algorithm = cmd.pruning.to_algorithm(&user_defaults);
// check if tracing is on
let tracing = tracing_switch_to_bool(cmd.tracing, &user_defaults)?;
// check if fatdb is on
let fat_db = fatdb_switch_to_bool(cmd.fat_db, &user_defaults, algorithm)?;
// prepare client and snapshot paths.
let client_path = db_dirs.client_path(algorithm);
let snapshot_path = db_dirs.snapshot_path();
// execute upgrades
execute_upgrades(&cmd.dirs.base, &db_dirs, algorithm, cmd.compaction.compaction_profile(db_dirs.db_root_path().as_path()))?;
// create dirs used by parity
cmd.dirs.create_dirs(false, false, false)?;
// prepare client config
let mut client_config = to_client_config(
&cmd.cache_config,
spec.name.to_lowercase(),
Mode::Active,
tracing,
fat_db,
cmd.compaction,
cmd.wal,
cmd.vm_type,
"".into(),
algorithm,
cmd.pruning_history,
cmd.pruning_memory,
cmd.check_seal
);
client_config.queue.verifier_settings = cmd.verifier_settings;
// build client
let service = ClientService::start(
client_config,
&spec,
&client_path,
&snapshot_path,
&cmd.dirs.ipc_path(),
Arc::new(Miner::with_spec(&spec)),
).map_err(|e| format!("Client service error: {:?}", e))?;
// free up the spec in memory.
drop(spec);
let client = service.client();
let mut instream: Box<io::Read> = match cmd.file_path {
Some(f) => Box::new(fs::File::open(&f).map_err(|_| format!("Cannot open given file: {}", f))?),
None => Box::new(io::stdin()),
};
const READAHEAD_BYTES: usize = 8;
let mut first_bytes: Vec<u8> = vec![0; READAHEAD_BYTES];
let mut first_read = 0;
let format = match cmd.format {
Some(format) => format,
None => {
first_read = instream.read(&mut first_bytes).map_err(|_| "Error reading from the file/stream.")?;
match first_bytes[0] {
0xf9 => DataFormat::Binary,
_ => DataFormat::Hex,
}
}
};
let informant = Arc::new(Informant::new(client.clone(), None, None, None, None, cmd.with_color));
service.register_io_handler(informant).map_err(|_| "Unable to register informant handler".to_owned())?;
let do_import = |bytes| {
while client.queue_info().is_full() { sleep(Duration::from_secs(1)); }
match client.import_block(bytes) {
Err(BlockImportError::Import(ImportError::AlreadyInChain)) => {
trace!("Skipping block already in chain.");
}
Err(e) => {
return Err(format!("Cannot import block: {:?}", e));
},
Ok(_) => {},
}
Ok(())
};
match format {
DataFormat::Binary => {
loop {
let mut bytes = if first_read > 0 {first_bytes.clone()} else {vec![0; READAHEAD_BYTES]};
let n = if first_read > 0 {
first_read
} else {
instream.read(&mut bytes).map_err(|_| "Error reading from the file/stream.")?
};
if n == 0 { break; }
first_read = 0;
let s = PayloadInfo::from(&bytes).map_err(|e| format!("Invalid RLP in the file/stream: {:?}", e))?.total();
bytes.resize(s, 0);
instream.read_exact(&mut bytes[n..]).map_err(|_| "Error reading from the file/stream.")?;
do_import(bytes)?;
}
}
DataFormat::Hex => {
for line in BufReader::new(instream).lines() {
let s = line.map_err(|_| "Error reading from the file/stream.")?;
let s = if first_read > 0 {from_utf8(&first_bytes).unwrap().to_owned() + &(s[..])} else {s};
first_read = 0;
let bytes = s.from_hex().map_err(|_| "Invalid hex in file/stream.")?;
do_import(bytes)?;
}
}
}
client.flush_queue();
// save user defaults
user_defaults.pruning = algorithm;
user_defaults.tracing = tracing;
user_defaults.fat_db = fat_db;
user_defaults.save(&user_defaults_path)?;
let report = client.report();
let ms = timer.elapsed().as_milliseconds();
info!("Import completed in {} seconds, {} blocks, {} blk/s, {} transactions, {} tx/s, {} Mgas, {} Mgas/s",
ms / 1000,
report.blocks_imported,
(report.blocks_imported * 1000) as u64 / ms,
report.transactions_applied,
(report.transactions_applied * 1000) as u64 / ms,
report.gas_processed / From::from(1_000_000),
(report.gas_processed / From::from(ms * 1000)).low_u64(),
);
Ok(())
}
fn start_client(
dirs: Directories,
spec: SpecType,
pruning: Pruning,
pruning_history: u64,
pruning_memory: usize,
tracing: Switch,
fat_db: Switch,
compaction: DatabaseCompactionProfile,
wal: bool,
cache_config: CacheConfig,
require_fat_db: bool,
) -> Result<ClientService, String> {
// load spec file
let spec = spec.spec()?;
// load genesis hash
let genesis_hash = spec.genesis_header().hash();
// database paths
let db_dirs = dirs.database(genesis_hash, None, spec.data_dir.clone());
// user defaults path
let user_defaults_path = db_dirs.user_defaults_path();
// load user defaults
let user_defaults = UserDefaults::load(&user_defaults_path)?;
|
// select pruning algorithm
let algorithm = pruning.to_algorithm(&user_defaults);
// check if tracing is on
let tracing = tracing_switch_to_bool(tracing, &user_defaults)?;
// check if fatdb is on
let fat_db = fatdb_switch_to_bool(fat_db, &user_defaults, algorithm)?;
if!fat_db && require_fat_db {
return Err("This command requires Parity to be synced with --fat-db on.".to_owned());
}
// prepare client and snapshot paths.
let client_path = db_dirs.client_path(algorithm);
let snapshot_path = db_dirs.snapshot_path();
// execute upgrades
execute_upgrades(&dirs.base, &db_dirs, algorithm, compaction.compaction_profile(db_dirs.db_root_path().as_path()))?;
// create dirs used by parity
dirs.create_dirs(false, false, false)?;
// prepare client config
let client_config = to_client_config(
&cache_config,
spec.name.to_lowercase(),
Mode::Active,
tracing,
fat_db,
compaction,
wal,
VMType::default(),
"".into(),
algorithm,
pruning_history,
pruning_memory,
true,
);
let service = ClientService::start(
client_config,
&spec,
&client_path,
&snapshot_path,
&dirs.ipc_path(),
Arc::new(Miner::with_spec(&spec)),
).map_err(|e| format!("Client service error: {:?}", e))?;
drop(spec);
Ok(service)
}
fn execute_export(cmd: ExportBlockchain) -> Result<(), String> {
let service = start_client(
cmd.dirs,
cmd.spec,
cmd.pruning,
cmd.pruning_history,
cmd.pruning_memory,
cmd.tracing,
cmd.fat_db,
cmd.compaction,
cmd.wal,
cmd.cache_config,
false,
)?;
let format = cmd.format.unwrap_or_default();
let client = service.client();
let mut out: Box<io::Write> = match cmd.file_path {
Some(f) => Box::new(fs::File::create(&f).map_err(|_| format!("Cannot write to file given: {}", f))?),
None => Box::new(io::stdout()),
};
let from = client.block_number(cmd.from_block).ok_or("From block could not be found")?;
let to = client.block_number(cmd.to_block).ok_or("To block could not be found")?;
for i in from..(to + 1) {
if i % 10000 == 0 {
info!("#{}", i);
}
let b = client.block(BlockId::Number(i)).ok_or("Error exporting incomplete chain")?.into_inner();
match format {
DataFormat::Binary => { out.write(&b).expect("Couldn't write to stream."); }
DataFormat::Hex => { out.write_fmt(format_args!("{}", b.pretty())).expect("Couldn't write to stream."); }
}
}
info!("Export completed.");
Ok(())
}
fn execute_export_state(cmd: ExportState) -> Result<(), String> {
let service = start_client(
cmd.dirs,
cmd.spec,
cmd.pruning,
cmd.pruning_history,
cmd.pruning_memory,
cmd.tracing,
cmd.fat_db,
cmd.compaction,
cmd.wal,
cmd.cache_config,
true
)?;
let client = service.client();
let mut out: Box<io::Write> = match cmd.file_path {
Some(f) => Box::new(fs::File::create(&f).map_err(|_| format!("Cannot write to file given: {}", f))?),
None => Box::new(io::stdout()),
};
let mut last: Option<Address> = None;
let at = cmd.at;
let mut i = 0usize;
out.write_fmt(format_args!("{{ \"state\": {{", )).expect("Couldn't write to stream.");
loop {
let accounts = client.list_accounts(at, last.as_ref(), 1000).ok_or("Specified block not found")?;
if accounts.is_empty() {
break;
}
for account in accounts.into_iter() {
let balance = client.balance(&account, at).unwrap_or_else(U256::zero);
if cmd.min_balance.map_or(false, |m| balance < m) || cmd.max_balance.map_or(false, |m| balance > m) {
last = Some(account);
continue; //filtered out
}
if i!= 0 {
out.write(b",").expect("Write error");
}
out.write_fmt(format_args!("\n\"0x{}\": {{\"balance\": \"{:x}\", \"nonce\": \"{:x}\"", account.hex(), balance, client.nonce(&account, at).unwrap_or_else(U256::zero))).expect("Write error");
let code = client.code(&account, at).unwrap_or(None).unwrap_or_else(Vec::new);
if!code.is_empty() {
out.write_fmt(format_args!(", \"code_hash\": \"0x{}\"", code.sha3().hex())).expect("Write error");
if cmd.code {
out.write_fmt(format_args!(", \"code\": \"{}\"", code.to_hex())).expect("Write error");
}
}
let storage_root = client.storage_root(&account, at).unwrap_or(::util::SHA3_NULL_RLP);
if storage_root!= ::util::SHA3_NULL_RLP {
out.write_fmt(format_args!(", \"storage_root\": \"0x{}\"", storage_root.hex())).expect("Write error");
if cmd.storage {
out.write_fmt(format_args!(", \"storage\": {{")).expect("Write error");
let mut last_storage: Option<H256> = None;
loop {
let keys = client.list_storage(at, &account, last_storage.as_ref(), 1000).ok_or("Specified block not found")?;
if keys.is_empty() {
break;
}
for key in keys.into_iter() {
if last_storage.is_some() {
out.write(b",").expect("Write error");
}
out.write_fmt(format_args!("\n\t\"0x{}\": \"0x{}\"", key.hex(), client.storage_at(&account, &key, at).unwrap_or_else(Default::default).hex())).expect("Write error");
last_storage = Some(key);
}
}
out.write(b"\n}").expect("Write error");
}
}
out.write(b"}").expect("Write error");
i += 1;
if i % 10000 == 0 {
info!("Account #{}", i);
}
last = Some(account);
}
}
out.write_fmt(format_args!("\n}}}}")).expect("Write error");
info!("Export completed.");
Ok(())
}
pub fn kill_db(cmd: KillBlockchain) -> Result<(), String> {
let spec = cmd.spec.spec()?;
let genesis_hash = spec.genesis_header().hash();
let db_dirs = cmd.dirs.database(genesis_hash, None, spec.data_dir);
let user_defaults_path = db_dirs.user_defaults_path();
let user_defaults = UserDefaults::load(&user_defaults_path)?;
let algorithm = cmd.pruning.to_algorithm(&user_defaults);
let dir = db_dirs.db_path(algorithm);
fs::remove_dir_all(&dir).map_err(|e| format!("Error removing database: {:?}", e))?;
info!("Database deleted.");
Ok(())
}
#[cfg(test)]
mod test {
use super::DataFormat;
#[test]
fn test_data_format_parsing() {
assert_eq!(DataFormat::Binary, "binary".parse().unwrap());
assert_eq!(DataFormat::Binary, "bin".parse().unwrap());
assert_eq!(DataFormat::Hex, "hex".parse().unwrap());
}
}
|
fdlimit::raise_fd_limit();
|
random_line_split
|
blockchain.rs
|
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::str::{FromStr, from_utf8};
use std::{io, fs};
use std::io::{BufReader, BufRead};
use std::time::{Instant, Duration};
use std::thread::sleep;
use std::sync::Arc;
use rustc_serialize::hex::FromHex;
use util::{ToPretty, U256, H256, Address, Hashable};
use rlp::PayloadInfo;
use ethcore::service::ClientService;
use ethcore::client::{Mode, DatabaseCompactionProfile, VMType, BlockImportError, BlockChainClient, BlockId};
use ethcore::error::ImportError;
use ethcore::miner::Miner;
use ethcore::verification::queue::VerifierSettings;
use cache::CacheConfig;
use informant::{Informant, MillisecondDuration};
use params::{SpecType, Pruning, Switch, tracing_switch_to_bool, fatdb_switch_to_bool};
use helpers::{to_client_config, execute_upgrades};
use dir::Directories;
use user_defaults::UserDefaults;
use fdlimit;
#[derive(Debug, PartialEq)]
pub enum
|
{
Hex,
Binary,
}
impl Default for DataFormat {
fn default() -> Self {
DataFormat::Binary
}
}
impl FromStr for DataFormat {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"binary" | "bin" => Ok(DataFormat::Binary),
"hex" => Ok(DataFormat::Hex),
x => Err(format!("Invalid format: {}", x))
}
}
}
#[derive(Debug, PartialEq)]
pub enum BlockchainCmd {
Kill(KillBlockchain),
Import(ImportBlockchain),
Export(ExportBlockchain),
ExportState(ExportState),
}
#[derive(Debug, PartialEq)]
pub struct KillBlockchain {
pub spec: SpecType,
pub dirs: Directories,
pub pruning: Pruning,
}
#[derive(Debug, PartialEq)]
pub struct ImportBlockchain {
pub spec: SpecType,
pub cache_config: CacheConfig,
pub dirs: Directories,
pub file_path: Option<String>,
pub format: Option<DataFormat>,
pub pruning: Pruning,
pub pruning_history: u64,
pub pruning_memory: usize,
pub compaction: DatabaseCompactionProfile,
pub wal: bool,
pub tracing: Switch,
pub fat_db: Switch,
pub vm_type: VMType,
pub check_seal: bool,
pub with_color: bool,
pub verifier_settings: VerifierSettings,
}
#[derive(Debug, PartialEq)]
pub struct ExportBlockchain {
pub spec: SpecType,
pub cache_config: CacheConfig,
pub dirs: Directories,
pub file_path: Option<String>,
pub format: Option<DataFormat>,
pub pruning: Pruning,
pub pruning_history: u64,
pub pruning_memory: usize,
pub compaction: DatabaseCompactionProfile,
pub wal: bool,
pub fat_db: Switch,
pub tracing: Switch,
pub from_block: BlockId,
pub to_block: BlockId,
pub check_seal: bool,
}
#[derive(Debug, PartialEq)]
pub struct ExportState {
pub spec: SpecType,
pub cache_config: CacheConfig,
pub dirs: Directories,
pub file_path: Option<String>,
pub format: Option<DataFormat>,
pub pruning: Pruning,
pub pruning_history: u64,
pub pruning_memory: usize,
pub compaction: DatabaseCompactionProfile,
pub wal: bool,
pub fat_db: Switch,
pub tracing: Switch,
pub at: BlockId,
pub storage: bool,
pub code: bool,
pub min_balance: Option<U256>,
pub max_balance: Option<U256>,
}
pub fn execute(cmd: BlockchainCmd) -> Result<(), String> {
match cmd {
BlockchainCmd::Kill(kill_cmd) => kill_db(kill_cmd),
BlockchainCmd::Import(import_cmd) => execute_import(import_cmd),
BlockchainCmd::Export(export_cmd) => execute_export(export_cmd),
BlockchainCmd::ExportState(export_cmd) => execute_export_state(export_cmd),
}
}
fn execute_import(cmd: ImportBlockchain) -> Result<(), String> {
let timer = Instant::now();
// load spec file
let spec = cmd.spec.spec()?;
// load genesis hash
let genesis_hash = spec.genesis_header().hash();
// database paths
let db_dirs = cmd.dirs.database(genesis_hash, None, spec.data_dir.clone());
// user defaults path
let user_defaults_path = db_dirs.user_defaults_path();
// load user defaults
let mut user_defaults = UserDefaults::load(&user_defaults_path)?;
fdlimit::raise_fd_limit();
// select pruning algorithm
let algorithm = cmd.pruning.to_algorithm(&user_defaults);
// check if tracing is on
let tracing = tracing_switch_to_bool(cmd.tracing, &user_defaults)?;
// check if fatdb is on
let fat_db = fatdb_switch_to_bool(cmd.fat_db, &user_defaults, algorithm)?;
// prepare client and snapshot paths.
let client_path = db_dirs.client_path(algorithm);
let snapshot_path = db_dirs.snapshot_path();
// execute upgrades
execute_upgrades(&cmd.dirs.base, &db_dirs, algorithm, cmd.compaction.compaction_profile(db_dirs.db_root_path().as_path()))?;
// create dirs used by parity
cmd.dirs.create_dirs(false, false, false)?;
// prepare client config
let mut client_config = to_client_config(
&cmd.cache_config,
spec.name.to_lowercase(),
Mode::Active,
tracing,
fat_db,
cmd.compaction,
cmd.wal,
cmd.vm_type,
"".into(),
algorithm,
cmd.pruning_history,
cmd.pruning_memory,
cmd.check_seal
);
client_config.queue.verifier_settings = cmd.verifier_settings;
// build client
let service = ClientService::start(
client_config,
&spec,
&client_path,
&snapshot_path,
&cmd.dirs.ipc_path(),
Arc::new(Miner::with_spec(&spec)),
).map_err(|e| format!("Client service error: {:?}", e))?;
// free up the spec in memory.
drop(spec);
let client = service.client();
let mut instream: Box<io::Read> = match cmd.file_path {
Some(f) => Box::new(fs::File::open(&f).map_err(|_| format!("Cannot open given file: {}", f))?),
None => Box::new(io::stdin()),
};
const READAHEAD_BYTES: usize = 8;
let mut first_bytes: Vec<u8> = vec![0; READAHEAD_BYTES];
let mut first_read = 0;
let format = match cmd.format {
Some(format) => format,
None => {
first_read = instream.read(&mut first_bytes).map_err(|_| "Error reading from the file/stream.")?;
match first_bytes[0] {
0xf9 => DataFormat::Binary,
_ => DataFormat::Hex,
}
}
};
let informant = Arc::new(Informant::new(client.clone(), None, None, None, None, cmd.with_color));
service.register_io_handler(informant).map_err(|_| "Unable to register informant handler".to_owned())?;
let do_import = |bytes| {
while client.queue_info().is_full() { sleep(Duration::from_secs(1)); }
match client.import_block(bytes) {
Err(BlockImportError::Import(ImportError::AlreadyInChain)) => {
trace!("Skipping block already in chain.");
}
Err(e) => {
return Err(format!("Cannot import block: {:?}", e));
},
Ok(_) => {},
}
Ok(())
};
match format {
DataFormat::Binary => {
loop {
let mut bytes = if first_read > 0 {first_bytes.clone()} else {vec![0; READAHEAD_BYTES]};
let n = if first_read > 0 {
first_read
} else {
instream.read(&mut bytes).map_err(|_| "Error reading from the file/stream.")?
};
if n == 0 { break; }
first_read = 0;
let s = PayloadInfo::from(&bytes).map_err(|e| format!("Invalid RLP in the file/stream: {:?}", e))?.total();
bytes.resize(s, 0);
instream.read_exact(&mut bytes[n..]).map_err(|_| "Error reading from the file/stream.")?;
do_import(bytes)?;
}
}
DataFormat::Hex => {
for line in BufReader::new(instream).lines() {
let s = line.map_err(|_| "Error reading from the file/stream.")?;
let s = if first_read > 0 {from_utf8(&first_bytes).unwrap().to_owned() + &(s[..])} else {s};
first_read = 0;
let bytes = s.from_hex().map_err(|_| "Invalid hex in file/stream.")?;
do_import(bytes)?;
}
}
}
client.flush_queue();
// save user defaults
user_defaults.pruning = algorithm;
user_defaults.tracing = tracing;
user_defaults.fat_db = fat_db;
user_defaults.save(&user_defaults_path)?;
let report = client.report();
let ms = timer.elapsed().as_milliseconds();
info!("Import completed in {} seconds, {} blocks, {} blk/s, {} transactions, {} tx/s, {} Mgas, {} Mgas/s",
ms / 1000,
report.blocks_imported,
(report.blocks_imported * 1000) as u64 / ms,
report.transactions_applied,
(report.transactions_applied * 1000) as u64 / ms,
report.gas_processed / From::from(1_000_000),
(report.gas_processed / From::from(ms * 1000)).low_u64(),
);
Ok(())
}
fn start_client(
dirs: Directories,
spec: SpecType,
pruning: Pruning,
pruning_history: u64,
pruning_memory: usize,
tracing: Switch,
fat_db: Switch,
compaction: DatabaseCompactionProfile,
wal: bool,
cache_config: CacheConfig,
require_fat_db: bool,
) -> Result<ClientService, String> {
// load spec file
let spec = spec.spec()?;
// load genesis hash
let genesis_hash = spec.genesis_header().hash();
// database paths
let db_dirs = dirs.database(genesis_hash, None, spec.data_dir.clone());
// user defaults path
let user_defaults_path = db_dirs.user_defaults_path();
// load user defaults
let user_defaults = UserDefaults::load(&user_defaults_path)?;
fdlimit::raise_fd_limit();
// select pruning algorithm
let algorithm = pruning.to_algorithm(&user_defaults);
// check if tracing is on
let tracing = tracing_switch_to_bool(tracing, &user_defaults)?;
// check if fatdb is on
let fat_db = fatdb_switch_to_bool(fat_db, &user_defaults, algorithm)?;
if!fat_db && require_fat_db {
return Err("This command requires Parity to be synced with --fat-db on.".to_owned());
}
// prepare client and snapshot paths.
let client_path = db_dirs.client_path(algorithm);
let snapshot_path = db_dirs.snapshot_path();
// execute upgrades
execute_upgrades(&dirs.base, &db_dirs, algorithm, compaction.compaction_profile(db_dirs.db_root_path().as_path()))?;
// create dirs used by parity
dirs.create_dirs(false, false, false)?;
// prepare client config
let client_config = to_client_config(
&cache_config,
spec.name.to_lowercase(),
Mode::Active,
tracing,
fat_db,
compaction,
wal,
VMType::default(),
"".into(),
algorithm,
pruning_history,
pruning_memory,
true,
);
let service = ClientService::start(
client_config,
&spec,
&client_path,
&snapshot_path,
&dirs.ipc_path(),
Arc::new(Miner::with_spec(&spec)),
).map_err(|e| format!("Client service error: {:?}", e))?;
drop(spec);
Ok(service)
}
fn execute_export(cmd: ExportBlockchain) -> Result<(), String> {
let service = start_client(
cmd.dirs,
cmd.spec,
cmd.pruning,
cmd.pruning_history,
cmd.pruning_memory,
cmd.tracing,
cmd.fat_db,
cmd.compaction,
cmd.wal,
cmd.cache_config,
false,
)?;
let format = cmd.format.unwrap_or_default();
let client = service.client();
let mut out: Box<io::Write> = match cmd.file_path {
Some(f) => Box::new(fs::File::create(&f).map_err(|_| format!("Cannot write to file given: {}", f))?),
None => Box::new(io::stdout()),
};
let from = client.block_number(cmd.from_block).ok_or("From block could not be found")?;
let to = client.block_number(cmd.to_block).ok_or("To block could not be found")?;
for i in from..(to + 1) {
if i % 10000 == 0 {
info!("#{}", i);
}
let b = client.block(BlockId::Number(i)).ok_or("Error exporting incomplete chain")?.into_inner();
match format {
DataFormat::Binary => { out.write(&b).expect("Couldn't write to stream."); }
DataFormat::Hex => { out.write_fmt(format_args!("{}", b.pretty())).expect("Couldn't write to stream."); }
}
}
info!("Export completed.");
Ok(())
}
fn execute_export_state(cmd: ExportState) -> Result<(), String> {
let service = start_client(
cmd.dirs,
cmd.spec,
cmd.pruning,
cmd.pruning_history,
cmd.pruning_memory,
cmd.tracing,
cmd.fat_db,
cmd.compaction,
cmd.wal,
cmd.cache_config,
true
)?;
let client = service.client();
let mut out: Box<io::Write> = match cmd.file_path {
Some(f) => Box::new(fs::File::create(&f).map_err(|_| format!("Cannot write to file given: {}", f))?),
None => Box::new(io::stdout()),
};
let mut last: Option<Address> = None;
let at = cmd.at;
let mut i = 0usize;
out.write_fmt(format_args!("{{ \"state\": {{", )).expect("Couldn't write to stream.");
loop {
let accounts = client.list_accounts(at, last.as_ref(), 1000).ok_or("Specified block not found")?;
if accounts.is_empty() {
break;
}
for account in accounts.into_iter() {
let balance = client.balance(&account, at).unwrap_or_else(U256::zero);
if cmd.min_balance.map_or(false, |m| balance < m) || cmd.max_balance.map_or(false, |m| balance > m) {
last = Some(account);
continue; //filtered out
}
if i!= 0 {
out.write(b",").expect("Write error");
}
out.write_fmt(format_args!("\n\"0x{}\": {{\"balance\": \"{:x}\", \"nonce\": \"{:x}\"", account.hex(), balance, client.nonce(&account, at).unwrap_or_else(U256::zero))).expect("Write error");
let code = client.code(&account, at).unwrap_or(None).unwrap_or_else(Vec::new);
if!code.is_empty() {
out.write_fmt(format_args!(", \"code_hash\": \"0x{}\"", code.sha3().hex())).expect("Write error");
if cmd.code {
out.write_fmt(format_args!(", \"code\": \"{}\"", code.to_hex())).expect("Write error");
}
}
let storage_root = client.storage_root(&account, at).unwrap_or(::util::SHA3_NULL_RLP);
if storage_root!= ::util::SHA3_NULL_RLP {
out.write_fmt(format_args!(", \"storage_root\": \"0x{}\"", storage_root.hex())).expect("Write error");
if cmd.storage {
out.write_fmt(format_args!(", \"storage\": {{")).expect("Write error");
let mut last_storage: Option<H256> = None;
loop {
let keys = client.list_storage(at, &account, last_storage.as_ref(), 1000).ok_or("Specified block not found")?;
if keys.is_empty() {
break;
}
for key in keys.into_iter() {
if last_storage.is_some() {
out.write(b",").expect("Write error");
}
out.write_fmt(format_args!("\n\t\"0x{}\": \"0x{}\"", key.hex(), client.storage_at(&account, &key, at).unwrap_or_else(Default::default).hex())).expect("Write error");
last_storage = Some(key);
}
}
out.write(b"\n}").expect("Write error");
}
}
out.write(b"}").expect("Write error");
i += 1;
if i % 10000 == 0 {
info!("Account #{}", i);
}
last = Some(account);
}
}
out.write_fmt(format_args!("\n}}}}")).expect("Write error");
info!("Export completed.");
Ok(())
}
pub fn kill_db(cmd: KillBlockchain) -> Result<(), String> {
let spec = cmd.spec.spec()?;
let genesis_hash = spec.genesis_header().hash();
let db_dirs = cmd.dirs.database(genesis_hash, None, spec.data_dir);
let user_defaults_path = db_dirs.user_defaults_path();
let user_defaults = UserDefaults::load(&user_defaults_path)?;
let algorithm = cmd.pruning.to_algorithm(&user_defaults);
let dir = db_dirs.db_path(algorithm);
fs::remove_dir_all(&dir).map_err(|e| format!("Error removing database: {:?}", e))?;
info!("Database deleted.");
Ok(())
}
#[cfg(test)]
mod test {
use super::DataFormat;
#[test]
fn test_data_format_parsing() {
assert_eq!(DataFormat::Binary, "binary".parse().unwrap());
assert_eq!(DataFormat::Binary, "bin".parse().unwrap());
assert_eq!(DataFormat::Hex, "hex".parse().unwrap());
}
}
|
DataFormat
|
identifier_name
|
vinsertf128.rs
|
use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
fn vinsertf128_1() {
run_test(&Instruction { mnemonic: Mnemonic::VINSERTF128, operand1: Some(Direct(YMM0)), operand2: Some(Direct(YMM6)), operand3: Some(Direct(XMM0)), operand4: Some(Literal8(17)), lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 227, 77, 24, 192, 17], OperandSize::Dword)
}
fn vinsertf128_2() {
run_test(&Instruction { mnemonic: Mnemonic::VINSERTF128, operand1: Some(Direct(YMM3)), operand2: Some(Direct(YMM4)), operand3: Some(IndirectScaledIndexedDisplaced(EDX, EDI, Four, 353225365, Some(OperandSize::Xmmword), None)), operand4: Some(Literal8(81)), lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 227, 93, 24, 156, 186, 149, 202, 13, 21, 81], OperandSize::Dword)
}
fn vinsertf128_3() {
run_test(&Instruction { mnemonic: Mnemonic::VINSERTF128, operand1: Some(Direct(YMM0)), operand2: Some(Direct(YMM3)), operand3: Some(Direct(XMM2)), operand4: Some(Literal8(36)), lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 227, 101, 24, 194, 36], OperandSize::Qword)
}
fn
|
() {
run_test(&Instruction { mnemonic: Mnemonic::VINSERTF128, operand1: Some(Direct(YMM3)), operand2: Some(Direct(YMM7)), operand3: Some(IndirectDisplaced(RAX, 1337666297, Some(OperandSize::Xmmword), None)), operand4: Some(Literal8(119)), lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 227, 69, 24, 152, 249, 42, 187, 79, 119], OperandSize::Qword)
}
|
vinsertf128_4
|
identifier_name
|
vinsertf128.rs
|
use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
fn vinsertf128_1() {
run_test(&Instruction { mnemonic: Mnemonic::VINSERTF128, operand1: Some(Direct(YMM0)), operand2: Some(Direct(YMM6)), operand3: Some(Direct(XMM0)), operand4: Some(Literal8(17)), lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 227, 77, 24, 192, 17], OperandSize::Dword)
}
fn vinsertf128_2() {
run_test(&Instruction { mnemonic: Mnemonic::VINSERTF128, operand1: Some(Direct(YMM3)), operand2: Some(Direct(YMM4)), operand3: Some(IndirectScaledIndexedDisplaced(EDX, EDI, Four, 353225365, Some(OperandSize::Xmmword), None)), operand4: Some(Literal8(81)), lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 227, 93, 24, 156, 186, 149, 202, 13, 21, 81], OperandSize::Dword)
}
fn vinsertf128_3() {
run_test(&Instruction { mnemonic: Mnemonic::VINSERTF128, operand1: Some(Direct(YMM0)), operand2: Some(Direct(YMM3)), operand3: Some(Direct(XMM2)), operand4: Some(Literal8(36)), lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 227, 101, 24, 194, 36], OperandSize::Qword)
}
fn vinsertf128_4() {
|
}
|
run_test(&Instruction { mnemonic: Mnemonic::VINSERTF128, operand1: Some(Direct(YMM3)), operand2: Some(Direct(YMM7)), operand3: Some(IndirectDisplaced(RAX, 1337666297, Some(OperandSize::Xmmword), None)), operand4: Some(Literal8(119)), lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 227, 69, 24, 152, 249, 42, 187, 79, 119], OperandSize::Qword)
|
random_line_split
|
vinsertf128.rs
|
use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
fn vinsertf128_1() {
run_test(&Instruction { mnemonic: Mnemonic::VINSERTF128, operand1: Some(Direct(YMM0)), operand2: Some(Direct(YMM6)), operand3: Some(Direct(XMM0)), operand4: Some(Literal8(17)), lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 227, 77, 24, 192, 17], OperandSize::Dword)
}
fn vinsertf128_2() {
run_test(&Instruction { mnemonic: Mnemonic::VINSERTF128, operand1: Some(Direct(YMM3)), operand2: Some(Direct(YMM4)), operand3: Some(IndirectScaledIndexedDisplaced(EDX, EDI, Four, 353225365, Some(OperandSize::Xmmword), None)), operand4: Some(Literal8(81)), lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 227, 93, 24, 156, 186, 149, 202, 13, 21, 81], OperandSize::Dword)
}
fn vinsertf128_3()
|
fn vinsertf128_4() {
run_test(&Instruction { mnemonic: Mnemonic::VINSERTF128, operand1: Some(Direct(YMM3)), operand2: Some(Direct(YMM7)), operand3: Some(IndirectDisplaced(RAX, 1337666297, Some(OperandSize::Xmmword), None)), operand4: Some(Literal8(119)), lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 227, 69, 24, 152, 249, 42, 187, 79, 119], OperandSize::Qword)
}
|
{
run_test(&Instruction { mnemonic: Mnemonic::VINSERTF128, operand1: Some(Direct(YMM0)), operand2: Some(Direct(YMM3)), operand3: Some(Direct(XMM2)), operand4: Some(Literal8(36)), lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 227, 101, 24, 194, 36], OperandSize::Qword)
}
|
identifier_body
|
cnvs.rs
|
CNV, given as Kass Raftery scores: \
N: none, B: barely, P: positive, S: strong, V: very strong \">"
.as_bytes(),
);
header.push_record(
"##FORMAT=<ID=LOCI_DP,Number=.,Type=Integer,Description=\"Depths of contained loci.\">"
.as_bytes(),
);
header.push_record(
"##FORMAT=<ID=LOCI_VAF,Number=.,Type=Integer,Description=\"VAFs of contained loci.\">"
.as_bytes(),
);
let mut contig_lens = HashMap::new();
// register sequences
for rec in bcf_reader.header().header_records() {
match rec {
bcf::header::HeaderRecord::Contig { values,.. } => {
let name = values.get("ID").unwrap();
let len = values.get("length").unwrap();
contig_lens.insert(name.clone().into_bytes(), len.parse()?);
header.push_record(format!("##contig=<ID={},length={}>", name, len).as_bytes());
}
_ => (),
}
}
self = self.contig_lens(contig_lens);
Ok(self.bcf_writer(if let Some(path) = out_path {
bcf::Writer::from_path(path, &header, false, bcf::Format::BCF)?
} else {
bcf::Writer::from_stdout(&header, false, bcf::Format::BCF)?
}))
}
}
impl Caller {
pub(crate) fn call(&mut self) -> Result<()> {
// obtain records
let calls = {
let mut record = self.bcf_reader.empty_record();
let mut last_call: Option<&Call> = None;
let mut curr_region = None;
let mut _calls = Vec::new();
loop {
if!self.bcf_reader.read(&mut record)? {
break;
}
if let Some(call) = Call::new(&mut record)? {
if call.depth_normal >= MIN_DEPTH {
_calls.push(call);
}
}
}
// add next and prev pos to calls
for i in 0.._calls.len() {
if i > 0 {
_calls[i].prev_start = Some(_calls[i - 1].start);
}
if i < _calls.len() - 1 {
_calls[i].next_start = Some(_calls[i + 1].start);
}
}
let mut calls = HashMap::new();
for call in _calls {
let region = if let Some(last_call) = last_call {
if call.rid == last_call.rid && (call.start - last_call.start) <= self.max_dist
{
curr_region.unwrap()
} else {
Region {
rid: call.rid,
start: call.start,
}
}
} else {
Region {
rid: call.rid,
start: call.start,
}
};
curr_region = Some(region);
calls.entry(region).or_insert_with(Vec::new).push(call);
last_call = Some(calls.get(®ion).unwrap().last().unwrap());
}
calls
};
// normalization
let mean_depth = |filter: &dyn Fn(&Call) -> u32| {
calls.values().flatten().map(filter).sum::<u32>() as f64 / calls.len() as f64
};
let mean_depth_tumor = mean_depth(&|call: &Call| call.depth_tumor);
let mean_depth_normal = mean_depth(&|call: &Call| call.depth_normal);
let depth_norm_factor = mean_depth_tumor / mean_depth_normal;
let min_bayes_factor = self.min_bayes_factor;
let purity = self.purity;
let cnv_calls: BTreeMap<_, _> = calls
.par_iter()
.map(|(region, calls)| {
let hmm = HMM::new(depth_norm_factor, min_bayes_factor, purity);
let (states, _prob) = hmm::viterbi(&hmm, calls);
(
region,
states
.iter()
.zip(calls.iter())
.group_by(|item| item.0)
.into_iter()
.filter_map(|(&state, group)| {
let cnv = hmm.states[*state];
if cnv.gain == 0 {
return None;
}
let group = group.into_iter().map(|item| item.1).collect_vec();
let first_call = group[0];
if group.len() > 1 {
let last_call = group[group.len() - 1];
// calculate posterior probability of no CNV
let prob_no_cnv = hmm.prob_no_cnv(&group);
let bayes_factors = hmm.bayes_factors(state, &group);
Some(CNVCall {
prev_pos: first_call.prev_start,
next_pos: last_call.next_start,
pos: first_call.start,
end: last_call.start + 1,
cnv: cnv,
prob_no_cnv,
calls: group,
bayes_factors: bayes_factors,
})
} else {
None
}
})
.collect_vec(),
)
})
.collect();
for (region, calls) in cnv_calls {
let contig = self.bcf_reader.header().rid2name(region.rid)?;
let rid = self.bcf_writer.header().name2rid(contig)?;
for call in calls {
let mut record = self.bcf_writer.empty_record();
call.write(
rid,
&mut record,
depth_norm_factor,
*self.contig_lens.get(contig).unwrap(),
)?;
self.bcf_writer.write(&record)?;
}
}
Ok(())
}
}
#[derive(Clone, Copy, Debug, Hash, Eq, PartialEq, PartialOrd, Ord)]
pub(crate) struct Region {
rid: u32,
start: u64,
}
pub(crate) struct CNVCall<'a> {
prev_pos: Option<u64>,
next_pos: Option<u64>,
pos: u64,
end: u64,
cnv: CNV,
prob_no_cnv: LogProb,
calls: Vec<&'a Call>,
bayes_factors: Vec<BayesFactor>,
}
impl<'a> CNVCall<'a> {
pub(crate) fn write(
&self,
rid: u32,
record: &mut bcf::Record,
depth_norm_factor: f64,
contig_len: u64,
) -> Result<()> {
record.set_rid(Some(rid));
record.set_pos(self.pos as i64);
record.set_alleles(&[b"N", b"<CNV>"])?;
record.push_info_integer(b"END", &[self.end as i32])?;
record.push_info_integer(b"SVLEN", &[self.len() as i32])?;
record.push_info_integer(b"CN", &[2 + self.cnv.gain])?;
record.push_info_float(b"VAF", &[*self.cnv.allele_freq as f32])?;
record.push_info_integer(b"LOCI", &[self.calls.len() as i32])?;
record.push_info_string(b"SVTYPE", &[b"CNV"])?;
record.push_info_flag(b"IMPRECISE")?;
record.push_info_integer(
b"CIPOS",
&[
if let Some(prev_pos) = self.prev_pos {
-((self.pos - prev_pos) as i32)
} else {
-(self.pos as i32)
},
0,
],
)?;
record.push_info_integer(
b"CIEND",
&[
0,
if let Some(next_pos) = self.next_pos {
(next_pos - self.end) as i32
} else {
(contig_len - self.end) as i32
},
],
)?;
let mut loci_dp = Vec::new();
loci_dp.extend(self.calls.iter().map(|call| call.depth_tumor as i32));
loci_dp.extend(
self.calls
.iter()
.map(|call| (call.depth_normal as f64 * depth_norm_factor).round() as i32),
);
record.push_format_integer(b"LOCI_DP", &loci_dp)?;
let mut loci_vaf = Vec::new();
loci_vaf.extend(self.calls.iter().map(|call| *call.allele_freq_tumor as f32));
loci_vaf.extend(
self.calls
.iter()
.map(|call| *call.allele_freq_normal as f32),
);
record.push_format_float(b"LOCI_VAF", &loci_vaf)?;
record.set_qual(*PHREDProb::from(self.prob_no_cnv) as f32);
let obs = join(
self.bayes_factors
.iter()
.map(|bf| utils::evidence_kass_raftery_to_letter(bf.evidence_kass_raftery())),
"",
);
record.push_info_string(b"OBS", &[obs.as_bytes()])?;
Ok(())
}
pub(crate) fn len(&self) -> u32 {
self.end - self.pos + 1
}
}
pub(crate) struct HMM {
states: Vec<CNV>,
state_by_gain: HashMap<i32, Vec<hmm::State>>,
depth_norm_factor: f64,
prob_keep_state: LogProb,
prob_change_state: LogProb,
}
impl HMM {
#[allow(clippy::float_cmp)]
fn new(depth_norm_factor: f64, min_bayes_factor: f64, purity: f64) -> Self {
let n_allele_freqs = 10;
let mut states = Vec::new();
let mut state_by_gain = HashMap::new();
for allele_freq in linspace(0.1, 1.0, n_allele_freqs) {
for gain in -2..MAX_GAIN {
if gain!= 0 || allele_freq == 1.0 {
let cnv = CNV {
gain: gain,
allele_freq: AlleleFreq(allele_freq),
purity,
};
state_by_gain
.entry(gain)
.or_insert_with(Vec::new)
.push(hmm::State(states.len()));
states.push(cnv);
}
}
}
// METHOD:
// We choose the probability to keep a state to be higher than the probability
// to switch a state. In addition, we want the switch to the null state to be as likely
// as keeping the state. The amount is defined by an epsilon, which is derived from the
// minimum bayes factor over the products of emission probabilities between
// two stretches of two different states.
assert!(min_bayes_factor > 1.0);
let n = states.len() as f64;
let epsilon = min_bayes_factor - 1.0;
let denominator = n + epsilon;
let prob_keep_state = LogProb::from(Prob((1.0 + epsilon) / denominator));
let prob_change_state = LogProb::from(Prob(1.0 / denominator));
HMM {
states,
state_by_gain,
depth_norm_factor,
prob_keep_state,
prob_change_state,
}
}
pub(crate) fn prob_no_cnv(&self, observations: &[&Call]) -> LogProb {
let likelihood_no_cnv = likelihood(
self,
iter::repeat(self.state_by_gain.get(&0).unwrap()[0]),
observations.iter().cloned(),
);
let mut likelihoods = vec![likelihood_no_cnv];
for gain in -2..MAX_GAIN {
if gain!= 0 {
let af_spectrum = self.state_by_gain.get(&gain).unwrap();
likelihoods.push(LogProb::ln_simpsons_integrate_exp(
|i, _| {
let state = af_spectrum[i];
likelihood(self, iter::repeat(state), observations.iter().cloned())
},
0.0,
1.0,
af_spectrum.len() - 1,
));
}
}
LogProb::ln_sum_exp(&likelihoods)
}
pub(crate) fn null_state(&self) -> hmm::State {
self.state_by_gain.get(&0).unwrap()[0]
}
pub(crate) fn bayes_factors(&self, state: hmm::State, observations: &[&Call]) -> Vec<BayesFactor> {
let null_state = self.null_state();
observations
.into_iter()
.map(|obs| {
BayesFactor::new(
self.observation_prob(state, obs),
self.observation_prob(null_state, obs),
)
})
.collect()
}
fn prob_af_depth(&self, state: hmm::State, call: &Call) -> LogProb {
let cnv = self.states[*state];
let prob05 = LogProb(0.5f64.ln());
// handle allele freq changes
let prob_af = if let Some(alt_af) = cnv.expected_allele_freq_alt_affected() {
let ref_af = cnv.expected_allele_freq_ref_affected().unwrap();
prob05
+ call
.prob_allele_freq_tumor(alt_af)
.ln_add_exp(call.prob_allele_freq_tumor(ref_af))
} else {
LogProb::ln_one()
};
// handle depth changes
let prob_depth = call.prob_depth_tumor(
call.depth_normal as f64 * self.depth_norm_factor * cnv.expected_depth_factor(),
);
prob_af + prob_depth
}
}
impl hmm::Model<Call> for HMM {
fn num_states(&self) -> usize {
self.states.len()
}
fn states(&self) -> hmm::StateIter {
hmm::StateIter::new(self.num_states())
}
fn transitions(&self) -> hmm::StateTransitionIter {
hmm::StateTransitionIter::new(self.num_states())
}
fn transition_prob(&self, from: hmm::State, to: hmm::State) -> LogProb {
if from == to {
self.prob_keep_state
} else {
self.prob_change_state
}
}
fn initial_prob(&self, _: hmm::State) -> LogProb {
LogProb((1.0 / self.states.len() as f64).ln())
}
fn observation_prob(&self, state: hmm::State, call: &Call) -> LogProb {
let prob_af_depth = self.prob_af_depth(state, call);
let prob_null = self.prob_af_depth(self.null_state(), call);
(call.prob_germline_het + prob_af_depth)
.ln_add_exp(call.prob_germline_het.ln_one_minus_exp() + prob_null)
}
}
pub(crate) fn likelihood<'a, O: 'a>(
hmm: &dyn hmm::Model<O>,
states: impl IntoIterator<Item = hmm::State>,
observations: impl Iterator<Item = &'a O>,
) -> LogProb {
|
let mut p = LogProb::ln_one();
for (state, obs) in states.into_iter().zip(observations) {
p += hmm.observation_prob(state, obs);
}
p
}
|
identifier_body
|
|
cnvs.rs
|
.map(|(region, calls)| {
let hmm = HMM::new(depth_norm_factor, min_bayes_factor, purity);
let (states, _prob) = hmm::viterbi(&hmm, calls);
(
region,
states
.iter()
.zip(calls.iter())
.group_by(|item| item.0)
.into_iter()
.filter_map(|(&state, group)| {
let cnv = hmm.states[*state];
if cnv.gain == 0 {
return None;
}
let group = group.into_iter().map(|item| item.1).collect_vec();
let first_call = group[0];
if group.len() > 1 {
let last_call = group[group.len() - 1];
// calculate posterior probability of no CNV
let prob_no_cnv = hmm.prob_no_cnv(&group);
let bayes_factors = hmm.bayes_factors(state, &group);
Some(CNVCall {
prev_pos: first_call.prev_start,
next_pos: last_call.next_start,
pos: first_call.start,
end: last_call.start + 1,
cnv: cnv,
prob_no_cnv,
calls: group,
bayes_factors: bayes_factors,
})
} else {
None
}
})
.collect_vec(),
)
})
.collect();
for (region, calls) in cnv_calls {
let contig = self.bcf_reader.header().rid2name(region.rid)?;
let rid = self.bcf_writer.header().name2rid(contig)?;
for call in calls {
let mut record = self.bcf_writer.empty_record();
call.write(
rid,
&mut record,
depth_norm_factor,
*self.contig_lens.get(contig).unwrap(),
)?;
self.bcf_writer.write(&record)?;
}
}
Ok(())
}
}
#[derive(Clone, Copy, Debug, Hash, Eq, PartialEq, PartialOrd, Ord)]
pub(crate) struct Region {
rid: u32,
start: u64,
}
pub(crate) struct CNVCall<'a> {
prev_pos: Option<u64>,
next_pos: Option<u64>,
pos: u64,
end: u64,
cnv: CNV,
prob_no_cnv: LogProb,
calls: Vec<&'a Call>,
bayes_factors: Vec<BayesFactor>,
}
impl<'a> CNVCall<'a> {
pub(crate) fn write(
&self,
rid: u32,
record: &mut bcf::Record,
depth_norm_factor: f64,
contig_len: u64,
) -> Result<()> {
record.set_rid(Some(rid));
record.set_pos(self.pos as i64);
record.set_alleles(&[b"N", b"<CNV>"])?;
record.push_info_integer(b"END", &[self.end as i32])?;
record.push_info_integer(b"SVLEN", &[self.len() as i32])?;
record.push_info_integer(b"CN", &[2 + self.cnv.gain])?;
record.push_info_float(b"VAF", &[*self.cnv.allele_freq as f32])?;
record.push_info_integer(b"LOCI", &[self.calls.len() as i32])?;
record.push_info_string(b"SVTYPE", &[b"CNV"])?;
record.push_info_flag(b"IMPRECISE")?;
record.push_info_integer(
b"CIPOS",
&[
if let Some(prev_pos) = self.prev_pos {
-((self.pos - prev_pos) as i32)
} else {
-(self.pos as i32)
},
0,
],
)?;
record.push_info_integer(
b"CIEND",
&[
0,
if let Some(next_pos) = self.next_pos {
(next_pos - self.end) as i32
} else {
(contig_len - self.end) as i32
},
],
)?;
let mut loci_dp = Vec::new();
loci_dp.extend(self.calls.iter().map(|call| call.depth_tumor as i32));
loci_dp.extend(
self.calls
.iter()
.map(|call| (call.depth_normal as f64 * depth_norm_factor).round() as i32),
);
record.push_format_integer(b"LOCI_DP", &loci_dp)?;
let mut loci_vaf = Vec::new();
loci_vaf.extend(self.calls.iter().map(|call| *call.allele_freq_tumor as f32));
loci_vaf.extend(
self.calls
.iter()
.map(|call| *call.allele_freq_normal as f32),
);
record.push_format_float(b"LOCI_VAF", &loci_vaf)?;
record.set_qual(*PHREDProb::from(self.prob_no_cnv) as f32);
let obs = join(
self.bayes_factors
.iter()
.map(|bf| utils::evidence_kass_raftery_to_letter(bf.evidence_kass_raftery())),
"",
);
record.push_info_string(b"OBS", &[obs.as_bytes()])?;
Ok(())
}
pub(crate) fn len(&self) -> u32 {
self.end - self.pos + 1
}
}
pub(crate) struct HMM {
states: Vec<CNV>,
state_by_gain: HashMap<i32, Vec<hmm::State>>,
depth_norm_factor: f64,
prob_keep_state: LogProb,
prob_change_state: LogProb,
}
impl HMM {
#[allow(clippy::float_cmp)]
fn new(depth_norm_factor: f64, min_bayes_factor: f64, purity: f64) -> Self {
let n_allele_freqs = 10;
let mut states = Vec::new();
let mut state_by_gain = HashMap::new();
for allele_freq in linspace(0.1, 1.0, n_allele_freqs) {
for gain in -2..MAX_GAIN {
if gain!= 0 || allele_freq == 1.0 {
let cnv = CNV {
gain: gain,
allele_freq: AlleleFreq(allele_freq),
purity,
};
state_by_gain
.entry(gain)
.or_insert_with(Vec::new)
.push(hmm::State(states.len()));
states.push(cnv);
}
}
}
// METHOD:
// We choose the probability to keep a state to be higher than the probability
// to switch a state. In addition, we want the switch to the null state to be as likely
// as keeping the state. The amount is defined by an epsilon, which is derived from the
// minimum bayes factor over the products of emission probabilities between
// two stretches of two different states.
assert!(min_bayes_factor > 1.0);
let n = states.len() as f64;
let epsilon = min_bayes_factor - 1.0;
let denominator = n + epsilon;
let prob_keep_state = LogProb::from(Prob((1.0 + epsilon) / denominator));
let prob_change_state = LogProb::from(Prob(1.0 / denominator));
HMM {
states,
state_by_gain,
depth_norm_factor,
prob_keep_state,
prob_change_state,
}
}
pub(crate) fn prob_no_cnv(&self, observations: &[&Call]) -> LogProb {
let likelihood_no_cnv = likelihood(
self,
iter::repeat(self.state_by_gain.get(&0).unwrap()[0]),
observations.iter().cloned(),
);
let mut likelihoods = vec![likelihood_no_cnv];
for gain in -2..MAX_GAIN {
if gain!= 0 {
let af_spectrum = self.state_by_gain.get(&gain).unwrap();
likelihoods.push(LogProb::ln_simpsons_integrate_exp(
|i, _| {
let state = af_spectrum[i];
likelihood(self, iter::repeat(state), observations.iter().cloned())
},
0.0,
1.0,
af_spectrum.len() - 1,
));
}
}
LogProb::ln_sum_exp(&likelihoods)
}
pub(crate) fn null_state(&self) -> hmm::State {
self.state_by_gain.get(&0).unwrap()[0]
}
pub(crate) fn bayes_factors(&self, state: hmm::State, observations: &[&Call]) -> Vec<BayesFactor> {
let null_state = self.null_state();
observations
.into_iter()
.map(|obs| {
BayesFactor::new(
self.observation_prob(state, obs),
self.observation_prob(null_state, obs),
)
})
.collect()
}
fn prob_af_depth(&self, state: hmm::State, call: &Call) -> LogProb {
let cnv = self.states[*state];
let prob05 = LogProb(0.5f64.ln());
// handle allele freq changes
let prob_af = if let Some(alt_af) = cnv.expected_allele_freq_alt_affected() {
let ref_af = cnv.expected_allele_freq_ref_affected().unwrap();
prob05
+ call
.prob_allele_freq_tumor(alt_af)
.ln_add_exp(call.prob_allele_freq_tumor(ref_af))
} else {
LogProb::ln_one()
};
// handle depth changes
let prob_depth = call.prob_depth_tumor(
call.depth_normal as f64 * self.depth_norm_factor * cnv.expected_depth_factor(),
);
prob_af + prob_depth
}
}
impl hmm::Model<Call> for HMM {
fn num_states(&self) -> usize {
self.states.len()
}
fn states(&self) -> hmm::StateIter {
hmm::StateIter::new(self.num_states())
}
fn transitions(&self) -> hmm::StateTransitionIter {
hmm::StateTransitionIter::new(self.num_states())
}
fn transition_prob(&self, from: hmm::State, to: hmm::State) -> LogProb {
if from == to {
self.prob_keep_state
} else {
self.prob_change_state
}
}
fn initial_prob(&self, _: hmm::State) -> LogProb {
LogProb((1.0 / self.states.len() as f64).ln())
}
fn observation_prob(&self, state: hmm::State, call: &Call) -> LogProb {
let prob_af_depth = self.prob_af_depth(state, call);
let prob_null = self.prob_af_depth(self.null_state(), call);
(call.prob_germline_het + prob_af_depth)
.ln_add_exp(call.prob_germline_het.ln_one_minus_exp() + prob_null)
}
}
pub(crate) fn likelihood<'a, O: 'a>(
hmm: &dyn hmm::Model<O>,
states: impl IntoIterator<Item = hmm::State>,
observations: impl Iterator<Item = &'a O>,
) -> LogProb {
let mut p = LogProb::ln_one();
for (state, obs) in states.into_iter().zip(observations) {
p += hmm.observation_prob(state, obs);
}
p
}
pub(crate) fn marginal<'a, O: 'a>(
hmm: &dyn hmm::Model<O>,
observations: impl IntoIterator<Item = &'a O>,
) -> LogProb {
let mut prev = vec![LogProb::ln_zero(); hmm.num_states()];
let mut curr = prev.clone();
for (i, obs) in observations.into_iter().enumerate() {
for to in hmm.states() {
let prob_obs = hmm.observation_prob(to, obs);
curr[*to] = if i == 0 {
hmm.initial_prob(to)
} else {
prob_obs
+ LogProb::ln_sum_exp(
&hmm.states()
.map(|from| prev[*from] + hmm.transition_prob(from, to))
.collect_vec(),
)
};
}
mem::swap(&mut prev, &mut curr);
}
LogProb::ln_sum_exp(&prev.into_iter().collect_vec())
.cap_numerical_overshoot(utils::NUMERICAL_EPSILON)
}
#[derive(Debug)]
pub(crate) struct Call {
prob_germline_het: LogProb,
allele_freq_tumor: AlleleFreq,
allele_freq_normal: AlleleFreq,
depth_tumor: u32,
depth_normal: u32,
start: u64,
rid: u32,
prev_start: Option<u64>,
next_start: Option<u64>,
}
impl Call {
pub(crate) fn new(record: &mut bcf::Record) -> Result<Option<Self>> {
let pos = record.pos();
let prob_germline_het = record.info(b"PROB_GERMLINE_HET").float()?;
if let Some(_prob_germline_het) = prob_germline_het {
if!_prob_germline_het[0].is_missing() &&!_prob_germline_het[0].is_nan() {
let prob_germline_het = LogProb::from(PHREDProb(_prob_germline_het[0] as f64));
assert!(
*prob_germline_het <= 0.0,
"invalid prob_germline_het: {}, POS: {}",
_prob_germline_het[0],
pos
);
let depths = record
.format(b"DP")
.integer()?
.into_iter()
.map(|d| d[0] as u32)
.collect_vec();
let allele_freqs = record.format(b"AF").float()?;
if prob_germline_het >= LogProb::from(Prob(0.5)) {
return Ok(Some(Call {
allele_freq_tumor: AlleleFreq(allele_freqs.tumor()[0] as f64),
allele_freq_normal: AlleleFreq(allele_freqs.normal()[0] as f64),
depth_tumor: *depths.tumor(),
depth_normal: *depths.normal(),
prob_germline_het: prob_germline_het,
start: record.pos() as u64,
rid: record.rid().unwrap(),
prev_start: None,
next_start: None,
}));
}
}
}
Ok(None)
}
pub(crate) fn prob_allele_freq_tumor(&self, true_allele_freq: AlleleFreq) -> LogProb {
allele_freq_pdf(self.allele_freq_tumor, true_allele_freq, self.depth_tumor)
}
pub(crate) fn prob_allele_freq_normal_het(&self) -> LogProb {
allele_freq_pdf(self.allele_freq_normal, AlleleFreq(0.5), self.depth_normal)
}
pub(crate) fn prob_depth_tumor(&self, true_depth: f64) -> LogProb {
depth_pmf(self.depth_tumor, true_depth)
}
}
#[derive(PartialEq, Copy, Clone, Debug)]
pub(crate) struct CNV {
gain: i32,
allele_freq: AlleleFreq,
purity: f64,
}
impl CNV {
pub(crate) fn expected_allele_freq_alt_affected(&self) -> Option<AlleleFreq> {
if self.gain > -2 {
Some(AlleleFreq(
*self.allele_freq * (1.0 + self.gain as f64) / (2.0 + self.gain as f64)
+ (1.0 - *self.allele_freq) * 0.5,
))
} else if self.purity < 1.0 {
|
// gain = -2: all lost in tumor cells, hence 100% normal cells at this locus.
// Therefore VAF=0.5.
Some(AlleleFreq(0.5))
}
|
conditional_block
|
|
cnvs.rs
|
"##INFO=<ID=LOCI,Number=1,Type=Integer,Description=\"Number of contained loci.\">"
.as_bytes(),
);
header.push_record(
"##INFO=<ID=OBS,Number=1,Type=String,Description=\"Bayes factors for per-locus \
support for no CNV, given as Kass Raftery scores: \
N: none, B: barely, P: positive, S: strong, V: very strong \">"
.as_bytes(),
);
header.push_record(
"##FORMAT=<ID=LOCI_DP,Number=.,Type=Integer,Description=\"Depths of contained loci.\">"
.as_bytes(),
);
header.push_record(
"##FORMAT=<ID=LOCI_VAF,Number=.,Type=Integer,Description=\"VAFs of contained loci.\">"
.as_bytes(),
);
let mut contig_lens = HashMap::new();
// register sequences
for rec in bcf_reader.header().header_records() {
match rec {
bcf::header::HeaderRecord::Contig { values,.. } => {
let name = values.get("ID").unwrap();
let len = values.get("length").unwrap();
contig_lens.insert(name.clone().into_bytes(), len.parse()?);
header.push_record(format!("##contig=<ID={},length={}>", name, len).as_bytes());
}
_ => (),
}
}
self = self.contig_lens(contig_lens);
Ok(self.bcf_writer(if let Some(path) = out_path {
bcf::Writer::from_path(path, &header, false, bcf::Format::BCF)?
} else {
bcf::Writer::from_stdout(&header, false, bcf::Format::BCF)?
}))
}
}
impl Caller {
pub(crate) fn call(&mut self) -> Result<()> {
// obtain records
let calls = {
let mut record = self.bcf_reader.empty_record();
let mut last_call: Option<&Call> = None;
let mut curr_region = None;
let mut _calls = Vec::new();
loop {
if!self.bcf_reader.read(&mut record)? {
break;
}
if let Some(call) = Call::new(&mut record)? {
if call.depth_normal >= MIN_DEPTH {
_calls.push(call);
}
}
}
// add next and prev pos to calls
for i in 0.._calls.len() {
if i > 0 {
_calls[i].prev_start = Some(_calls[i - 1].start);
}
if i < _calls.len() - 1 {
_calls[i].next_start = Some(_calls[i + 1].start);
}
}
let mut calls = HashMap::new();
for call in _calls {
let region = if let Some(last_call) = last_call {
if call.rid == last_call.rid && (call.start - last_call.start) <= self.max_dist
{
curr_region.unwrap()
} else {
Region {
rid: call.rid,
start: call.start,
}
}
} else {
Region {
rid: call.rid,
start: call.start,
}
};
curr_region = Some(region);
calls.entry(region).or_insert_with(Vec::new).push(call);
last_call = Some(calls.get(®ion).unwrap().last().unwrap());
}
calls
};
// normalization
let mean_depth = |filter: &dyn Fn(&Call) -> u32| {
calls.values().flatten().map(filter).sum::<u32>() as f64 / calls.len() as f64
};
let mean_depth_tumor = mean_depth(&|call: &Call| call.depth_tumor);
let mean_depth_normal = mean_depth(&|call: &Call| call.depth_normal);
let depth_norm_factor = mean_depth_tumor / mean_depth_normal;
let min_bayes_factor = self.min_bayes_factor;
let purity = self.purity;
let cnv_calls: BTreeMap<_, _> = calls
.par_iter()
.map(|(region, calls)| {
let hmm = HMM::new(depth_norm_factor, min_bayes_factor, purity);
let (states, _prob) = hmm::viterbi(&hmm, calls);
(
region,
states
.iter()
.zip(calls.iter())
.group_by(|item| item.0)
.into_iter()
.filter_map(|(&state, group)| {
let cnv = hmm.states[*state];
if cnv.gain == 0 {
return None;
}
let group = group.into_iter().map(|item| item.1).collect_vec();
let first_call = group[0];
if group.len() > 1 {
let last_call = group[group.len() - 1];
// calculate posterior probability of no CNV
let prob_no_cnv = hmm.prob_no_cnv(&group);
let bayes_factors = hmm.bayes_factors(state, &group);
Some(CNVCall {
prev_pos: first_call.prev_start,
next_pos: last_call.next_start,
pos: first_call.start,
end: last_call.start + 1,
cnv: cnv,
prob_no_cnv,
calls: group,
bayes_factors: bayes_factors,
})
} else {
None
}
})
.collect_vec(),
)
})
.collect();
for (region, calls) in cnv_calls {
let contig = self.bcf_reader.header().rid2name(region.rid)?;
let rid = self.bcf_writer.header().name2rid(contig)?;
for call in calls {
let mut record = self.bcf_writer.empty_record();
call.write(
rid,
&mut record,
depth_norm_factor,
*self.contig_lens.get(contig).unwrap(),
)?;
self.bcf_writer.write(&record)?;
}
}
Ok(())
}
}
#[derive(Clone, Copy, Debug, Hash, Eq, PartialEq, PartialOrd, Ord)]
pub(crate) struct Region {
rid: u32,
start: u64,
}
pub(crate) struct CNVCall<'a> {
prev_pos: Option<u64>,
next_pos: Option<u64>,
pos: u64,
end: u64,
cnv: CNV,
prob_no_cnv: LogProb,
calls: Vec<&'a Call>,
bayes_factors: Vec<BayesFactor>,
}
impl<'a> CNVCall<'a> {
pub(crate) fn write(
&self,
rid: u32,
record: &mut bcf::Record,
depth_norm_factor: f64,
contig_len: u64,
) -> Result<()> {
record.set_rid(Some(rid));
record.set_pos(self.pos as i64);
record.set_alleles(&[b"N", b"<CNV>"])?;
record.push_info_integer(b"END", &[self.end as i32])?;
record.push_info_integer(b"SVLEN", &[self.len() as i32])?;
record.push_info_integer(b"CN", &[2 + self.cnv.gain])?;
record.push_info_float(b"VAF", &[*self.cnv.allele_freq as f32])?;
record.push_info_integer(b"LOCI", &[self.calls.len() as i32])?;
record.push_info_string(b"SVTYPE", &[b"CNV"])?;
record.push_info_flag(b"IMPRECISE")?;
record.push_info_integer(
b"CIPOS",
&[
if let Some(prev_pos) = self.prev_pos {
-((self.pos - prev_pos) as i32)
} else {
-(self.pos as i32)
},
0,
],
)?;
record.push_info_integer(
b"CIEND",
&[
0,
if let Some(next_pos) = self.next_pos {
(next_pos - self.end) as i32
} else {
(contig_len - self.end) as i32
},
],
)?;
let mut loci_dp = Vec::new();
loci_dp.extend(self.calls.iter().map(|call| call.depth_tumor as i32));
loci_dp.extend(
self.calls
.iter()
.map(|call| (call.depth_normal as f64 * depth_norm_factor).round() as i32),
);
record.push_format_integer(b"LOCI_DP", &loci_dp)?;
let mut loci_vaf = Vec::new();
loci_vaf.extend(self.calls.iter().map(|call| *call.allele_freq_tumor as f32));
loci_vaf.extend(
self.calls
.iter()
.map(|call| *call.allele_freq_normal as f32),
);
record.push_format_float(b"LOCI_VAF", &loci_vaf)?;
record.set_qual(*PHREDProb::from(self.prob_no_cnv) as f32);
let obs = join(
self.bayes_factors
.iter()
.map(|bf| utils::evidence_kass_raftery_to_letter(bf.evidence_kass_raftery())),
"",
);
record.push_info_string(b"OBS", &[obs.as_bytes()])?;
Ok(())
}
pub(crate) fn len(&self) -> u32 {
self.end - self.pos + 1
}
}
pub(crate) struct HMM {
states: Vec<CNV>,
state_by_gain: HashMap<i32, Vec<hmm::State>>,
depth_norm_factor: f64,
prob_keep_state: LogProb,
prob_change_state: LogProb,
}
impl HMM {
#[allow(clippy::float_cmp)]
fn new(depth_norm_factor: f64, min_bayes_factor: f64, purity: f64) -> Self {
let n_allele_freqs = 10;
let mut states = Vec::new();
let mut state_by_gain = HashMap::new();
for allele_freq in linspace(0.1, 1.0, n_allele_freqs) {
for gain in -2..MAX_GAIN {
if gain!= 0 || allele_freq == 1.0 {
let cnv = CNV {
gain: gain,
allele_freq: AlleleFreq(allele_freq),
purity,
};
state_by_gain
.entry(gain)
.or_insert_with(Vec::new)
.push(hmm::State(states.len()));
states.push(cnv);
}
}
}
// METHOD:
// We choose the probability to keep a state to be higher than the probability
// to switch a state. In addition, we want the switch to the null state to be as likely
// as keeping the state. The amount is defined by an epsilon, which is derived from the
// minimum bayes factor over the products of emission probabilities between
// two stretches of two different states.
assert!(min_bayes_factor > 1.0);
let n = states.len() as f64;
let epsilon = min_bayes_factor - 1.0;
let denominator = n + epsilon;
let prob_keep_state = LogProb::from(Prob((1.0 + epsilon) / denominator));
let prob_change_state = LogProb::from(Prob(1.0 / denominator));
HMM {
states,
state_by_gain,
depth_norm_factor,
prob_keep_state,
prob_change_state,
}
}
pub(crate) fn prob_no_cnv(&self, observations: &[&Call]) -> LogProb {
let likelihood_no_cnv = likelihood(
self,
iter::repeat(self.state_by_gain.get(&0).unwrap()[0]),
observations.iter().cloned(),
);
let mut likelihoods = vec![likelihood_no_cnv];
for gain in -2..MAX_GAIN {
if gain!= 0 {
let af_spectrum = self.state_by_gain.get(&gain).unwrap();
likelihoods.push(LogProb::ln_simpsons_integrate_exp(
|i, _| {
let state = af_spectrum[i];
likelihood(self, iter::repeat(state), observations.iter().cloned())
},
0.0,
1.0,
af_spectrum.len() - 1,
));
}
}
LogProb::ln_sum_exp(&likelihoods)
}
pub(crate) fn null_state(&self) -> hmm::State {
self.state_by_gain.get(&0).unwrap()[0]
}
pub(crate) fn bayes_factors(&self, state: hmm::State, observations: &[&Call]) -> Vec<BayesFactor> {
let null_state = self.null_state();
observations
.into_iter()
.map(|obs| {
BayesFactor::new(
self.observation_prob(state, obs),
self.observation_prob(null_state, obs),
)
})
.collect()
}
fn prob_af_depth(&self, state: hmm::State, call: &Call) -> LogProb {
let cnv = self.states[*state];
let prob05 = LogProb(0.5f64.ln());
// handle allele freq changes
let prob_af = if let Some(alt_af) = cnv.expected_allele_freq_alt_affected() {
let ref_af = cnv.expected_allele_freq_ref_affected().unwrap();
prob05
+ call
.prob_allele_freq_tumor(alt_af)
.ln_add_exp(call.prob_allele_freq_tumor(ref_af))
} else {
LogProb::ln_one()
};
// handle depth changes
let prob_depth = call.prob_depth_tumor(
call.depth_normal as f64 * self.depth_norm_factor * cnv.expected_depth_factor(),
);
prob_af + prob_depth
}
}
impl hmm::Model<Call> for HMM {
fn num_states(&self) -> usize {
self.states.len()
}
fn states(&self) -> hmm::StateIter {
hmm::StateIter::new(self.num_states())
}
fn transitions(&self) -> hmm::StateTransitionIter {
hmm::StateTransitionIter::new(self.num_states())
}
fn transition_prob(&self, from: hmm::State, to: hmm::State) -> LogProb {
if from == to {
self.prob_keep_state
} else {
self.prob_change_state
}
}
fn initial_prob(&self, _: hmm::State) -> LogProb {
LogProb((1.0 / self.states.len() as f64).ln())
}
fn observation_prob(&self, state: hmm::State, call: &Call) -> LogProb {
let prob_af_depth = self.prob_af_depth(state, call);
let prob_null = self.prob_af_depth(self.null_state(), call);
(call.prob_germline_het + prob_af_depth)
.ln_add_exp(call.prob_germline_het.ln_one_minus_exp() + prob_null)
}
}
pub(crate) fn l
|
ikelihood<
|
identifier_name
|
|
cnvs.rs
|
header.push_record(
"##INFO=<ID=SVLEN,Number=1,Type=Integer,Description=\"CNV length.\">".as_bytes(),
);
header.push_record(
"##INFO=<ID=SVTYPE,Number=1,Type=Integer,Description=\"SV type.\">".as_bytes(),
);
header.push_record(
"##INFO=<ID=LOCI,Number=1,Type=Integer,Description=\"Number of contained loci.\">"
.as_bytes(),
);
header.push_record(
"##INFO=<ID=OBS,Number=1,Type=String,Description=\"Bayes factors for per-locus \
support for no CNV, given as Kass Raftery scores: \
N: none, B: barely, P: positive, S: strong, V: very strong \">"
.as_bytes(),
);
header.push_record(
"##FORMAT=<ID=LOCI_DP,Number=.,Type=Integer,Description=\"Depths of contained loci.\">"
.as_bytes(),
);
header.push_record(
"##FORMAT=<ID=LOCI_VAF,Number=.,Type=Integer,Description=\"VAFs of contained loci.\">"
.as_bytes(),
);
let mut contig_lens = HashMap::new();
// register sequences
for rec in bcf_reader.header().header_records() {
match rec {
bcf::header::HeaderRecord::Contig { values,.. } => {
let name = values.get("ID").unwrap();
let len = values.get("length").unwrap();
contig_lens.insert(name.clone().into_bytes(), len.parse()?);
header.push_record(format!("##contig=<ID={},length={}>", name, len).as_bytes());
}
_ => (),
}
}
self = self.contig_lens(contig_lens);
Ok(self.bcf_writer(if let Some(path) = out_path {
bcf::Writer::from_path(path, &header, false, bcf::Format::BCF)?
} else {
bcf::Writer::from_stdout(&header, false, bcf::Format::BCF)?
}))
}
}
impl Caller {
pub(crate) fn call(&mut self) -> Result<()> {
// obtain records
let calls = {
let mut record = self.bcf_reader.empty_record();
let mut last_call: Option<&Call> = None;
let mut curr_region = None;
let mut _calls = Vec::new();
loop {
if!self.bcf_reader.read(&mut record)? {
break;
}
if let Some(call) = Call::new(&mut record)? {
if call.depth_normal >= MIN_DEPTH {
_calls.push(call);
}
}
}
// add next and prev pos to calls
for i in 0.._calls.len() {
if i > 0 {
_calls[i].prev_start = Some(_calls[i - 1].start);
}
if i < _calls.len() - 1 {
_calls[i].next_start = Some(_calls[i + 1].start);
}
}
let mut calls = HashMap::new();
for call in _calls {
let region = if let Some(last_call) = last_call {
if call.rid == last_call.rid && (call.start - last_call.start) <= self.max_dist
{
curr_region.unwrap()
} else {
Region {
rid: call.rid,
start: call.start,
}
}
} else {
Region {
rid: call.rid,
start: call.start,
}
};
curr_region = Some(region);
calls.entry(region).or_insert_with(Vec::new).push(call);
last_call = Some(calls.get(®ion).unwrap().last().unwrap());
}
calls
};
// normalization
let mean_depth = |filter: &dyn Fn(&Call) -> u32| {
calls.values().flatten().map(filter).sum::<u32>() as f64 / calls.len() as f64
};
let mean_depth_tumor = mean_depth(&|call: &Call| call.depth_tumor);
let mean_depth_normal = mean_depth(&|call: &Call| call.depth_normal);
let depth_norm_factor = mean_depth_tumor / mean_depth_normal;
let min_bayes_factor = self.min_bayes_factor;
let purity = self.purity;
let cnv_calls: BTreeMap<_, _> = calls
.par_iter()
.map(|(region, calls)| {
let hmm = HMM::new(depth_norm_factor, min_bayes_factor, purity);
let (states, _prob) = hmm::viterbi(&hmm, calls);
(
region,
states
.iter()
.zip(calls.iter())
.group_by(|item| item.0)
.into_iter()
.filter_map(|(&state, group)| {
let cnv = hmm.states[*state];
if cnv.gain == 0 {
return None;
}
let group = group.into_iter().map(|item| item.1).collect_vec();
let first_call = group[0];
if group.len() > 1 {
let last_call = group[group.len() - 1];
// calculate posterior probability of no CNV
let prob_no_cnv = hmm.prob_no_cnv(&group);
let bayes_factors = hmm.bayes_factors(state, &group);
Some(CNVCall {
prev_pos: first_call.prev_start,
next_pos: last_call.next_start,
pos: first_call.start,
end: last_call.start + 1,
cnv: cnv,
prob_no_cnv,
calls: group,
bayes_factors: bayes_factors,
})
} else {
None
}
})
.collect_vec(),
)
})
.collect();
for (region, calls) in cnv_calls {
let contig = self.bcf_reader.header().rid2name(region.rid)?;
let rid = self.bcf_writer.header().name2rid(contig)?;
for call in calls {
let mut record = self.bcf_writer.empty_record();
call.write(
rid,
&mut record,
depth_norm_factor,
*self.contig_lens.get(contig).unwrap(),
)?;
self.bcf_writer.write(&record)?;
}
}
Ok(())
}
}
#[derive(Clone, Copy, Debug, Hash, Eq, PartialEq, PartialOrd, Ord)]
pub(crate) struct Region {
rid: u32,
start: u64,
}
pub(crate) struct CNVCall<'a> {
prev_pos: Option<u64>,
next_pos: Option<u64>,
pos: u64,
end: u64,
cnv: CNV,
prob_no_cnv: LogProb,
calls: Vec<&'a Call>,
bayes_factors: Vec<BayesFactor>,
}
impl<'a> CNVCall<'a> {
pub(crate) fn write(
&self,
rid: u32,
record: &mut bcf::Record,
depth_norm_factor: f64,
contig_len: u64,
) -> Result<()> {
record.set_rid(Some(rid));
record.set_pos(self.pos as i64);
record.set_alleles(&[b"N", b"<CNV>"])?;
record.push_info_integer(b"END", &[self.end as i32])?;
record.push_info_integer(b"SVLEN", &[self.len() as i32])?;
record.push_info_integer(b"CN", &[2 + self.cnv.gain])?;
record.push_info_float(b"VAF", &[*self.cnv.allele_freq as f32])?;
record.push_info_integer(b"LOCI", &[self.calls.len() as i32])?;
record.push_info_string(b"SVTYPE", &[b"CNV"])?;
record.push_info_flag(b"IMPRECISE")?;
record.push_info_integer(
b"CIPOS",
&[
if let Some(prev_pos) = self.prev_pos {
-((self.pos - prev_pos) as i32)
} else {
-(self.pos as i32)
},
0,
],
)?;
record.push_info_integer(
b"CIEND",
&[
0,
if let Some(next_pos) = self.next_pos {
(next_pos - self.end) as i32
} else {
(contig_len - self.end) as i32
},
],
)?;
let mut loci_dp = Vec::new();
loci_dp.extend(self.calls.iter().map(|call| call.depth_tumor as i32));
loci_dp.extend(
self.calls
.iter()
.map(|call| (call.depth_normal as f64 * depth_norm_factor).round() as i32),
);
record.push_format_integer(b"LOCI_DP", &loci_dp)?;
let mut loci_vaf = Vec::new();
loci_vaf.extend(self.calls.iter().map(|call| *call.allele_freq_tumor as f32));
loci_vaf.extend(
self.calls
.iter()
.map(|call| *call.allele_freq_normal as f32),
);
record.push_format_float(b"LOCI_VAF", &loci_vaf)?;
record.set_qual(*PHREDProb::from(self.prob_no_cnv) as f32);
let obs = join(
self.bayes_factors
.iter()
.map(|bf| utils::evidence_kass_raftery_to_letter(bf.evidence_kass_raftery())),
"",
);
record.push_info_string(b"OBS", &[obs.as_bytes()])?;
Ok(())
}
pub(crate) fn len(&self) -> u32 {
self.end - self.pos + 1
}
}
pub(crate) struct HMM {
states: Vec<CNV>,
state_by_gain: HashMap<i32, Vec<hmm::State>>,
depth_norm_factor: f64,
prob_keep_state: LogProb,
prob_change_state: LogProb,
}
impl HMM {
#[allow(clippy::float_cmp)]
fn new(depth_norm_factor: f64, min_bayes_factor: f64, purity: f64) -> Self {
let n_allele_freqs = 10;
let mut states = Vec::new();
let mut state_by_gain = HashMap::new();
for allele_freq in linspace(0.1, 1.0, n_allele_freqs) {
for gain in -2..MAX_GAIN {
if gain!= 0 || allele_freq == 1.0 {
let cnv = CNV {
gain: gain,
allele_freq: AlleleFreq(allele_freq),
purity,
};
state_by_gain
.entry(gain)
.or_insert_with(Vec::new)
.push(hmm::State(states.len()));
states.push(cnv);
}
}
}
// METHOD:
// We choose the probability to keep a state to be higher than the probability
// to switch a state. In addition, we want the switch to the null state to be as likely
// as keeping the state. The amount is defined by an epsilon, which is derived from the
// minimum bayes factor over the products of emission probabilities between
// two stretches of two different states.
assert!(min_bayes_factor > 1.0);
let n = states.len() as f64;
let epsilon = min_bayes_factor - 1.0;
let denominator = n + epsilon;
let prob_keep_state = LogProb::from(Prob((1.0 + epsilon) / denominator));
let prob_change_state = LogProb::from(Prob(1.0 / denominator));
HMM {
states,
state_by_gain,
depth_norm_factor,
prob_keep_state,
prob_change_state,
}
}
pub(crate) fn prob_no_cnv(&self, observations: &[&Call]) -> LogProb {
let likelihood_no_cnv = likelihood(
self,
iter::repeat(self.state_by_gain.get(&0).unwrap()[0]),
observations.iter().cloned(),
);
let mut likelihoods = vec![likelihood_no_cnv];
for gain in -2..MAX_GAIN {
if gain!= 0 {
let af_spectrum = self.state_by_gain.get(&gain).unwrap();
likelihoods.push(LogProb::ln_simpsons_integrate_exp(
|i, _| {
let state = af_spectrum[i];
likelihood(self, iter::repeat(state), observations.iter().cloned())
},
0.0,
1.0,
af_spectrum.len() - 1,
));
}
}
LogProb::ln_sum_exp(&likelihoods)
|
pub(crate) fn null_state(&self) -> hmm::State {
self.state_by_gain.get(&0).unwrap()[0]
}
pub(crate) fn bayes_factors(&self, state: hmm::State, observations: &[&Call]) -> Vec<BayesFactor> {
let null_state = self.null_state();
observations
.into_iter()
.map(|obs| {
BayesFactor::new(
self.observation_prob(state, obs),
self.observation_prob(null_state, obs),
)
})
.collect()
}
fn prob_af_depth(&self, state: hmm::State, call: &Call) -> LogProb {
let cnv = self.states[*state];
let prob05 = LogProb(0.5f64.ln());
// handle allele freq changes
let prob_af = if let Some(alt_af) = cnv.expected_allele_freq_alt_affected() {
let ref_af = cnv.expected_allele_freq_ref_affected().unwrap();
prob05
+ call
.prob_allele_freq_tumor(alt_af)
.ln_add_exp(call.prob_allele_freq_tumor(ref_af))
} else {
LogProb::ln_one()
};
// handle depth changes
let prob_depth = call.prob_depth_tumor(
call.depth_normal as f64 * self.depth_norm_factor * cnv.expected_depth_factor(),
);
prob_af + prob_depth
}
}
impl hmm::Model<Call> for HMM {
fn num_states(&self) -> usize {
self.states.len()
}
fn states(&self) -> hmm::StateIter {
hmm::StateIter::new(self.num_states())
}
fn transitions(&self) -> hmm::StateTransitionIter {
hmm::StateTransitionIter::new(self.num_states())
}
fn transition_prob(&self, from: hmm::State, to: hmm::State) -> LogProb {
if from == to {
self.prob_keep_state
} else {
self.prob_change_state
}
}
fn initial_prob(&self, _: hmm::State) -> LogProb {
LogProb((1.0 / self.states.len() as f64).ln())
}
fn observation_prob(&self, state: hmm::State, call: &Call) -> LogProb
|
}
|
random_line_split
|
main.rs
|
extern crate graphics;
extern crate freetype as ft;
extern crate sdl2_window;
extern crate opengl_graphics;
extern crate piston;
use std::path::Path;
use sdl2_window::Sdl2Window;
use opengl_graphics::{ GlGraphics, Texture, OpenGL };
use graphics::math::Matrix2d;
use piston::window::WindowSettings;
use piston::event::*;
fn render_text(face: &mut ft::Face, gl: &mut GlGraphics, t: Matrix2d, text: &str) {
use graphics::*;
let mut x = 10;
let mut y = 0;
for ch in text.chars() {
face.load_char(ch as usize, ft::face::RENDER).unwrap();
let g = face.glyph();
let bitmap = g.bitmap();
let texture = Texture::from_memory_alpha(bitmap.buffer(),
bitmap.width() as u32,
bitmap.rows() as u32).unwrap();
Image::new_colored(color::BLACK).draw(
&texture,
default_draw_state(),
t.trans((x + g.bitmap_left()) as f64, (y - g.bitmap_top()) as f64),
gl
);
x += (g.advance().x >> 6) as i32;
y += (g.advance().y >> 6) as i32;
}
}
fn
|
() {
let opengl = OpenGL::_3_2;
let window = Sdl2Window::new(
opengl,
WindowSettings::new("piston-example-freetype", [300, 300])
.exit_on_esc(true)
);
let freetype = ft::Library::init().unwrap();
let font = Path::new("./bin/assets/FiraSans-Regular.ttf");
let mut face = freetype.new_face(&font, 0).unwrap();
face.set_pixel_sizes(0, 48).unwrap();
let ref mut gl = GlGraphics::new(opengl);
for e in window.events() {
if let Some(args) = e.render_args() {
use graphics::*;
gl.draw(args.viewport(), |c, gl| {
let transform = c.transform.trans(0.0, 100.0);
clear(color::WHITE, gl);
render_text(&mut face, gl, transform, "Hello Piston!");
});
}
}
}
|
main
|
identifier_name
|
main.rs
|
extern crate graphics;
extern crate freetype as ft;
extern crate sdl2_window;
extern crate opengl_graphics;
extern crate piston;
use std::path::Path;
use sdl2_window::Sdl2Window;
use opengl_graphics::{ GlGraphics, Texture, OpenGL };
use graphics::math::Matrix2d;
use piston::window::WindowSettings;
use piston::event::*;
fn render_text(face: &mut ft::Face, gl: &mut GlGraphics, t: Matrix2d, text: &str) {
use graphics::*;
let mut x = 10;
let mut y = 0;
for ch in text.chars() {
face.load_char(ch as usize, ft::face::RENDER).unwrap();
let g = face.glyph();
let bitmap = g.bitmap();
let texture = Texture::from_memory_alpha(bitmap.buffer(),
bitmap.width() as u32,
bitmap.rows() as u32).unwrap();
Image::new_colored(color::BLACK).draw(
&texture,
default_draw_state(),
t.trans((x + g.bitmap_left()) as f64, (y - g.bitmap_top()) as f64),
gl
);
x += (g.advance().x >> 6) as i32;
y += (g.advance().y >> 6) as i32;
}
}
fn main() {
let opengl = OpenGL::_3_2;
let window = Sdl2Window::new(
opengl,
WindowSettings::new("piston-example-freetype", [300, 300])
.exit_on_esc(true)
);
let freetype = ft::Library::init().unwrap();
|
let font = Path::new("./bin/assets/FiraSans-Regular.ttf");
let mut face = freetype.new_face(&font, 0).unwrap();
face.set_pixel_sizes(0, 48).unwrap();
let ref mut gl = GlGraphics::new(opengl);
for e in window.events() {
if let Some(args) = e.render_args() {
use graphics::*;
gl.draw(args.viewport(), |c, gl| {
let transform = c.transform.trans(0.0, 100.0);
clear(color::WHITE, gl);
render_text(&mut face, gl, transform, "Hello Piston!");
});
}
}
}
|
random_line_split
|
|
main.rs
|
extern crate graphics;
extern crate freetype as ft;
extern crate sdl2_window;
extern crate opengl_graphics;
extern crate piston;
use std::path::Path;
use sdl2_window::Sdl2Window;
use opengl_graphics::{ GlGraphics, Texture, OpenGL };
use graphics::math::Matrix2d;
use piston::window::WindowSettings;
use piston::event::*;
fn render_text(face: &mut ft::Face, gl: &mut GlGraphics, t: Matrix2d, text: &str)
|
x += (g.advance().x >> 6) as i32;
y += (g.advance().y >> 6) as i32;
}
}
fn main() {
let opengl = OpenGL::_3_2;
let window = Sdl2Window::new(
opengl,
WindowSettings::new("piston-example-freetype", [300, 300])
.exit_on_esc(true)
);
let freetype = ft::Library::init().unwrap();
let font = Path::new("./bin/assets/FiraSans-Regular.ttf");
let mut face = freetype.new_face(&font, 0).unwrap();
face.set_pixel_sizes(0, 48).unwrap();
let ref mut gl = GlGraphics::new(opengl);
for e in window.events() {
if let Some(args) = e.render_args() {
use graphics::*;
gl.draw(args.viewport(), |c, gl| {
let transform = c.transform.trans(0.0, 100.0);
clear(color::WHITE, gl);
render_text(&mut face, gl, transform, "Hello Piston!");
});
}
}
}
|
{
use graphics::*;
let mut x = 10;
let mut y = 0;
for ch in text.chars() {
face.load_char(ch as usize, ft::face::RENDER).unwrap();
let g = face.glyph();
let bitmap = g.bitmap();
let texture = Texture::from_memory_alpha(bitmap.buffer(),
bitmap.width() as u32,
bitmap.rows() as u32).unwrap();
Image::new_colored(color::BLACK).draw(
&texture,
default_draw_state(),
t.trans((x + g.bitmap_left()) as f64, (y - g.bitmap_top()) as f64),
gl
);
|
identifier_body
|
main.rs
|
extern crate graphics;
extern crate freetype as ft;
extern crate sdl2_window;
extern crate opengl_graphics;
extern crate piston;
use std::path::Path;
use sdl2_window::Sdl2Window;
use opengl_graphics::{ GlGraphics, Texture, OpenGL };
use graphics::math::Matrix2d;
use piston::window::WindowSettings;
use piston::event::*;
fn render_text(face: &mut ft::Face, gl: &mut GlGraphics, t: Matrix2d, text: &str) {
use graphics::*;
let mut x = 10;
let mut y = 0;
for ch in text.chars() {
face.load_char(ch as usize, ft::face::RENDER).unwrap();
let g = face.glyph();
let bitmap = g.bitmap();
let texture = Texture::from_memory_alpha(bitmap.buffer(),
bitmap.width() as u32,
bitmap.rows() as u32).unwrap();
Image::new_colored(color::BLACK).draw(
&texture,
default_draw_state(),
t.trans((x + g.bitmap_left()) as f64, (y - g.bitmap_top()) as f64),
gl
);
x += (g.advance().x >> 6) as i32;
y += (g.advance().y >> 6) as i32;
}
}
fn main() {
let opengl = OpenGL::_3_2;
let window = Sdl2Window::new(
opengl,
WindowSettings::new("piston-example-freetype", [300, 300])
.exit_on_esc(true)
);
let freetype = ft::Library::init().unwrap();
let font = Path::new("./bin/assets/FiraSans-Regular.ttf");
let mut face = freetype.new_face(&font, 0).unwrap();
face.set_pixel_sizes(0, 48).unwrap();
let ref mut gl = GlGraphics::new(opengl);
for e in window.events() {
if let Some(args) = e.render_args()
|
}
}
|
{
use graphics::*;
gl.draw(args.viewport(), |c, gl| {
let transform = c.transform.trans(0.0, 100.0);
clear(color::WHITE, gl);
render_text(&mut face, gl, transform, "Hello Piston!");
});
}
|
conditional_block
|
options.rs
|
#![ allow( unused_imports ) ]
#![ allow( unused_mut ) ]
#![ allow( dead_code ) ]
#![ allow( unused_variables ) ]
#![ warn( missing_docs ) ]
#![ warn( missing_debug_implementations ) ]
use meta_tools::*;
use quote::{ quote, ToTokens, TokenStreamExt };
use syn::parse::*;
use syn::spanned::Spanned;
use proc_macro_tools::*;
use std::collections::HashMap;
use iter_tools::{ Itertools, process_results };
pub type Result< T > = std::result::Result< T, syn::Error >;
///
/// Descriptor of a function with a body, body of which is not parsed.
///
pub struct FnQuick
{
pub attrs : Vec< syn::Attribute >,
pub vis : syn::Visibility,
pub sig : syn::Signature,
pub block : Option< proc_macro2::TokenStream >,
}
impl quote::ToTokens for FnQuick
{
fn to_tokens( &self, tokens : &mut proc_macro2::TokenStream )
{
for attr in self.attrs.iter()
{
attr.to_tokens( tokens );
}
self.sig.to_tokens( tokens );
match &self.block
{
Some( block ) => tokens.append( proc_macro2::Group::new( proc_macro2::Delimiter::Brace, block.to_token_stream() ) ),
None => tokens.append( proc_macro2::Punct::new( ';', proc_macro2::Spacing::Alone ) ),
}
}
}
///
/// Descriptor of element of options.
///
pub enum Element
{
Fn( FnQuick ),
Signature( FnQuick ),
Field( syn::Field ),
}
impl Parse for Element
{
fn parse( input : ParseStream ) -> Result< Self >
{
let attrs : Vec< syn::Attribute > = input.call( syn::Attribute::parse_outer )?;
let vis : syn::Visibility = input.parse()?;
let lookahead1 = input.lookahead1();
if lookahead1.peek( syn::Token!{ fn } )
{
let sig : syn::Signature = input.parse()?;
let lookahead2 = input.lookahead1();
if lookahead2.peek( syn::token::Brace )
{
let input2;
let brace_token : syn::token::Brace = syn::braced!( input2 in input );
let block : proc_macro2::TokenStream = input2.parse()?;
let fn_desc = FnQuick
{
attrs,
vis,
sig,
block : Some( block ),
};
return Ok( Element::Fn( fn_desc ) );
}
else
{
let fn_desc = FnQuick
{
attrs,
vis,
sig,
block : None,
};
return Ok( Element::Fn( fn_desc ) );
}
}
else
{
input.call( syn::Field::parse_named ).map( | mut e |
{
e.vis = vis;
e.attrs = attrs;
Element::Field( e )
})
}
}
}
///
/// Descriptor of attribute options.
///
#[allow( dead_code )]
struct OptionsDescriptor
{
attrs : Vec< syn::Attribute >,
vis : syn::Visibility,
ident : syn::Ident,
generics: syn::Generics,
brace_token : syn::token::Brace,
methods_map : HashMap< String, FnQuick >,
signatures_map : HashMap< String, FnQuick >,
fields_map : HashMap< String, syn::Field >,
}
impl Parse for OptionsDescriptor
{
fn parse( input : ParseStream ) -> Result< Self >
{
let input2;
let vis = input.parse()?;
let ident = input.parse()?;
let generics = input.parse()?;
let brace_token = syn::braced!( input2 in input );
let mut attrs = input2.call( syn::Attribute::parse_inner )?;
let elements : syn::punctuated::Punctuated< Element, syn::Token!{ ; } > = input2.parse_terminated( Element::parse )?;
let mut methods_map = hmap!{};
let mut signatures_map = hmap!{};
let mut fields_map = hmap!{};
for attr in attrs.iter_mut()
{
attr.style = syn::AttrStyle::Outer;
}
for element in elements.into_iter()
{
match element
{
Element::Fn( f ) =>
{
methods_map.insert( f.sig.ident.to_string(), f );
},
Element::Signature( f ) =>
{
signatures_map.insert( f.sig.ident.to_string(), f );
},
Element::Field( f ) =>
{
let key = f.ident.as_ref().ok_or_else( || syn_err!( &f.clone(), "Field does not have name: {}", quote!{ #f } ) )?.to_string();
fields_map.insert( key, f );
},
}
}
let mut result = OptionsDescriptor
{
vis,
ident,
generics,
brace_token,
attrs,
methods_map,
signatures_map,
fields_map,
};
Ok( result )
}
}
///
/// Getter descriptor.
///
#[ derive( Debug ) ]
pub struct GetterDescriptor
{
attr : proc_macro2::TokenStream,
signature : proc_macro2::TokenStream,
body : proc_macro2::TokenStream,
}
//
impl quote::ToTokens for GetterDescriptor
{
fn to_tokens( &self, tokens : &mut proc_macro2::TokenStream )
{
self.attr.to_tokens( tokens );
self.signature.to_tokens( tokens );
self.body.to_tokens( tokens );
}
}
///
/// Generate a getter for a field.
///
fn
|
( name : &str, field : &syn::Field ) -> Result< GetterDescriptor >
{
let name_ident = syn::Ident::new( &name, field.span() );
let ty = &field.ty;
// tree_print!( ty );
let ty_is_ref = matches!( ty, syn::Type::Reference( _ ) );
let ty2 = if ty_is_ref
{
ty.to_token_stream()
}
else
{
quote!{ & #ty }
};
let attr = quote!
{
#[ inline ]
};
let signature = quote!
{
fn #name_ident( &self ) -> #ty2
};
let body = quote!
{
{
&self.#name_ident
}
};
let result = GetterDescriptor
{
attr,
signature,
body,
};
Ok( result )
}
///
///
///
fn perform_gen( options_descriptor : &OptionsDescriptor ) -> ( proc_macro2::TokenStream, proc_macro2::TokenStream )
{
let mut perform = quote!{};
let mut attr_form_after = quote!{};
if let Some( perform_fn ) = options_descriptor.methods_map.get( "perform" )
{
let sig = &perform_fn.sig;
attr_form_after = quote!{ #[ form_after( #sig ) ] };
perform = quote!
{
#[ allow( unused_attributes ) ]
#[ inline ]
#perform_fn
}
}
( perform, attr_form_after )
}
///
/// Options macro handler.
///
pub fn options( attr : proc_macro::TokenStream, item : proc_macro::TokenStream ) -> Result< proc_macro2::TokenStream >
{
let options_descriptor = match syn::parse::< OptionsDescriptor >( item )
{
Ok( syntax_tree ) => syntax_tree,
Err( err ) => return Err( err ),
};
let name_ident = &options_descriptor.ident;
let generics = &options_descriptor.generics;
let attrs = &options_descriptor.attrs;
let mut fields_define = Vec::< &syn::Field >::new();
for ( name, field ) in options_descriptor.fields_map.iter()
{
fields_define.push( field );
}
let ( perform, attr_form_after ) = perform_gen( &options_descriptor );
let getters = options_descriptor.fields_map.iter().map( | ( key, field ) | getter_gen( key, field ) );
let getters : Vec< _ > = process_results( getters, | iter | iter.collect() )?;
let getters_signatures : Vec< _ > = getters.iter().map( | e | e.signature.clone() ).collect();
let result = quote!
{
mod #name_ident
{
#[cfg( feature = "in_wtools" )]
use ::wtools::options::*;
#[cfg( not( feature = "in_wtools" ) )]
use ::woptions::*;
#( #attrs )*
#[ derive( Former, PartialEq, Debug ) ]
#attr_form_after
pub struct Options #generics
{
#( #fields_define, )*
}
pub trait OptionsAdapter #generics
{
#( #getters_signatures ; )*
#perform
}
impl #generics OptionsAdapter #generics for Options #generics
{
#( #getters )*
}
#[ inline ]
pub fn former #generics() -> OptionsFormer #generics
{
Options::#generics::former()
}
}
#[ inline ]
fn #name_ident #generics () -> #name_ident::OptionsFormer #generics
{
#name_ident::former::#generics()
}
};
Ok( result )
}
//
// = Input :
//
// Options!{ split< 'a >
// {
// #![ derive( PartialOrd ) ]
//
// pub src : &'a str;
// pub delimeter : &'a str;
// #[ default( true ) ]
// pub left : bool;
//
// fn perform( self ) -> Box< ( dyn std::iter::Iterator< Item = &'a str > + 'a ) >
// where
// Self : Sized,
// {
// if *self.left()
// {
// Box::new( self.src().split( self.delimeter() ) )
// }
// else
// {
// Box::new( self.src().rsplit( self.delimeter() ) )
// }
// }
//
// }}
//
//
// = Output:
//
// #[ derive( PartialOrd ) ]
// #[ derive( Former, PartialEq, Debug ) ]
// #[ form_after( fn perform( self ) -> Box< ( dyn std::iter::Iterator< Item = &'a str > + 'a ) > ) ]
// pub struct Options< 'a >
// {
// pub src : &'a str,
// pub delimeter : &'a str,
// #[ default( true ) ]
// pub left : bool,
// }
//
// pub trait OptionsAdapter< 'a >
// {
// fn src( &self ) -> &'a str;
// fn delimeter( &self ) -> &'a str;
// fn left( &self ) -> &bool;
// #[ inline ]
// fn perform( self ) -> Box< ( dyn std::iter::Iterator< Item = &'a str > + 'a ) >
// where
// Self : Sized,
// {
// if *self.left()
// {
// Box::new( self.src().split( self.delimeter() ) )
// }
// else
// {
// Box::new( self.src().rsplit( self.delimeter() ) )
// }
// }
// }
//
// impl< 'a > OptionsAdapter< 'a > for Options< 'a >
// {
// #[ inline ]
// fn src( &self ) -> &'a str
// {
// &self.src
// }
// #[ inline ]
// fn delimeter( &self ) -> &'a str
// {
// &self.delimeter
// }
// #[ inline ]
// fn left( &self ) -> &bool
// {
// &self.left
// }
// }
//
// #[ inline ]
// pub fn former< 'a >() -> OptionsFormer< 'a >
// {
// Options::< 'a >::former()
// }
//
// }
//
// #[ inline ]
// fn split< 'a >() -> split::OptionsFormer< 'a >
// {
// split::former::< 'a >()
// }
//
|
getter_gen
|
identifier_name
|
options.rs
|
#![ allow( unused_imports ) ]
#![ allow( unused_mut ) ]
#![ allow( dead_code ) ]
#![ allow( unused_variables ) ]
#![ warn( missing_docs ) ]
#![ warn( missing_debug_implementations ) ]
use meta_tools::*;
use quote::{ quote, ToTokens, TokenStreamExt };
use syn::parse::*;
use syn::spanned::Spanned;
use proc_macro_tools::*;
use std::collections::HashMap;
use iter_tools::{ Itertools, process_results };
pub type Result< T > = std::result::Result< T, syn::Error >;
///
/// Descriptor of a function with a body, body of which is not parsed.
///
pub struct FnQuick
{
pub attrs : Vec< syn::Attribute >,
pub vis : syn::Visibility,
pub sig : syn::Signature,
pub block : Option< proc_macro2::TokenStream >,
}
impl quote::ToTokens for FnQuick
{
fn to_tokens( &self, tokens : &mut proc_macro2::TokenStream )
{
for attr in self.attrs.iter()
{
attr.to_tokens( tokens );
}
self.sig.to_tokens( tokens );
match &self.block
{
Some( block ) => tokens.append( proc_macro2::Group::new( proc_macro2::Delimiter::Brace, block.to_token_stream() ) ),
None => tokens.append( proc_macro2::Punct::new( ';', proc_macro2::Spacing::Alone ) ),
}
}
}
///
/// Descriptor of element of options.
///
pub enum Element
{
Fn( FnQuick ),
Signature( FnQuick ),
Field( syn::Field ),
}
impl Parse for Element
{
fn parse( input : ParseStream ) -> Result< Self >
{
let attrs : Vec< syn::Attribute > = input.call( syn::Attribute::parse_outer )?;
let vis : syn::Visibility = input.parse()?;
let lookahead1 = input.lookahead1();
if lookahead1.peek( syn::Token!{ fn } )
|
let fn_desc = FnQuick
{
attrs,
vis,
sig,
block : None,
};
return Ok( Element::Fn( fn_desc ) );
}
}
else
{
input.call( syn::Field::parse_named ).map( | mut e |
{
e.vis = vis;
e.attrs = attrs;
Element::Field( e )
})
}
}
}
///
/// Descriptor of attribute options.
///
#[allow( dead_code )]
struct OptionsDescriptor
{
attrs : Vec< syn::Attribute >,
vis : syn::Visibility,
ident : syn::Ident,
generics: syn::Generics,
brace_token : syn::token::Brace,
methods_map : HashMap< String, FnQuick >,
signatures_map : HashMap< String, FnQuick >,
fields_map : HashMap< String, syn::Field >,
}
impl Parse for OptionsDescriptor
{
fn parse( input : ParseStream ) -> Result< Self >
{
let input2;
let vis = input.parse()?;
let ident = input.parse()?;
let generics = input.parse()?;
let brace_token = syn::braced!( input2 in input );
let mut attrs = input2.call( syn::Attribute::parse_inner )?;
let elements : syn::punctuated::Punctuated< Element, syn::Token!{ ; } > = input2.parse_terminated( Element::parse )?;
let mut methods_map = hmap!{};
let mut signatures_map = hmap!{};
let mut fields_map = hmap!{};
for attr in attrs.iter_mut()
{
attr.style = syn::AttrStyle::Outer;
}
for element in elements.into_iter()
{
match element
{
Element::Fn( f ) =>
{
methods_map.insert( f.sig.ident.to_string(), f );
},
Element::Signature( f ) =>
{
signatures_map.insert( f.sig.ident.to_string(), f );
},
Element::Field( f ) =>
{
let key = f.ident.as_ref().ok_or_else( || syn_err!( &f.clone(), "Field does not have name: {}", quote!{ #f } ) )?.to_string();
fields_map.insert( key, f );
},
}
}
let mut result = OptionsDescriptor
{
vis,
ident,
generics,
brace_token,
attrs,
methods_map,
signatures_map,
fields_map,
};
Ok( result )
}
}
///
/// Getter descriptor.
///
#[ derive( Debug ) ]
pub struct GetterDescriptor
{
attr : proc_macro2::TokenStream,
signature : proc_macro2::TokenStream,
body : proc_macro2::TokenStream,
}
//
impl quote::ToTokens for GetterDescriptor
{
fn to_tokens( &self, tokens : &mut proc_macro2::TokenStream )
{
self.attr.to_tokens( tokens );
self.signature.to_tokens( tokens );
self.body.to_tokens( tokens );
}
}
///
/// Generate a getter for a field.
///
fn getter_gen( name : &str, field : &syn::Field ) -> Result< GetterDescriptor >
{
let name_ident = syn::Ident::new( &name, field.span() );
let ty = &field.ty;
// tree_print!( ty );
let ty_is_ref = matches!( ty, syn::Type::Reference( _ ) );
let ty2 = if ty_is_ref
{
ty.to_token_stream()
}
else
{
quote!{ & #ty }
};
let attr = quote!
{
#[ inline ]
};
let signature = quote!
{
fn #name_ident( &self ) -> #ty2
};
let body = quote!
{
{
&self.#name_ident
}
};
let result = GetterDescriptor
{
attr,
signature,
body,
};
Ok( result )
}
///
///
///
fn perform_gen( options_descriptor : &OptionsDescriptor ) -> ( proc_macro2::TokenStream, proc_macro2::TokenStream )
{
let mut perform = quote!{};
let mut attr_form_after = quote!{};
if let Some( perform_fn ) = options_descriptor.methods_map.get( "perform" )
{
let sig = &perform_fn.sig;
attr_form_after = quote!{ #[ form_after( #sig ) ] };
perform = quote!
{
#[ allow( unused_attributes ) ]
#[ inline ]
#perform_fn
}
}
( perform, attr_form_after )
}
///
/// Options macro handler.
///
pub fn options( attr : proc_macro::TokenStream, item : proc_macro::TokenStream ) -> Result< proc_macro2::TokenStream >
{
let options_descriptor = match syn::parse::< OptionsDescriptor >( item )
{
Ok( syntax_tree ) => syntax_tree,
Err( err ) => return Err( err ),
};
let name_ident = &options_descriptor.ident;
let generics = &options_descriptor.generics;
let attrs = &options_descriptor.attrs;
let mut fields_define = Vec::< &syn::Field >::new();
for ( name, field ) in options_descriptor.fields_map.iter()
{
fields_define.push( field );
}
let ( perform, attr_form_after ) = perform_gen( &options_descriptor );
let getters = options_descriptor.fields_map.iter().map( | ( key, field ) | getter_gen( key, field ) );
let getters : Vec< _ > = process_results( getters, | iter | iter.collect() )?;
let getters_signatures : Vec< _ > = getters.iter().map( | e | e.signature.clone() ).collect();
let result = quote!
{
mod #name_ident
{
#[cfg( feature = "in_wtools" )]
use ::wtools::options::*;
#[cfg( not( feature = "in_wtools" ) )]
use ::woptions::*;
#( #attrs )*
#[ derive( Former, PartialEq, Debug ) ]
#attr_form_after
pub struct Options #generics
{
#( #fields_define, )*
}
pub trait OptionsAdapter #generics
{
#( #getters_signatures ; )*
#perform
}
impl #generics OptionsAdapter #generics for Options #generics
{
#( #getters )*
}
#[ inline ]
pub fn former #generics() -> OptionsFormer #generics
{
Options::#generics::former()
}
}
#[ inline ]
fn #name_ident #generics () -> #name_ident::OptionsFormer #generics
{
#name_ident::former::#generics()
}
};
Ok( result )
}
//
// = Input :
//
// Options!{ split< 'a >
// {
// #![ derive( PartialOrd ) ]
//
// pub src : &'a str;
// pub delimeter : &'a str;
// #[ default( true ) ]
// pub left : bool;
//
// fn perform( self ) -> Box< ( dyn std::iter::Iterator< Item = &'a str > + 'a ) >
// where
// Self : Sized,
// {
// if *self.left()
// {
// Box::new( self.src().split( self.delimeter() ) )
// }
// else
// {
// Box::new( self.src().rsplit( self.delimeter() ) )
// }
// }
//
// }}
//
//
// = Output:
//
// #[ derive( PartialOrd ) ]
// #[ derive( Former, PartialEq, Debug ) ]
// #[ form_after( fn perform( self ) -> Box< ( dyn std::iter::Iterator< Item = &'a str > + 'a ) > ) ]
// pub struct Options< 'a >
// {
// pub src : &'a str,
// pub delimeter : &'a str,
// #[ default( true ) ]
// pub left : bool,
// }
//
// pub trait OptionsAdapter< 'a >
// {
// fn src( &self ) -> &'a str;
// fn delimeter( &self ) -> &'a str;
// fn left( &self ) -> &bool;
// #[ inline ]
// fn perform( self ) -> Box< ( dyn std::iter::Iterator< Item = &'a str > + 'a ) >
// where
// Self : Sized,
// {
// if *self.left()
// {
// Box::new( self.src().split( self.delimeter() ) )
// }
// else
// {
// Box::new( self.src().rsplit( self.delimeter() ) )
// }
// }
// }
//
// impl< 'a > OptionsAdapter< 'a > for Options< 'a >
// {
// #[ inline ]
// fn src( &self ) -> &'a str
// {
// &self.src
// }
// #[ inline ]
// fn delimeter( &self ) -> &'a str
// {
// &self.delimeter
// }
// #[ inline ]
// fn left( &self ) -> &bool
// {
// &self.left
// }
// }
//
// #[ inline ]
// pub fn former< 'a >() -> OptionsFormer< 'a >
// {
// Options::< 'a >::former()
// }
//
// }
//
// #[ inline ]
// fn split< 'a >() -> split::OptionsFormer< 'a >
// {
// split::former::< 'a >()
// }
//
|
{
let sig : syn::Signature = input.parse()?;
let lookahead2 = input.lookahead1();
if lookahead2.peek( syn::token::Brace )
{
let input2;
let brace_token : syn::token::Brace = syn::braced!( input2 in input );
let block : proc_macro2::TokenStream = input2.parse()?;
let fn_desc = FnQuick
{
attrs,
vis,
sig,
block : Some( block ),
};
return Ok( Element::Fn( fn_desc ) );
}
else
{
|
conditional_block
|
options.rs
|
#![ allow( unused_imports ) ]
#![ allow( unused_mut ) ]
#![ allow( dead_code ) ]
#![ allow( unused_variables ) ]
#![ warn( missing_docs ) ]
#![ warn( missing_debug_implementations ) ]
use meta_tools::*;
use quote::{ quote, ToTokens, TokenStreamExt };
use syn::parse::*;
use syn::spanned::Spanned;
use proc_macro_tools::*;
use std::collections::HashMap;
use iter_tools::{ Itertools, process_results };
pub type Result< T > = std::result::Result< T, syn::Error >;
///
/// Descriptor of a function with a body, body of which is not parsed.
///
pub struct FnQuick
{
pub attrs : Vec< syn::Attribute >,
pub vis : syn::Visibility,
pub sig : syn::Signature,
pub block : Option< proc_macro2::TokenStream >,
}
impl quote::ToTokens for FnQuick
{
fn to_tokens( &self, tokens : &mut proc_macro2::TokenStream )
{
|
for attr in self.attrs.iter()
{
attr.to_tokens( tokens );
}
self.sig.to_tokens( tokens );
match &self.block
{
Some( block ) => tokens.append( proc_macro2::Group::new( proc_macro2::Delimiter::Brace, block.to_token_stream() ) ),
None => tokens.append( proc_macro2::Punct::new( ';', proc_macro2::Spacing::Alone ) ),
}
}
}
///
/// Descriptor of element of options.
///
pub enum Element
{
Fn( FnQuick ),
Signature( FnQuick ),
Field( syn::Field ),
}
impl Parse for Element
{
fn parse( input : ParseStream ) -> Result< Self >
{
let attrs : Vec< syn::Attribute > = input.call( syn::Attribute::parse_outer )?;
let vis : syn::Visibility = input.parse()?;
let lookahead1 = input.lookahead1();
if lookahead1.peek( syn::Token!{ fn } )
{
let sig : syn::Signature = input.parse()?;
let lookahead2 = input.lookahead1();
if lookahead2.peek( syn::token::Brace )
{
let input2;
let brace_token : syn::token::Brace = syn::braced!( input2 in input );
let block : proc_macro2::TokenStream = input2.parse()?;
let fn_desc = FnQuick
{
attrs,
vis,
sig,
block : Some( block ),
};
return Ok( Element::Fn( fn_desc ) );
}
else
{
let fn_desc = FnQuick
{
attrs,
vis,
sig,
block : None,
};
return Ok( Element::Fn( fn_desc ) );
}
}
else
{
input.call( syn::Field::parse_named ).map( | mut e |
{
e.vis = vis;
e.attrs = attrs;
Element::Field( e )
})
}
}
}
///
/// Descriptor of attribute options.
///
#[allow( dead_code )]
struct OptionsDescriptor
{
attrs : Vec< syn::Attribute >,
vis : syn::Visibility,
ident : syn::Ident,
generics: syn::Generics,
brace_token : syn::token::Brace,
methods_map : HashMap< String, FnQuick >,
signatures_map : HashMap< String, FnQuick >,
fields_map : HashMap< String, syn::Field >,
}
impl Parse for OptionsDescriptor
{
fn parse( input : ParseStream ) -> Result< Self >
{
let input2;
let vis = input.parse()?;
let ident = input.parse()?;
let generics = input.parse()?;
let brace_token = syn::braced!( input2 in input );
let mut attrs = input2.call( syn::Attribute::parse_inner )?;
let elements : syn::punctuated::Punctuated< Element, syn::Token!{ ; } > = input2.parse_terminated( Element::parse )?;
let mut methods_map = hmap!{};
let mut signatures_map = hmap!{};
let mut fields_map = hmap!{};
for attr in attrs.iter_mut()
{
attr.style = syn::AttrStyle::Outer;
}
for element in elements.into_iter()
{
match element
{
Element::Fn( f ) =>
{
methods_map.insert( f.sig.ident.to_string(), f );
},
Element::Signature( f ) =>
{
signatures_map.insert( f.sig.ident.to_string(), f );
},
Element::Field( f ) =>
{
let key = f.ident.as_ref().ok_or_else( || syn_err!( &f.clone(), "Field does not have name: {}", quote!{ #f } ) )?.to_string();
fields_map.insert( key, f );
},
}
}
let mut result = OptionsDescriptor
{
vis,
ident,
generics,
brace_token,
attrs,
methods_map,
signatures_map,
fields_map,
};
Ok( result )
}
}
///
/// Getter descriptor.
///
#[ derive( Debug ) ]
pub struct GetterDescriptor
{
attr : proc_macro2::TokenStream,
signature : proc_macro2::TokenStream,
body : proc_macro2::TokenStream,
}
//
impl quote::ToTokens for GetterDescriptor
{
fn to_tokens( &self, tokens : &mut proc_macro2::TokenStream )
{
self.attr.to_tokens( tokens );
self.signature.to_tokens( tokens );
self.body.to_tokens( tokens );
}
}
///
/// Generate a getter for a field.
///
fn getter_gen( name : &str, field : &syn::Field ) -> Result< GetterDescriptor >
{
let name_ident = syn::Ident::new( &name, field.span() );
let ty = &field.ty;
// tree_print!( ty );
let ty_is_ref = matches!( ty, syn::Type::Reference( _ ) );
let ty2 = if ty_is_ref
{
ty.to_token_stream()
}
else
{
quote!{ & #ty }
};
let attr = quote!
{
#[ inline ]
};
let signature = quote!
{
fn #name_ident( &self ) -> #ty2
};
let body = quote!
{
{
&self.#name_ident
}
};
let result = GetterDescriptor
{
attr,
signature,
body,
};
Ok( result )
}
///
///
///
fn perform_gen( options_descriptor : &OptionsDescriptor ) -> ( proc_macro2::TokenStream, proc_macro2::TokenStream )
{
let mut perform = quote!{};
let mut attr_form_after = quote!{};
if let Some( perform_fn ) = options_descriptor.methods_map.get( "perform" )
{
let sig = &perform_fn.sig;
attr_form_after = quote!{ #[ form_after( #sig ) ] };
perform = quote!
{
#[ allow( unused_attributes ) ]
#[ inline ]
#perform_fn
}
}
( perform, attr_form_after )
}
///
/// Options macro handler.
///
pub fn options( attr : proc_macro::TokenStream, item : proc_macro::TokenStream ) -> Result< proc_macro2::TokenStream >
{
let options_descriptor = match syn::parse::< OptionsDescriptor >( item )
{
Ok( syntax_tree ) => syntax_tree,
Err( err ) => return Err( err ),
};
let name_ident = &options_descriptor.ident;
let generics = &options_descriptor.generics;
let attrs = &options_descriptor.attrs;
let mut fields_define = Vec::< &syn::Field >::new();
for ( name, field ) in options_descriptor.fields_map.iter()
{
fields_define.push( field );
}
let ( perform, attr_form_after ) = perform_gen( &options_descriptor );
let getters = options_descriptor.fields_map.iter().map( | ( key, field ) | getter_gen( key, field ) );
let getters : Vec< _ > = process_results( getters, | iter | iter.collect() )?;
let getters_signatures : Vec< _ > = getters.iter().map( | e | e.signature.clone() ).collect();
let result = quote!
{
mod #name_ident
{
#[cfg( feature = "in_wtools" )]
use ::wtools::options::*;
#[cfg( not( feature = "in_wtools" ) )]
use ::woptions::*;
#( #attrs )*
#[ derive( Former, PartialEq, Debug ) ]
#attr_form_after
pub struct Options #generics
{
#( #fields_define, )*
}
pub trait OptionsAdapter #generics
{
#( #getters_signatures ; )*
#perform
}
impl #generics OptionsAdapter #generics for Options #generics
{
#( #getters )*
}
#[ inline ]
pub fn former #generics() -> OptionsFormer #generics
{
Options::#generics::former()
}
}
#[ inline ]
fn #name_ident #generics () -> #name_ident::OptionsFormer #generics
{
#name_ident::former::#generics()
}
};
Ok( result )
}
//
// = Input :
//
// Options!{ split< 'a >
// {
// #![ derive( PartialOrd ) ]
//
// pub src : &'a str;
// pub delimeter : &'a str;
// #[ default( true ) ]
// pub left : bool;
//
// fn perform( self ) -> Box< ( dyn std::iter::Iterator< Item = &'a str > + 'a ) >
// where
// Self : Sized,
// {
// if *self.left()
// {
// Box::new( self.src().split( self.delimeter() ) )
// }
// else
// {
// Box::new( self.src().rsplit( self.delimeter() ) )
// }
// }
//
// }}
//
//
// = Output:
//
// #[ derive( PartialOrd ) ]
// #[ derive( Former, PartialEq, Debug ) ]
// #[ form_after( fn perform( self ) -> Box< ( dyn std::iter::Iterator< Item = &'a str > + 'a ) > ) ]
// pub struct Options< 'a >
// {
// pub src : &'a str,
// pub delimeter : &'a str,
// #[ default( true ) ]
// pub left : bool,
// }
//
// pub trait OptionsAdapter< 'a >
// {
// fn src( &self ) -> &'a str;
// fn delimeter( &self ) -> &'a str;
// fn left( &self ) -> &bool;
// #[ inline ]
// fn perform( self ) -> Box< ( dyn std::iter::Iterator< Item = &'a str > + 'a ) >
// where
// Self : Sized,
// {
// if *self.left()
// {
// Box::new( self.src().split( self.delimeter() ) )
// }
// else
// {
// Box::new( self.src().rsplit( self.delimeter() ) )
// }
// }
// }
//
// impl< 'a > OptionsAdapter< 'a > for Options< 'a >
// {
// #[ inline ]
// fn src( &self ) -> &'a str
// {
// &self.src
// }
// #[ inline ]
// fn delimeter( &self ) -> &'a str
// {
// &self.delimeter
// }
// #[ inline ]
// fn left( &self ) -> &bool
// {
// &self.left
// }
// }
//
// #[ inline ]
// pub fn former< 'a >() -> OptionsFormer< 'a >
// {
// Options::< 'a >::former()
// }
//
// }
//
// #[ inline ]
// fn split< 'a >() -> split::OptionsFormer< 'a >
// {
// split::former::< 'a >()
// }
//
|
random_line_split
|
|
reftest.rs
|
// Copyright 2013 The Servo Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![deny(unused_imports)]
#![deny(unused_variables)]
extern crate png;
extern crate test;
extern crate regex;
extern crate url;
use std::ascii::AsciiExt;
use std::io;
use std::io::{File, Reader, Command};
use std::io::process::ExitStatus;
use std::io::fs::PathExtensions;
use std::os;
use std::path::Path;
use test::{AutoColor, DynTestName, DynTestFn, TestDesc, TestOpts, TestDescAndFn};
use test::run_tests_console;
use regex::Regex;
use url::Url;
bitflags!(
flags RenderMode: u32 {
const CPU_RENDERING = 0x00000001,
const GPU_RENDERING = 0x00000010,
const LINUX_TARGET = 0x00000100,
const MACOS_TARGET = 0x00001000,
const ANDROID_TARGET = 0x00010000
}
)
fn main() {
let args = os::args();
let mut parts = args.tail().split(|e| "--" == e.as_slice());
let harness_args = parts.next().unwrap(); //.split() is never empty
let servo_args = parts.next().unwrap_or(&[]);
let (render_mode_string, base_path, testname) = match harness_args {
[] | [_] => panic!("USAGE: cpu|gpu base_path [testname regex]"),
[ref render_mode_string, ref base_path] => (render_mode_string, base_path, None),
[ref render_mode_string, ref base_path, ref testname,..] => (render_mode_string, base_path, Some(Regex::new(testname.as_slice()).unwrap())),
};
let mut render_mode = match render_mode_string.as_slice() {
"cpu" => CPU_RENDERING,
"gpu" => GPU_RENDERING,
_ => panic!("First argument must specify cpu or gpu as rendering mode")
};
if cfg!(target_os = "linux") {
render_mode.insert(LINUX_TARGET);
}
if cfg!(target_os = "macos") {
render_mode.insert(MACOS_TARGET);
}
if cfg!(target_os = "android") {
render_mode.insert(ANDROID_TARGET);
}
let mut all_tests = vec!();
println!("Scanning {} for manifests\n", base_path);
for file in io::fs::walk_dir(&Path::new(base_path.as_slice())).unwrap() {
let maybe_extension = file.extension_str();
match maybe_extension {
Some(extension) => {
if extension.to_ascii_lower().as_slice() == "list" && file.is_file() {
let tests = parse_lists(&file, servo_args, render_mode, all_tests.len());
println!("\t{} [{} tests]", file.display(), tests.len());
all_tests.extend(tests.into_iter());
}
}
_ => {}
}
}
let test_opts = TestOpts {
filter: testname,
run_ignored: false,
logfile: None,
run_tests: true,
run_benchmarks: false,
ratchet_noise_percent: None,
ratchet_metrics: None,
save_metrics: None,
test_shard: None,
nocapture: false,
color: AutoColor
};
match run_tests_console(&test_opts, all_tests) {
Ok(false) => os::set_exit_status(1), // tests failed
Err(_) => os::set_exit_status(2), // I/O-related failure
_ => (),
}
}
#[deriving(PartialEq)]
enum ReftestKind {
Same,
Different,
}
struct Reftest {
name: String,
kind: ReftestKind,
files: [Path,..2],
id: uint,
servo_args: Vec<String>,
render_mode: RenderMode,
is_flaky: bool,
experimental: bool,
fragment_identifier: Option<String>,
}
struct
|
<'a> {
conditions: &'a str,
kind: &'a str,
file_left: &'a str,
file_right: &'a str,
}
fn parse_lists(file: &Path, servo_args: &[String], render_mode: RenderMode, id_offset: uint) -> Vec<TestDescAndFn> {
let mut tests = Vec::new();
let contents = File::open_mode(file, io::Open, io::Read)
.and_then(|mut f| f.read_to_string())
.ok().expect("Could not read file");
for line in contents.as_slice().lines() {
// ignore comments or empty lines
if line.starts_with("#") || line.is_empty() {
continue;
}
let parts: Vec<&str> = line.split(' ').filter(|p|!p.is_empty()).collect();
let test_line = match parts.len() {
3 => TestLine {
conditions: "",
kind: parts[0],
file_left: parts[1],
file_right: parts[2],
},
4 => TestLine {
conditions: parts[0],
kind: parts[1],
file_left: parts[2],
file_right: parts[3],
},
_ => panic!("reftest line: '{:s}' doesn't match '[CONDITIONS] KIND LEFT RIGHT'", line),
};
let kind = match test_line.kind {
"==" => Same,
"!=" => Different,
part => panic!("reftest line: '{:s}' has invalid kind '{:s}'", line, part)
};
let base = file.dir_path();
let file_left = base.join(test_line.file_left);
let file_right = base.join(test_line.file_right);
let mut conditions_list = test_line.conditions.split(',');
let mut flakiness = RenderMode::empty();
let mut experimental = false;
let mut fragment_identifier = None;
for condition in conditions_list {
match condition {
"flaky_cpu" => flakiness.insert(CPU_RENDERING),
"flaky_gpu" => flakiness.insert(GPU_RENDERING),
"flaky_linux" => flakiness.insert(LINUX_TARGET),
"flaky_macos" => flakiness.insert(MACOS_TARGET),
"experimental" => experimental = true,
_ => (),
}
if condition.starts_with("fragment=") {
fragment_identifier = Some(condition.slice_from("fragment=".len()).to_string());
}
}
let reftest = Reftest {
name: format!("{} {} {}", test_line.file_left, test_line.kind, test_line.file_right),
kind: kind,
files: [file_left, file_right],
id: id_offset + tests.len(),
render_mode: render_mode,
servo_args: servo_args.iter().map(|x| x.clone()).collect(),
is_flaky: render_mode.intersects(flakiness),
experimental: experimental,
fragment_identifier: fragment_identifier,
};
tests.push(make_test(reftest));
}
tests
}
fn make_test(reftest: Reftest) -> TestDescAndFn {
let name = reftest.name.clone();
TestDescAndFn {
desc: TestDesc {
name: DynTestName(name),
ignore: false,
should_fail: false,
},
testfn: DynTestFn(proc() {
check_reftest(reftest);
}),
}
}
fn capture(reftest: &Reftest, side: uint) -> (u32, u32, Vec<u8>) {
let png_filename = format!("/tmp/servo-reftest-{:06u}-{:u}.png", reftest.id, side);
let mut command = Command::new("target/servo");
command
.args(reftest.servo_args.as_slice())
// Allows pixel perfect rendering of Ahem font for reftests.
.arg("-Z")
.arg("disable-text-aa")
.args(["-f", "-o"])
.arg(png_filename.as_slice())
.arg({
let mut url = Url::from_file_path(&reftest.files[side]).unwrap();
url.fragment = reftest.fragment_identifier.clone();
url.to_string()
});
// CPU rendering is the default
if reftest.render_mode.contains(CPU_RENDERING) {
command.arg("-c");
}
if reftest.render_mode.contains(GPU_RENDERING) {
command.arg("-g");
}
if reftest.experimental {
command.arg("--experimental");
}
let retval = match command.status() {
Ok(status) => status,
Err(e) => panic!("failed to execute process: {}", e),
};
assert_eq!(retval, ExitStatus(0));
let image = png::load_png(&from_str::<Path>(png_filename.as_slice()).unwrap()).unwrap();
let rgba8_bytes = match image.pixels {
png::RGBA8(pixels) => pixels,
_ => panic!(),
};
(image.width, image.height, rgba8_bytes)
}
fn check_reftest(reftest: Reftest) {
let (left_width, left_height, left_bytes) = capture(&reftest, 0);
let (right_width, right_height, right_bytes) = capture(&reftest, 1);
assert_eq!(left_width, right_width);
assert_eq!(left_height, right_height);
let left_all_white = left_bytes.iter().all(|&p| p == 255);
let right_all_white = right_bytes.iter().all(|&p| p == 255);
if left_all_white && right_all_white {
panic!("Both renderings are empty")
}
let pixels = left_bytes.iter().zip(right_bytes.iter()).map(|(&a, &b)| {
if a as i8 - b as i8 == 0 {
// White for correct
0xFF
} else {
// "1100" in the RGBA channel with an error for an incorrect value
// This results in some number of C0 and FFs, which is much more
// readable (and distinguishable) than the previous difference-wise
// scaling but does not require reconstructing the actual RGBA pixel.
0xC0
}
}).collect::<Vec<u8>>();
if pixels.iter().any(|&a| a < 255) {
let output_str = format!("/tmp/servo-reftest-{:06u}-diff.png", reftest.id);
let output = from_str::<Path>(output_str.as_slice()).unwrap();
let mut img = png::Image {
width: left_width,
height: left_height,
pixels: png::RGBA8(pixels),
};
let res = png::store_png(&mut img, &output);
assert!(res.is_ok());
match (reftest.kind, reftest.is_flaky) {
(Same, true) => println!("flaky test - rendering difference: {}", output_str),
(Same, false) => panic!("rendering difference: {}", output_str),
(Different, _) => {} // Result was different and that's what was expected
}
} else {
assert!(reftest.is_flaky || reftest.kind == Same);
}
}
|
TestLine
|
identifier_name
|
reftest.rs
|
// Copyright 2013 The Servo Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![deny(unused_imports)]
#![deny(unused_variables)]
extern crate png;
extern crate test;
extern crate regex;
extern crate url;
use std::ascii::AsciiExt;
use std::io;
use std::io::{File, Reader, Command};
use std::io::process::ExitStatus;
use std::io::fs::PathExtensions;
use std::os;
use std::path::Path;
use test::{AutoColor, DynTestName, DynTestFn, TestDesc, TestOpts, TestDescAndFn};
use test::run_tests_console;
use regex::Regex;
use url::Url;
bitflags!(
flags RenderMode: u32 {
const CPU_RENDERING = 0x00000001,
const GPU_RENDERING = 0x00000010,
const LINUX_TARGET = 0x00000100,
const MACOS_TARGET = 0x00001000,
const ANDROID_TARGET = 0x00010000
}
)
fn main() {
let args = os::args();
let mut parts = args.tail().split(|e| "--" == e.as_slice());
let harness_args = parts.next().unwrap(); //.split() is never empty
let servo_args = parts.next().unwrap_or(&[]);
let (render_mode_string, base_path, testname) = match harness_args {
[] | [_] => panic!("USAGE: cpu|gpu base_path [testname regex]"),
[ref render_mode_string, ref base_path] => (render_mode_string, base_path, None),
[ref render_mode_string, ref base_path, ref testname,..] => (render_mode_string, base_path, Some(Regex::new(testname.as_slice()).unwrap())),
};
let mut render_mode = match render_mode_string.as_slice() {
"cpu" => CPU_RENDERING,
"gpu" => GPU_RENDERING,
_ => panic!("First argument must specify cpu or gpu as rendering mode")
};
if cfg!(target_os = "linux") {
render_mode.insert(LINUX_TARGET);
}
if cfg!(target_os = "macos") {
render_mode.insert(MACOS_TARGET);
}
if cfg!(target_os = "android") {
render_mode.insert(ANDROID_TARGET);
}
let mut all_tests = vec!();
println!("Scanning {} for manifests\n", base_path);
for file in io::fs::walk_dir(&Path::new(base_path.as_slice())).unwrap() {
let maybe_extension = file.extension_str();
match maybe_extension {
Some(extension) => {
if extension.to_ascii_lower().as_slice() == "list" && file.is_file() {
let tests = parse_lists(&file, servo_args, render_mode, all_tests.len());
println!("\t{} [{} tests]", file.display(), tests.len());
all_tests.extend(tests.into_iter());
}
}
_ => {}
}
}
let test_opts = TestOpts {
filter: testname,
run_ignored: false,
logfile: None,
run_tests: true,
run_benchmarks: false,
ratchet_noise_percent: None,
ratchet_metrics: None,
save_metrics: None,
test_shard: None,
nocapture: false,
color: AutoColor
};
match run_tests_console(&test_opts, all_tests) {
Ok(false) => os::set_exit_status(1), // tests failed
Err(_) => os::set_exit_status(2), // I/O-related failure
_ => (),
}
}
#[deriving(PartialEq)]
enum ReftestKind {
Same,
Different,
}
struct Reftest {
name: String,
kind: ReftestKind,
files: [Path,..2],
id: uint,
servo_args: Vec<String>,
render_mode: RenderMode,
is_flaky: bool,
experimental: bool,
fragment_identifier: Option<String>,
}
struct TestLine<'a> {
conditions: &'a str,
kind: &'a str,
file_left: &'a str,
file_right: &'a str,
}
fn parse_lists(file: &Path, servo_args: &[String], render_mode: RenderMode, id_offset: uint) -> Vec<TestDescAndFn> {
let mut tests = Vec::new();
let contents = File::open_mode(file, io::Open, io::Read)
.and_then(|mut f| f.read_to_string())
.ok().expect("Could not read file");
for line in contents.as_slice().lines() {
// ignore comments or empty lines
if line.starts_with("#") || line.is_empty() {
continue;
}
let parts: Vec<&str> = line.split(' ').filter(|p|!p.is_empty()).collect();
let test_line = match parts.len() {
3 => TestLine {
conditions: "",
kind: parts[0],
file_left: parts[1],
file_right: parts[2],
},
4 => TestLine {
conditions: parts[0],
kind: parts[1],
file_left: parts[2],
file_right: parts[3],
},
_ => panic!("reftest line: '{:s}' doesn't match '[CONDITIONS] KIND LEFT RIGHT'", line),
};
let kind = match test_line.kind {
"==" => Same,
"!=" => Different,
part => panic!("reftest line: '{:s}' has invalid kind '{:s}'", line, part)
};
let base = file.dir_path();
let file_left = base.join(test_line.file_left);
let file_right = base.join(test_line.file_right);
let mut conditions_list = test_line.conditions.split(',');
let mut flakiness = RenderMode::empty();
let mut experimental = false;
let mut fragment_identifier = None;
for condition in conditions_list {
match condition {
"flaky_cpu" => flakiness.insert(CPU_RENDERING),
"flaky_gpu" => flakiness.insert(GPU_RENDERING),
"flaky_linux" => flakiness.insert(LINUX_TARGET),
"flaky_macos" => flakiness.insert(MACOS_TARGET),
"experimental" => experimental = true,
_ => (),
}
if condition.starts_with("fragment=") {
fragment_identifier = Some(condition.slice_from("fragment=".len()).to_string());
}
}
let reftest = Reftest {
name: format!("{} {} {}", test_line.file_left, test_line.kind, test_line.file_right),
kind: kind,
files: [file_left, file_right],
id: id_offset + tests.len(),
render_mode: render_mode,
servo_args: servo_args.iter().map(|x| x.clone()).collect(),
is_flaky: render_mode.intersects(flakiness),
experimental: experimental,
fragment_identifier: fragment_identifier,
};
tests.push(make_test(reftest));
}
tests
}
fn make_test(reftest: Reftest) -> TestDescAndFn {
let name = reftest.name.clone();
TestDescAndFn {
desc: TestDesc {
name: DynTestName(name),
ignore: false,
should_fail: false,
},
testfn: DynTestFn(proc() {
check_reftest(reftest);
}),
}
}
fn capture(reftest: &Reftest, side: uint) -> (u32, u32, Vec<u8>) {
let png_filename = format!("/tmp/servo-reftest-{:06u}-{:u}.png", reftest.id, side);
let mut command = Command::new("target/servo");
command
.args(reftest.servo_args.as_slice())
// Allows pixel perfect rendering of Ahem font for reftests.
.arg("-Z")
.arg("disable-text-aa")
.args(["-f", "-o"])
.arg(png_filename.as_slice())
.arg({
let mut url = Url::from_file_path(&reftest.files[side]).unwrap();
url.fragment = reftest.fragment_identifier.clone();
url.to_string()
});
// CPU rendering is the default
if reftest.render_mode.contains(CPU_RENDERING) {
command.arg("-c");
|
}
if reftest.render_mode.contains(GPU_RENDERING) {
command.arg("-g");
}
if reftest.experimental {
command.arg("--experimental");
}
let retval = match command.status() {
Ok(status) => status,
Err(e) => panic!("failed to execute process: {}", e),
};
assert_eq!(retval, ExitStatus(0));
let image = png::load_png(&from_str::<Path>(png_filename.as_slice()).unwrap()).unwrap();
let rgba8_bytes = match image.pixels {
png::RGBA8(pixels) => pixels,
_ => panic!(),
};
(image.width, image.height, rgba8_bytes)
}
fn check_reftest(reftest: Reftest) {
let (left_width, left_height, left_bytes) = capture(&reftest, 0);
let (right_width, right_height, right_bytes) = capture(&reftest, 1);
assert_eq!(left_width, right_width);
assert_eq!(left_height, right_height);
let left_all_white = left_bytes.iter().all(|&p| p == 255);
let right_all_white = right_bytes.iter().all(|&p| p == 255);
if left_all_white && right_all_white {
panic!("Both renderings are empty")
}
let pixels = left_bytes.iter().zip(right_bytes.iter()).map(|(&a, &b)| {
if a as i8 - b as i8 == 0 {
// White for correct
0xFF
} else {
// "1100" in the RGBA channel with an error for an incorrect value
// This results in some number of C0 and FFs, which is much more
// readable (and distinguishable) than the previous difference-wise
// scaling but does not require reconstructing the actual RGBA pixel.
0xC0
}
}).collect::<Vec<u8>>();
if pixels.iter().any(|&a| a < 255) {
let output_str = format!("/tmp/servo-reftest-{:06u}-diff.png", reftest.id);
let output = from_str::<Path>(output_str.as_slice()).unwrap();
let mut img = png::Image {
width: left_width,
height: left_height,
pixels: png::RGBA8(pixels),
};
let res = png::store_png(&mut img, &output);
assert!(res.is_ok());
match (reftest.kind, reftest.is_flaky) {
(Same, true) => println!("flaky test - rendering difference: {}", output_str),
(Same, false) => panic!("rendering difference: {}", output_str),
(Different, _) => {} // Result was different and that's what was expected
}
} else {
assert!(reftest.is_flaky || reftest.kind == Same);
}
}
|
random_line_split
|
|
bind_instead_of_map_multipart.rs
|
#![deny(clippy::bind_instead_of_map)]
#![allow(clippy::blocks_in_if_conditions)]
pub fn main() {
let _ = Some("42").and_then(|s| if s.len() < 42 { Some(0) } else { Some(s.len()) });
let _ = Some("42").and_then(|s| if s.len() < 42 { None } else { Some(s.len()) });
let _ = Ok::<_, ()>("42").and_then(|s| if s.len() < 42 { Ok(0) } else { Ok(s.len()) });
let _ = Ok::<_, ()>("42").and_then(|s| if s.len() < 42 { Err(()) } else { Ok(s.len()) });
let _ = Err::<(), _>("42").or_else(|s| if s.len() < 42 { Err(s.len() + 20) } else { Err(s.len()) });
let _ = Err::<(), _>("42").or_else(|s| if s.len() < 42 { Ok(()) } else { Err(s.len()) });
hard_example();
macro_example();
|
if s == "43" {
return Some(43);
}
s == "42"
} {
return Some(45);
}
match s.len() {
10 => Some(2),
20 => {
if foo() {
return {
if foo() {
return Some(20);
}
println!("foo");
Some(3)
};
}
Some(20)
},
40 => Some(30),
_ => Some(1),
}
});
}
fn foo() -> bool {
true
}
macro_rules! m {
() => {
Some(10)
};
}
fn macro_example() {
let _ = Some("").and_then(|s| if s.len() == 20 { m!() } else { Some(20) });
let _ = Some("").and_then(|s| if s.len() == 20 { Some(m!()) } else { Some(Some(20)) });
}
|
}
fn hard_example() {
Some("42").and_then(|s| {
if {
|
random_line_split
|
bind_instead_of_map_multipart.rs
|
#![deny(clippy::bind_instead_of_map)]
#![allow(clippy::blocks_in_if_conditions)]
pub fn main() {
let _ = Some("42").and_then(|s| if s.len() < 42 { Some(0) } else { Some(s.len()) });
let _ = Some("42").and_then(|s| if s.len() < 42 { None } else { Some(s.len()) });
let _ = Ok::<_, ()>("42").and_then(|s| if s.len() < 42 { Ok(0) } else { Ok(s.len()) });
let _ = Ok::<_, ()>("42").and_then(|s| if s.len() < 42 { Err(()) } else { Ok(s.len()) });
let _ = Err::<(), _>("42").or_else(|s| if s.len() < 42 { Err(s.len() + 20) } else { Err(s.len()) });
let _ = Err::<(), _>("42").or_else(|s| if s.len() < 42 { Ok(()) } else { Err(s.len()) });
hard_example();
macro_example();
}
fn hard_example() {
Some("42").and_then(|s| {
if {
if s == "43" {
return Some(43);
}
s == "42"
} {
return Some(45);
}
match s.len() {
10 => Some(2),
20 => {
if foo() {
return {
if foo() {
return Some(20);
}
println!("foo");
Some(3)
};
}
Some(20)
},
40 => Some(30),
_ => Some(1),
}
});
}
fn foo() -> bool {
true
}
macro_rules! m {
() => {
Some(10)
};
}
fn macro_example()
|
{
let _ = Some("").and_then(|s| if s.len() == 20 { m!() } else { Some(20) });
let _ = Some("").and_then(|s| if s.len() == 20 { Some(m!()) } else { Some(Some(20)) });
}
|
identifier_body
|
|
bind_instead_of_map_multipart.rs
|
#![deny(clippy::bind_instead_of_map)]
#![allow(clippy::blocks_in_if_conditions)]
pub fn main() {
let _ = Some("42").and_then(|s| if s.len() < 42 { Some(0) } else { Some(s.len()) });
let _ = Some("42").and_then(|s| if s.len() < 42 { None } else { Some(s.len()) });
let _ = Ok::<_, ()>("42").and_then(|s| if s.len() < 42 { Ok(0) } else { Ok(s.len()) });
let _ = Ok::<_, ()>("42").and_then(|s| if s.len() < 42 { Err(()) } else { Ok(s.len()) });
let _ = Err::<(), _>("42").or_else(|s| if s.len() < 42 { Err(s.len() + 20) } else { Err(s.len()) });
let _ = Err::<(), _>("42").or_else(|s| if s.len() < 42 { Ok(()) } else { Err(s.len()) });
hard_example();
macro_example();
}
fn hard_example() {
Some("42").and_then(|s| {
if {
if s == "43" {
return Some(43);
}
s == "42"
} {
return Some(45);
}
match s.len() {
10 => Some(2),
20 => {
if foo() {
return {
if foo() {
return Some(20);
}
println!("foo");
Some(3)
};
}
Some(20)
},
40 => Some(30),
_ => Some(1),
}
});
}
fn foo() -> bool {
true
}
macro_rules! m {
() => {
Some(10)
};
}
fn
|
() {
let _ = Some("").and_then(|s| if s.len() == 20 { m!() } else { Some(20) });
let _ = Some("").and_then(|s| if s.len() == 20 { Some(m!()) } else { Some(Some(20)) });
}
|
macro_example
|
identifier_name
|
lib.rs
|
#![feature(braced_empty_structs)]
extern crate html5ever;
extern crate hyper;
extern crate libc;
extern crate string_cache;
extern crate tendril;
extern crate url;
use libc::c_char;
use std::collections::{HashMap, HashSet, VecDeque};
use std::default::Default;
use std::ffi::CStr;
use std::io::Read;
use std::result::Result;
use std::string::String;
use std::sync::{Arc, Mutex};
use std::sync::mpsc::{channel, Receiver, Sender};
use std::thread;
use std::thread::{current, park, Thread};
use string_cache::Atom;
use tendril::Tendril;
use tendril::stream::TendrilSink;
use hyper::Client;
use hyper::header::Connection;
use html5ever::parse_document;
use html5ever::tree_builder::TreeBuilderOpts;
use html5ever::driver::{ParseOpts, BytesOpts};
use html5ever::rcdom::{Document, Doctype, Comment, Element, RcDom, Handle, Text};
use url::Url;
trait AtomicProcess {
fn process_this(&self) -> Vec<ProcessOutputs>;
}
enum ProcessOutputs {
Processes(Box<AtomicProcess>),
Output(i64),
}
struct TaskQueue {
threads: Arc<Mutex<Vec<Thread>>>,
queue: Arc<Mutex<VecDeque<Box<AtomicProcess>>>>,
output_channel: Sender<i64>,
}
struct PageDownloader {
thread_url: String,
}
struct WalkDom {
handle: Handle,
count: i64,
}
struct PauseThread;
// The plan here is to have a set of tasks sitting of a FIFO queue
// each thread gets 1 task and processes it. Each new task that is created
// is added to the queue.
impl AtomicProcess for PauseThread {
fn process_this(&self) -> Vec<ProcessOutputs> {
park();
vec![ProcessOutputs::Output(0i64)]
}
}
impl TaskQueue {
pub fn nextTask(&self) -> Box<AtomicProcess> {
match self.queue.clone().lock().unwrap().pop_back() {
Some(res) => res,
None => Box::new(PauseThread),
}
}
pub fn addTask(&self, new_task: Box<AtomicProcess>) {
self.queue.clone().lock().unwrap().push_front(new_task);
self.threads.clone().lock().unwrap().iter().map(|t| t.unpark());
}
pub fn addThreadToWorkers(&self, t_handle: Thread) {
self.threads.clone().lock().unwrap().push(t_handle);
loop {
for process in self.nextTask().process_this() {
match process {
ProcessOutputs::Processes(res) => self.addTask(res),
ProcessOutputs::Output(res) => self.output_channel.send(res).unwrap(),
}
}
}
}
}
impl AtomicProcess for WalkDom {
fn process_this(&self) -> Vec<ProcessOutputs> {
let node = self.handle.borrow();
match node.node {
Element(ref name, _, _) => {
if name.local == Atom::from("article") {
self.count += 1
}
}
Document => (),
Doctype(_, _, _) => (),
Text(_) => (),
Comment(_) => (),
};
let dom_steps: Vec<ProcessOutputs> = node.children
.iter()
.map(|child| {
ProcessOutputs::Processes(Box::new(WalkDom {
handle: *child,
count: self.count,
}))
})
.collect();
if dom_steps.len() == 0
|
dom_steps
}
}
impl AtomicProcess for PageDownloader {
fn process_this(&self) -> Vec<ProcessOutputs> {
let client = Client::new();
// Read the Response.
let mut body = String::new();
let res = client.get(&self.thread_url[..])
.header(Connection::close())
.send()
.unwrap();
let _ = res.read_to_string(&mut body);
let mut dom = parse_document(RcDom::default(), Default::default())
.from_bytes(BytesOpts::default())
.read_from(&mut body.as_bytes())
.unwrap();
vec![ProcessOutputs::Processes(Box::new(WalkDom {
handle: dom.document,
count: 0i64,
}))]
}
}
fn start_read_thread(url: String) {
let (sender, receiver) = channel::<i64>();
let queue_controller = TaskQueue {
threads: Arc::new(Mutex::new(vec![])),
queue: Arc::new(Mutex::new(VecDeque::new())),
output_channel: sender,
};
(0..4)
.map(|_| {
let worker_thread = thread::spawn(move || {});
queue_controller.addThreadToWorkers(*worker_thread.thread());
})
.collect::<Vec<_>>();
queue_controller.addTask(Box::new(PageDownloader { thread_url: url }));
loop {
println!("Thread finished with count={:?}", receiver.recv().unwrap());
}
}
#[no_mangle]
pub extern "C" fn process(url: *const c_char) {
let c_value = Some(unsafe { CStr::from_ptr(url).to_string_lossy().into_owned() });
match c_value {
Some(value) => start_read_thread(String::from(&value[..])),
None => (),
}
}
|
{
dom_steps.push(ProcessOutputs::Output(self.count));
}
|
conditional_block
|
lib.rs
|
#![feature(braced_empty_structs)]
extern crate html5ever;
extern crate hyper;
extern crate libc;
extern crate string_cache;
extern crate tendril;
extern crate url;
use libc::c_char;
use std::collections::{HashMap, HashSet, VecDeque};
use std::default::Default;
use std::ffi::CStr;
use std::io::Read;
use std::result::Result;
use std::string::String;
use std::sync::{Arc, Mutex};
use std::sync::mpsc::{channel, Receiver, Sender};
use std::thread;
use std::thread::{current, park, Thread};
use string_cache::Atom;
use tendril::Tendril;
use tendril::stream::TendrilSink;
use hyper::Client;
use hyper::header::Connection;
use html5ever::parse_document;
use html5ever::tree_builder::TreeBuilderOpts;
use html5ever::driver::{ParseOpts, BytesOpts};
use html5ever::rcdom::{Document, Doctype, Comment, Element, RcDom, Handle, Text};
use url::Url;
trait AtomicProcess {
fn process_this(&self) -> Vec<ProcessOutputs>;
}
enum ProcessOutputs {
Processes(Box<AtomicProcess>),
Output(i64),
}
struct TaskQueue {
threads: Arc<Mutex<Vec<Thread>>>,
queue: Arc<Mutex<VecDeque<Box<AtomicProcess>>>>,
output_channel: Sender<i64>,
}
struct PageDownloader {
thread_url: String,
}
struct WalkDom {
handle: Handle,
count: i64,
}
struct PauseThread;
// The plan here is to have a set of tasks sitting of a FIFO queue
// each thread gets 1 task and processes it. Each new task that is created
// is added to the queue.
impl AtomicProcess for PauseThread {
fn process_this(&self) -> Vec<ProcessOutputs> {
park();
vec![ProcessOutputs::Output(0i64)]
}
}
impl TaskQueue {
pub fn nextTask(&self) -> Box<AtomicProcess> {
match self.queue.clone().lock().unwrap().pop_back() {
Some(res) => res,
None => Box::new(PauseThread),
}
}
pub fn addTask(&self, new_task: Box<AtomicProcess>) {
self.queue.clone().lock().unwrap().push_front(new_task);
self.threads.clone().lock().unwrap().iter().map(|t| t.unpark());
}
pub fn addThreadToWorkers(&self, t_handle: Thread) {
self.threads.clone().lock().unwrap().push(t_handle);
loop {
for process in self.nextTask().process_this() {
match process {
ProcessOutputs::Processes(res) => self.addTask(res),
ProcessOutputs::Output(res) => self.output_channel.send(res).unwrap(),
}
}
}
}
}
impl AtomicProcess for WalkDom {
|
if name.local == Atom::from("article") {
self.count += 1
}
}
Document => (),
Doctype(_, _, _) => (),
Text(_) => (),
Comment(_) => (),
};
let dom_steps: Vec<ProcessOutputs> = node.children
.iter()
.map(|child| {
ProcessOutputs::Processes(Box::new(WalkDom {
handle: *child,
count: self.count,
}))
})
.collect();
if dom_steps.len() == 0 {
dom_steps.push(ProcessOutputs::Output(self.count));
}
dom_steps
}
}
impl AtomicProcess for PageDownloader {
fn process_this(&self) -> Vec<ProcessOutputs> {
let client = Client::new();
// Read the Response.
let mut body = String::new();
let res = client.get(&self.thread_url[..])
.header(Connection::close())
.send()
.unwrap();
let _ = res.read_to_string(&mut body);
let mut dom = parse_document(RcDom::default(), Default::default())
.from_bytes(BytesOpts::default())
.read_from(&mut body.as_bytes())
.unwrap();
vec![ProcessOutputs::Processes(Box::new(WalkDom {
handle: dom.document,
count: 0i64,
}))]
}
}
fn start_read_thread(url: String) {
let (sender, receiver) = channel::<i64>();
let queue_controller = TaskQueue {
threads: Arc::new(Mutex::new(vec![])),
queue: Arc::new(Mutex::new(VecDeque::new())),
output_channel: sender,
};
(0..4)
.map(|_| {
let worker_thread = thread::spawn(move || {});
queue_controller.addThreadToWorkers(*worker_thread.thread());
})
.collect::<Vec<_>>();
queue_controller.addTask(Box::new(PageDownloader { thread_url: url }));
loop {
println!("Thread finished with count={:?}", receiver.recv().unwrap());
}
}
#[no_mangle]
pub extern "C" fn process(url: *const c_char) {
let c_value = Some(unsafe { CStr::from_ptr(url).to_string_lossy().into_owned() });
match c_value {
Some(value) => start_read_thread(String::from(&value[..])),
None => (),
}
}
|
fn process_this(&self) -> Vec<ProcessOutputs> {
let node = self.handle.borrow();
match node.node {
Element(ref name, _, _) => {
|
random_line_split
|
lib.rs
|
#![feature(braced_empty_structs)]
extern crate html5ever;
extern crate hyper;
extern crate libc;
extern crate string_cache;
extern crate tendril;
extern crate url;
use libc::c_char;
use std::collections::{HashMap, HashSet, VecDeque};
use std::default::Default;
use std::ffi::CStr;
use std::io::Read;
use std::result::Result;
use std::string::String;
use std::sync::{Arc, Mutex};
use std::sync::mpsc::{channel, Receiver, Sender};
use std::thread;
use std::thread::{current, park, Thread};
use string_cache::Atom;
use tendril::Tendril;
use tendril::stream::TendrilSink;
use hyper::Client;
use hyper::header::Connection;
use html5ever::parse_document;
use html5ever::tree_builder::TreeBuilderOpts;
use html5ever::driver::{ParseOpts, BytesOpts};
use html5ever::rcdom::{Document, Doctype, Comment, Element, RcDom, Handle, Text};
use url::Url;
trait AtomicProcess {
fn process_this(&self) -> Vec<ProcessOutputs>;
}
enum ProcessOutputs {
Processes(Box<AtomicProcess>),
Output(i64),
}
struct TaskQueue {
threads: Arc<Mutex<Vec<Thread>>>,
queue: Arc<Mutex<VecDeque<Box<AtomicProcess>>>>,
output_channel: Sender<i64>,
}
struct PageDownloader {
thread_url: String,
}
struct WalkDom {
handle: Handle,
count: i64,
}
struct PauseThread;
// The plan here is to have a set of tasks sitting of a FIFO queue
// each thread gets 1 task and processes it. Each new task that is created
// is added to the queue.
impl AtomicProcess for PauseThread {
fn
|
(&self) -> Vec<ProcessOutputs> {
park();
vec![ProcessOutputs::Output(0i64)]
}
}
impl TaskQueue {
pub fn nextTask(&self) -> Box<AtomicProcess> {
match self.queue.clone().lock().unwrap().pop_back() {
Some(res) => res,
None => Box::new(PauseThread),
}
}
pub fn addTask(&self, new_task: Box<AtomicProcess>) {
self.queue.clone().lock().unwrap().push_front(new_task);
self.threads.clone().lock().unwrap().iter().map(|t| t.unpark());
}
pub fn addThreadToWorkers(&self, t_handle: Thread) {
self.threads.clone().lock().unwrap().push(t_handle);
loop {
for process in self.nextTask().process_this() {
match process {
ProcessOutputs::Processes(res) => self.addTask(res),
ProcessOutputs::Output(res) => self.output_channel.send(res).unwrap(),
}
}
}
}
}
impl AtomicProcess for WalkDom {
fn process_this(&self) -> Vec<ProcessOutputs> {
let node = self.handle.borrow();
match node.node {
Element(ref name, _, _) => {
if name.local == Atom::from("article") {
self.count += 1
}
}
Document => (),
Doctype(_, _, _) => (),
Text(_) => (),
Comment(_) => (),
};
let dom_steps: Vec<ProcessOutputs> = node.children
.iter()
.map(|child| {
ProcessOutputs::Processes(Box::new(WalkDom {
handle: *child,
count: self.count,
}))
})
.collect();
if dom_steps.len() == 0 {
dom_steps.push(ProcessOutputs::Output(self.count));
}
dom_steps
}
}
impl AtomicProcess for PageDownloader {
fn process_this(&self) -> Vec<ProcessOutputs> {
let client = Client::new();
// Read the Response.
let mut body = String::new();
let res = client.get(&self.thread_url[..])
.header(Connection::close())
.send()
.unwrap();
let _ = res.read_to_string(&mut body);
let mut dom = parse_document(RcDom::default(), Default::default())
.from_bytes(BytesOpts::default())
.read_from(&mut body.as_bytes())
.unwrap();
vec![ProcessOutputs::Processes(Box::new(WalkDom {
handle: dom.document,
count: 0i64,
}))]
}
}
fn start_read_thread(url: String) {
let (sender, receiver) = channel::<i64>();
let queue_controller = TaskQueue {
threads: Arc::new(Mutex::new(vec![])),
queue: Arc::new(Mutex::new(VecDeque::new())),
output_channel: sender,
};
(0..4)
.map(|_| {
let worker_thread = thread::spawn(move || {});
queue_controller.addThreadToWorkers(*worker_thread.thread());
})
.collect::<Vec<_>>();
queue_controller.addTask(Box::new(PageDownloader { thread_url: url }));
loop {
println!("Thread finished with count={:?}", receiver.recv().unwrap());
}
}
#[no_mangle]
pub extern "C" fn process(url: *const c_char) {
let c_value = Some(unsafe { CStr::from_ptr(url).to_string_lossy().into_owned() });
match c_value {
Some(value) => start_read_thread(String::from(&value[..])),
None => (),
}
}
|
process_this
|
identifier_name
|
command.rs
|
use super::{CommandType};
#[derive(Clone, Debug, PartialEq)]
pub enum Sender {
User(String, Option<String>, Option<String>),
Server(String)
}
impl Sender {
fn to_cmd(&self) -> String {
match *self {
Sender::User(ref n, None, None) => format!(":{} ", n),
Sender::User(ref n, Some(ref u), None) => format!(":{}!{} ", n, u),
Sender::User(ref n, None, Some(ref h)) => format!(":{}@{} ", n, h),
Sender::User(ref n, Some(ref u), Some(ref h)) => format!(":{}!{}@{} ", n, u, h),
Sender::Server(ref s) => format!(":{} ", s),
}
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct Tag {
pub key: String,
pub value: String
}
impl Tag {
fn to_cmd(&self) -> String {
fn escape_tag_value(value: &str) -> String {
let escape_seqs =
vec![("\\\\", "\\"), ("\\:", ";"), ("\\s", " "), ("\\r", "\r"), ("\\n", "\n")];
escape_seqs.iter().fold(value.into(), |a, x| a.replace(x.1, x.0))
}
format!("{}={}", self.key, escape_tag_value(&*self.value))
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct Tags {
pub data: Vec<Tag>,
}
impl Tags {
fn to_cmd(&self) -> String {
if self.data.len() == 0 {
return "".to_string();
}
let mut iter = self.data.iter();
let first = iter.next().unwrap().to_cmd();
let mut buf = String::new();
buf.push('@');
buf.push_str(&*first);
for i in iter {
buf.push_str(&*format!(";{}", i.to_cmd()));
}
buf.push(' ');
buf
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct Params {
pub data: Vec<String>,
}
impl Params {
fn to_cmd(&self) -> String
|
}
#[derive(Clone, Debug, PartialEq)]
pub struct Command {
pub tags: Option<Tags>,
pub prefix: Option<Sender>,
pub command: CommandType,
pub params: Params,
}
impl Command {
pub fn to_string(&self) -> String {
let cmd: &str = self.command.into();
format!("{}{}{}{}\r\n", self.tags.as_ref().map(|x|x.to_cmd()).unwrap_or("".to_string()),
self.prefix.as_ref().map(|x|x.to_cmd()).unwrap_or("".to_string()),
cmd,
self.params.to_cmd())
}
pub fn get_param(&self, index: usize) -> Option<&str> {
self.params.data.get(index).map(|x| &**x)
}
}
|
{
let mut buf = String::new();
buf.push(' ');
let n = self.data.len();
if n > 0 {
for i in 0..n-1 {
buf.push_str(&*format!("{} ", self.data[i]));
}
buf.push_str(&*format!(":{}", self.data[n-1]));
}
buf
}
|
identifier_body
|
command.rs
|
use super::{CommandType};
#[derive(Clone, Debug, PartialEq)]
pub enum Sender {
User(String, Option<String>, Option<String>),
Server(String)
}
impl Sender {
fn to_cmd(&self) -> String {
match *self {
Sender::User(ref n, None, None) => format!(":{} ", n),
Sender::User(ref n, Some(ref u), None) => format!(":{}!{} ", n, u),
Sender::User(ref n, None, Some(ref h)) => format!(":{}@{} ", n, h),
Sender::User(ref n, Some(ref u), Some(ref h)) => format!(":{}!{}@{} ", n, u, h),
Sender::Server(ref s) => format!(":{} ", s),
}
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct Tag {
pub key: String,
pub value: String
}
impl Tag {
fn to_cmd(&self) -> String {
fn escape_tag_value(value: &str) -> String {
let escape_seqs =
vec![("\\\\", "\\"), ("\\:", ";"), ("\\s", " "), ("\\r", "\r"), ("\\n", "\n")];
escape_seqs.iter().fold(value.into(), |a, x| a.replace(x.1, x.0))
}
format!("{}={}", self.key, escape_tag_value(&*self.value))
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct Tags {
pub data: Vec<Tag>,
}
impl Tags {
fn to_cmd(&self) -> String {
if self.data.len() == 0 {
return "".to_string();
}
let mut iter = self.data.iter();
let first = iter.next().unwrap().to_cmd();
let mut buf = String::new();
buf.push('@');
buf.push_str(&*first);
for i in iter {
buf.push_str(&*format!(";{}", i.to_cmd()));
}
buf.push(' ');
buf
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct Params {
pub data: Vec<String>,
}
impl Params {
fn to_cmd(&self) -> String {
let mut buf = String::new();
buf.push(' ');
let n = self.data.len();
if n > 0 {
for i in 0..n-1 {
buf.push_str(&*format!("{} ", self.data[i]));
}
buf.push_str(&*format!(":{}", self.data[n-1]));
}
buf
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct Command {
pub tags: Option<Tags>,
pub prefix: Option<Sender>,
pub command: CommandType,
pub params: Params,
}
impl Command {
pub fn
|
(&self) -> String {
let cmd: &str = self.command.into();
format!("{}{}{}{}\r\n", self.tags.as_ref().map(|x|x.to_cmd()).unwrap_or("".to_string()),
self.prefix.as_ref().map(|x|x.to_cmd()).unwrap_or("".to_string()),
cmd,
self.params.to_cmd())
}
pub fn get_param(&self, index: usize) -> Option<&str> {
self.params.data.get(index).map(|x| &**x)
}
}
|
to_string
|
identifier_name
|
command.rs
|
use super::{CommandType};
#[derive(Clone, Debug, PartialEq)]
pub enum Sender {
User(String, Option<String>, Option<String>),
Server(String)
}
impl Sender {
fn to_cmd(&self) -> String {
match *self {
Sender::User(ref n, None, None) => format!(":{} ", n),
Sender::User(ref n, Some(ref u), None) => format!(":{}!{} ", n, u),
Sender::User(ref n, None, Some(ref h)) => format!(":{}@{} ", n, h),
Sender::User(ref n, Some(ref u), Some(ref h)) => format!(":{}!{}@{} ", n, u, h),
Sender::Server(ref s) => format!(":{} ", s),
}
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct Tag {
pub key: String,
pub value: String
}
impl Tag {
fn to_cmd(&self) -> String {
fn escape_tag_value(value: &str) -> String {
let escape_seqs =
vec![("\\\\", "\\"), ("\\:", ";"), ("\\s", " "), ("\\r", "\r"), ("\\n", "\n")];
escape_seqs.iter().fold(value.into(), |a, x| a.replace(x.1, x.0))
}
format!("{}={}", self.key, escape_tag_value(&*self.value))
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct Tags {
pub data: Vec<Tag>,
}
impl Tags {
fn to_cmd(&self) -> String {
if self.data.len() == 0 {
return "".to_string();
}
let mut iter = self.data.iter();
let first = iter.next().unwrap().to_cmd();
let mut buf = String::new();
buf.push('@');
buf.push_str(&*first);
for i in iter {
buf.push_str(&*format!(";{}", i.to_cmd()));
}
buf.push(' ');
buf
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct Params {
pub data: Vec<String>,
}
impl Params {
fn to_cmd(&self) -> String {
let mut buf = String::new();
buf.push(' ');
let n = self.data.len();
if n > 0 {
for i in 0..n-1 {
buf.push_str(&*format!("{} ", self.data[i]));
}
buf.push_str(&*format!(":{}", self.data[n-1]));
}
buf
|
}
#[derive(Clone, Debug, PartialEq)]
pub struct Command {
pub tags: Option<Tags>,
pub prefix: Option<Sender>,
pub command: CommandType,
pub params: Params,
}
impl Command {
pub fn to_string(&self) -> String {
let cmd: &str = self.command.into();
format!("{}{}{}{}\r\n", self.tags.as_ref().map(|x|x.to_cmd()).unwrap_or("".to_string()),
self.prefix.as_ref().map(|x|x.to_cmd()).unwrap_or("".to_string()),
cmd,
self.params.to_cmd())
}
pub fn get_param(&self, index: usize) -> Option<&str> {
self.params.data.get(index).map(|x| &**x)
}
}
|
}
|
random_line_split
|
errors.rs
|
// Copyright 2018 Mozilla
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use
// this file except in compliance with the License. You may obtain a copy of the
// License at http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
use std; // To refer to std::result::Result.
use rusqlite;
use mentat_core::{
ValueTypeSet,
};
use mentat_db;
use mentat_query::{
PlainSymbol,
};
use mentat_query_pull;
use aggregates::{
SimpleAggregationOp,
};
#[macro_export]
macro_rules! bail {
($e:expr) => (
return Err($e.into());
)
}
pub type Result<T> = std::result::Result<T, ProjectorError>;
|
/// We're just not done yet. Message that the feature is recognized but not yet
/// implemented.
#[fail(display = "not yet implemented: {}", _0)]
NotYetImplemented(String),
#[fail(display = "no possible types for value provided to {:?}", _0)]
CannotProjectImpossibleBinding(SimpleAggregationOp),
#[fail(display = "cannot apply projection operation {:?} to types {:?}", _0, _1)]
CannotApplyAggregateOperationToTypes(SimpleAggregationOp, ValueTypeSet),
#[fail(display = "invalid projection: {}", _0)]
InvalidProjection(String),
#[fail(display = "cannot project unbound variable {:?}", _0)]
UnboundVariable(PlainSymbol),
#[fail(display = "cannot find type for variable {:?}", _0)]
NoTypeAvailableForVariable(PlainSymbol),
#[fail(display = "expected {}, got {}", _0, _1)]
UnexpectedResultsType(&'static str, &'static str),
#[fail(display = "expected tuple of length {}, got tuple of length {}", _0, _1)]
UnexpectedResultsTupleLength(usize, usize),
#[fail(display = "min/max expressions: {} (max 1), corresponding: {}", _0, _1)]
AmbiguousAggregates(usize, usize),
// It would be better to capture the underlying `rusqlite::Error`, but that type doesn't
// implement many useful traits, including `Clone`, `Eq`, and `PartialEq`.
#[fail(display = "SQL error: {}", _0)]
RusqliteError(String),
#[fail(display = "{}", _0)]
DbError(#[cause] mentat_db::DbError),
#[fail(display = "{}", _0)]
PullError(#[cause] mentat_query_pull::PullError),
}
impl From<rusqlite::Error> for ProjectorError {
fn from(error: rusqlite::Error) -> ProjectorError {
ProjectorError::RusqliteError(error.to_string())
}
}
impl From<mentat_db::DbError> for ProjectorError {
fn from(error: mentat_db::DbError) -> ProjectorError {
ProjectorError::DbError(error)
}
}
impl From<mentat_query_pull::PullError> for ProjectorError {
fn from(error: mentat_query_pull::PullError) -> ProjectorError {
ProjectorError::PullError(error)
}
}
|
#[derive(Debug, Fail)]
pub enum ProjectorError {
|
random_line_split
|
errors.rs
|
// Copyright 2018 Mozilla
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use
// this file except in compliance with the License. You may obtain a copy of the
// License at http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
use std; // To refer to std::result::Result.
use rusqlite;
use mentat_core::{
ValueTypeSet,
};
use mentat_db;
use mentat_query::{
PlainSymbol,
};
use mentat_query_pull;
use aggregates::{
SimpleAggregationOp,
};
#[macro_export]
macro_rules! bail {
($e:expr) => (
return Err($e.into());
)
}
pub type Result<T> = std::result::Result<T, ProjectorError>;
#[derive(Debug, Fail)]
pub enum ProjectorError {
/// We're just not done yet. Message that the feature is recognized but not yet
/// implemented.
#[fail(display = "not yet implemented: {}", _0)]
NotYetImplemented(String),
#[fail(display = "no possible types for value provided to {:?}", _0)]
CannotProjectImpossibleBinding(SimpleAggregationOp),
#[fail(display = "cannot apply projection operation {:?} to types {:?}", _0, _1)]
CannotApplyAggregateOperationToTypes(SimpleAggregationOp, ValueTypeSet),
#[fail(display = "invalid projection: {}", _0)]
InvalidProjection(String),
#[fail(display = "cannot project unbound variable {:?}", _0)]
UnboundVariable(PlainSymbol),
#[fail(display = "cannot find type for variable {:?}", _0)]
NoTypeAvailableForVariable(PlainSymbol),
#[fail(display = "expected {}, got {}", _0, _1)]
UnexpectedResultsType(&'static str, &'static str),
#[fail(display = "expected tuple of length {}, got tuple of length {}", _0, _1)]
UnexpectedResultsTupleLength(usize, usize),
#[fail(display = "min/max expressions: {} (max 1), corresponding: {}", _0, _1)]
AmbiguousAggregates(usize, usize),
// It would be better to capture the underlying `rusqlite::Error`, but that type doesn't
// implement many useful traits, including `Clone`, `Eq`, and `PartialEq`.
#[fail(display = "SQL error: {}", _0)]
RusqliteError(String),
#[fail(display = "{}", _0)]
DbError(#[cause] mentat_db::DbError),
#[fail(display = "{}", _0)]
PullError(#[cause] mentat_query_pull::PullError),
}
impl From<rusqlite::Error> for ProjectorError {
fn from(error: rusqlite::Error) -> ProjectorError
|
}
impl From<mentat_db::DbError> for ProjectorError {
fn from(error: mentat_db::DbError) -> ProjectorError {
ProjectorError::DbError(error)
}
}
impl From<mentat_query_pull::PullError> for ProjectorError {
fn from(error: mentat_query_pull::PullError) -> ProjectorError {
ProjectorError::PullError(error)
}
}
|
{
ProjectorError::RusqliteError(error.to_string())
}
|
identifier_body
|
errors.rs
|
// Copyright 2018 Mozilla
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use
// this file except in compliance with the License. You may obtain a copy of the
// License at http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
use std; // To refer to std::result::Result.
use rusqlite;
use mentat_core::{
ValueTypeSet,
};
use mentat_db;
use mentat_query::{
PlainSymbol,
};
use mentat_query_pull;
use aggregates::{
SimpleAggregationOp,
};
#[macro_export]
macro_rules! bail {
($e:expr) => (
return Err($e.into());
)
}
pub type Result<T> = std::result::Result<T, ProjectorError>;
#[derive(Debug, Fail)]
pub enum ProjectorError {
/// We're just not done yet. Message that the feature is recognized but not yet
/// implemented.
#[fail(display = "not yet implemented: {}", _0)]
NotYetImplemented(String),
#[fail(display = "no possible types for value provided to {:?}", _0)]
CannotProjectImpossibleBinding(SimpleAggregationOp),
#[fail(display = "cannot apply projection operation {:?} to types {:?}", _0, _1)]
CannotApplyAggregateOperationToTypes(SimpleAggregationOp, ValueTypeSet),
#[fail(display = "invalid projection: {}", _0)]
InvalidProjection(String),
#[fail(display = "cannot project unbound variable {:?}", _0)]
UnboundVariable(PlainSymbol),
#[fail(display = "cannot find type for variable {:?}", _0)]
NoTypeAvailableForVariable(PlainSymbol),
#[fail(display = "expected {}, got {}", _0, _1)]
UnexpectedResultsType(&'static str, &'static str),
#[fail(display = "expected tuple of length {}, got tuple of length {}", _0, _1)]
UnexpectedResultsTupleLength(usize, usize),
#[fail(display = "min/max expressions: {} (max 1), corresponding: {}", _0, _1)]
AmbiguousAggregates(usize, usize),
// It would be better to capture the underlying `rusqlite::Error`, but that type doesn't
// implement many useful traits, including `Clone`, `Eq`, and `PartialEq`.
#[fail(display = "SQL error: {}", _0)]
RusqliteError(String),
#[fail(display = "{}", _0)]
DbError(#[cause] mentat_db::DbError),
#[fail(display = "{}", _0)]
PullError(#[cause] mentat_query_pull::PullError),
}
impl From<rusqlite::Error> for ProjectorError {
fn
|
(error: rusqlite::Error) -> ProjectorError {
ProjectorError::RusqliteError(error.to_string())
}
}
impl From<mentat_db::DbError> for ProjectorError {
fn from(error: mentat_db::DbError) -> ProjectorError {
ProjectorError::DbError(error)
}
}
impl From<mentat_query_pull::PullError> for ProjectorError {
fn from(error: mentat_query_pull::PullError) -> ProjectorError {
ProjectorError::PullError(error)
}
}
|
from
|
identifier_name
|
effect.rs
|
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Enforces the Rust effect system. Currently there is just one effect,
/// `unsafe`.
use middle::def;
use middle::ty;
use middle::typeck::MethodCall;
use util::ppaux;
use syntax::ast;
use syntax::ast_util::PostExpansionMethod;
use syntax::codemap::Span;
use syntax::visit;
use syntax::visit::Visitor;
#[deriving(PartialEq)]
enum UnsafeContext {
SafeContext,
UnsafeFn,
UnsafeBlock(ast::NodeId),
}
fn type_is_unsafe_function(ty: ty::t) -> bool {
match ty::get(ty).sty {
ty::ty_bare_fn(ref f) => f.fn_style == ast::UnsafeFn,
ty::ty_closure(ref f) => f.fn_style == ast::UnsafeFn,
_ => false,
}
}
struct EffectCheckVisitor<'a, 'tcx: 'a> {
tcx: &'a ty::ctxt<'tcx>,
/// Whether we're in an unsafe context.
unsafe_context: UnsafeContext,
}
impl<'a, 'tcx> EffectCheckVisitor<'a, 'tcx> {
fn require_unsafe(&mut self, span: Span, description: &str) {
match self.unsafe_context {
SafeContext => {
// Report an error.
span_err!(self.tcx.sess, span, E0133,
"{} requires unsafe function or block",
description);
}
UnsafeBlock(block_id) => {
// OK, but record this.
|
}
}
fn check_str_index(&mut self, e: &ast::Expr) {
let base_type = match e.node {
ast::ExprIndex(ref base, _) => ty::node_id_to_type(self.tcx, base.id),
_ => return
};
debug!("effect: checking index with base type {}",
ppaux::ty_to_string(self.tcx, base_type));
match ty::get(base_type).sty {
ty::ty_uniq(ty) | ty::ty_rptr(_, ty::mt{ty,..}) => match ty::get(ty).sty {
ty::ty_str => {
span_err!(self.tcx.sess, e.span, E0134,
"modification of string types is not allowed");
}
_ => {}
},
ty::ty_str => {
span_err!(self.tcx.sess, e.span, E0135,
"modification of string types is not allowed");
}
_ => {}
}
}
}
impl<'a, 'tcx, 'v> Visitor<'v> for EffectCheckVisitor<'a, 'tcx> {
fn visit_fn(&mut self, fn_kind: visit::FnKind<'v>, fn_decl: &'v ast::FnDecl,
block: &'v ast::Block, span: Span, _: ast::NodeId) {
let (is_item_fn, is_unsafe_fn) = match fn_kind {
visit::FkItemFn(_, _, fn_style, _) =>
(true, fn_style == ast::UnsafeFn),
visit::FkMethod(_, _, method) =>
(true, method.pe_fn_style() == ast::UnsafeFn),
_ => (false, false),
};
let old_unsafe_context = self.unsafe_context;
if is_unsafe_fn {
self.unsafe_context = UnsafeFn
} else if is_item_fn {
self.unsafe_context = SafeContext
}
visit::walk_fn(self, fn_kind, fn_decl, block, span);
self.unsafe_context = old_unsafe_context
}
fn visit_block(&mut self, block: &ast::Block) {
let old_unsafe_context = self.unsafe_context;
match block.rules {
ast::DefaultBlock => {}
ast::UnsafeBlock(source) => {
// By default only the outermost `unsafe` block is
// "used" and so nested unsafe blocks are pointless
// (the inner ones are unnecessary and we actually
// warn about them). As such, there are two cases when
// we need to create a new context, when we're
// - outside `unsafe` and found a `unsafe` block
// (normal case)
// - inside `unsafe`, found an `unsafe` block
// created internally to the compiler
//
// The second case is necessary to ensure that the
// compiler `unsafe` blocks don't accidentally "use"
// external blocks (e.g. `unsafe { println("") }`,
// expands to `unsafe {... unsafe {... } }` where
// the inner one is compiler generated).
if self.unsafe_context == SafeContext || source == ast::CompilerGenerated {
self.unsafe_context = UnsafeBlock(block.id)
}
}
}
visit::walk_block(self, block);
self.unsafe_context = old_unsafe_context
}
fn visit_expr(&mut self, expr: &ast::Expr) {
match expr.node {
ast::ExprMethodCall(_, _, _) => {
let method_call = MethodCall::expr(expr.id);
let base_type = self.tcx.method_map.borrow().get(&method_call).ty;
debug!("effect: method call case, base type is {}",
ppaux::ty_to_string(self.tcx, base_type));
if type_is_unsafe_function(base_type) {
self.require_unsafe(expr.span,
"invocation of unsafe method")
}
}
ast::ExprCall(ref base, _) => {
let base_type = ty::node_id_to_type(self.tcx, base.id);
debug!("effect: call case, base type is {}",
ppaux::ty_to_string(self.tcx, base_type));
if type_is_unsafe_function(base_type) {
self.require_unsafe(expr.span, "call to unsafe function")
}
}
ast::ExprUnary(ast::UnDeref, ref base) => {
let base_type = ty::node_id_to_type(self.tcx, base.id);
debug!("effect: unary case, base type is {}",
ppaux::ty_to_string(self.tcx, base_type));
match ty::get(base_type).sty {
ty::ty_ptr(_) => {
self.require_unsafe(expr.span,
"dereference of unsafe pointer")
}
_ => {}
}
}
ast::ExprAssign(ref base, _) | ast::ExprAssignOp(_, ref base, _) => {
self.check_str_index(&**base);
}
ast::ExprAddrOf(ast::MutMutable, ref base) => {
self.check_str_index(&**base);
}
ast::ExprInlineAsm(..) => {
self.require_unsafe(expr.span, "use of inline assembly")
}
ast::ExprPath(..) => {
match ty::resolve_expr(self.tcx, expr) {
def::DefStatic(_, true) => {
self.require_unsafe(expr.span, "use of mutable static")
}
_ => {}
}
}
_ => {}
}
visit::walk_expr(self, expr);
}
}
pub fn check_crate(tcx: &ty::ctxt) {
let mut visitor = EffectCheckVisitor {
tcx: tcx,
unsafe_context: SafeContext,
};
visit::walk_crate(&mut visitor, tcx.map.krate());
}
|
debug!("effect: recording unsafe block as used: {}", block_id);
self.tcx.used_unsafe.borrow_mut().insert(block_id);
}
UnsafeFn => {}
|
random_line_split
|
effect.rs
|
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Enforces the Rust effect system. Currently there is just one effect,
/// `unsafe`.
use middle::def;
use middle::ty;
use middle::typeck::MethodCall;
use util::ppaux;
use syntax::ast;
use syntax::ast_util::PostExpansionMethod;
use syntax::codemap::Span;
use syntax::visit;
use syntax::visit::Visitor;
#[deriving(PartialEq)]
enum UnsafeContext {
SafeContext,
UnsafeFn,
UnsafeBlock(ast::NodeId),
}
fn type_is_unsafe_function(ty: ty::t) -> bool {
match ty::get(ty).sty {
ty::ty_bare_fn(ref f) => f.fn_style == ast::UnsafeFn,
ty::ty_closure(ref f) => f.fn_style == ast::UnsafeFn,
_ => false,
}
}
struct EffectCheckVisitor<'a, 'tcx: 'a> {
tcx: &'a ty::ctxt<'tcx>,
/// Whether we're in an unsafe context.
unsafe_context: UnsafeContext,
}
impl<'a, 'tcx> EffectCheckVisitor<'a, 'tcx> {
fn require_unsafe(&mut self, span: Span, description: &str) {
match self.unsafe_context {
SafeContext => {
// Report an error.
span_err!(self.tcx.sess, span, E0133,
"{} requires unsafe function or block",
description);
}
UnsafeBlock(block_id) => {
// OK, but record this.
debug!("effect: recording unsafe block as used: {}", block_id);
self.tcx.used_unsafe.borrow_mut().insert(block_id);
}
UnsafeFn => {}
}
}
fn check_str_index(&mut self, e: &ast::Expr) {
let base_type = match e.node {
ast::ExprIndex(ref base, _) => ty::node_id_to_type(self.tcx, base.id),
_ => return
};
debug!("effect: checking index with base type {}",
ppaux::ty_to_string(self.tcx, base_type));
match ty::get(base_type).sty {
ty::ty_uniq(ty) | ty::ty_rptr(_, ty::mt{ty,..}) => match ty::get(ty).sty {
ty::ty_str => {
span_err!(self.tcx.sess, e.span, E0134,
"modification of string types is not allowed");
}
_ => {}
},
ty::ty_str => {
span_err!(self.tcx.sess, e.span, E0135,
"modification of string types is not allowed");
}
_ => {}
}
}
}
impl<'a, 'tcx, 'v> Visitor<'v> for EffectCheckVisitor<'a, 'tcx> {
fn visit_fn(&mut self, fn_kind: visit::FnKind<'v>, fn_decl: &'v ast::FnDecl,
block: &'v ast::Block, span: Span, _: ast::NodeId) {
let (is_item_fn, is_unsafe_fn) = match fn_kind {
visit::FkItemFn(_, _, fn_style, _) =>
(true, fn_style == ast::UnsafeFn),
visit::FkMethod(_, _, method) =>
(true, method.pe_fn_style() == ast::UnsafeFn),
_ => (false, false),
};
let old_unsafe_context = self.unsafe_context;
if is_unsafe_fn {
self.unsafe_context = UnsafeFn
} else if is_item_fn {
self.unsafe_context = SafeContext
}
visit::walk_fn(self, fn_kind, fn_decl, block, span);
self.unsafe_context = old_unsafe_context
}
fn visit_block(&mut self, block: &ast::Block) {
let old_unsafe_context = self.unsafe_context;
match block.rules {
ast::DefaultBlock => {}
ast::UnsafeBlock(source) => {
// By default only the outermost `unsafe` block is
// "used" and so nested unsafe blocks are pointless
// (the inner ones are unnecessary and we actually
// warn about them). As such, there are two cases when
// we need to create a new context, when we're
// - outside `unsafe` and found a `unsafe` block
// (normal case)
// - inside `unsafe`, found an `unsafe` block
// created internally to the compiler
//
// The second case is necessary to ensure that the
// compiler `unsafe` blocks don't accidentally "use"
// external blocks (e.g. `unsafe { println("") }`,
// expands to `unsafe {... unsafe {... } }` where
// the inner one is compiler generated).
if self.unsafe_context == SafeContext || source == ast::CompilerGenerated {
self.unsafe_context = UnsafeBlock(block.id)
}
}
}
visit::walk_block(self, block);
self.unsafe_context = old_unsafe_context
}
fn
|
(&mut self, expr: &ast::Expr) {
match expr.node {
ast::ExprMethodCall(_, _, _) => {
let method_call = MethodCall::expr(expr.id);
let base_type = self.tcx.method_map.borrow().get(&method_call).ty;
debug!("effect: method call case, base type is {}",
ppaux::ty_to_string(self.tcx, base_type));
if type_is_unsafe_function(base_type) {
self.require_unsafe(expr.span,
"invocation of unsafe method")
}
}
ast::ExprCall(ref base, _) => {
let base_type = ty::node_id_to_type(self.tcx, base.id);
debug!("effect: call case, base type is {}",
ppaux::ty_to_string(self.tcx, base_type));
if type_is_unsafe_function(base_type) {
self.require_unsafe(expr.span, "call to unsafe function")
}
}
ast::ExprUnary(ast::UnDeref, ref base) => {
let base_type = ty::node_id_to_type(self.tcx, base.id);
debug!("effect: unary case, base type is {}",
ppaux::ty_to_string(self.tcx, base_type));
match ty::get(base_type).sty {
ty::ty_ptr(_) => {
self.require_unsafe(expr.span,
"dereference of unsafe pointer")
}
_ => {}
}
}
ast::ExprAssign(ref base, _) | ast::ExprAssignOp(_, ref base, _) => {
self.check_str_index(&**base);
}
ast::ExprAddrOf(ast::MutMutable, ref base) => {
self.check_str_index(&**base);
}
ast::ExprInlineAsm(..) => {
self.require_unsafe(expr.span, "use of inline assembly")
}
ast::ExprPath(..) => {
match ty::resolve_expr(self.tcx, expr) {
def::DefStatic(_, true) => {
self.require_unsafe(expr.span, "use of mutable static")
}
_ => {}
}
}
_ => {}
}
visit::walk_expr(self, expr);
}
}
pub fn check_crate(tcx: &ty::ctxt) {
let mut visitor = EffectCheckVisitor {
tcx: tcx,
unsafe_context: SafeContext,
};
visit::walk_crate(&mut visitor, tcx.map.krate());
}
|
visit_expr
|
identifier_name
|
effect.rs
|
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Enforces the Rust effect system. Currently there is just one effect,
/// `unsafe`.
use middle::def;
use middle::ty;
use middle::typeck::MethodCall;
use util::ppaux;
use syntax::ast;
use syntax::ast_util::PostExpansionMethod;
use syntax::codemap::Span;
use syntax::visit;
use syntax::visit::Visitor;
#[deriving(PartialEq)]
enum UnsafeContext {
SafeContext,
UnsafeFn,
UnsafeBlock(ast::NodeId),
}
fn type_is_unsafe_function(ty: ty::t) -> bool {
match ty::get(ty).sty {
ty::ty_bare_fn(ref f) => f.fn_style == ast::UnsafeFn,
ty::ty_closure(ref f) => f.fn_style == ast::UnsafeFn,
_ => false,
}
}
struct EffectCheckVisitor<'a, 'tcx: 'a> {
tcx: &'a ty::ctxt<'tcx>,
/// Whether we're in an unsafe context.
unsafe_context: UnsafeContext,
}
impl<'a, 'tcx> EffectCheckVisitor<'a, 'tcx> {
fn require_unsafe(&mut self, span: Span, description: &str)
|
fn check_str_index(&mut self, e: &ast::Expr) {
let base_type = match e.node {
ast::ExprIndex(ref base, _) => ty::node_id_to_type(self.tcx, base.id),
_ => return
};
debug!("effect: checking index with base type {}",
ppaux::ty_to_string(self.tcx, base_type));
match ty::get(base_type).sty {
ty::ty_uniq(ty) | ty::ty_rptr(_, ty::mt{ty,..}) => match ty::get(ty).sty {
ty::ty_str => {
span_err!(self.tcx.sess, e.span, E0134,
"modification of string types is not allowed");
}
_ => {}
},
ty::ty_str => {
span_err!(self.tcx.sess, e.span, E0135,
"modification of string types is not allowed");
}
_ => {}
}
}
}
impl<'a, 'tcx, 'v> Visitor<'v> for EffectCheckVisitor<'a, 'tcx> {
fn visit_fn(&mut self, fn_kind: visit::FnKind<'v>, fn_decl: &'v ast::FnDecl,
block: &'v ast::Block, span: Span, _: ast::NodeId) {
let (is_item_fn, is_unsafe_fn) = match fn_kind {
visit::FkItemFn(_, _, fn_style, _) =>
(true, fn_style == ast::UnsafeFn),
visit::FkMethod(_, _, method) =>
(true, method.pe_fn_style() == ast::UnsafeFn),
_ => (false, false),
};
let old_unsafe_context = self.unsafe_context;
if is_unsafe_fn {
self.unsafe_context = UnsafeFn
} else if is_item_fn {
self.unsafe_context = SafeContext
}
visit::walk_fn(self, fn_kind, fn_decl, block, span);
self.unsafe_context = old_unsafe_context
}
fn visit_block(&mut self, block: &ast::Block) {
let old_unsafe_context = self.unsafe_context;
match block.rules {
ast::DefaultBlock => {}
ast::UnsafeBlock(source) => {
// By default only the outermost `unsafe` block is
// "used" and so nested unsafe blocks are pointless
// (the inner ones are unnecessary and we actually
// warn about them). As such, there are two cases when
// we need to create a new context, when we're
// - outside `unsafe` and found a `unsafe` block
// (normal case)
// - inside `unsafe`, found an `unsafe` block
// created internally to the compiler
//
// The second case is necessary to ensure that the
// compiler `unsafe` blocks don't accidentally "use"
// external blocks (e.g. `unsafe { println("") }`,
// expands to `unsafe {... unsafe {... } }` where
// the inner one is compiler generated).
if self.unsafe_context == SafeContext || source == ast::CompilerGenerated {
self.unsafe_context = UnsafeBlock(block.id)
}
}
}
visit::walk_block(self, block);
self.unsafe_context = old_unsafe_context
}
fn visit_expr(&mut self, expr: &ast::Expr) {
match expr.node {
ast::ExprMethodCall(_, _, _) => {
let method_call = MethodCall::expr(expr.id);
let base_type = self.tcx.method_map.borrow().get(&method_call).ty;
debug!("effect: method call case, base type is {}",
ppaux::ty_to_string(self.tcx, base_type));
if type_is_unsafe_function(base_type) {
self.require_unsafe(expr.span,
"invocation of unsafe method")
}
}
ast::ExprCall(ref base, _) => {
let base_type = ty::node_id_to_type(self.tcx, base.id);
debug!("effect: call case, base type is {}",
ppaux::ty_to_string(self.tcx, base_type));
if type_is_unsafe_function(base_type) {
self.require_unsafe(expr.span, "call to unsafe function")
}
}
ast::ExprUnary(ast::UnDeref, ref base) => {
let base_type = ty::node_id_to_type(self.tcx, base.id);
debug!("effect: unary case, base type is {}",
ppaux::ty_to_string(self.tcx, base_type));
match ty::get(base_type).sty {
ty::ty_ptr(_) => {
self.require_unsafe(expr.span,
"dereference of unsafe pointer")
}
_ => {}
}
}
ast::ExprAssign(ref base, _) | ast::ExprAssignOp(_, ref base, _) => {
self.check_str_index(&**base);
}
ast::ExprAddrOf(ast::MutMutable, ref base) => {
self.check_str_index(&**base);
}
ast::ExprInlineAsm(..) => {
self.require_unsafe(expr.span, "use of inline assembly")
}
ast::ExprPath(..) => {
match ty::resolve_expr(self.tcx, expr) {
def::DefStatic(_, true) => {
self.require_unsafe(expr.span, "use of mutable static")
}
_ => {}
}
}
_ => {}
}
visit::walk_expr(self, expr);
}
}
pub fn check_crate(tcx: &ty::ctxt) {
let mut visitor = EffectCheckVisitor {
tcx: tcx,
unsafe_context: SafeContext,
};
visit::walk_crate(&mut visitor, tcx.map.krate());
}
|
{
match self.unsafe_context {
SafeContext => {
// Report an error.
span_err!(self.tcx.sess, span, E0133,
"{} requires unsafe function or block",
description);
}
UnsafeBlock(block_id) => {
// OK, but record this.
debug!("effect: recording unsafe block as used: {}", block_id);
self.tcx.used_unsafe.borrow_mut().insert(block_id);
}
UnsafeFn => {}
}
}
|
identifier_body
|
lib.rs
|
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Terminal formatting library.
//!
//! This crate provides the `Terminal` trait, which abstracts over an [ANSI
//! Terminal][ansi] to provide color printing, among other things. There are two implementations,
//! the `TerminfoTerminal`, which uses control characters from a
//! [terminfo][ti] database, and `WinConsole`, which uses the [Win32 Console
//! API][win].
//!
//! # Examples
//!
//! ```no_run
//! extern crate term;
//!
//! use std::io::prelude::*;
//!
//! fn main() {
//! let mut t = term::stdout().unwrap();
//!
//! t.fg(term::color::GREEN).unwrap();
//! (write!(t, "hello, ")).unwrap();
//!
//! t.fg(term::color::RED).unwrap();
//! (writeln!(t, "world!")).unwrap();
//!
//! t.reset().unwrap();
//! }
//! ```
//!
//! [ansi]: https://en.wikipedia.org/wiki/ANSI_escape_code
//! [win]: http://msdn.microsoft.com/en-us/library/windows/desktop/ms682010%28v=vs.85%29.aspx
//! [ti]: https://en.wikipedia.org/wiki/Terminfo
// Do not remove on snapshot creation. Needed for bootstrap. (Issue #22364)
#![cfg_attr(stage0, feature(custom_attribute))]
#![crate_name = "term"]
#![unstable(feature = "rustc_private",
reason = "use the crates.io `term` library instead")]
#![staged_api]
#![crate_type = "rlib"]
#![crate_type = "dylib"]
#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "http://www.rust-lang.org/favicon.ico",
html_root_url = "http://doc.rust-lang.org/nightly/",
html_playground_url = "http://play.rust-lang.org/")]
#![deny(missing_docs)]
#![feature(box_syntax)]
#![feature(collections)]
#![feature(int_uint)]
#![feature(rustc_private)]
#![feature(staged_api)]
#![feature(std_misc)]
#![feature(str_char)]
#![feature(path_ext)]
#![cfg_attr(windows, feature(libc))]
#[macro_use] extern crate log;
pub use terminfo::TerminfoTerminal;
#[cfg(windows)]
pub use win::WinConsole;
use std::io::prelude::*;
use std::io;
pub mod terminfo;
#[cfg(windows)]
mod win;
/// A hack to work around the fact that `Box<Write + Send>` does not
/// currently implement `Write`.
pub struct WriterWrapper {
wrapped: Box<Write + Send>,
}
impl Write for WriterWrapper {
#[inline]
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
self.wrapped.write(buf)
}
#[inline]
fn flush(&mut self) -> io::Result<()> {
self.wrapped.flush()
}
}
#[cfg(not(windows))]
/// Return a Terminal wrapping stdout, or None if a terminal couldn't be
/// opened.
pub fn
|
() -> Option<Box<Terminal<WriterWrapper> + Send>> {
TerminfoTerminal::new(WriterWrapper {
wrapped: box std::io::stdout() as Box<Write + Send>,
})
}
#[cfg(windows)]
/// Return a Terminal wrapping stdout, or None if a terminal couldn't be
/// opened.
pub fn stdout() -> Option<Box<Terminal<WriterWrapper> + Send>> {
let ti = TerminfoTerminal::new(WriterWrapper {
wrapped: box std::io::stdout() as Box<Write + Send>,
});
match ti {
Some(t) => Some(t),
None => {
WinConsole::new(WriterWrapper {
wrapped: box std::io::stdout() as Box<Write + Send>,
})
}
}
}
#[cfg(not(windows))]
/// Return a Terminal wrapping stderr, or None if a terminal couldn't be
/// opened.
pub fn stderr() -> Option<Box<Terminal<WriterWrapper> + Send>> {
TerminfoTerminal::new(WriterWrapper {
wrapped: box std::io::stderr() as Box<Write + Send>,
})
}
#[cfg(windows)]
/// Return a Terminal wrapping stderr, or None if a terminal couldn't be
/// opened.
pub fn stderr() -> Option<Box<Terminal<WriterWrapper> + Send>> {
let ti = TerminfoTerminal::new(WriterWrapper {
wrapped: box std::io::stderr() as Box<Write + Send>,
});
match ti {
Some(t) => Some(t),
None => {
WinConsole::new(WriterWrapper {
wrapped: box std::io::stderr() as Box<Write + Send>,
})
}
}
}
/// Terminal color definitions
pub mod color {
/// Number for a terminal color
pub type Color = u16;
pub const BLACK: Color = 0;
pub const RED: Color = 1;
pub const GREEN: Color = 2;
pub const YELLOW: Color = 3;
pub const BLUE: Color = 4;
pub const MAGENTA: Color = 5;
pub const CYAN: Color = 6;
pub const WHITE: Color = 7;
pub const BRIGHT_BLACK: Color = 8;
pub const BRIGHT_RED: Color = 9;
pub const BRIGHT_GREEN: Color = 10;
pub const BRIGHT_YELLOW: Color = 11;
pub const BRIGHT_BLUE: Color = 12;
pub const BRIGHT_MAGENTA: Color = 13;
pub const BRIGHT_CYAN: Color = 14;
pub const BRIGHT_WHITE: Color = 15;
}
/// Terminal attributes
pub mod attr {
pub use self::Attr::*;
/// Terminal attributes for use with term.attr().
///
/// Most attributes can only be turned on and must be turned off with term.reset().
/// The ones that can be turned off explicitly take a boolean value.
/// Color is also represented as an attribute for convenience.
#[derive(Copy)]
pub enum Attr {
/// Bold (or possibly bright) mode
Bold,
/// Dim mode, also called faint or half-bright. Often not supported
Dim,
/// Italics mode. Often not supported
Italic(bool),
/// Underline mode
Underline(bool),
/// Blink mode
Blink,
/// Standout mode. Often implemented as Reverse, sometimes coupled with Bold
Standout(bool),
/// Reverse mode, inverts the foreground and background colors
Reverse,
/// Secure mode, also called invis mode. Hides the printed text
Secure,
/// Convenience attribute to set the foreground color
ForegroundColor(super::color::Color),
/// Convenience attribute to set the background color
BackgroundColor(super::color::Color)
}
}
/// A terminal with similar capabilities to an ANSI Terminal
/// (foreground/background colors etc).
pub trait Terminal<T: Write>: Write {
/// Sets the foreground color to the given color.
///
/// If the color is a bright color, but the terminal only supports 8 colors,
/// the corresponding normal color will be used instead.
///
/// Returns `Ok(true)` if the color was set, `Ok(false)` otherwise, and `Err(e)`
/// if there was an I/O error.
fn fg(&mut self, color: color::Color) -> io::Result<bool>;
/// Sets the background color to the given color.
///
/// If the color is a bright color, but the terminal only supports 8 colors,
/// the corresponding normal color will be used instead.
///
/// Returns `Ok(true)` if the color was set, `Ok(false)` otherwise, and `Err(e)`
/// if there was an I/O error.
fn bg(&mut self, color: color::Color) -> io::Result<bool>;
/// Sets the given terminal attribute, if supported. Returns `Ok(true)`
/// if the attribute was supported, `Ok(false)` otherwise, and `Err(e)` if
/// there was an I/O error.
fn attr(&mut self, attr: attr::Attr) -> io::Result<bool>;
/// Returns whether the given terminal attribute is supported.
fn supports_attr(&self, attr: attr::Attr) -> bool;
/// Resets all terminal attributes and color to the default.
/// Returns `Ok()`.
fn reset(&mut self) -> io::Result<()>;
/// Gets an immutable reference to the stream inside
fn get_ref<'a>(&'a self) -> &'a T;
/// Gets a mutable reference to the stream inside
fn get_mut<'a>(&'a mut self) -> &'a mut T;
}
/// A terminal which can be unwrapped.
pub trait UnwrappableTerminal<T: Write>: Terminal<T> {
/// Returns the contained stream, destroying the `Terminal`
fn unwrap(self) -> T;
}
|
stdout
|
identifier_name
|
lib.rs
|
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Terminal formatting library.
//!
//! This crate provides the `Terminal` trait, which abstracts over an [ANSI
//! Terminal][ansi] to provide color printing, among other things. There are two implementations,
//! the `TerminfoTerminal`, which uses control characters from a
//! [terminfo][ti] database, and `WinConsole`, which uses the [Win32 Console
//! API][win].
//!
//! # Examples
//!
//! ```no_run
//! extern crate term;
//!
//! use std::io::prelude::*;
//!
//! fn main() {
//! let mut t = term::stdout().unwrap();
//!
//! t.fg(term::color::GREEN).unwrap();
//! (write!(t, "hello, ")).unwrap();
//!
//! t.fg(term::color::RED).unwrap();
//! (writeln!(t, "world!")).unwrap();
//!
//! t.reset().unwrap();
//! }
//! ```
//!
//! [ansi]: https://en.wikipedia.org/wiki/ANSI_escape_code
//! [win]: http://msdn.microsoft.com/en-us/library/windows/desktop/ms682010%28v=vs.85%29.aspx
//! [ti]: https://en.wikipedia.org/wiki/Terminfo
// Do not remove on snapshot creation. Needed for bootstrap. (Issue #22364)
#![cfg_attr(stage0, feature(custom_attribute))]
#![crate_name = "term"]
#![unstable(feature = "rustc_private",
reason = "use the crates.io `term` library instead")]
#![staged_api]
#![crate_type = "rlib"]
#![crate_type = "dylib"]
#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "http://www.rust-lang.org/favicon.ico",
html_root_url = "http://doc.rust-lang.org/nightly/",
html_playground_url = "http://play.rust-lang.org/")]
#![deny(missing_docs)]
#![feature(box_syntax)]
#![feature(collections)]
#![feature(int_uint)]
#![feature(rustc_private)]
#![feature(staged_api)]
#![feature(std_misc)]
#![feature(str_char)]
#![feature(path_ext)]
#![cfg_attr(windows, feature(libc))]
#[macro_use] extern crate log;
pub use terminfo::TerminfoTerminal;
#[cfg(windows)]
pub use win::WinConsole;
use std::io::prelude::*;
use std::io;
pub mod terminfo;
#[cfg(windows)]
mod win;
/// A hack to work around the fact that `Box<Write + Send>` does not
/// currently implement `Write`.
pub struct WriterWrapper {
wrapped: Box<Write + Send>,
}
impl Write for WriterWrapper {
#[inline]
fn write(&mut self, buf: &[u8]) -> io::Result<usize>
|
#[inline]
fn flush(&mut self) -> io::Result<()> {
self.wrapped.flush()
}
}
#[cfg(not(windows))]
/// Return a Terminal wrapping stdout, or None if a terminal couldn't be
/// opened.
pub fn stdout() -> Option<Box<Terminal<WriterWrapper> + Send>> {
TerminfoTerminal::new(WriterWrapper {
wrapped: box std::io::stdout() as Box<Write + Send>,
})
}
#[cfg(windows)]
/// Return a Terminal wrapping stdout, or None if a terminal couldn't be
/// opened.
pub fn stdout() -> Option<Box<Terminal<WriterWrapper> + Send>> {
let ti = TerminfoTerminal::new(WriterWrapper {
wrapped: box std::io::stdout() as Box<Write + Send>,
});
match ti {
Some(t) => Some(t),
None => {
WinConsole::new(WriterWrapper {
wrapped: box std::io::stdout() as Box<Write + Send>,
})
}
}
}
#[cfg(not(windows))]
/// Return a Terminal wrapping stderr, or None if a terminal couldn't be
/// opened.
pub fn stderr() -> Option<Box<Terminal<WriterWrapper> + Send>> {
TerminfoTerminal::new(WriterWrapper {
wrapped: box std::io::stderr() as Box<Write + Send>,
})
}
#[cfg(windows)]
/// Return a Terminal wrapping stderr, or None if a terminal couldn't be
/// opened.
pub fn stderr() -> Option<Box<Terminal<WriterWrapper> + Send>> {
let ti = TerminfoTerminal::new(WriterWrapper {
wrapped: box std::io::stderr() as Box<Write + Send>,
});
match ti {
Some(t) => Some(t),
None => {
WinConsole::new(WriterWrapper {
wrapped: box std::io::stderr() as Box<Write + Send>,
})
}
}
}
/// Terminal color definitions
pub mod color {
/// Number for a terminal color
pub type Color = u16;
pub const BLACK: Color = 0;
pub const RED: Color = 1;
pub const GREEN: Color = 2;
pub const YELLOW: Color = 3;
pub const BLUE: Color = 4;
pub const MAGENTA: Color = 5;
pub const CYAN: Color = 6;
pub const WHITE: Color = 7;
pub const BRIGHT_BLACK: Color = 8;
pub const BRIGHT_RED: Color = 9;
pub const BRIGHT_GREEN: Color = 10;
pub const BRIGHT_YELLOW: Color = 11;
pub const BRIGHT_BLUE: Color = 12;
pub const BRIGHT_MAGENTA: Color = 13;
pub const BRIGHT_CYAN: Color = 14;
pub const BRIGHT_WHITE: Color = 15;
}
/// Terminal attributes
pub mod attr {
pub use self::Attr::*;
/// Terminal attributes for use with term.attr().
///
/// Most attributes can only be turned on and must be turned off with term.reset().
/// The ones that can be turned off explicitly take a boolean value.
/// Color is also represented as an attribute for convenience.
#[derive(Copy)]
pub enum Attr {
/// Bold (or possibly bright) mode
Bold,
/// Dim mode, also called faint or half-bright. Often not supported
Dim,
/// Italics mode. Often not supported
Italic(bool),
/// Underline mode
Underline(bool),
/// Blink mode
Blink,
/// Standout mode. Often implemented as Reverse, sometimes coupled with Bold
Standout(bool),
/// Reverse mode, inverts the foreground and background colors
Reverse,
/// Secure mode, also called invis mode. Hides the printed text
Secure,
/// Convenience attribute to set the foreground color
ForegroundColor(super::color::Color),
/// Convenience attribute to set the background color
BackgroundColor(super::color::Color)
}
}
/// A terminal with similar capabilities to an ANSI Terminal
/// (foreground/background colors etc).
pub trait Terminal<T: Write>: Write {
/// Sets the foreground color to the given color.
///
/// If the color is a bright color, but the terminal only supports 8 colors,
/// the corresponding normal color will be used instead.
///
/// Returns `Ok(true)` if the color was set, `Ok(false)` otherwise, and `Err(e)`
/// if there was an I/O error.
fn fg(&mut self, color: color::Color) -> io::Result<bool>;
/// Sets the background color to the given color.
///
/// If the color is a bright color, but the terminal only supports 8 colors,
/// the corresponding normal color will be used instead.
///
/// Returns `Ok(true)` if the color was set, `Ok(false)` otherwise, and `Err(e)`
/// if there was an I/O error.
fn bg(&mut self, color: color::Color) -> io::Result<bool>;
/// Sets the given terminal attribute, if supported. Returns `Ok(true)`
/// if the attribute was supported, `Ok(false)` otherwise, and `Err(e)` if
/// there was an I/O error.
fn attr(&mut self, attr: attr::Attr) -> io::Result<bool>;
/// Returns whether the given terminal attribute is supported.
fn supports_attr(&self, attr: attr::Attr) -> bool;
/// Resets all terminal attributes and color to the default.
/// Returns `Ok()`.
fn reset(&mut self) -> io::Result<()>;
/// Gets an immutable reference to the stream inside
fn get_ref<'a>(&'a self) -> &'a T;
/// Gets a mutable reference to the stream inside
fn get_mut<'a>(&'a mut self) -> &'a mut T;
}
/// A terminal which can be unwrapped.
pub trait UnwrappableTerminal<T: Write>: Terminal<T> {
/// Returns the contained stream, destroying the `Terminal`
fn unwrap(self) -> T;
}
|
{
self.wrapped.write(buf)
}
|
identifier_body
|
lib.rs
|
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Terminal formatting library.
//!
//! This crate provides the `Terminal` trait, which abstracts over an [ANSI
//! Terminal][ansi] to provide color printing, among other things. There are two implementations,
//! the `TerminfoTerminal`, which uses control characters from a
//! [terminfo][ti] database, and `WinConsole`, which uses the [Win32 Console
//! API][win].
//!
//! # Examples
//!
//! ```no_run
//! extern crate term;
//!
//! use std::io::prelude::*;
//!
//! fn main() {
//! let mut t = term::stdout().unwrap();
//!
//! t.fg(term::color::GREEN).unwrap();
//! (write!(t, "hello, ")).unwrap();
//!
//! t.fg(term::color::RED).unwrap();
//! (writeln!(t, "world!")).unwrap();
//!
//! t.reset().unwrap();
//! }
//! ```
//!
//! [ansi]: https://en.wikipedia.org/wiki/ANSI_escape_code
//! [win]: http://msdn.microsoft.com/en-us/library/windows/desktop/ms682010%28v=vs.85%29.aspx
//! [ti]: https://en.wikipedia.org/wiki/Terminfo
// Do not remove on snapshot creation. Needed for bootstrap. (Issue #22364)
#![cfg_attr(stage0, feature(custom_attribute))]
#![crate_name = "term"]
#![unstable(feature = "rustc_private",
reason = "use the crates.io `term` library instead")]
#![staged_api]
#![crate_type = "rlib"]
#![crate_type = "dylib"]
#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "http://www.rust-lang.org/favicon.ico",
html_root_url = "http://doc.rust-lang.org/nightly/",
html_playground_url = "http://play.rust-lang.org/")]
#![deny(missing_docs)]
#![feature(box_syntax)]
#![feature(collections)]
#![feature(int_uint)]
#![feature(rustc_private)]
#![feature(staged_api)]
#![feature(std_misc)]
#![feature(str_char)]
#![feature(path_ext)]
#![cfg_attr(windows, feature(libc))]
#[macro_use] extern crate log;
pub use terminfo::TerminfoTerminal;
#[cfg(windows)]
pub use win::WinConsole;
use std::io::prelude::*;
use std::io;
pub mod terminfo;
#[cfg(windows)]
mod win;
/// A hack to work around the fact that `Box<Write + Send>` does not
/// currently implement `Write`.
pub struct WriterWrapper {
wrapped: Box<Write + Send>,
}
impl Write for WriterWrapper {
#[inline]
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
self.wrapped.write(buf)
}
#[inline]
fn flush(&mut self) -> io::Result<()> {
self.wrapped.flush()
}
}
#[cfg(not(windows))]
/// Return a Terminal wrapping stdout, or None if a terminal couldn't be
/// opened.
pub fn stdout() -> Option<Box<Terminal<WriterWrapper> + Send>> {
TerminfoTerminal::new(WriterWrapper {
wrapped: box std::io::stdout() as Box<Write + Send>,
})
}
#[cfg(windows)]
/// Return a Terminal wrapping stdout, or None if a terminal couldn't be
/// opened.
pub fn stdout() -> Option<Box<Terminal<WriterWrapper> + Send>> {
let ti = TerminfoTerminal::new(WriterWrapper {
wrapped: box std::io::stdout() as Box<Write + Send>,
});
match ti {
Some(t) => Some(t),
None => {
WinConsole::new(WriterWrapper {
wrapped: box std::io::stdout() as Box<Write + Send>,
})
}
}
}
#[cfg(not(windows))]
/// Return a Terminal wrapping stderr, or None if a terminal couldn't be
/// opened.
pub fn stderr() -> Option<Box<Terminal<WriterWrapper> + Send>> {
TerminfoTerminal::new(WriterWrapper {
wrapped: box std::io::stderr() as Box<Write + Send>,
})
}
#[cfg(windows)]
/// Return a Terminal wrapping stderr, or None if a terminal couldn't be
/// opened.
pub fn stderr() -> Option<Box<Terminal<WriterWrapper> + Send>> {
let ti = TerminfoTerminal::new(WriterWrapper {
wrapped: box std::io::stderr() as Box<Write + Send>,
});
match ti {
Some(t) => Some(t),
None => {
WinConsole::new(WriterWrapper {
wrapped: box std::io::stderr() as Box<Write + Send>,
})
}
}
}
|
/// Terminal color definitions
pub mod color {
/// Number for a terminal color
pub type Color = u16;
pub const BLACK: Color = 0;
pub const RED: Color = 1;
pub const GREEN: Color = 2;
pub const YELLOW: Color = 3;
pub const BLUE: Color = 4;
pub const MAGENTA: Color = 5;
pub const CYAN: Color = 6;
pub const WHITE: Color = 7;
pub const BRIGHT_BLACK: Color = 8;
pub const BRIGHT_RED: Color = 9;
pub const BRIGHT_GREEN: Color = 10;
pub const BRIGHT_YELLOW: Color = 11;
pub const BRIGHT_BLUE: Color = 12;
pub const BRIGHT_MAGENTA: Color = 13;
pub const BRIGHT_CYAN: Color = 14;
pub const BRIGHT_WHITE: Color = 15;
}
/// Terminal attributes
pub mod attr {
pub use self::Attr::*;
/// Terminal attributes for use with term.attr().
///
/// Most attributes can only be turned on and must be turned off with term.reset().
/// The ones that can be turned off explicitly take a boolean value.
/// Color is also represented as an attribute for convenience.
#[derive(Copy)]
pub enum Attr {
/// Bold (or possibly bright) mode
Bold,
/// Dim mode, also called faint or half-bright. Often not supported
Dim,
/// Italics mode. Often not supported
Italic(bool),
/// Underline mode
Underline(bool),
/// Blink mode
Blink,
/// Standout mode. Often implemented as Reverse, sometimes coupled with Bold
Standout(bool),
/// Reverse mode, inverts the foreground and background colors
Reverse,
/// Secure mode, also called invis mode. Hides the printed text
Secure,
/// Convenience attribute to set the foreground color
ForegroundColor(super::color::Color),
/// Convenience attribute to set the background color
BackgroundColor(super::color::Color)
}
}
/// A terminal with similar capabilities to an ANSI Terminal
/// (foreground/background colors etc).
pub trait Terminal<T: Write>: Write {
/// Sets the foreground color to the given color.
///
/// If the color is a bright color, but the terminal only supports 8 colors,
/// the corresponding normal color will be used instead.
///
/// Returns `Ok(true)` if the color was set, `Ok(false)` otherwise, and `Err(e)`
/// if there was an I/O error.
fn fg(&mut self, color: color::Color) -> io::Result<bool>;
/// Sets the background color to the given color.
///
/// If the color is a bright color, but the terminal only supports 8 colors,
/// the corresponding normal color will be used instead.
///
/// Returns `Ok(true)` if the color was set, `Ok(false)` otherwise, and `Err(e)`
/// if there was an I/O error.
fn bg(&mut self, color: color::Color) -> io::Result<bool>;
/// Sets the given terminal attribute, if supported. Returns `Ok(true)`
/// if the attribute was supported, `Ok(false)` otherwise, and `Err(e)` if
/// there was an I/O error.
fn attr(&mut self, attr: attr::Attr) -> io::Result<bool>;
/// Returns whether the given terminal attribute is supported.
fn supports_attr(&self, attr: attr::Attr) -> bool;
/// Resets all terminal attributes and color to the default.
/// Returns `Ok()`.
fn reset(&mut self) -> io::Result<()>;
/// Gets an immutable reference to the stream inside
fn get_ref<'a>(&'a self) -> &'a T;
/// Gets a mutable reference to the stream inside
fn get_mut<'a>(&'a mut self) -> &'a mut T;
}
/// A terminal which can be unwrapped.
pub trait UnwrappableTerminal<T: Write>: Terminal<T> {
/// Returns the contained stream, destroying the `Terminal`
fn unwrap(self) -> T;
}
|
random_line_split
|
|
state.rs
|
use ::libc;
#[cfg(feature = "task")]
use super::task::BufProc;
use super::{In, Out, Sig};
use super::control::Control;
#[derive(Copy, Clone, Debug)]
pub enum DeviceState {
/// The current task.
#[cfg(feature = "task")] Proc(BufProc),
/// Update.
Idle,
|
/// The current character.
InText(Control),
}
impl DeviceState {
#[cfg(feature = "task")]
/// The constructor method `from_task` returns a Process' event.
pub fn from_task(name: BufProc) -> Self {
DeviceState::Proc(name)
}
/// The constructor method `from_idle` returns a Update's event.
pub fn from_idle() -> Self {
DeviceState::Idle
}
/// The constructor method `from_out` returns a text Output's event.
pub fn from_out(buf: Out, len: libc::size_t) -> Self {
DeviceState::OutText(buf, len)
}
/// The constructor method `from_out` returns a key Input's event.
pub fn from_in(buf: In, len: libc::size_t) -> Self {
DeviceState::InText(Control::new(buf, len))
}
/// The constructor method `from_ig` returns a Signal's event.
pub fn from_sig(sig: libc::c_int) -> Self {
DeviceState::Sig(sig)
}
#[cfg(feature = "task")]
/// The accessor method `is_task` returns a Process' event.
pub fn is_task(self) -> Option<BufProc> {
match self {
DeviceState::Proc(name) => Some(name),
_ => None,
}
}
/// The accessor method `is_idle` returns a Option for Update's event.
pub fn is_idle(&self) -> Option<()> {
match *self {
DeviceState::Idle => Some(()),
_ => None,
}
}
/// The accessor method `is_out_text` returns a Option for Ouput's event.
pub fn is_out_text(&self) -> Option<(Out, libc::size_t)> {
match *self {
DeviceState::OutText(buf, len) => Some((buf, len)),
_ => None,
}
}
/// The accessor method `is_input` returns a Option for key
/// or mouse Input's event.
pub fn is_input(&self) -> Option<Control> {
match *self {
DeviceState::InText(event) => Some(event),
_ => None,
}
}
/// The accessor method `is_signal` returns a Option for Signal's event.
pub fn is_signal(&self) -> Option<libc::c_int> {
match *self {
DeviceState::Sig(sig) => Some(sig),
_ => None,
}
}
}
|
/// As catched a signal.
Sig(Sig),
/// The output of new lines.
OutText(Out, libc::size_t),
|
random_line_split
|
state.rs
|
use ::libc;
#[cfg(feature = "task")]
use super::task::BufProc;
use super::{In, Out, Sig};
use super::control::Control;
#[derive(Copy, Clone, Debug)]
pub enum DeviceState {
/// The current task.
#[cfg(feature = "task")] Proc(BufProc),
/// Update.
Idle,
/// As catched a signal.
Sig(Sig),
/// The output of new lines.
OutText(Out, libc::size_t),
/// The current character.
InText(Control),
}
impl DeviceState {
#[cfg(feature = "task")]
/// The constructor method `from_task` returns a Process' event.
pub fn from_task(name: BufProc) -> Self {
DeviceState::Proc(name)
}
/// The constructor method `from_idle` returns a Update's event.
pub fn from_idle() -> Self {
DeviceState::Idle
}
/// The constructor method `from_out` returns a text Output's event.
pub fn from_out(buf: Out, len: libc::size_t) -> Self {
DeviceState::OutText(buf, len)
}
/// The constructor method `from_out` returns a key Input's event.
pub fn from_in(buf: In, len: libc::size_t) -> Self {
DeviceState::InText(Control::new(buf, len))
}
/// The constructor method `from_ig` returns a Signal's event.
pub fn from_sig(sig: libc::c_int) -> Self {
DeviceState::Sig(sig)
}
#[cfg(feature = "task")]
/// The accessor method `is_task` returns a Process' event.
pub fn is_task(self) -> Option<BufProc> {
match self {
DeviceState::Proc(name) => Some(name),
_ => None,
}
}
/// The accessor method `is_idle` returns a Option for Update's event.
pub fn is_idle(&self) -> Option<()> {
match *self {
DeviceState::Idle => Some(()),
_ => None,
}
}
/// The accessor method `is_out_text` returns a Option for Ouput's event.
pub fn is_out_text(&self) -> Option<(Out, libc::size_t)> {
match *self {
DeviceState::OutText(buf, len) => Some((buf, len)),
_ => None,
}
}
/// The accessor method `is_input` returns a Option for key
/// or mouse Input's event.
pub fn is_input(&self) -> Option<Control>
|
/// The accessor method `is_signal` returns a Option for Signal's event.
pub fn is_signal(&self) -> Option<libc::c_int> {
match *self {
DeviceState::Sig(sig) => Some(sig),
_ => None,
}
}
}
|
{
match *self {
DeviceState::InText(event) => Some(event),
_ => None,
}
}
|
identifier_body
|
state.rs
|
use ::libc;
#[cfg(feature = "task")]
use super::task::BufProc;
use super::{In, Out, Sig};
use super::control::Control;
#[derive(Copy, Clone, Debug)]
pub enum DeviceState {
/// The current task.
#[cfg(feature = "task")] Proc(BufProc),
/// Update.
Idle,
/// As catched a signal.
Sig(Sig),
/// The output of new lines.
OutText(Out, libc::size_t),
/// The current character.
InText(Control),
}
impl DeviceState {
#[cfg(feature = "task")]
/// The constructor method `from_task` returns a Process' event.
pub fn from_task(name: BufProc) -> Self {
DeviceState::Proc(name)
}
/// The constructor method `from_idle` returns a Update's event.
pub fn from_idle() -> Self {
DeviceState::Idle
}
/// The constructor method `from_out` returns a text Output's event.
pub fn from_out(buf: Out, len: libc::size_t) -> Self {
DeviceState::OutText(buf, len)
}
/// The constructor method `from_out` returns a key Input's event.
pub fn from_in(buf: In, len: libc::size_t) -> Self {
DeviceState::InText(Control::new(buf, len))
}
/// The constructor method `from_ig` returns a Signal's event.
pub fn from_sig(sig: libc::c_int) -> Self {
DeviceState::Sig(sig)
}
#[cfg(feature = "task")]
/// The accessor method `is_task` returns a Process' event.
pub fn is_task(self) -> Option<BufProc> {
match self {
DeviceState::Proc(name) => Some(name),
_ => None,
}
}
/// The accessor method `is_idle` returns a Option for Update's event.
pub fn is_idle(&self) -> Option<()> {
match *self {
DeviceState::Idle => Some(()),
_ => None,
}
}
/// The accessor method `is_out_text` returns a Option for Ouput's event.
pub fn is_out_text(&self) -> Option<(Out, libc::size_t)> {
match *self {
DeviceState::OutText(buf, len) => Some((buf, len)),
_ => None,
}
}
/// The accessor method `is_input` returns a Option for key
/// or mouse Input's event.
pub fn
|
(&self) -> Option<Control> {
match *self {
DeviceState::InText(event) => Some(event),
_ => None,
}
}
/// The accessor method `is_signal` returns a Option for Signal's event.
pub fn is_signal(&self) -> Option<libc::c_int> {
match *self {
DeviceState::Sig(sig) => Some(sig),
_ => None,
}
}
}
|
is_input
|
identifier_name
|
main.rs
|
#[macro_use]
extern crate serde_derive;
extern crate serde;
extern crate cargo;
extern crate rustc_serialize;
extern crate itertools;
extern crate tera;
extern crate toml;
#[cfg(test)]
#[macro_use]
extern crate hamcrest;
mod context;
mod planning;
mod rendering;
mod settings;
mod util;
mod bazel;
use bazel::BazelRenderer;
use cargo::CargoError;
use cargo::CliResult;
use cargo::util::CargoResult;
use cargo::util::Config;
use planning::BuildPlanner;
use rendering::FileOutputs;
use rendering::BuildRenderer;
use rendering::RenderDetails;
use settings::RazeSettings;
use settings::GenMode;
use std::env;
use std::fs::File;
use std::path::Path;
use std::io::Read;
use std::io::Write;
#[derive(Debug, RustcDecodable)]
struct Options {
arg_buildprefix: Option<String>,
flag_verbose: u32,
flag_quiet: Option<bool>,
flag_host: Option<String>,
flag_color: Option<String>,
flag_target: Option<String>,
flag_dryrun: Option<bool>,
}
const USAGE: &'static str = r#"
Generate BUILD files for your pre-vendored Cargo dependencies.
Usage:
cargo raze
Options:
-h, --help Print this message
-v, --verbose Use verbose output
--host HOST Registry index to sync with
-q, --quiet No output printed to stdout
--color WHEN Coloring: auto, always, never
-d, --dryrun Do not emit any files
"#;
fn main() {
let cargo_config = Config::default().unwrap();
let args = env::args().collect::<Vec<_>>();
let result = cargo::call_main_without_stdin(real_main, &cargo_config, USAGE, &args, false);
if let Err(e) = result {
cargo::exit_with_error(e, &mut *cargo_config.shell());
}
}
fn real_main(options: Options, cargo_config: &Config) -> CliResult {
try!(cargo_config.configure(options.flag_verbose,
options.flag_quiet,
&options.flag_color,
/* frozen = */ false,
/* locked = */ false));
let mut settings = try!(load_settings("Cargo.toml"));
println!("Loaded override settings: {:#?}", settings);
try!(validate_settings(&mut settings));
let mut planner = try!(BuildPlanner::new(settings.clone(), cargo_config));
if let Some(host) = options.flag_host {
try!(planner.set_registry_from_url(host));
}
let planned_build = try!(planner.plan_build());
let mut bazel_renderer = BazelRenderer::new();
let render_details = RenderDetails {
path_prefix: "./".to_owned(),
};
let bazel_file_outputs = match settings.genmode {
GenMode::Vendored => try!(bazel_renderer.render_planned_build(&render_details, &planned_build)),
GenMode::Remote => try!(bazel_renderer.render_remote_planned_build(&render_details, &planned_build)),
/* exhaustive, we control the definition */
};
let dry_run = options.flag_dryrun.unwrap_or(false);
for FileOutputs { path, contents } in bazel_file_outputs {
if!dry_run {
try!(write_to_file_loudly(&path, &contents));
} else {
println!("{}:\n{}", path, contents);
}
}
Ok(())
}
/** Verifies that the provided settings make sense. */
fn validate_settings(settings: &mut RazeSettings) -> CargoResult<()> {
if!settings.workspace_path.starts_with("//") {
return Err(CargoError::from("raze.workspace_path must start with \"//\". Paths into local repositories (such as @local//path) are currently unsupported."))
}
if settings.workspace_path == "//" {
return Err(CargoError::from("raze.workspace_path must not be '//' (it is currently unsupported). Its probably not what you want anyway, as this would vendor the crates directly into //vendor."));
}
if settings.workspace_path.ends_with("/") {
settings.workspace_path.pop();
}
return Ok(())
}
fn write_to_file_loudly(path: &str, contents: &str) -> CargoResult<()> {
try!(File::create(&path)
.and_then(|mut f| f.write_all(contents.as_bytes()))
.map_err(|_| CargoError::from(format!("failed to create {}", path))));
println!("Generated {} successfully", path);
Ok(())
}
fn
|
<T: AsRef<Path>>(cargo_toml_path: T) -> Result<RazeSettings, CargoError> {
let path = cargo_toml_path.as_ref();
let mut toml = try!(File::open(path)
.map_err(|e| {
println!("{:?}", e);
CargoError::from(format!("Could not load {:?}", path))
}));
let mut toml_contents = String::new();
try!(toml.read_to_string(&mut toml_contents)
.map_err(|e| {
println!("{:?}", e);
CargoError::from(format!("failed to read {:?}", path))
}));
toml::from_str::<settings::CargoToml>(&toml_contents)
.map_err(|e| {
println!("{:?}", e);
CargoError::from(format!("failed to parse {:?}", path))
})
.map(|toml| toml.raze)
}
|
load_settings
|
identifier_name
|
main.rs
|
#[macro_use]
extern crate serde_derive;
extern crate serde;
extern crate cargo;
extern crate rustc_serialize;
extern crate itertools;
extern crate tera;
extern crate toml;
#[cfg(test)]
#[macro_use]
extern crate hamcrest;
mod context;
mod planning;
mod rendering;
mod settings;
mod util;
mod bazel;
use bazel::BazelRenderer;
use cargo::CargoError;
use cargo::CliResult;
use cargo::util::CargoResult;
use cargo::util::Config;
use planning::BuildPlanner;
use rendering::FileOutputs;
use rendering::BuildRenderer;
use rendering::RenderDetails;
use settings::RazeSettings;
use settings::GenMode;
use std::env;
use std::fs::File;
use std::path::Path;
use std::io::Read;
use std::io::Write;
#[derive(Debug, RustcDecodable)]
struct Options {
arg_buildprefix: Option<String>,
flag_verbose: u32,
flag_quiet: Option<bool>,
flag_host: Option<String>,
flag_color: Option<String>,
flag_target: Option<String>,
flag_dryrun: Option<bool>,
}
const USAGE: &'static str = r#"
Generate BUILD files for your pre-vendored Cargo dependencies.
Usage:
cargo raze
Options:
-h, --help Print this message
-v, --verbose Use verbose output
--host HOST Registry index to sync with
-q, --quiet No output printed to stdout
--color WHEN Coloring: auto, always, never
-d, --dryrun Do not emit any files
"#;
fn main() {
let cargo_config = Config::default().unwrap();
let args = env::args().collect::<Vec<_>>();
let result = cargo::call_main_without_stdin(real_main, &cargo_config, USAGE, &args, false);
if let Err(e) = result {
cargo::exit_with_error(e, &mut *cargo_config.shell());
}
}
fn real_main(options: Options, cargo_config: &Config) -> CliResult {
try!(cargo_config.configure(options.flag_verbose,
options.flag_quiet,
&options.flag_color,
/* frozen = */ false,
/* locked = */ false));
let mut settings = try!(load_settings("Cargo.toml"));
println!("Loaded override settings: {:#?}", settings);
try!(validate_settings(&mut settings));
let mut planner = try!(BuildPlanner::new(settings.clone(), cargo_config));
if let Some(host) = options.flag_host {
try!(planner.set_registry_from_url(host));
}
let planned_build = try!(planner.plan_build());
let mut bazel_renderer = BazelRenderer::new();
let render_details = RenderDetails {
path_prefix: "./".to_owned(),
};
let bazel_file_outputs = match settings.genmode {
GenMode::Vendored => try!(bazel_renderer.render_planned_build(&render_details, &planned_build)),
GenMode::Remote => try!(bazel_renderer.render_remote_planned_build(&render_details, &planned_build)),
/* exhaustive, we control the definition */
};
let dry_run = options.flag_dryrun.unwrap_or(false);
for FileOutputs { path, contents } in bazel_file_outputs {
if!dry_run {
try!(write_to_file_loudly(&path, &contents));
} else {
println!("{}:\n{}", path, contents);
}
}
Ok(())
}
/** Verifies that the provided settings make sense. */
fn validate_settings(settings: &mut RazeSettings) -> CargoResult<()> {
if!settings.workspace_path.starts_with("//") {
return Err(CargoError::from("raze.workspace_path must start with \"//\". Paths into local repositories (such as @local//path) are currently unsupported."))
}
if settings.workspace_path == "//"
|
if settings.workspace_path.ends_with("/") {
settings.workspace_path.pop();
}
return Ok(())
}
fn write_to_file_loudly(path: &str, contents: &str) -> CargoResult<()> {
try!(File::create(&path)
.and_then(|mut f| f.write_all(contents.as_bytes()))
.map_err(|_| CargoError::from(format!("failed to create {}", path))));
println!("Generated {} successfully", path);
Ok(())
}
fn load_settings<T: AsRef<Path>>(cargo_toml_path: T) -> Result<RazeSettings, CargoError> {
let path = cargo_toml_path.as_ref();
let mut toml = try!(File::open(path)
.map_err(|e| {
println!("{:?}", e);
CargoError::from(format!("Could not load {:?}", path))
}));
let mut toml_contents = String::new();
try!(toml.read_to_string(&mut toml_contents)
.map_err(|e| {
println!("{:?}", e);
CargoError::from(format!("failed to read {:?}", path))
}));
toml::from_str::<settings::CargoToml>(&toml_contents)
.map_err(|e| {
println!("{:?}", e);
CargoError::from(format!("failed to parse {:?}", path))
})
.map(|toml| toml.raze)
}
|
{
return Err(CargoError::from("raze.workspace_path must not be '//' (it is currently unsupported). Its probably not what you want anyway, as this would vendor the crates directly into //vendor."));
}
|
conditional_block
|
main.rs
|
#[macro_use]
extern crate serde_derive;
extern crate serde;
extern crate cargo;
extern crate rustc_serialize;
extern crate itertools;
extern crate tera;
extern crate toml;
#[cfg(test)]
#[macro_use]
extern crate hamcrest;
mod context;
mod planning;
mod rendering;
mod settings;
mod util;
mod bazel;
use bazel::BazelRenderer;
use cargo::CargoError;
use cargo::CliResult;
use cargo::util::CargoResult;
use cargo::util::Config;
use planning::BuildPlanner;
use rendering::FileOutputs;
use rendering::BuildRenderer;
use rendering::RenderDetails;
use settings::RazeSettings;
use settings::GenMode;
use std::env;
use std::fs::File;
use std::path::Path;
use std::io::Read;
use std::io::Write;
#[derive(Debug, RustcDecodable)]
struct Options {
arg_buildprefix: Option<String>,
flag_verbose: u32,
flag_quiet: Option<bool>,
flag_host: Option<String>,
flag_color: Option<String>,
flag_target: Option<String>,
flag_dryrun: Option<bool>,
}
const USAGE: &'static str = r#"
Generate BUILD files for your pre-vendored Cargo dependencies.
Usage:
cargo raze
Options:
-h, --help Print this message
-v, --verbose Use verbose output
--host HOST Registry index to sync with
-q, --quiet No output printed to stdout
--color WHEN Coloring: auto, always, never
-d, --dryrun Do not emit any files
"#;
fn main() {
let cargo_config = Config::default().unwrap();
let args = env::args().collect::<Vec<_>>();
let result = cargo::call_main_without_stdin(real_main, &cargo_config, USAGE, &args, false);
if let Err(e) = result {
cargo::exit_with_error(e, &mut *cargo_config.shell());
}
}
fn real_main(options: Options, cargo_config: &Config) -> CliResult {
try!(cargo_config.configure(options.flag_verbose,
options.flag_quiet,
&options.flag_color,
/* frozen = */ false,
/* locked = */ false));
let mut settings = try!(load_settings("Cargo.toml"));
println!("Loaded override settings: {:#?}", settings);
try!(validate_settings(&mut settings));
let mut planner = try!(BuildPlanner::new(settings.clone(), cargo_config));
if let Some(host) = options.flag_host {
try!(planner.set_registry_from_url(host));
}
let planned_build = try!(planner.plan_build());
let mut bazel_renderer = BazelRenderer::new();
let render_details = RenderDetails {
path_prefix: "./".to_owned(),
};
let bazel_file_outputs = match settings.genmode {
GenMode::Vendored => try!(bazel_renderer.render_planned_build(&render_details, &planned_build)),
GenMode::Remote => try!(bazel_renderer.render_remote_planned_build(&render_details, &planned_build)),
/* exhaustive, we control the definition */
};
let dry_run = options.flag_dryrun.unwrap_or(false);
for FileOutputs { path, contents } in bazel_file_outputs {
if!dry_run {
try!(write_to_file_loudly(&path, &contents));
} else {
println!("{}:\n{}", path, contents);
}
}
Ok(())
}
/** Verifies that the provided settings make sense. */
fn validate_settings(settings: &mut RazeSettings) -> CargoResult<()>
|
fn write_to_file_loudly(path: &str, contents: &str) -> CargoResult<()> {
try!(File::create(&path)
.and_then(|mut f| f.write_all(contents.as_bytes()))
.map_err(|_| CargoError::from(format!("failed to create {}", path))));
println!("Generated {} successfully", path);
Ok(())
}
fn load_settings<T: AsRef<Path>>(cargo_toml_path: T) -> Result<RazeSettings, CargoError> {
let path = cargo_toml_path.as_ref();
let mut toml = try!(File::open(path)
.map_err(|e| {
println!("{:?}", e);
CargoError::from(format!("Could not load {:?}", path))
}));
let mut toml_contents = String::new();
try!(toml.read_to_string(&mut toml_contents)
.map_err(|e| {
println!("{:?}", e);
CargoError::from(format!("failed to read {:?}", path))
}));
toml::from_str::<settings::CargoToml>(&toml_contents)
.map_err(|e| {
println!("{:?}", e);
CargoError::from(format!("failed to parse {:?}", path))
})
.map(|toml| toml.raze)
}
|
{
if !settings.workspace_path.starts_with("//") {
return Err(CargoError::from("raze.workspace_path must start with \"//\". Paths into local repositories (such as @local//path) are currently unsupported."))
}
if settings.workspace_path == "//" {
return Err(CargoError::from("raze.workspace_path must not be '//' (it is currently unsupported). Its probably not what you want anyway, as this would vendor the crates directly into //vendor."));
}
if settings.workspace_path.ends_with("/") {
settings.workspace_path.pop();
}
return Ok(())
}
|
identifier_body
|
main.rs
|
#[macro_use]
extern crate serde_derive;
extern crate serde;
extern crate cargo;
extern crate rustc_serialize;
extern crate itertools;
extern crate tera;
extern crate toml;
#[cfg(test)]
#[macro_use]
extern crate hamcrest;
mod context;
mod planning;
mod rendering;
mod settings;
mod util;
mod bazel;
use bazel::BazelRenderer;
use cargo::CargoError;
use cargo::CliResult;
use cargo::util::CargoResult;
use cargo::util::Config;
use planning::BuildPlanner;
use rendering::FileOutputs;
use rendering::BuildRenderer;
use rendering::RenderDetails;
use settings::RazeSettings;
use settings::GenMode;
use std::env;
use std::fs::File;
use std::path::Path;
use std::io::Read;
use std::io::Write;
#[derive(Debug, RustcDecodable)]
struct Options {
arg_buildprefix: Option<String>,
flag_verbose: u32,
flag_quiet: Option<bool>,
flag_host: Option<String>,
flag_color: Option<String>,
flag_target: Option<String>,
flag_dryrun: Option<bool>,
}
const USAGE: &'static str = r#"
Generate BUILD files for your pre-vendored Cargo dependencies.
Usage:
cargo raze
Options:
-h, --help Print this message
-v, --verbose Use verbose output
--host HOST Registry index to sync with
-q, --quiet No output printed to stdout
--color WHEN Coloring: auto, always, never
-d, --dryrun Do not emit any files
"#;
fn main() {
let cargo_config = Config::default().unwrap();
let args = env::args().collect::<Vec<_>>();
let result = cargo::call_main_without_stdin(real_main, &cargo_config, USAGE, &args, false);
if let Err(e) = result {
cargo::exit_with_error(e, &mut *cargo_config.shell());
}
}
fn real_main(options: Options, cargo_config: &Config) -> CliResult {
try!(cargo_config.configure(options.flag_verbose,
options.flag_quiet,
&options.flag_color,
/* frozen = */ false,
/* locked = */ false));
let mut settings = try!(load_settings("Cargo.toml"));
println!("Loaded override settings: {:#?}", settings);
try!(validate_settings(&mut settings));
let mut planner = try!(BuildPlanner::new(settings.clone(), cargo_config));
if let Some(host) = options.flag_host {
try!(planner.set_registry_from_url(host));
}
|
let planned_build = try!(planner.plan_build());
let mut bazel_renderer = BazelRenderer::new();
let render_details = RenderDetails {
path_prefix: "./".to_owned(),
};
let bazel_file_outputs = match settings.genmode {
GenMode::Vendored => try!(bazel_renderer.render_planned_build(&render_details, &planned_build)),
GenMode::Remote => try!(bazel_renderer.render_remote_planned_build(&render_details, &planned_build)),
/* exhaustive, we control the definition */
};
let dry_run = options.flag_dryrun.unwrap_or(false);
for FileOutputs { path, contents } in bazel_file_outputs {
if!dry_run {
try!(write_to_file_loudly(&path, &contents));
} else {
println!("{}:\n{}", path, contents);
}
}
Ok(())
}
/** Verifies that the provided settings make sense. */
fn validate_settings(settings: &mut RazeSettings) -> CargoResult<()> {
if!settings.workspace_path.starts_with("//") {
return Err(CargoError::from("raze.workspace_path must start with \"//\". Paths into local repositories (such as @local//path) are currently unsupported."))
}
if settings.workspace_path == "//" {
return Err(CargoError::from("raze.workspace_path must not be '//' (it is currently unsupported). Its probably not what you want anyway, as this would vendor the crates directly into //vendor."));
}
if settings.workspace_path.ends_with("/") {
settings.workspace_path.pop();
}
return Ok(())
}
fn write_to_file_loudly(path: &str, contents: &str) -> CargoResult<()> {
try!(File::create(&path)
.and_then(|mut f| f.write_all(contents.as_bytes()))
.map_err(|_| CargoError::from(format!("failed to create {}", path))));
println!("Generated {} successfully", path);
Ok(())
}
fn load_settings<T: AsRef<Path>>(cargo_toml_path: T) -> Result<RazeSettings, CargoError> {
let path = cargo_toml_path.as_ref();
let mut toml = try!(File::open(path)
.map_err(|e| {
println!("{:?}", e);
CargoError::from(format!("Could not load {:?}", path))
}));
let mut toml_contents = String::new();
try!(toml.read_to_string(&mut toml_contents)
.map_err(|e| {
println!("{:?}", e);
CargoError::from(format!("failed to read {:?}", path))
}));
toml::from_str::<settings::CargoToml>(&toml_contents)
.map_err(|e| {
println!("{:?}", e);
CargoError::from(format!("failed to parse {:?}", path))
})
.map(|toml| toml.raze)
}
|
random_line_split
|
|
fibonacci_word.rs
|
// Implements http://rosettacode.org/wiki/Fibonacci_word
use entropy::shannon_entropy;
mod entropy;
// Returns "amount" fibonacci words as a vector of tuples
// The first value of the tuple is the length of the word
// and the second one its entropy
fn fib_words(amount: usize) -> Vec<(usize, f64)> {
let mut data = Vec::with_capacity(amount);
let mut previous = String::from("1");
|
let mut next = String::from("0");
// The first two words (we need to add them manually because
// they are the base of the sequence)
data.push((previous.len(), shannon_entropy(&previous[..])));
data.push((next.len(), shannon_entropy(&next[..])));
// The rest of the words
for _ in (3..amount + 1) {
let temp = next.clone();
next.push_str(&previous[..]);
previous = temp;
data.push((next.len(), shannon_entropy(&next[..])));
}
data
}
#[cfg(not(test))]
fn main() {
println!("Calculating... This may take a couple of minutes...\n");
let words = fib_words(18);
let mut i = 1;
println!("{:>2}:{:>10} {}", "N", "length", "entropy");
for &(length, entropy) in &words {
println!("{:>2}:{:>10} {:.15}", i, length, entropy);
i += 1;
}
}
#[test]
fn test_fibonacii_words() {
let expected = vec![
(1, 0.000000000000000f64),
(1, 0.000000000000000),
(2, 1.000000000000000),
(3, 0.918295834054490),
(5, 0.970950594454669),
(8, 0.954434002924965),
(13, 0.961236604722876),
(21, 0.958711882977132),
(34, 0.959686893774217),
(55, 0.959316032054378),
(89, 0.959457915838670),
(144, 0.959403754221023),
(233, 0.959424446955987),
(377, 0.959416543740441),
(610, 0.959419562603144),
(987, 0.959418409515224),
(1597, 0.959418849957810),
(2584, 0.959418681724032)];
let epsilon = 0.0000000001f64;
let output = fib_words(18);
for ((output_length, output_entropy), (expected_length, expected_entropy))
in output.into_iter().zip(expected.into_iter()) {
assert!(output_length == expected_length);
assert!((output_entropy - expected_entropy).abs() < epsilon);
}
}
|
random_line_split
|
|
fibonacci_word.rs
|
// Implements http://rosettacode.org/wiki/Fibonacci_word
use entropy::shannon_entropy;
mod entropy;
// Returns "amount" fibonacci words as a vector of tuples
// The first value of the tuple is the length of the word
// and the second one its entropy
fn fib_words(amount: usize) -> Vec<(usize, f64)>
|
#[cfg(not(test))]
fn main() {
println!("Calculating... This may take a couple of minutes...\n");
let words = fib_words(18);
let mut i = 1;
println!("{:>2}:{:>10} {}", "N", "length", "entropy");
for &(length, entropy) in &words {
println!("{:>2}:{:>10} {:.15}", i, length, entropy);
i += 1;
}
}
#[test]
fn test_fibonacii_words() {
let expected = vec![
(1, 0.000000000000000f64),
(1, 0.000000000000000),
(2, 1.000000000000000),
(3, 0.918295834054490),
(5, 0.970950594454669),
(8, 0.954434002924965),
(13, 0.961236604722876),
(21, 0.958711882977132),
(34, 0.959686893774217),
(55, 0.959316032054378),
(89, 0.959457915838670),
(144, 0.959403754221023),
(233, 0.959424446955987),
(377, 0.959416543740441),
(610, 0.959419562603144),
(987, 0.959418409515224),
(1597, 0.959418849957810),
(2584, 0.959418681724032)];
let epsilon = 0.0000000001f64;
let output = fib_words(18);
for ((output_length, output_entropy), (expected_length, expected_entropy))
in output.into_iter().zip(expected.into_iter()) {
assert!(output_length == expected_length);
assert!((output_entropy - expected_entropy).abs() < epsilon);
}
}
|
{
let mut data = Vec::with_capacity(amount);
let mut previous = String::from("1");
let mut next = String::from("0");
// The first two words (we need to add them manually because
// they are the base of the sequence)
data.push((previous.len(), shannon_entropy(&previous[..])));
data.push((next.len(), shannon_entropy(&next[..])));
// The rest of the words
for _ in (3..amount + 1) {
let temp = next.clone();
next.push_str(&previous[..]);
previous = temp;
data.push((next.len(), shannon_entropy(&next[..])));
}
data
}
|
identifier_body
|
fibonacci_word.rs
|
// Implements http://rosettacode.org/wiki/Fibonacci_word
use entropy::shannon_entropy;
mod entropy;
// Returns "amount" fibonacci words as a vector of tuples
// The first value of the tuple is the length of the word
// and the second one its entropy
fn fib_words(amount: usize) -> Vec<(usize, f64)> {
let mut data = Vec::with_capacity(amount);
let mut previous = String::from("1");
let mut next = String::from("0");
// The first two words (we need to add them manually because
// they are the base of the sequence)
data.push((previous.len(), shannon_entropy(&previous[..])));
data.push((next.len(), shannon_entropy(&next[..])));
// The rest of the words
for _ in (3..amount + 1) {
let temp = next.clone();
next.push_str(&previous[..]);
previous = temp;
data.push((next.len(), shannon_entropy(&next[..])));
}
data
}
#[cfg(not(test))]
fn main() {
println!("Calculating... This may take a couple of minutes...\n");
let words = fib_words(18);
let mut i = 1;
println!("{:>2}:{:>10} {}", "N", "length", "entropy");
for &(length, entropy) in &words {
println!("{:>2}:{:>10} {:.15}", i, length, entropy);
i += 1;
}
}
#[test]
fn
|
() {
let expected = vec![
(1, 0.000000000000000f64),
(1, 0.000000000000000),
(2, 1.000000000000000),
(3, 0.918295834054490),
(5, 0.970950594454669),
(8, 0.954434002924965),
(13, 0.961236604722876),
(21, 0.958711882977132),
(34, 0.959686893774217),
(55, 0.959316032054378),
(89, 0.959457915838670),
(144, 0.959403754221023),
(233, 0.959424446955987),
(377, 0.959416543740441),
(610, 0.959419562603144),
(987, 0.959418409515224),
(1597, 0.959418849957810),
(2584, 0.959418681724032)];
let epsilon = 0.0000000001f64;
let output = fib_words(18);
for ((output_length, output_entropy), (expected_length, expected_entropy))
in output.into_iter().zip(expected.into_iter()) {
assert!(output_length == expected_length);
assert!((output_entropy - expected_entropy).abs() < epsilon);
}
}
|
test_fibonacii_words
|
identifier_name
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.