file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
mod.rs | use std::fs;
use std::io;
pub mod cgroup_reader;
pub mod ns_reader;
pub mod process;
fn load_cgroups(procs: Vec<process::Process>) -> Vec<cgroup_reader::Reader> {
let cgroups = Vec::<cgroup_reader::Reader>::new();
for p in procs {
cgroups.push(cgroup_reader::new(p.pid));
}
cgroups
}
pub struct Container {
processes: Vec<process::Process>,
namespaces: Vec<String>,
cgroups: Vec<cgroup_reader::Reader>,
update_intv: i32
}
impl Container {
fn update(&self) {
loop {
for cgroup in self.cgroups {
let kvs = cgroup.read();
for kv in kvs {
let (key, val) = kv;
println!("{} : {}", key, val);
}
}
}
}
}
pub fn | (group: ns_reader::NS_Group) -> Container {
let (namespaces, process) = group;
return Container{
processes: process,
namespaces: namespaces,
cgroups: load_cgroups(process),
update_intv: 1,
}
}
| new | identifier_name |
mod.rs | use std::fs;
use std::io;
pub mod cgroup_reader;
pub mod ns_reader;
pub mod process;
fn load_cgroups(procs: Vec<process::Process>) -> Vec<cgroup_reader::Reader> {
let cgroups = Vec::<cgroup_reader::Reader>::new();
for p in procs {
cgroups.push(cgroup_reader::new(p.pid));
}
cgroups
}
pub struct Container {
processes: Vec<process::Process>,
namespaces: Vec<String>,
cgroups: Vec<cgroup_reader::Reader>,
update_intv: i32
}
impl Container {
fn update(&self) {
loop {
for cgroup in self.cgroups {
let kvs = cgroup.read();
for kv in kvs {
let (key, val) = kv;
println!("{} : {}", key, val);
}
} | pub fn new(group: ns_reader::NS_Group) -> Container {
let (namespaces, process) = group;
return Container{
processes: process,
namespaces: namespaces,
cgroups: load_cgroups(process),
update_intv: 1,
}
} | }
}
}
| random_line_split |
regions-infer-invariance-due-to-decl.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::marker;
struct invariant<'a> {
marker: marker::PhantomData<*mut &'a()>
}
fn to_same_lifetime<'r>(b_isize: invariant<'r>) {
let bj: invariant<'r> = b_isize;
}
fn to_longer_lifetime<'r>(b_isize: invariant<'r>) -> invariant<'static> {
b_isize //~ ERROR mismatched types
}
fn main() | {
} | identifier_body |
|
regions-infer-invariance-due-to-decl.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT | // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::marker;
struct invariant<'a> {
marker: marker::PhantomData<*mut &'a()>
}
fn to_same_lifetime<'r>(b_isize: invariant<'r>) {
let bj: invariant<'r> = b_isize;
}
fn to_longer_lifetime<'r>(b_isize: invariant<'r>) -> invariant<'static> {
b_isize //~ ERROR mismatched types
}
fn main() {
} | // file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license | random_line_split |
regions-infer-invariance-due-to-decl.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::marker;
struct invariant<'a> {
marker: marker::PhantomData<*mut &'a()>
}
fn to_same_lifetime<'r>(b_isize: invariant<'r>) {
let bj: invariant<'r> = b_isize;
}
fn to_longer_lifetime<'r>(b_isize: invariant<'r>) -> invariant<'static> {
b_isize //~ ERROR mismatched types
}
fn | () {
}
| main | identifier_name |
menubutton.rs | // This file is part of rgtk.
//
// rgtk is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// rgtk is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with rgtk. If not, see <http://www.gnu.org/licenses/>.
//! A widget that shows a menu when clicked on
use gtk::{mod, ffi};
use gtk::cast::GTK_MENUBUTTON;
use gtk::ArrowType;
/// MenuButton — A widget that shows a menu when clicked on
struct_Widget!(MenuButton)
impl MenuButton {
pub fn new() -> Option<MenuButton> {
let tmp_pointer = unsafe { ffi::gtk_menu_button_new() };
check_pointer!(tmp_pointer, MenuButton)
}
pub fn set_popup<T: gtk::WidgetTrait>(&mut self, popup: &T) -> () {
unsafe {
ffi::gtk_menu_button_set_popup(GTK_MENUBUTTON(self.pointer), popup.get_widget());
}
}
pub fn set_direction(&mut self, direction: ArrowType) -> () {
unsafe {
ffi::gtk_menu_button_set_direction(GTK_MENUBUTTON(self.pointer), direction);
}
}
pub fn get_direction(&self) -> ArrowType {
unsafe {
ffi::gtk_menu_button_get_direction(GTK_MENUBUTTON(self.pointer))
}
}
pub fn set_align_widget<T: gtk::WidgetTrait>(&mut self, align_widget: &T) -> () {
|
impl_drop!(MenuButton)
impl_TraitWidget!(MenuButton)
impl gtk::ContainerTrait for MenuButton {}
impl gtk::ButtonTrait for MenuButton {}
impl gtk::ToggleButtonTrait for MenuButton {}
impl_widget_events!(MenuButton)
| unsafe {
ffi::gtk_menu_button_set_align_widget(GTK_MENUBUTTON(self.pointer), align_widget.get_widget())
}
}
} | identifier_body |
menubutton.rs | // This file is part of rgtk. | // rgtk is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// rgtk is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with rgtk. If not, see <http://www.gnu.org/licenses/>.
//! A widget that shows a menu when clicked on
use gtk::{mod, ffi};
use gtk::cast::GTK_MENUBUTTON;
use gtk::ArrowType;
/// MenuButton — A widget that shows a menu when clicked on
struct_Widget!(MenuButton)
impl MenuButton {
pub fn new() -> Option<MenuButton> {
let tmp_pointer = unsafe { ffi::gtk_menu_button_new() };
check_pointer!(tmp_pointer, MenuButton)
}
pub fn set_popup<T: gtk::WidgetTrait>(&mut self, popup: &T) -> () {
unsafe {
ffi::gtk_menu_button_set_popup(GTK_MENUBUTTON(self.pointer), popup.get_widget());
}
}
pub fn set_direction(&mut self, direction: ArrowType) -> () {
unsafe {
ffi::gtk_menu_button_set_direction(GTK_MENUBUTTON(self.pointer), direction);
}
}
pub fn get_direction(&self) -> ArrowType {
unsafe {
ffi::gtk_menu_button_get_direction(GTK_MENUBUTTON(self.pointer))
}
}
pub fn set_align_widget<T: gtk::WidgetTrait>(&mut self, align_widget: &T) -> () {
unsafe {
ffi::gtk_menu_button_set_align_widget(GTK_MENUBUTTON(self.pointer), align_widget.get_widget())
}
}
}
impl_drop!(MenuButton)
impl_TraitWidget!(MenuButton)
impl gtk::ContainerTrait for MenuButton {}
impl gtk::ButtonTrait for MenuButton {}
impl gtk::ToggleButtonTrait for MenuButton {}
impl_widget_events!(MenuButton) | // | random_line_split |
menubutton.rs | // This file is part of rgtk.
//
// rgtk is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// rgtk is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with rgtk. If not, see <http://www.gnu.org/licenses/>.
//! A widget that shows a menu when clicked on
use gtk::{mod, ffi};
use gtk::cast::GTK_MENUBUTTON;
use gtk::ArrowType;
/// MenuButton — A widget that shows a menu when clicked on
struct_Widget!(MenuButton)
impl MenuButton {
pub fn ne | -> Option<MenuButton> {
let tmp_pointer = unsafe { ffi::gtk_menu_button_new() };
check_pointer!(tmp_pointer, MenuButton)
}
pub fn set_popup<T: gtk::WidgetTrait>(&mut self, popup: &T) -> () {
unsafe {
ffi::gtk_menu_button_set_popup(GTK_MENUBUTTON(self.pointer), popup.get_widget());
}
}
pub fn set_direction(&mut self, direction: ArrowType) -> () {
unsafe {
ffi::gtk_menu_button_set_direction(GTK_MENUBUTTON(self.pointer), direction);
}
}
pub fn get_direction(&self) -> ArrowType {
unsafe {
ffi::gtk_menu_button_get_direction(GTK_MENUBUTTON(self.pointer))
}
}
pub fn set_align_widget<T: gtk::WidgetTrait>(&mut self, align_widget: &T) -> () {
unsafe {
ffi::gtk_menu_button_set_align_widget(GTK_MENUBUTTON(self.pointer), align_widget.get_widget())
}
}
}
impl_drop!(MenuButton)
impl_TraitWidget!(MenuButton)
impl gtk::ContainerTrait for MenuButton {}
impl gtk::ButtonTrait for MenuButton {}
impl gtk::ToggleButtonTrait for MenuButton {}
impl_widget_events!(MenuButton)
| w() | identifier_name |
error.rs | // Copyright (c) 2017 Chef Software Inc. and/or applicable contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | #[derive(Debug, Fail)]
pub enum Error {
#[fail(display = "Invalid bind specification '{}'", _0)]
InvalidBindSpec(String),
#[fail(display = "Invalid topology '{}'. Possible values: standalone, leader", _0)]
InvalidTopology(String),
#[fail(display = "Invalid binding \"{}\", must be of the form <NAME>:<SERVICE_GROUP> where \
<NAME> is a service name and <SERVICE_GROUP> is a valid service group",
_0)]
InvalidBinding(String),
#[fail(display = "{}", _0)]
HabitatCore(hcore::Error),
}
impl From<hcore::Error> for Error {
fn from(err: hcore::Error) -> Error {
Error::HabitatCore(err)
}
} | // See the License for the specific language governing permissions and
// limitations under the License.
use hcore;
| random_line_split |
error.rs | // Copyright (c) 2017 Chef Software Inc. and/or applicable contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use hcore;
#[derive(Debug, Fail)]
pub enum Error {
#[fail(display = "Invalid bind specification '{}'", _0)]
InvalidBindSpec(String),
#[fail(display = "Invalid topology '{}'. Possible values: standalone, leader", _0)]
InvalidTopology(String),
#[fail(display = "Invalid binding \"{}\", must be of the form <NAME>:<SERVICE_GROUP> where \
<NAME> is a service name and <SERVICE_GROUP> is a valid service group",
_0)]
InvalidBinding(String),
#[fail(display = "{}", _0)]
HabitatCore(hcore::Error),
}
impl From<hcore::Error> for Error {
fn | (err: hcore::Error) -> Error {
Error::HabitatCore(err)
}
}
| from | identifier_name |
fold.rs | // Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed |
use clean::*;
use std::iter::Extendable;
use std::mem::{replace, swap};
pub trait DocFolder {
fn fold_item(&mut self, item: Item) -> Option<Item> {
self.fold_item_recur(item)
}
/// don't override!
fn fold_item_recur(&mut self, item: Item) -> Option<Item> {
let Item { attrs, name, source, visibility, def_id, inner } = item;
let inner = inner;
let inner = match inner {
StructItem(mut i) => {
let mut foo = Vec::new(); swap(&mut foo, &mut i.fields);
let num_fields = foo.len();
i.fields.extend(foo.move_iter().filter_map(|x| self.fold_item(x)));
i.fields_stripped |= num_fields!= i.fields.len();
StructItem(i)
},
ModuleItem(i) => {
ModuleItem(self.fold_mod(i))
},
EnumItem(mut i) => {
let mut foo = Vec::new(); swap(&mut foo, &mut i.variants);
let num_variants = foo.len();
i.variants.extend(foo.move_iter().filter_map(|x| self.fold_item(x)));
i.variants_stripped |= num_variants!= i.variants.len();
EnumItem(i)
},
TraitItem(mut i) => {
fn vtrm<T: DocFolder>(this: &mut T, trm: TraitMethod) -> Option<TraitMethod> {
match trm {
Required(it) => {
match this.fold_item(it) {
Some(x) => return Some(Required(x)),
None => return None,
}
},
Provided(it) => {
match this.fold_item(it) {
Some(x) => return Some(Provided(x)),
None => return None,
}
},
}
}
let mut foo = Vec::new(); swap(&mut foo, &mut i.methods);
i.methods.extend(foo.move_iter().filter_map(|x| vtrm(self, x)));
TraitItem(i)
},
ImplItem(mut i) => {
let mut foo = Vec::new(); swap(&mut foo, &mut i.methods);
i.methods.extend(foo.move_iter().filter_map(|x| self.fold_item(x)));
ImplItem(i)
},
VariantItem(i) => {
let i2 = i.clone(); // this clone is small
match i.kind {
StructVariant(mut j) => {
let mut foo = Vec::new(); swap(&mut foo, &mut j.fields);
let num_fields = foo.len();
let c = |x| self.fold_item(x);
j.fields.extend(foo.move_iter().filter_map(c));
j.fields_stripped |= num_fields!= j.fields.len();
VariantItem(Variant {kind: StructVariant(j),..i2})
},
_ => VariantItem(i2)
}
},
x => x
};
Some(Item { attrs: attrs, name: name, source: source, inner: inner,
visibility: visibility, def_id: def_id })
}
fn fold_mod(&mut self, m: Module) -> Module {
Module {
is_crate: m.is_crate,
items: m.items.move_iter().filter_map(|i| self.fold_item(i)).collect()
}
}
fn fold_crate(&mut self, mut c: Crate) -> Crate {
c.module = match replace(&mut c.module, None) {
Some(module) => self.fold_item(module), None => None
};
return c;
}
} | // except according to those terms. | random_line_split |
fold.rs | // Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use clean::*;
use std::iter::Extendable;
use std::mem::{replace, swap};
pub trait DocFolder {
fn fold_item(&mut self, item: Item) -> Option<Item> {
self.fold_item_recur(item)
}
/// don't override!
fn fold_item_recur(&mut self, item: Item) -> Option<Item> {
let Item { attrs, name, source, visibility, def_id, inner } = item;
let inner = inner;
let inner = match inner {
StructItem(mut i) => {
let mut foo = Vec::new(); swap(&mut foo, &mut i.fields);
let num_fields = foo.len();
i.fields.extend(foo.move_iter().filter_map(|x| self.fold_item(x)));
i.fields_stripped |= num_fields!= i.fields.len();
StructItem(i)
},
ModuleItem(i) => {
ModuleItem(self.fold_mod(i))
},
EnumItem(mut i) => {
let mut foo = Vec::new(); swap(&mut foo, &mut i.variants);
let num_variants = foo.len();
i.variants.extend(foo.move_iter().filter_map(|x| self.fold_item(x)));
i.variants_stripped |= num_variants!= i.variants.len();
EnumItem(i)
},
TraitItem(mut i) => {
fn vtrm<T: DocFolder>(this: &mut T, trm: TraitMethod) -> Option<TraitMethod> {
match trm {
Required(it) => {
match this.fold_item(it) {
Some(x) => return Some(Required(x)),
None => return None,
}
},
Provided(it) => {
match this.fold_item(it) {
Some(x) => return Some(Provided(x)),
None => return None,
}
},
}
}
let mut foo = Vec::new(); swap(&mut foo, &mut i.methods);
i.methods.extend(foo.move_iter().filter_map(|x| vtrm(self, x)));
TraitItem(i)
},
ImplItem(mut i) => | ,
VariantItem(i) => {
let i2 = i.clone(); // this clone is small
match i.kind {
StructVariant(mut j) => {
let mut foo = Vec::new(); swap(&mut foo, &mut j.fields);
let num_fields = foo.len();
let c = |x| self.fold_item(x);
j.fields.extend(foo.move_iter().filter_map(c));
j.fields_stripped |= num_fields!= j.fields.len();
VariantItem(Variant {kind: StructVariant(j),..i2})
},
_ => VariantItem(i2)
}
},
x => x
};
Some(Item { attrs: attrs, name: name, source: source, inner: inner,
visibility: visibility, def_id: def_id })
}
fn fold_mod(&mut self, m: Module) -> Module {
Module {
is_crate: m.is_crate,
items: m.items.move_iter().filter_map(|i| self.fold_item(i)).collect()
}
}
fn fold_crate(&mut self, mut c: Crate) -> Crate {
c.module = match replace(&mut c.module, None) {
Some(module) => self.fold_item(module), None => None
};
return c;
}
}
| {
let mut foo = Vec::new(); swap(&mut foo, &mut i.methods);
i.methods.extend(foo.move_iter().filter_map(|x| self.fold_item(x)));
ImplItem(i)
} | conditional_block |
fold.rs | // Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use clean::*;
use std::iter::Extendable;
use std::mem::{replace, swap};
pub trait DocFolder {
fn fold_item(&mut self, item: Item) -> Option<Item> {
self.fold_item_recur(item)
}
/// don't override!
fn fold_item_recur(&mut self, item: Item) -> Option<Item> {
let Item { attrs, name, source, visibility, def_id, inner } = item;
let inner = inner;
let inner = match inner {
StructItem(mut i) => {
let mut foo = Vec::new(); swap(&mut foo, &mut i.fields);
let num_fields = foo.len();
i.fields.extend(foo.move_iter().filter_map(|x| self.fold_item(x)));
i.fields_stripped |= num_fields!= i.fields.len();
StructItem(i)
},
ModuleItem(i) => {
ModuleItem(self.fold_mod(i))
},
EnumItem(mut i) => {
let mut foo = Vec::new(); swap(&mut foo, &mut i.variants);
let num_variants = foo.len();
i.variants.extend(foo.move_iter().filter_map(|x| self.fold_item(x)));
i.variants_stripped |= num_variants!= i.variants.len();
EnumItem(i)
},
TraitItem(mut i) => {
fn vtrm<T: DocFolder>(this: &mut T, trm: TraitMethod) -> Option<TraitMethod> {
match trm {
Required(it) => {
match this.fold_item(it) {
Some(x) => return Some(Required(x)),
None => return None,
}
},
Provided(it) => {
match this.fold_item(it) {
Some(x) => return Some(Provided(x)),
None => return None,
}
},
}
}
let mut foo = Vec::new(); swap(&mut foo, &mut i.methods);
i.methods.extend(foo.move_iter().filter_map(|x| vtrm(self, x)));
TraitItem(i)
},
ImplItem(mut i) => {
let mut foo = Vec::new(); swap(&mut foo, &mut i.methods);
i.methods.extend(foo.move_iter().filter_map(|x| self.fold_item(x)));
ImplItem(i)
},
VariantItem(i) => {
let i2 = i.clone(); // this clone is small
match i.kind {
StructVariant(mut j) => {
let mut foo = Vec::new(); swap(&mut foo, &mut j.fields);
let num_fields = foo.len();
let c = |x| self.fold_item(x);
j.fields.extend(foo.move_iter().filter_map(c));
j.fields_stripped |= num_fields!= j.fields.len();
VariantItem(Variant {kind: StructVariant(j),..i2})
},
_ => VariantItem(i2)
}
},
x => x
};
Some(Item { attrs: attrs, name: name, source: source, inner: inner,
visibility: visibility, def_id: def_id })
}
fn fold_mod(&mut self, m: Module) -> Module |
fn fold_crate(&mut self, mut c: Crate) -> Crate {
c.module = match replace(&mut c.module, None) {
Some(module) => self.fold_item(module), None => None
};
return c;
}
}
| {
Module {
is_crate: m.is_crate,
items: m.items.move_iter().filter_map(|i| self.fold_item(i)).collect()
}
} | identifier_body |
fold.rs | // Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use clean::*;
use std::iter::Extendable;
use std::mem::{replace, swap};
pub trait DocFolder {
fn fold_item(&mut self, item: Item) -> Option<Item> {
self.fold_item_recur(item)
}
/// don't override!
fn fold_item_recur(&mut self, item: Item) -> Option<Item> {
let Item { attrs, name, source, visibility, def_id, inner } = item;
let inner = inner;
let inner = match inner {
StructItem(mut i) => {
let mut foo = Vec::new(); swap(&mut foo, &mut i.fields);
let num_fields = foo.len();
i.fields.extend(foo.move_iter().filter_map(|x| self.fold_item(x)));
i.fields_stripped |= num_fields!= i.fields.len();
StructItem(i)
},
ModuleItem(i) => {
ModuleItem(self.fold_mod(i))
},
EnumItem(mut i) => {
let mut foo = Vec::new(); swap(&mut foo, &mut i.variants);
let num_variants = foo.len();
i.variants.extend(foo.move_iter().filter_map(|x| self.fold_item(x)));
i.variants_stripped |= num_variants!= i.variants.len();
EnumItem(i)
},
TraitItem(mut i) => {
fn | <T: DocFolder>(this: &mut T, trm: TraitMethod) -> Option<TraitMethod> {
match trm {
Required(it) => {
match this.fold_item(it) {
Some(x) => return Some(Required(x)),
None => return None,
}
},
Provided(it) => {
match this.fold_item(it) {
Some(x) => return Some(Provided(x)),
None => return None,
}
},
}
}
let mut foo = Vec::new(); swap(&mut foo, &mut i.methods);
i.methods.extend(foo.move_iter().filter_map(|x| vtrm(self, x)));
TraitItem(i)
},
ImplItem(mut i) => {
let mut foo = Vec::new(); swap(&mut foo, &mut i.methods);
i.methods.extend(foo.move_iter().filter_map(|x| self.fold_item(x)));
ImplItem(i)
},
VariantItem(i) => {
let i2 = i.clone(); // this clone is small
match i.kind {
StructVariant(mut j) => {
let mut foo = Vec::new(); swap(&mut foo, &mut j.fields);
let num_fields = foo.len();
let c = |x| self.fold_item(x);
j.fields.extend(foo.move_iter().filter_map(c));
j.fields_stripped |= num_fields!= j.fields.len();
VariantItem(Variant {kind: StructVariant(j),..i2})
},
_ => VariantItem(i2)
}
},
x => x
};
Some(Item { attrs: attrs, name: name, source: source, inner: inner,
visibility: visibility, def_id: def_id })
}
fn fold_mod(&mut self, m: Module) -> Module {
Module {
is_crate: m.is_crate,
items: m.items.move_iter().filter_map(|i| self.fold_item(i)).collect()
}
}
fn fold_crate(&mut self, mut c: Crate) -> Crate {
c.module = match replace(&mut c.module, None) {
Some(module) => self.fold_item(module), None => None
};
return c;
}
}
| vtrm | identifier_name |
issue-17718-static-unsafe-interior.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at | // option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::kinds::marker;
use std::cell::UnsafeCell;
struct MyUnsafe<T> {
value: UnsafeCell<T>
}
impl<T> MyUnsafe<T> {
fn forbidden(&self) {}
}
enum UnsafeEnum<T> {
VariantSafe,
VariantUnsafe(UnsafeCell<T>)
}
static STATIC1: UnsafeEnum<int> = VariantSafe;
static STATIC2: UnsafeCell<int> = UnsafeCell { value: 1 };
const CONST: UnsafeCell<int> = UnsafeCell { value: 1 };
static STATIC3: MyUnsafe<int> = MyUnsafe{value: CONST};
static STATIC4: &'static UnsafeCell<int> = &STATIC2;
struct Wrap<T> {
value: T
}
static UNSAFE: UnsafeCell<int> = UnsafeCell{value: 1};
static WRAPPED_UNSAFE: Wrap<&'static UnsafeCell<int>> = Wrap { value: &UNSAFE };
fn main() {
let a = &STATIC1;
STATIC3.forbidden()
} | // http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your | random_line_split |
issue-17718-static-unsafe-interior.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::kinds::marker;
use std::cell::UnsafeCell;
struct MyUnsafe<T> {
value: UnsafeCell<T>
}
impl<T> MyUnsafe<T> {
fn forbidden(&self) {}
}
enum | <T> {
VariantSafe,
VariantUnsafe(UnsafeCell<T>)
}
static STATIC1: UnsafeEnum<int> = VariantSafe;
static STATIC2: UnsafeCell<int> = UnsafeCell { value: 1 };
const CONST: UnsafeCell<int> = UnsafeCell { value: 1 };
static STATIC3: MyUnsafe<int> = MyUnsafe{value: CONST};
static STATIC4: &'static UnsafeCell<int> = &STATIC2;
struct Wrap<T> {
value: T
}
static UNSAFE: UnsafeCell<int> = UnsafeCell{value: 1};
static WRAPPED_UNSAFE: Wrap<&'static UnsafeCell<int>> = Wrap { value: &UNSAFE };
fn main() {
let a = &STATIC1;
STATIC3.forbidden()
}
| UnsafeEnum | identifier_name |
main.rs | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
#![forbid(unsafe_code)]
use bytecode_source_map::utils::{remap_owned_loc_to_loc, source_map_from_file, OwnedLoc};
use move_bytecode_viewer::{
bytecode_viewer::BytecodeViewer, source_viewer::ModuleViewer,
tui::tui_interface::start_tui_with_interface, viewer::Viewer,
};
use std::{fs, path::Path};
use structopt::StructOpt;
use vm::file_format::CompiledModule;
#[derive(Debug, StructOpt)]
#[structopt(
name = "Move Bytecode Explorer",
about = "Explore Move bytecode and how the source code compiles to it"
)]
struct Args {
/// The path to the module binary
#[structopt(long = "module-path", short = "b")]
pub module_binary_path: String,
/// The path to the source file
#[structopt(long = "source-path", short = "s")]
pub source_file_path: String,
}
pub fn | () {
let args = Args::from_args();
let source_map_extension = "mvsm";
let bytecode_bytes = fs::read(&args.module_binary_path).expect("Unable to read bytecode file");
let compiled_module =
CompiledModule::deserialize(&bytecode_bytes).expect("Module blob can't be deserialized");
let source_map = source_map_from_file::<OwnedLoc>(
&Path::new(&args.module_binary_path).with_extension(source_map_extension),
)
.map(remap_owned_loc_to_loc)
.unwrap();
let source_path = Path::new(&args.source_file_path);
let module_viewer =
ModuleViewer::new(compiled_module.clone(), source_map.clone(), &source_path);
let bytecode_viewer = BytecodeViewer::new(source_map, compiled_module);
let interface = Viewer::new(module_viewer, bytecode_viewer);
start_tui_with_interface(interface).unwrap();
}
| main | identifier_name |
main.rs | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
#![forbid(unsafe_code)]
use bytecode_source_map::utils::{remap_owned_loc_to_loc, source_map_from_file, OwnedLoc};
use move_bytecode_viewer::{
bytecode_viewer::BytecodeViewer, source_viewer::ModuleViewer,
tui::tui_interface::start_tui_with_interface, viewer::Viewer,
};
use std::{fs, path::Path};
use structopt::StructOpt;
use vm::file_format::CompiledModule;
#[derive(Debug, StructOpt)]
#[structopt(
name = "Move Bytecode Explorer",
about = "Explore Move bytecode and how the source code compiles to it"
)]
struct Args {
/// The path to the module binary
#[structopt(long = "module-path", short = "b")]
pub module_binary_path: String,
/// The path to the source file
#[structopt(long = "source-path", short = "s")]
pub source_file_path: String,
}
pub fn main() | start_tui_with_interface(interface).unwrap();
}
| {
let args = Args::from_args();
let source_map_extension = "mvsm";
let bytecode_bytes = fs::read(&args.module_binary_path).expect("Unable to read bytecode file");
let compiled_module =
CompiledModule::deserialize(&bytecode_bytes).expect("Module blob can't be deserialized");
let source_map = source_map_from_file::<OwnedLoc>(
&Path::new(&args.module_binary_path).with_extension(source_map_extension),
)
.map(remap_owned_loc_to_loc)
.unwrap();
let source_path = Path::new(&args.source_file_path);
let module_viewer =
ModuleViewer::new(compiled_module.clone(), source_map.clone(), &source_path);
let bytecode_viewer = BytecodeViewer::new(source_map, compiled_module);
let interface = Viewer::new(module_viewer, bytecode_viewer); | identifier_body |
main.rs | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
#![forbid(unsafe_code)]
use bytecode_source_map::utils::{remap_owned_loc_to_loc, source_map_from_file, OwnedLoc};
use move_bytecode_viewer::{
bytecode_viewer::BytecodeViewer, source_viewer::ModuleViewer,
tui::tui_interface::start_tui_with_interface, viewer::Viewer,
};
| use structopt::StructOpt;
use vm::file_format::CompiledModule;
#[derive(Debug, StructOpt)]
#[structopt(
name = "Move Bytecode Explorer",
about = "Explore Move bytecode and how the source code compiles to it"
)]
struct Args {
/// The path to the module binary
#[structopt(long = "module-path", short = "b")]
pub module_binary_path: String,
/// The path to the source file
#[structopt(long = "source-path", short = "s")]
pub source_file_path: String,
}
pub fn main() {
let args = Args::from_args();
let source_map_extension = "mvsm";
let bytecode_bytes = fs::read(&args.module_binary_path).expect("Unable to read bytecode file");
let compiled_module =
CompiledModule::deserialize(&bytecode_bytes).expect("Module blob can't be deserialized");
let source_map = source_map_from_file::<OwnedLoc>(
&Path::new(&args.module_binary_path).with_extension(source_map_extension),
)
.map(remap_owned_loc_to_loc)
.unwrap();
let source_path = Path::new(&args.source_file_path);
let module_viewer =
ModuleViewer::new(compiled_module.clone(), source_map.clone(), &source_path);
let bytecode_viewer = BytecodeViewer::new(source_map, compiled_module);
let interface = Viewer::new(module_viewer, bytecode_viewer);
start_tui_with_interface(interface).unwrap();
} | use std::{fs, path::Path}; | random_line_split |
manual_flatten.rs | use super::utils::make_iterator_snippet;
use super::MANUAL_FLATTEN;
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::higher;
use clippy_utils::visitors::is_local_used;
use clippy_utils::{is_lang_ctor, path_to_local_id};
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::LangItem::{OptionSome, ResultOk};
use rustc_hir::{Expr, ExprKind, Pat, PatKind, StmtKind};
use rustc_lint::LateContext;
use rustc_middle::ty;
use rustc_span::source_map::Span;
/// Check for unnecessary `if let` usage in a for loop where only the `Some` or `Ok` variant of the
/// iterator element is used.
pub(super) fn check<'tcx>(
cx: &LateContext<'tcx>,
pat: &'tcx Pat<'_>,
arg: &'tcx Expr<'_>,
body: &'tcx Expr<'_>,
span: Span,
) {
if let ExprKind::Block(block, _) = body.kind {
// Ensure the `if let` statement is the only expression or statement in the for-loop
let inner_expr = if block.stmts.len() == 1 && block.expr.is_none() {
let match_stmt = &block.stmts[0];
if let StmtKind::Semi(inner_expr) = match_stmt.kind | else {
None
}
} else if block.stmts.is_empty() {
block.expr
} else {
None
};
if_chain! {
if let Some(inner_expr) = inner_expr;
if let Some(higher::IfLet { let_pat, let_expr, if_then, if_else: None })
= higher::IfLet::hir(cx, inner_expr);
// Ensure match_expr in `if let` statement is the same as the pat from the for-loop
if let PatKind::Binding(_, pat_hir_id, _, _) = pat.kind;
if path_to_local_id(let_expr, pat_hir_id);
// Ensure the `if let` statement is for the `Some` variant of `Option` or the `Ok` variant of `Result`
if let PatKind::TupleStruct(ref qpath, _, _) = let_pat.kind;
let some_ctor = is_lang_ctor(cx, qpath, OptionSome);
let ok_ctor = is_lang_ctor(cx, qpath, ResultOk);
if some_ctor || ok_ctor;
// Ensure epxr in `if let` is not used afterwards
if!is_local_used(cx, if_then, pat_hir_id);
then {
let if_let_type = if some_ctor { "Some" } else { "Ok" };
// Prepare the error message
let msg = format!("unnecessary `if let` since only the `{}` variant of the iterator element is used", if_let_type);
// Prepare the help message
let mut applicability = Applicability::MaybeIncorrect;
let arg_snippet = make_iterator_snippet(cx, arg, &mut applicability);
let copied = match cx.typeck_results().expr_ty(let_expr).kind() {
ty::Ref(_, inner, _) => match inner.kind() {
ty::Ref(..) => ".copied()",
_ => ""
}
_ => ""
};
span_lint_and_then(
cx,
MANUAL_FLATTEN,
span,
&msg,
|diag| {
let sugg = format!("{}{}.flatten()", arg_snippet, copied);
diag.span_suggestion(
arg.span,
"try",
sugg,
Applicability::MaybeIncorrect,
);
diag.span_help(
inner_expr.span,
"...and remove the `if let` statement in the for loop",
);
}
);
}
}
}
}
| {
Some(inner_expr)
} | conditional_block |
manual_flatten.rs | use super::utils::make_iterator_snippet;
use super::MANUAL_FLATTEN;
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::higher;
use clippy_utils::visitors::is_local_used;
use clippy_utils::{is_lang_ctor, path_to_local_id};
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::LangItem::{OptionSome, ResultOk};
use rustc_hir::{Expr, ExprKind, Pat, PatKind, StmtKind};
use rustc_lint::LateContext;
use rustc_middle::ty;
use rustc_span::source_map::Span;
/// Check for unnecessary `if let` usage in a for loop where only the `Some` or `Ok` variant of the
/// iterator element is used.
pub(super) fn check<'tcx>(
cx: &LateContext<'tcx>,
pat: &'tcx Pat<'_>,
arg: &'tcx Expr<'_>,
body: &'tcx Expr<'_>,
span: Span,
) {
if let ExprKind::Block(block, _) = body.kind {
// Ensure the `if let` statement is the only expression or statement in the for-loop
let inner_expr = if block.stmts.len() == 1 && block.expr.is_none() {
let match_stmt = &block.stmts[0];
if let StmtKind::Semi(inner_expr) = match_stmt.kind {
Some(inner_expr)
} else {
None
}
} else if block.stmts.is_empty() {
block.expr
} else {
None
};
if_chain! {
if let Some(inner_expr) = inner_expr;
if let Some(higher::IfLet { let_pat, let_expr, if_then, if_else: None })
= higher::IfLet::hir(cx, inner_expr);
// Ensure match_expr in `if let` statement is the same as the pat from the for-loop
if let PatKind::Binding(_, pat_hir_id, _, _) = pat.kind;
if path_to_local_id(let_expr, pat_hir_id);
// Ensure the `if let` statement is for the `Some` variant of `Option` or the `Ok` variant of `Result`
if let PatKind::TupleStruct(ref qpath, _, _) = let_pat.kind;
let some_ctor = is_lang_ctor(cx, qpath, OptionSome); | if!is_local_used(cx, if_then, pat_hir_id);
then {
let if_let_type = if some_ctor { "Some" } else { "Ok" };
// Prepare the error message
let msg = format!("unnecessary `if let` since only the `{}` variant of the iterator element is used", if_let_type);
// Prepare the help message
let mut applicability = Applicability::MaybeIncorrect;
let arg_snippet = make_iterator_snippet(cx, arg, &mut applicability);
let copied = match cx.typeck_results().expr_ty(let_expr).kind() {
ty::Ref(_, inner, _) => match inner.kind() {
ty::Ref(..) => ".copied()",
_ => ""
}
_ => ""
};
span_lint_and_then(
cx,
MANUAL_FLATTEN,
span,
&msg,
|diag| {
let sugg = format!("{}{}.flatten()", arg_snippet, copied);
diag.span_suggestion(
arg.span,
"try",
sugg,
Applicability::MaybeIncorrect,
);
diag.span_help(
inner_expr.span,
"...and remove the `if let` statement in the for loop",
);
}
);
}
}
}
} | let ok_ctor = is_lang_ctor(cx, qpath, ResultOk);
if some_ctor || ok_ctor;
// Ensure epxr in `if let` is not used afterwards | random_line_split |
manual_flatten.rs | use super::utils::make_iterator_snippet;
use super::MANUAL_FLATTEN;
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::higher;
use clippy_utils::visitors::is_local_used;
use clippy_utils::{is_lang_ctor, path_to_local_id};
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::LangItem::{OptionSome, ResultOk};
use rustc_hir::{Expr, ExprKind, Pat, PatKind, StmtKind};
use rustc_lint::LateContext;
use rustc_middle::ty;
use rustc_span::source_map::Span;
/// Check for unnecessary `if let` usage in a for loop where only the `Some` or `Ok` variant of the
/// iterator element is used.
pub(super) fn | <'tcx>(
cx: &LateContext<'tcx>,
pat: &'tcx Pat<'_>,
arg: &'tcx Expr<'_>,
body: &'tcx Expr<'_>,
span: Span,
) {
if let ExprKind::Block(block, _) = body.kind {
// Ensure the `if let` statement is the only expression or statement in the for-loop
let inner_expr = if block.stmts.len() == 1 && block.expr.is_none() {
let match_stmt = &block.stmts[0];
if let StmtKind::Semi(inner_expr) = match_stmt.kind {
Some(inner_expr)
} else {
None
}
} else if block.stmts.is_empty() {
block.expr
} else {
None
};
if_chain! {
if let Some(inner_expr) = inner_expr;
if let Some(higher::IfLet { let_pat, let_expr, if_then, if_else: None })
= higher::IfLet::hir(cx, inner_expr);
// Ensure match_expr in `if let` statement is the same as the pat from the for-loop
if let PatKind::Binding(_, pat_hir_id, _, _) = pat.kind;
if path_to_local_id(let_expr, pat_hir_id);
// Ensure the `if let` statement is for the `Some` variant of `Option` or the `Ok` variant of `Result`
if let PatKind::TupleStruct(ref qpath, _, _) = let_pat.kind;
let some_ctor = is_lang_ctor(cx, qpath, OptionSome);
let ok_ctor = is_lang_ctor(cx, qpath, ResultOk);
if some_ctor || ok_ctor;
// Ensure epxr in `if let` is not used afterwards
if!is_local_used(cx, if_then, pat_hir_id);
then {
let if_let_type = if some_ctor { "Some" } else { "Ok" };
// Prepare the error message
let msg = format!("unnecessary `if let` since only the `{}` variant of the iterator element is used", if_let_type);
// Prepare the help message
let mut applicability = Applicability::MaybeIncorrect;
let arg_snippet = make_iterator_snippet(cx, arg, &mut applicability);
let copied = match cx.typeck_results().expr_ty(let_expr).kind() {
ty::Ref(_, inner, _) => match inner.kind() {
ty::Ref(..) => ".copied()",
_ => ""
}
_ => ""
};
span_lint_and_then(
cx,
MANUAL_FLATTEN,
span,
&msg,
|diag| {
let sugg = format!("{}{}.flatten()", arg_snippet, copied);
diag.span_suggestion(
arg.span,
"try",
sugg,
Applicability::MaybeIncorrect,
);
diag.span_help(
inner_expr.span,
"...and remove the `if let` statement in the for loop",
);
}
);
}
}
}
}
| check | identifier_name |
manual_flatten.rs | use super::utils::make_iterator_snippet;
use super::MANUAL_FLATTEN;
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::higher;
use clippy_utils::visitors::is_local_used;
use clippy_utils::{is_lang_ctor, path_to_local_id};
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::LangItem::{OptionSome, ResultOk};
use rustc_hir::{Expr, ExprKind, Pat, PatKind, StmtKind};
use rustc_lint::LateContext;
use rustc_middle::ty;
use rustc_span::source_map::Span;
/// Check for unnecessary `if let` usage in a for loop where only the `Some` or `Ok` variant of the
/// iterator element is used.
pub(super) fn check<'tcx>(
cx: &LateContext<'tcx>,
pat: &'tcx Pat<'_>,
arg: &'tcx Expr<'_>,
body: &'tcx Expr<'_>,
span: Span,
) | // Ensure match_expr in `if let` statement is the same as the pat from the for-loop
if let PatKind::Binding(_, pat_hir_id, _, _) = pat.kind;
if path_to_local_id(let_expr, pat_hir_id);
// Ensure the `if let` statement is for the `Some` variant of `Option` or the `Ok` variant of `Result`
if let PatKind::TupleStruct(ref qpath, _, _) = let_pat.kind;
let some_ctor = is_lang_ctor(cx, qpath, OptionSome);
let ok_ctor = is_lang_ctor(cx, qpath, ResultOk);
if some_ctor || ok_ctor;
// Ensure epxr in `if let` is not used afterwards
if!is_local_used(cx, if_then, pat_hir_id);
then {
let if_let_type = if some_ctor { "Some" } else { "Ok" };
// Prepare the error message
let msg = format!("unnecessary `if let` since only the `{}` variant of the iterator element is used", if_let_type);
// Prepare the help message
let mut applicability = Applicability::MaybeIncorrect;
let arg_snippet = make_iterator_snippet(cx, arg, &mut applicability);
let copied = match cx.typeck_results().expr_ty(let_expr).kind() {
ty::Ref(_, inner, _) => match inner.kind() {
ty::Ref(..) => ".copied()",
_ => ""
}
_ => ""
};
span_lint_and_then(
cx,
MANUAL_FLATTEN,
span,
&msg,
|diag| {
let sugg = format!("{}{}.flatten()", arg_snippet, copied);
diag.span_suggestion(
arg.span,
"try",
sugg,
Applicability::MaybeIncorrect,
);
diag.span_help(
inner_expr.span,
"...and remove the `if let` statement in the for loop",
);
}
);
}
}
}
}
| {
if let ExprKind::Block(block, _) = body.kind {
// Ensure the `if let` statement is the only expression or statement in the for-loop
let inner_expr = if block.stmts.len() == 1 && block.expr.is_none() {
let match_stmt = &block.stmts[0];
if let StmtKind::Semi(inner_expr) = match_stmt.kind {
Some(inner_expr)
} else {
None
}
} else if block.stmts.is_empty() {
block.expr
} else {
None
};
if_chain! {
if let Some(inner_expr) = inner_expr;
if let Some(higher::IfLet { let_pat, let_expr, if_then, if_else: None })
= higher::IfLet::hir(cx, inner_expr); | identifier_body |
repl.rs | extern crate readline;
use std::process;
use storage;
use jobs;
use cluster;
pub fn start() {
loop {
match readline::readline(">>> ") {
Ok(input) => {
let input = input.replace("\n", "");
if input.len() > 0 {
readline::add_history(input.as_ref());
println!("{:?}", input);
if "help" == input {
println!("help comes later....");
}
else if "jobs" == input {
//jobs::list();
}
else if "ping" == input {
cluster::ping(4); | storage.list();
}
else if "exit" == input || "quit" == input {
process::exit(0);
}
}
},
Err(e) => {
println!("{}", e);
//panic!("{}", e);
}
}
}
} | }
else if "storage" == input {
let storage = storage::bootstrap(); | random_line_split |
repl.rs | extern crate readline;
use std::process;
use storage;
use jobs;
use cluster;
pub fn start() |
}
else if "exit" == input || "quit" == input {
process::exit(0);
}
}
},
Err(e) => {
println!("{}", e);
//panic!("{}", e);
}
}
}
}
| {
loop {
match readline::readline(">>> ") {
Ok(input) => {
let input = input.replace("\n", "");
if input.len() > 0 {
readline::add_history(input.as_ref());
println!("{:?}", input);
if "help" == input {
println!("help comes later....");
}
else if "jobs" == input {
//jobs::list();
}
else if "ping" == input {
cluster::ping(4);
}
else if "storage" == input {
let storage = storage::bootstrap();
storage.list(); | identifier_body |
repl.rs | extern crate readline;
use std::process;
use storage;
use jobs;
use cluster;
pub fn start() {
loop {
match readline::readline(">>> ") {
Ok(input) => {
let input = input.replace("\n", "");
if input.len() > 0 {
readline::add_history(input.as_ref());
println!("{:?}", input);
if "help" == input {
println!("help comes later....");
}
else if "jobs" == input {
//jobs::list();
}
else if "ping" == input {
cluster::ping(4);
}
else if "storage" == input {
let storage = storage::bootstrap();
storage.list();
}
else if "exit" == input || "quit" == input {
process::exit(0);
}
}
},
Err(e) => |
}
}
}
| {
println!("{}", e);
//panic!("{}", e);
} | conditional_block |
repl.rs | extern crate readline;
use std::process;
use storage;
use jobs;
use cluster;
pub fn | () {
loop {
match readline::readline(">>> ") {
Ok(input) => {
let input = input.replace("\n", "");
if input.len() > 0 {
readline::add_history(input.as_ref());
println!("{:?}", input);
if "help" == input {
println!("help comes later....");
}
else if "jobs" == input {
//jobs::list();
}
else if "ping" == input {
cluster::ping(4);
}
else if "storage" == input {
let storage = storage::bootstrap();
storage.list();
}
else if "exit" == input || "quit" == input {
process::exit(0);
}
}
},
Err(e) => {
println!("{}", e);
//panic!("{}", e);
}
}
}
}
| start | identifier_name |
ptx.rs | Set};
use std::default::Default;
use std::fs::File;
use getopts::{Options, Matches};
use std::io::{stdin, stdout, BufReader, BufWriter, BufRead, Read, Write};
use regex::Regex;
use std::cmp;
#[path = "../common/util.rs"]
#[macro_use]
mod util;
static NAME: &'static str = "ptx";
static VERSION: &'static str = "1.0.0";
#[derive(Debug)]
enum OutFormat {
Dumb,
Roff,
Tex,
}
#[derive(Debug)]
struct Config {
format : OutFormat,
gnu_ext : bool,
auto_ref : bool,
input_ref : bool,
right_ref : bool,
ignore_case : bool,
macro_name : String,
trunc_str : String,
context_regex : String,
line_width : usize,
gap_size : usize,
}
impl Default for Config {
fn default() -> Config {
Config {
format : OutFormat::Dumb,
gnu_ext : true,
auto_ref : false,
input_ref : false,
right_ref : false,
ignore_case : false,
macro_name : "xx".to_string(),
trunc_str : "/".to_string(),
context_regex : "\\w+".to_string(),
line_width : 72,
gap_size : 3
}
}
}
fn read_word_filter_file(matches: &Matches, option: &str) -> HashSet<String> {
let filename = matches.opt_str(option).expect("parsing options failed!");
let reader = BufReader::new(crash_if_err!(1, File::open(filename)));
let mut words: HashSet<String> = HashSet::new();
for word in reader.lines() {
words.insert(crash_if_err!(1, word));
}
words
}
#[derive(Debug)]
struct WordFilter {
only_specified: bool,
ignore_specified: bool,
only_set: HashSet<String>,
ignore_set: HashSet<String>,
word_regex: String,
}
impl WordFilter {
fn new(matches: &Matches, config: &Config) -> WordFilter {
let (o, oset): (bool, HashSet<String>) =
if matches.opt_present("o") | else {
(false, HashSet::new())
};
let (i, iset): (bool, HashSet<String>) =
if matches.opt_present("i") {
(true, read_word_filter_file(matches, "i"))
} else {
(false, HashSet::new())
};
if matches.opt_present("b") {
crash!(1, "-b not implemented yet");
}
let reg =
if matches.opt_present("W") {
matches.opt_str("W").expect("parsing options failed!")
} else if config.gnu_ext {
"\\w+".to_string()
} else {
"[^ \t\n]+".to_string()
};
WordFilter {
only_specified: o,
ignore_specified: i,
only_set: oset,
ignore_set: iset,
word_regex: reg
}
}
}
#[derive(Debug, PartialOrd, PartialEq, Eq, Ord)]
struct WordRef {
word: String,
global_line_nr: usize,
local_line_nr: usize,
position: usize,
position_end: usize,
filename: String,
}
fn print_version() {
println!("{} {}", NAME, VERSION);
}
fn print_usage(opts: &Options) {
let brief = "Usage: ptx [OPTION]... [INPUT]... (without -G) or: \
ptx -G [OPTION]... [INPUT [OUTPUT]] \n Output a permuted index, \
including context, of the words in the input files. \n\n Mandatory \
arguments to long options are mandatory for short options too.";
let explaination = "With no FILE, or when FILE is -, read standard input. \
Default is '-F /'.";
println!("{}\n{}", opts.usage(&brief), explaination);
}
fn get_config(matches: &Matches) -> Config {
let mut config: Config = Default::default();
let err_msg = "parsing options failed";
if matches.opt_present("G") {
config.gnu_ext = false;
config.format = OutFormat::Roff;
config.context_regex = "[^ \t\n]+".to_string();
} else {
crash!(1, "GNU extensions not implemented yet");
}
if matches.opt_present("S") {
crash!(1, "-S not implemented yet");
}
config.auto_ref = matches.opt_present("A");
config.input_ref = matches.opt_present("r");
config.right_ref &= matches.opt_present("R");
config.ignore_case = matches.opt_present("f");
if matches.opt_present("M") {
config.macro_name =
matches.opt_str("M").expect(err_msg).to_string();
}
if matches.opt_present("F") {
config.trunc_str =
matches.opt_str("F").expect(err_msg).to_string();
}
if matches.opt_present("w") {
let width_str = matches.opt_str("w").expect(err_msg);
config.line_width = crash_if_err!(
1, usize::from_str_radix(width_str.as_str(), 10));
}
if matches.opt_present("g") {
let gap_str = matches.opt_str("g").expect(err_msg);
config.gap_size = crash_if_err!(
1, usize::from_str_radix(gap_str.as_str(), 10));
}
if matches.opt_present("O") {
config.format = OutFormat::Roff;
}
if matches.opt_present("T") {
config.format = OutFormat::Tex;
}
config
}
fn read_input(input_files: Vec<&str>, config: &Config) ->
HashMap<String, (Vec<String>, usize)> {
let mut file_map : HashMap<String, (Vec<String>, usize)> =
HashMap::new();
let mut files = Vec::new();
if input_files.is_empty() {
files.push("-");
} else {
if config.gnu_ext {
files.push_all(input_files.as_slice());
} else {
files.push(input_files[0]);
}
}
let mut lines_so_far: usize = 0;
for filename in files {
let reader: BufReader<Box<Read>> = BufReader::new(
if filename == "-" {
Box::new(stdin())
} else {
let file = crash_if_err!(1, File::open(filename));
Box::new(file)
});
let lines: Vec<String> = reader.lines().map(|x| crash_if_err!(1, x))
.collect();
let size = lines.len();
file_map.insert(filename.to_string(), (lines, lines_so_far));
lines_so_far += size
}
file_map
}
fn create_word_set(config: &Config, filter: &WordFilter,
file_map: &HashMap<String, (Vec<String>, usize)>)->
BTreeSet<WordRef> {
let reg = Regex::new(filter.word_regex.as_str()).unwrap();
let ref_reg = Regex::new(config.context_regex.as_str()).unwrap();
let mut word_set: BTreeSet<WordRef> = BTreeSet::new();
for (file, lines) in file_map.iter() {
let mut count: usize = 0;
let offs = lines.1;
for line in (lines.0).iter() {
// if -r, exclude reference from word set
let (ref_beg, ref_end) = match ref_reg.find(line) {
Some(x) => x,
None => (0,0)
};
// match words with given regex
for (beg, end) in reg.find_iter(line) {
if config.input_ref && ((beg, end) == (ref_beg, ref_end)) {
continue;
}
let mut word = line.slice_chars(beg, end).to_string();
if filter.only_specified &&
!(filter.only_set.contains(&word)) {
continue;
}
if filter.ignore_specified &&
filter.ignore_set.contains(&word) {
continue;
}
if config.ignore_case {
word = word.to_lowercase();
}
word_set.insert(WordRef{
word: word,
filename: String::from(file.as_str()),
global_line_nr: offs + count,
local_line_nr: count,
position: beg,
position_end: end
});
}
count += 1;
}
}
word_set
}
fn get_reference(config: &Config, word_ref: &WordRef, line: &String) ->
String {
if config.auto_ref {
format!("{}:{}", word_ref.filename, word_ref.local_line_nr + 1)
} else if config.input_ref {
let reg = Regex::new(config.context_regex.as_str()).unwrap();
let (beg, end) = match reg.find(line) {
Some(x) => x,
None => (0,0)
};
format!("{}", line.slice_chars(beg, end))
} else {
String::new()
}
}
fn assert_str_integrity(s: &Vec<char>, beg: usize, end: usize) {
assert!(beg <= end);
assert!(end <= s.len());
}
fn trim_broken_word_left(s: &Vec<char>, beg: usize, end: usize) -> usize {
assert_str_integrity(s, beg, end);
if beg == end || beg == 0 || s[beg].is_whitespace() ||
s[beg-1].is_whitespace() {
return beg;
}
let mut b = beg;
while b < end &&!s[b].is_whitespace() {
b += 1;
}
b
}
fn trim_broken_word_right(s: &Vec<char>, beg: usize, end: usize) -> usize {
assert_str_integrity(s, beg, end);
if beg == end || end == s.len() || s[end-1].is_whitespace() ||
s[end].is_whitespace() {
return end;
}
let mut e = end;
while beg < e &&!s[e-1].is_whitespace() {
e -= 1;
}
e
}
fn trim_idx(s: &Vec<char>, beg: usize, end: usize) -> (usize, usize) {
assert_str_integrity(s, beg, end);
let mut b = beg;
let mut e = end;
while b < e && s[b].is_whitespace() {
b += 1;
}
while b < e && s[e-1].is_whitespace() {
e -= 1;
}
(b,e)
}
fn get_output_chunks(all_before: &String, keyword: &String, all_after: &String,
config: &Config) -> (String, String, String, String) {
assert!(all_before.trim() == all_before.as_str());
assert!(keyword.trim() == keyword.as_str());
assert!(all_after.trim() == all_after.as_str());
let mut head = String::new();
let mut before = String::new();
let mut after = String::new();
let mut tail = String::new();
let half_line_size = cmp::max((config.line_width/2) as isize -
(2*config.trunc_str.len()) as isize, 0) as usize;
let max_after_size = cmp::max(half_line_size as isize -
keyword.len() as isize - 1, 0) as usize;
let max_before_size = half_line_size;
let all_before_vec: Vec<char> = all_before.chars().collect();
let all_after_vec: Vec<char> = all_after.chars().collect();
// get before
let mut bb_tmp =
cmp::max(all_before.len() as isize - max_before_size as isize, 0) as usize;
bb_tmp = trim_broken_word_left(&all_before_vec, bb_tmp, all_before.len());
let (before_beg, before_end) =
trim_idx(&all_before_vec, bb_tmp, all_before.len());
before.push_str(all_before.slice_chars(before_beg, before_end));
assert!(max_before_size >= before.len());
// get after
let mut ae_tmp = cmp::min(max_after_size, all_after.len());
ae_tmp = trim_broken_word_right(&all_after_vec, 0, ae_tmp);
let (after_beg, after_end) = trim_idx(&all_after_vec, 0, ae_tmp);
after.push_str(all_after.slice_chars(after_beg, after_end));
assert!(max_after_size >= after.len());
// get tail
let max_tail_size = max_before_size - before.len();
let (tb, _) = trim_idx(&all_after_vec, after_end, all_after.len());
let mut te_tmp = cmp::min(tb + max_tail_size, all_after.len());
te_tmp = trim_broken_word_right(&all_after_vec, tb, te_tmp);
let (tail_beg, tail_end) = trim_idx(&all_after_vec, tb, te_tmp);
tail.push_str(all_after.slice_chars(tail_beg, tail_end));
// get head
let max_head_size = max_after_size - after.len();
let (_, he) = trim_idx(&all_before_vec, 0, before_beg);
let mut hb_tmp =
cmp::max(he as isize - max_head_size as isize, 0) as usize;
hb_tmp = trim_broken_word_left(&all_before_vec, hb_tmp, he);
let (head_beg, head_end) = trim_idx(&all_before_vec, hb_tmp, he);
head.push_str(all_before.slice_chars(head_beg, head_end));
// put right context truncation string if needed
if after_end!= all_after.len() && tail_beg == tail_end {
after.push_str(config.trunc_str.as_str());
} else if after_end!= all_after.len() && tail_end!= all_after.len() {
tail.push_str(config.trunc_str.as_str());
}
// put left context truncation string if needed
if before_beg!= 0 && head_beg == head_end {
before = format!("{}{}", config.trunc_str, before);
} else if before_beg!= 0 && head_beg!= 0 {
head = format!("{}{}", config.trunc_str, head);
}
// add space before "after" if needed
if after.len() > 0 {
after = format!(" {}", after);
}
(tail, before, after, head)
}
fn tex_mapper(x: char) -> String {
match x {
'\\' => "\\backslash{}".to_string(),
'$' | '%' | '#' | '&' | '_' => format!("\\{}", x),
'}' | '{' => format!("$\\{}$", x),
_ => x.to_string()
}
}
fn adjust_tex_str(context: &str) -> String {
let ws_reg = Regex::new(r"[\t\n\v\f\r ]").unwrap();
let mut fix: String = ws_reg.replace_all(context, " ").trim().to_string();
let mapped_chunks: Vec<String> = fix.chars().map(tex_mapper).collect();
fix = mapped_chunks.connect("");
fix
}
fn format_tex_line(config: &Config, word_ref: &WordRef, line: &String,
reference: &String) -> String {
let mut output = String::new();
output.push_str(&format!("\\{} ", config.macro_name));
let all_before = if config.input_ref {
let before = line.slice_chars(0, word_ref.position);
adjust_tex_str(before.trim().trim_left_matches(reference))
} else {
adjust_tex_str(line.slice_chars(0, word_ref.position))
};
let keyword = adjust_tex_str(
line.slice_chars(word_ref.position, word_ref.position_end));
let all_after = adjust_tex_str(
line.slice_chars(word_ref.position_end, line.len()));
let (tail, before, after, head) =
get_output_chunks(&all_before, &keyword, &all_after, &config);
output.push_str(format!("{5}{0}{6}{5}{1}{6}{5}{2}{6}{5}{3}{6}{5}{4}{6}",
tail, before, keyword, after, head, "{", "}").as_str());
if config.auto_ref || config.input_ref {
output.push_str(
&format!("{}{}{}", "{", adjust_tex_str(&reference), "}"));
}
output
}
fn adjust_roff_str(context: &str) -> String {
let ws_reg = Regex::new(r"[\t\n\v\f\r]").unwrap();
ws_reg.replace_all(context, " ").replace("\"", "\"\"").trim().to_string()
}
fn format_roff_line(config: &Config, word_ref: &WordRef, line: &str,
reference: &str) -> String {
let mut output = String::new();
output.push_str(&format!(".{}", config.macro_name));
let all_before = if config.input_ref {
let before = line.slice_chars(0, word_ref.position);
adjust_roff_str(before.trim().trim_left_matches(reference))
} else {
adjust_roff_str(line.slice_chars(0, word_ref.position))
};
let keyword = adjust_roff_str(
line.slice_chars(word_ref.position, word_ref.position_end));
let all_after = adjust_roff_str(
line.slice_chars(word_ref.position_end, line.len()));
let (tail, before, after, head) =
get_output_chunks(&all_before, &keyword, &all_after, &config);
output.push_str(format!(" \"{}\" \"{}\" \"{}{}\" \"{}\"",
tail, before, keyword, after, head).as_str());
if config.auto_ref || config.input_ref {
output.push_str(&format!(" \"{}\"", adjust_roff_str(&reference)));
}
output
}
fn write_traditional_output(config: &Config,
file_map: &HashMap<String, (Vec<String>,usize)>,
words: &BTreeSet<WordRef>, output_filename: &str) {
let mut writer: BufWriter<Box<Write>> = BufWriter::new(
if output_filename == "-" {
Box::new(stdout())
} else {
let file = crash_if_err!(1, File::create(output_filename));
Box::new(file)
});
for word_ref in words.iter() {
let file_map_value : &(Vec<String>, usize) =
file_map.get(&(word_ref.filename))
.expect("Missing file in file map");
let (ref lines, _) = *(file_map_value);
let reference =
get_reference(config, word_ref, &lines[word_ref.local_line_nr]);
let output_line: String = match config.format {
OutFormat::Tex => format_tex_line(
config, word_ref, &lines[word_ref.local_line_nr], &reference),
OutFormat::Roff => format_roff_line(
config, word_ref, &lines[word_ref.local_line_nr], &reference),
OutFormat::Dumb => crash!(
1, "There is no dumb format with GNU extensions disabled")
};
crash_if_err!(1, writeln!(writer, "{}", output_line));
}
}
pub fn uumain(args: Vec<String>) -> i32 {
let mut opts = Options::new();
opts.optflag("A", "auto-reference",
"output automatically generated references");
opts.optflag("G", "traditional", "behave more like System V 'ptx'");
opts.optopt("F", "flag-truncation",
"use STRING for flagging line truncations", "STRING");
opts.optopt("M", "macro-name", "macro name to use instead of 'xx'",
"STRING");
opts.optflag("O", "format=roff", "generate output as roff directives");
opts.optflag("R", "right-side-refs",
"put references at right, not counted in -w");
opts.optopt("S", "sentence-regexp", "for end of lines or end of sentences",
"REGEXP");
opts.optflag("T", "format=tex", "generate output as TeX directives");
opts.optopt("W", "word-regexp", "use REGEXP to match each keyword",
"REGEXP");
opts.optopt("b", "break-file", "word break characters in this FILE",
"FILE");
opts.optflag("f", "ignore-case",
"fold lower case to upper case for sorting");
opts.optopt("g", "gap-size", "gap size in columns between output fields",
"NUMBER");
opts.optopt("i", "ignore-file", "read ignore word list from FILE", "FILE");
opts.optopt("o", "only-file", "read only word list from this FILE",
"FILE");
opts.optflag("r", "references", "first field of each line is a reference");
opts.optopt("w", "width", "output width in columns, reference excluded",
"NUMBER");
opts.optflag("", "help", "display this | {
(true, read_word_filter_file(matches, "o"))
} | conditional_block |
ptx.rs | Set};
use std::default::Default;
use std::fs::File;
use getopts::{Options, Matches};
use std::io::{stdin, stdout, BufReader, BufWriter, BufRead, Read, Write};
use regex::Regex;
use std::cmp;
#[path = "../common/util.rs"]
#[macro_use]
mod util;
static NAME: &'static str = "ptx";
static VERSION: &'static str = "1.0.0";
#[derive(Debug)]
enum | {
Dumb,
Roff,
Tex,
}
#[derive(Debug)]
struct Config {
format : OutFormat,
gnu_ext : bool,
auto_ref : bool,
input_ref : bool,
right_ref : bool,
ignore_case : bool,
macro_name : String,
trunc_str : String,
context_regex : String,
line_width : usize,
gap_size : usize,
}
impl Default for Config {
fn default() -> Config {
Config {
format : OutFormat::Dumb,
gnu_ext : true,
auto_ref : false,
input_ref : false,
right_ref : false,
ignore_case : false,
macro_name : "xx".to_string(),
trunc_str : "/".to_string(),
context_regex : "\\w+".to_string(),
line_width : 72,
gap_size : 3
}
}
}
fn read_word_filter_file(matches: &Matches, option: &str) -> HashSet<String> {
let filename = matches.opt_str(option).expect("parsing options failed!");
let reader = BufReader::new(crash_if_err!(1, File::open(filename)));
let mut words: HashSet<String> = HashSet::new();
for word in reader.lines() {
words.insert(crash_if_err!(1, word));
}
words
}
#[derive(Debug)]
struct WordFilter {
only_specified: bool,
ignore_specified: bool,
only_set: HashSet<String>,
ignore_set: HashSet<String>,
word_regex: String,
}
impl WordFilter {
fn new(matches: &Matches, config: &Config) -> WordFilter {
let (o, oset): (bool, HashSet<String>) =
if matches.opt_present("o") {
(true, read_word_filter_file(matches, "o"))
} else {
(false, HashSet::new())
};
let (i, iset): (bool, HashSet<String>) =
if matches.opt_present("i") {
(true, read_word_filter_file(matches, "i"))
} else {
(false, HashSet::new())
};
if matches.opt_present("b") {
crash!(1, "-b not implemented yet");
}
let reg =
if matches.opt_present("W") {
matches.opt_str("W").expect("parsing options failed!")
} else if config.gnu_ext {
"\\w+".to_string()
} else {
"[^ \t\n]+".to_string()
};
WordFilter {
only_specified: o,
ignore_specified: i,
only_set: oset,
ignore_set: iset,
word_regex: reg
}
}
}
#[derive(Debug, PartialOrd, PartialEq, Eq, Ord)]
struct WordRef {
word: String,
global_line_nr: usize,
local_line_nr: usize,
position: usize,
position_end: usize,
filename: String,
}
fn print_version() {
println!("{} {}", NAME, VERSION);
}
fn print_usage(opts: &Options) {
let brief = "Usage: ptx [OPTION]... [INPUT]... (without -G) or: \
ptx -G [OPTION]... [INPUT [OUTPUT]] \n Output a permuted index, \
including context, of the words in the input files. \n\n Mandatory \
arguments to long options are mandatory for short options too.";
let explaination = "With no FILE, or when FILE is -, read standard input. \
Default is '-F /'.";
println!("{}\n{}", opts.usage(&brief), explaination);
}
fn get_config(matches: &Matches) -> Config {
let mut config: Config = Default::default();
let err_msg = "parsing options failed";
if matches.opt_present("G") {
config.gnu_ext = false;
config.format = OutFormat::Roff;
config.context_regex = "[^ \t\n]+".to_string();
} else {
crash!(1, "GNU extensions not implemented yet");
}
if matches.opt_present("S") {
crash!(1, "-S not implemented yet");
}
config.auto_ref = matches.opt_present("A");
config.input_ref = matches.opt_present("r");
config.right_ref &= matches.opt_present("R");
config.ignore_case = matches.opt_present("f");
if matches.opt_present("M") {
config.macro_name =
matches.opt_str("M").expect(err_msg).to_string();
}
if matches.opt_present("F") {
config.trunc_str =
matches.opt_str("F").expect(err_msg).to_string();
}
if matches.opt_present("w") {
let width_str = matches.opt_str("w").expect(err_msg);
config.line_width = crash_if_err!(
1, usize::from_str_radix(width_str.as_str(), 10));
}
if matches.opt_present("g") {
let gap_str = matches.opt_str("g").expect(err_msg);
config.gap_size = crash_if_err!(
1, usize::from_str_radix(gap_str.as_str(), 10));
}
if matches.opt_present("O") {
config.format = OutFormat::Roff;
}
if matches.opt_present("T") {
config.format = OutFormat::Tex;
}
config
}
fn read_input(input_files: Vec<&str>, config: &Config) ->
HashMap<String, (Vec<String>, usize)> {
let mut file_map : HashMap<String, (Vec<String>, usize)> =
HashMap::new();
let mut files = Vec::new();
if input_files.is_empty() {
files.push("-");
} else {
if config.gnu_ext {
files.push_all(input_files.as_slice());
} else {
files.push(input_files[0]);
}
}
let mut lines_so_far: usize = 0;
for filename in files {
let reader: BufReader<Box<Read>> = BufReader::new(
if filename == "-" {
Box::new(stdin())
} else {
let file = crash_if_err!(1, File::open(filename));
Box::new(file)
});
let lines: Vec<String> = reader.lines().map(|x| crash_if_err!(1, x))
.collect();
let size = lines.len();
file_map.insert(filename.to_string(), (lines, lines_so_far));
lines_so_far += size
}
file_map
}
fn create_word_set(config: &Config, filter: &WordFilter,
file_map: &HashMap<String, (Vec<String>, usize)>)->
BTreeSet<WordRef> {
let reg = Regex::new(filter.word_regex.as_str()).unwrap();
let ref_reg = Regex::new(config.context_regex.as_str()).unwrap();
let mut word_set: BTreeSet<WordRef> = BTreeSet::new();
for (file, lines) in file_map.iter() {
let mut count: usize = 0;
let offs = lines.1;
for line in (lines.0).iter() {
// if -r, exclude reference from word set
let (ref_beg, ref_end) = match ref_reg.find(line) {
Some(x) => x,
None => (0,0)
};
// match words with given regex
for (beg, end) in reg.find_iter(line) {
if config.input_ref && ((beg, end) == (ref_beg, ref_end)) {
continue;
}
let mut word = line.slice_chars(beg, end).to_string();
if filter.only_specified &&
!(filter.only_set.contains(&word)) {
continue;
}
if filter.ignore_specified &&
filter.ignore_set.contains(&word) {
continue;
}
if config.ignore_case {
word = word.to_lowercase();
}
word_set.insert(WordRef{
word: word,
filename: String::from(file.as_str()),
global_line_nr: offs + count,
local_line_nr: count,
position: beg,
position_end: end
});
}
count += 1;
}
}
word_set
}
fn get_reference(config: &Config, word_ref: &WordRef, line: &String) ->
String {
if config.auto_ref {
format!("{}:{}", word_ref.filename, word_ref.local_line_nr + 1)
} else if config.input_ref {
let reg = Regex::new(config.context_regex.as_str()).unwrap();
let (beg, end) = match reg.find(line) {
Some(x) => x,
None => (0,0)
};
format!("{}", line.slice_chars(beg, end))
} else {
String::new()
}
}
fn assert_str_integrity(s: &Vec<char>, beg: usize, end: usize) {
assert!(beg <= end);
assert!(end <= s.len());
}
fn trim_broken_word_left(s: &Vec<char>, beg: usize, end: usize) -> usize {
assert_str_integrity(s, beg, end);
if beg == end || beg == 0 || s[beg].is_whitespace() ||
s[beg-1].is_whitespace() {
return beg;
}
let mut b = beg;
while b < end &&!s[b].is_whitespace() {
b += 1;
}
b
}
fn trim_broken_word_right(s: &Vec<char>, beg: usize, end: usize) -> usize {
assert_str_integrity(s, beg, end);
if beg == end || end == s.len() || s[end-1].is_whitespace() ||
s[end].is_whitespace() {
return end;
}
let mut e = end;
while beg < e &&!s[e-1].is_whitespace() {
e -= 1;
}
e
}
fn trim_idx(s: &Vec<char>, beg: usize, end: usize) -> (usize, usize) {
assert_str_integrity(s, beg, end);
let mut b = beg;
let mut e = end;
while b < e && s[b].is_whitespace() {
b += 1;
}
while b < e && s[e-1].is_whitespace() {
e -= 1;
}
(b,e)
}
fn get_output_chunks(all_before: &String, keyword: &String, all_after: &String,
config: &Config) -> (String, String, String, String) {
assert!(all_before.trim() == all_before.as_str());
assert!(keyword.trim() == keyword.as_str());
assert!(all_after.trim() == all_after.as_str());
let mut head = String::new();
let mut before = String::new();
let mut after = String::new();
let mut tail = String::new();
let half_line_size = cmp::max((config.line_width/2) as isize -
(2*config.trunc_str.len()) as isize, 0) as usize;
let max_after_size = cmp::max(half_line_size as isize -
keyword.len() as isize - 1, 0) as usize;
let max_before_size = half_line_size;
let all_before_vec: Vec<char> = all_before.chars().collect();
let all_after_vec: Vec<char> = all_after.chars().collect();
// get before
let mut bb_tmp =
cmp::max(all_before.len() as isize - max_before_size as isize, 0) as usize;
bb_tmp = trim_broken_word_left(&all_before_vec, bb_tmp, all_before.len());
let (before_beg, before_end) =
trim_idx(&all_before_vec, bb_tmp, all_before.len());
before.push_str(all_before.slice_chars(before_beg, before_end));
assert!(max_before_size >= before.len());
// get after
let mut ae_tmp = cmp::min(max_after_size, all_after.len());
ae_tmp = trim_broken_word_right(&all_after_vec, 0, ae_tmp);
let (after_beg, after_end) = trim_idx(&all_after_vec, 0, ae_tmp);
after.push_str(all_after.slice_chars(after_beg, after_end));
assert!(max_after_size >= after.len());
// get tail
let max_tail_size = max_before_size - before.len();
let (tb, _) = trim_idx(&all_after_vec, after_end, all_after.len());
let mut te_tmp = cmp::min(tb + max_tail_size, all_after.len());
te_tmp = trim_broken_word_right(&all_after_vec, tb, te_tmp);
let (tail_beg, tail_end) = trim_idx(&all_after_vec, tb, te_tmp);
tail.push_str(all_after.slice_chars(tail_beg, tail_end));
// get head
let max_head_size = max_after_size - after.len();
let (_, he) = trim_idx(&all_before_vec, 0, before_beg);
let mut hb_tmp =
cmp::max(he as isize - max_head_size as isize, 0) as usize;
hb_tmp = trim_broken_word_left(&all_before_vec, hb_tmp, he);
let (head_beg, head_end) = trim_idx(&all_before_vec, hb_tmp, he);
head.push_str(all_before.slice_chars(head_beg, head_end));
// put right context truncation string if needed
if after_end!= all_after.len() && tail_beg == tail_end {
after.push_str(config.trunc_str.as_str());
} else if after_end!= all_after.len() && tail_end!= all_after.len() {
tail.push_str(config.trunc_str.as_str());
}
// put left context truncation string if needed
if before_beg!= 0 && head_beg == head_end {
before = format!("{}{}", config.trunc_str, before);
} else if before_beg!= 0 && head_beg!= 0 {
head = format!("{}{}", config.trunc_str, head);
}
// add space before "after" if needed
if after.len() > 0 {
after = format!(" {}", after);
}
(tail, before, after, head)
}
fn tex_mapper(x: char) -> String {
match x {
'\\' => "\\backslash{}".to_string(),
'$' | '%' | '#' | '&' | '_' => format!("\\{}", x),
'}' | '{' => format!("$\\{}$", x),
_ => x.to_string()
}
}
fn adjust_tex_str(context: &str) -> String {
let ws_reg = Regex::new(r"[\t\n\v\f\r ]").unwrap();
let mut fix: String = ws_reg.replace_all(context, " ").trim().to_string();
let mapped_chunks: Vec<String> = fix.chars().map(tex_mapper).collect();
fix = mapped_chunks.connect("");
fix
}
fn format_tex_line(config: &Config, word_ref: &WordRef, line: &String,
reference: &String) -> String {
let mut output = String::new();
output.push_str(&format!("\\{} ", config.macro_name));
let all_before = if config.input_ref {
let before = line.slice_chars(0, word_ref.position);
adjust_tex_str(before.trim().trim_left_matches(reference))
} else {
adjust_tex_str(line.slice_chars(0, word_ref.position))
};
let keyword = adjust_tex_str(
line.slice_chars(word_ref.position, word_ref.position_end));
let all_after = adjust_tex_str(
line.slice_chars(word_ref.position_end, line.len()));
let (tail, before, after, head) =
get_output_chunks(&all_before, &keyword, &all_after, &config);
output.push_str(format!("{5}{0}{6}{5}{1}{6}{5}{2}{6}{5}{3}{6}{5}{4}{6}",
tail, before, keyword, after, head, "{", "}").as_str());
if config.auto_ref || config.input_ref {
output.push_str(
&format!("{}{}{}", "{", adjust_tex_str(&reference), "}"));
}
output
}
fn adjust_roff_str(context: &str) -> String {
let ws_reg = Regex::new(r"[\t\n\v\f\r]").unwrap();
ws_reg.replace_all(context, " ").replace("\"", "\"\"").trim().to_string()
}
fn format_roff_line(config: &Config, word_ref: &WordRef, line: &str,
reference: &str) -> String {
let mut output = String::new();
output.push_str(&format!(".{}", config.macro_name));
let all_before = if config.input_ref {
let before = line.slice_chars(0, word_ref.position);
adjust_roff_str(before.trim().trim_left_matches(reference))
} else {
adjust_roff_str(line.slice_chars(0, word_ref.position))
};
let keyword = adjust_roff_str(
line.slice_chars(word_ref.position, word_ref.position_end));
let all_after = adjust_roff_str(
line.slice_chars(word_ref.position_end, line.len()));
let (tail, before, after, head) =
get_output_chunks(&all_before, &keyword, &all_after, &config);
output.push_str(format!(" \"{}\" \"{}\" \"{}{}\" \"{}\"",
tail, before, keyword, after, head).as_str());
if config.auto_ref || config.input_ref {
output.push_str(&format!(" \"{}\"", adjust_roff_str(&reference)));
}
output
}
fn write_traditional_output(config: &Config,
file_map: &HashMap<String, (Vec<String>,usize)>,
words: &BTreeSet<WordRef>, output_filename: &str) {
let mut writer: BufWriter<Box<Write>> = BufWriter::new(
if output_filename == "-" {
Box::new(stdout())
} else {
let file = crash_if_err!(1, File::create(output_filename));
Box::new(file)
});
for word_ref in words.iter() {
let file_map_value : &(Vec<String>, usize) =
file_map.get(&(word_ref.filename))
.expect("Missing file in file map");
let (ref lines, _) = *(file_map_value);
let reference =
get_reference(config, word_ref, &lines[word_ref.local_line_nr]);
let output_line: String = match config.format {
OutFormat::Tex => format_tex_line(
config, word_ref, &lines[word_ref.local_line_nr], &reference),
OutFormat::Roff => format_roff_line(
config, word_ref, &lines[word_ref.local_line_nr], &reference),
OutFormat::Dumb => crash!(
1, "There is no dumb format with GNU extensions disabled")
};
crash_if_err!(1, writeln!(writer, "{}", output_line));
}
}
pub fn uumain(args: Vec<String>) -> i32 {
let mut opts = Options::new();
opts.optflag("A", "auto-reference",
"output automatically generated references");
opts.optflag("G", "traditional", "behave more like System V 'ptx'");
opts.optopt("F", "flag-truncation",
"use STRING for flagging line truncations", "STRING");
opts.optopt("M", "macro-name", "macro name to use instead of 'xx'",
"STRING");
opts.optflag("O", "format=roff", "generate output as roff directives");
opts.optflag("R", "right-side-refs",
"put references at right, not counted in -w");
opts.optopt("S", "sentence-regexp", "for end of lines or end of sentences",
"REGEXP");
opts.optflag("T", "format=tex", "generate output as TeX directives");
opts.optopt("W", "word-regexp", "use REGEXP to match each keyword",
"REGEXP");
opts.optopt("b", "break-file", "word break characters in this FILE",
"FILE");
opts.optflag("f", "ignore-case",
"fold lower case to upper case for sorting");
opts.optopt("g", "gap-size", "gap size in columns between output fields",
"NUMBER");
opts.optopt("i", "ignore-file", "read ignore word list from FILE", "FILE");
opts.optopt("o", "only-file", "read only word list from this FILE",
"FILE");
opts.optflag("r", "references", "first field of each line is a reference");
opts.optopt("w", "width", "output width in columns, reference excluded",
"NUMBER");
opts.optflag("", "help", "display this | OutFormat | identifier_name |
ptx.rs | Set};
use std::default::Default;
use std::fs::File;
use getopts::{Options, Matches};
use std::io::{stdin, stdout, BufReader, BufWriter, BufRead, Read, Write};
use regex::Regex;
use std::cmp;
#[path = "../common/util.rs"]
#[macro_use]
mod util;
static NAME: &'static str = "ptx";
static VERSION: &'static str = "1.0.0";
#[derive(Debug)]
enum OutFormat {
Dumb,
Roff,
Tex,
}
#[derive(Debug)]
struct Config {
format : OutFormat,
gnu_ext : bool,
auto_ref : bool,
input_ref : bool,
right_ref : bool,
ignore_case : bool,
macro_name : String,
trunc_str : String,
context_regex : String,
line_width : usize,
gap_size : usize,
}
impl Default for Config {
fn default() -> Config {
Config {
format : OutFormat::Dumb,
gnu_ext : true,
auto_ref : false,
input_ref : false,
right_ref : false,
ignore_case : false,
macro_name : "xx".to_string(),
trunc_str : "/".to_string(),
context_regex : "\\w+".to_string(),
line_width : 72,
gap_size : 3
}
}
}
fn read_word_filter_file(matches: &Matches, option: &str) -> HashSet<String> |
#[derive(Debug)]
struct WordFilter {
only_specified: bool,
ignore_specified: bool,
only_set: HashSet<String>,
ignore_set: HashSet<String>,
word_regex: String,
}
impl WordFilter {
fn new(matches: &Matches, config: &Config) -> WordFilter {
let (o, oset): (bool, HashSet<String>) =
if matches.opt_present("o") {
(true, read_word_filter_file(matches, "o"))
} else {
(false, HashSet::new())
};
let (i, iset): (bool, HashSet<String>) =
if matches.opt_present("i") {
(true, read_word_filter_file(matches, "i"))
} else {
(false, HashSet::new())
};
if matches.opt_present("b") {
crash!(1, "-b not implemented yet");
}
let reg =
if matches.opt_present("W") {
matches.opt_str("W").expect("parsing options failed!")
} else if config.gnu_ext {
"\\w+".to_string()
} else {
"[^ \t\n]+".to_string()
};
WordFilter {
only_specified: o,
ignore_specified: i,
only_set: oset,
ignore_set: iset,
word_regex: reg
}
}
}
#[derive(Debug, PartialOrd, PartialEq, Eq, Ord)]
struct WordRef {
word: String,
global_line_nr: usize,
local_line_nr: usize,
position: usize,
position_end: usize,
filename: String,
}
fn print_version() {
println!("{} {}", NAME, VERSION);
}
fn print_usage(opts: &Options) {
let brief = "Usage: ptx [OPTION]... [INPUT]... (without -G) or: \
ptx -G [OPTION]... [INPUT [OUTPUT]] \n Output a permuted index, \
including context, of the words in the input files. \n\n Mandatory \
arguments to long options are mandatory for short options too.";
let explaination = "With no FILE, or when FILE is -, read standard input. \
Default is '-F /'.";
println!("{}\n{}", opts.usage(&brief), explaination);
}
fn get_config(matches: &Matches) -> Config {
let mut config: Config = Default::default();
let err_msg = "parsing options failed";
if matches.opt_present("G") {
config.gnu_ext = false;
config.format = OutFormat::Roff;
config.context_regex = "[^ \t\n]+".to_string();
} else {
crash!(1, "GNU extensions not implemented yet");
}
if matches.opt_present("S") {
crash!(1, "-S not implemented yet");
}
config.auto_ref = matches.opt_present("A");
config.input_ref = matches.opt_present("r");
config.right_ref &= matches.opt_present("R");
config.ignore_case = matches.opt_present("f");
if matches.opt_present("M") {
config.macro_name =
matches.opt_str("M").expect(err_msg).to_string();
}
if matches.opt_present("F") {
config.trunc_str =
matches.opt_str("F").expect(err_msg).to_string();
}
if matches.opt_present("w") {
let width_str = matches.opt_str("w").expect(err_msg);
config.line_width = crash_if_err!(
1, usize::from_str_radix(width_str.as_str(), 10));
}
if matches.opt_present("g") {
let gap_str = matches.opt_str("g").expect(err_msg);
config.gap_size = crash_if_err!(
1, usize::from_str_radix(gap_str.as_str(), 10));
}
if matches.opt_present("O") {
config.format = OutFormat::Roff;
}
if matches.opt_present("T") {
config.format = OutFormat::Tex;
}
config
}
fn read_input(input_files: Vec<&str>, config: &Config) ->
HashMap<String, (Vec<String>, usize)> {
let mut file_map : HashMap<String, (Vec<String>, usize)> =
HashMap::new();
let mut files = Vec::new();
if input_files.is_empty() {
files.push("-");
} else {
if config.gnu_ext {
files.push_all(input_files.as_slice());
} else {
files.push(input_files[0]);
}
}
let mut lines_so_far: usize = 0;
for filename in files {
let reader: BufReader<Box<Read>> = BufReader::new(
if filename == "-" {
Box::new(stdin())
} else {
let file = crash_if_err!(1, File::open(filename));
Box::new(file)
});
let lines: Vec<String> = reader.lines().map(|x| crash_if_err!(1, x))
.collect();
let size = lines.len();
file_map.insert(filename.to_string(), (lines, lines_so_far));
lines_so_far += size
}
file_map
}
fn create_word_set(config: &Config, filter: &WordFilter,
file_map: &HashMap<String, (Vec<String>, usize)>)->
BTreeSet<WordRef> {
let reg = Regex::new(filter.word_regex.as_str()).unwrap();
let ref_reg = Regex::new(config.context_regex.as_str()).unwrap();
let mut word_set: BTreeSet<WordRef> = BTreeSet::new();
for (file, lines) in file_map.iter() {
let mut count: usize = 0;
let offs = lines.1;
for line in (lines.0).iter() {
// if -r, exclude reference from word set
let (ref_beg, ref_end) = match ref_reg.find(line) {
Some(x) => x,
None => (0,0)
};
// match words with given regex
for (beg, end) in reg.find_iter(line) {
if config.input_ref && ((beg, end) == (ref_beg, ref_end)) {
continue;
}
let mut word = line.slice_chars(beg, end).to_string();
if filter.only_specified &&
!(filter.only_set.contains(&word)) {
continue;
}
if filter.ignore_specified &&
filter.ignore_set.contains(&word) {
continue;
}
if config.ignore_case {
word = word.to_lowercase();
}
word_set.insert(WordRef{
word: word,
filename: String::from(file.as_str()),
global_line_nr: offs + count,
local_line_nr: count,
position: beg,
position_end: end
});
}
count += 1;
}
}
word_set
}
fn get_reference(config: &Config, word_ref: &WordRef, line: &String) ->
String {
if config.auto_ref {
format!("{}:{}", word_ref.filename, word_ref.local_line_nr + 1)
} else if config.input_ref {
let reg = Regex::new(config.context_regex.as_str()).unwrap();
let (beg, end) = match reg.find(line) {
Some(x) => x,
None => (0,0)
};
format!("{}", line.slice_chars(beg, end))
} else {
String::new()
}
}
fn assert_str_integrity(s: &Vec<char>, beg: usize, end: usize) {
assert!(beg <= end);
assert!(end <= s.len());
}
fn trim_broken_word_left(s: &Vec<char>, beg: usize, end: usize) -> usize {
assert_str_integrity(s, beg, end);
if beg == end || beg == 0 || s[beg].is_whitespace() ||
s[beg-1].is_whitespace() {
return beg;
}
let mut b = beg;
while b < end &&!s[b].is_whitespace() {
b += 1;
}
b
}
fn trim_broken_word_right(s: &Vec<char>, beg: usize, end: usize) -> usize {
assert_str_integrity(s, beg, end);
if beg == end || end == s.len() || s[end-1].is_whitespace() ||
s[end].is_whitespace() {
return end;
}
let mut e = end;
while beg < e &&!s[e-1].is_whitespace() {
e -= 1;
}
e
}
fn trim_idx(s: &Vec<char>, beg: usize, end: usize) -> (usize, usize) {
assert_str_integrity(s, beg, end);
let mut b = beg;
let mut e = end;
while b < e && s[b].is_whitespace() {
b += 1;
}
while b < e && s[e-1].is_whitespace() {
e -= 1;
}
(b,e)
}
fn get_output_chunks(all_before: &String, keyword: &String, all_after: &String,
config: &Config) -> (String, String, String, String) {
assert!(all_before.trim() == all_before.as_str());
assert!(keyword.trim() == keyword.as_str());
assert!(all_after.trim() == all_after.as_str());
let mut head = String::new();
let mut before = String::new();
let mut after = String::new();
let mut tail = String::new();
let half_line_size = cmp::max((config.line_width/2) as isize -
(2*config.trunc_str.len()) as isize, 0) as usize;
let max_after_size = cmp::max(half_line_size as isize -
keyword.len() as isize - 1, 0) as usize;
let max_before_size = half_line_size;
let all_before_vec: Vec<char> = all_before.chars().collect();
let all_after_vec: Vec<char> = all_after.chars().collect();
// get before
let mut bb_tmp =
cmp::max(all_before.len() as isize - max_before_size as isize, 0) as usize;
bb_tmp = trim_broken_word_left(&all_before_vec, bb_tmp, all_before.len());
let (before_beg, before_end) =
trim_idx(&all_before_vec, bb_tmp, all_before.len());
before.push_str(all_before.slice_chars(before_beg, before_end));
assert!(max_before_size >= before.len());
// get after
let mut ae_tmp = cmp::min(max_after_size, all_after.len());
ae_tmp = trim_broken_word_right(&all_after_vec, 0, ae_tmp);
let (after_beg, after_end) = trim_idx(&all_after_vec, 0, ae_tmp);
after.push_str(all_after.slice_chars(after_beg, after_end));
assert!(max_after_size >= after.len());
// get tail
let max_tail_size = max_before_size - before.len();
let (tb, _) = trim_idx(&all_after_vec, after_end, all_after.len());
let mut te_tmp = cmp::min(tb + max_tail_size, all_after.len());
te_tmp = trim_broken_word_right(&all_after_vec, tb, te_tmp);
let (tail_beg, tail_end) = trim_idx(&all_after_vec, tb, te_tmp);
tail.push_str(all_after.slice_chars(tail_beg, tail_end));
// get head
let max_head_size = max_after_size - after.len();
let (_, he) = trim_idx(&all_before_vec, 0, before_beg);
let mut hb_tmp =
cmp::max(he as isize - max_head_size as isize, 0) as usize;
hb_tmp = trim_broken_word_left(&all_before_vec, hb_tmp, he);
let (head_beg, head_end) = trim_idx(&all_before_vec, hb_tmp, he);
head.push_str(all_before.slice_chars(head_beg, head_end));
// put right context truncation string if needed
if after_end!= all_after.len() && tail_beg == tail_end {
after.push_str(config.trunc_str.as_str());
} else if after_end!= all_after.len() && tail_end!= all_after.len() {
tail.push_str(config.trunc_str.as_str());
}
// put left context truncation string if needed
if before_beg!= 0 && head_beg == head_end {
before = format!("{}{}", config.trunc_str, before);
} else if before_beg!= 0 && head_beg!= 0 {
head = format!("{}{}", config.trunc_str, head);
}
// add space before "after" if needed
if after.len() > 0 {
after = format!(" {}", after);
}
(tail, before, after, head)
}
fn tex_mapper(x: char) -> String {
match x {
'\\' => "\\backslash{}".to_string(),
'$' | '%' | '#' | '&' | '_' => format!("\\{}", x),
'}' | '{' => format!("$\\{}$", x),
_ => x.to_string()
}
}
fn adjust_tex_str(context: &str) -> String {
let ws_reg = Regex::new(r"[\t\n\v\f\r ]").unwrap();
let mut fix: String = ws_reg.replace_all(context, " ").trim().to_string();
let mapped_chunks: Vec<String> = fix.chars().map(tex_mapper).collect();
fix = mapped_chunks.connect("");
fix
}
fn format_tex_line(config: &Config, word_ref: &WordRef, line: &String,
reference: &String) -> String {
let mut output = String::new();
output.push_str(&format!("\\{} ", config.macro_name));
let all_before = if config.input_ref {
let before = line.slice_chars(0, word_ref.position);
adjust_tex_str(before.trim().trim_left_matches(reference))
} else {
adjust_tex_str(line.slice_chars(0, word_ref.position))
};
let keyword = adjust_tex_str(
line.slice_chars(word_ref.position, word_ref.position_end));
let all_after = adjust_tex_str(
line.slice_chars(word_ref.position_end, line.len()));
let (tail, before, after, head) =
get_output_chunks(&all_before, &keyword, &all_after, &config);
output.push_str(format!("{5}{0}{6}{5}{1}{6}{5}{2}{6}{5}{3}{6}{5}{4}{6}",
tail, before, keyword, after, head, "{", "}").as_str());
if config.auto_ref || config.input_ref {
output.push_str(
&format!("{}{}{}", "{", adjust_tex_str(&reference), "}"));
}
output
}
fn adjust_roff_str(context: &str) -> String {
let ws_reg = Regex::new(r"[\t\n\v\f\r]").unwrap();
ws_reg.replace_all(context, " ").replace("\"", "\"\"").trim().to_string()
}
fn format_roff_line(config: &Config, word_ref: &WordRef, line: &str,
reference: &str) -> String {
let mut output = String::new();
output.push_str(&format!(".{}", config.macro_name));
let all_before = if config.input_ref {
let before = line.slice_chars(0, word_ref.position);
adjust_roff_str(before.trim().trim_left_matches(reference))
} else {
adjust_roff_str(line.slice_chars(0, word_ref.position))
};
let keyword = adjust_roff_str(
line.slice_chars(word_ref.position, word_ref.position_end));
let all_after = adjust_roff_str(
line.slice_chars(word_ref.position_end, line.len()));
let (tail, before, after, head) =
get_output_chunks(&all_before, &keyword, &all_after, &config);
output.push_str(format!(" \"{}\" \"{}\" \"{}{}\" \"{}\"",
tail, before, keyword, after, head).as_str());
if config.auto_ref || config.input_ref {
output.push_str(&format!(" \"{}\"", adjust_roff_str(&reference)));
}
output
}
fn write_traditional_output(config: &Config,
file_map: &HashMap<String, (Vec<String>,usize)>,
words: &BTreeSet<WordRef>, output_filename: &str) {
let mut writer: BufWriter<Box<Write>> = BufWriter::new(
if output_filename == "-" {
Box::new(stdout())
} else {
let file = crash_if_err!(1, File::create(output_filename));
Box::new(file)
});
for word_ref in words.iter() {
let file_map_value : &(Vec<String>, usize) =
file_map.get(&(word_ref.filename))
.expect("Missing file in file map");
let (ref lines, _) = *(file_map_value);
let reference =
get_reference(config, word_ref, &lines[word_ref.local_line_nr]);
let output_line: String = match config.format {
OutFormat::Tex => format_tex_line(
config, word_ref, &lines[word_ref.local_line_nr], &reference),
OutFormat::Roff => format_roff_line(
config, word_ref, &lines[word_ref.local_line_nr], &reference),
OutFormat::Dumb => crash!(
1, "There is no dumb format with GNU extensions disabled")
};
crash_if_err!(1, writeln!(writer, "{}", output_line));
}
}
pub fn uumain(args: Vec<String>) -> i32 {
let mut opts = Options::new();
opts.optflag("A", "auto-reference",
"output automatically generated references");
opts.optflag("G", "traditional", "behave more like System V 'ptx'");
opts.optopt("F", "flag-truncation",
"use STRING for flagging line truncations", "STRING");
opts.optopt("M", "macro-name", "macro name to use instead of 'xx'",
"STRING");
opts.optflag("O", "format=roff", "generate output as roff directives");
opts.optflag("R", "right-side-refs",
"put references at right, not counted in -w");
opts.optopt("S", "sentence-regexp", "for end of lines or end of sentences",
"REGEXP");
opts.optflag("T", "format=tex", "generate output as TeX directives");
opts.optopt("W", "word-regexp", "use REGEXP to match each keyword",
"REGEXP");
opts.optopt("b", "break-file", "word break characters in this FILE",
"FILE");
opts.optflag("f", "ignore-case",
"fold lower case to upper case for sorting");
opts.optopt("g", "gap-size", "gap size in columns between output fields",
"NUMBER");
opts.optopt("i", "ignore-file", "read ignore word list from FILE", "FILE");
opts.optopt("o", "only-file", "read only word list from this FILE",
"FILE");
opts.optflag("r", "references", "first field of each line is a reference");
opts.optopt("w", "width", "output width in columns, reference excluded",
"NUMBER");
opts.optflag("", "help", "display this | {
let filename = matches.opt_str(option).expect("parsing options failed!");
let reader = BufReader::new(crash_if_err!(1, File::open(filename)));
let mut words: HashSet<String> = HashSet::new();
for word in reader.lines() {
words.insert(crash_if_err!(1, word));
}
words
} | identifier_body |
ptx.rs | TreeSet};
use std::default::Default;
use std::fs::File;
use getopts::{Options, Matches};
use std::io::{stdin, stdout, BufReader, BufWriter, BufRead, Read, Write};
use regex::Regex;
use std::cmp;
#[path = "../common/util.rs"]
#[macro_use]
mod util;
static NAME: &'static str = "ptx";
static VERSION: &'static str = "1.0.0";
#[derive(Debug)]
enum OutFormat {
Dumb,
Roff,
Tex,
}
#[derive(Debug)]
struct Config {
format : OutFormat,
gnu_ext : bool,
auto_ref : bool,
input_ref : bool,
right_ref : bool,
ignore_case : bool,
macro_name : String,
trunc_str : String,
context_regex : String,
line_width : usize,
gap_size : usize,
}
impl Default for Config {
fn default() -> Config {
Config {
format : OutFormat::Dumb,
gnu_ext : true,
auto_ref : false,
input_ref : false,
right_ref : false,
ignore_case : false,
macro_name : "xx".to_string(),
trunc_str : "/".to_string(),
context_regex : "\\w+".to_string(),
line_width : 72,
gap_size : 3
}
}
}
fn read_word_filter_file(matches: &Matches, option: &str) -> HashSet<String> {
let filename = matches.opt_str(option).expect("parsing options failed!");
let reader = BufReader::new(crash_if_err!(1, File::open(filename)));
let mut words: HashSet<String> = HashSet::new();
for word in reader.lines() {
words.insert(crash_if_err!(1, word));
}
words
}
#[derive(Debug)]
struct WordFilter {
only_specified: bool,
ignore_specified: bool,
only_set: HashSet<String>,
ignore_set: HashSet<String>,
word_regex: String,
}
impl WordFilter {
fn new(matches: &Matches, config: &Config) -> WordFilter {
let (o, oset): (bool, HashSet<String>) =
if matches.opt_present("o") {
(true, read_word_filter_file(matches, "o"))
} else {
(false, HashSet::new())
};
let (i, iset): (bool, HashSet<String>) =
if matches.opt_present("i") {
(true, read_word_filter_file(matches, "i"))
} else {
(false, HashSet::new())
};
if matches.opt_present("b") {
crash!(1, "-b not implemented yet");
}
let reg =
if matches.opt_present("W") {
matches.opt_str("W").expect("parsing options failed!")
} else if config.gnu_ext {
"\\w+".to_string()
} else {
"[^ \t\n]+".to_string()
};
WordFilter {
only_specified: o,
ignore_specified: i,
only_set: oset,
ignore_set: iset,
word_regex: reg
}
}
}
#[derive(Debug, PartialOrd, PartialEq, Eq, Ord)]
struct WordRef {
word: String,
global_line_nr: usize,
local_line_nr: usize,
position: usize,
position_end: usize,
filename: String,
}
fn print_version() {
println!("{} {}", NAME, VERSION);
}
fn print_usage(opts: &Options) {
let brief = "Usage: ptx [OPTION]... [INPUT]... (without -G) or: \
ptx -G [OPTION]... [INPUT [OUTPUT]] \n Output a permuted index, \
including context, of the words in the input files. \n\n Mandatory \
arguments to long options are mandatory for short options too.";
let explaination = "With no FILE, or when FILE is -, read standard input. \
Default is '-F /'.";
println!("{}\n{}", opts.usage(&brief), explaination);
}
fn get_config(matches: &Matches) -> Config {
let mut config: Config = Default::default();
let err_msg = "parsing options failed";
if matches.opt_present("G") {
config.gnu_ext = false;
config.format = OutFormat::Roff;
config.context_regex = "[^ \t\n]+".to_string();
} else { | crash!(1, "-S not implemented yet");
}
config.auto_ref = matches.opt_present("A");
config.input_ref = matches.opt_present("r");
config.right_ref &= matches.opt_present("R");
config.ignore_case = matches.opt_present("f");
if matches.opt_present("M") {
config.macro_name =
matches.opt_str("M").expect(err_msg).to_string();
}
if matches.opt_present("F") {
config.trunc_str =
matches.opt_str("F").expect(err_msg).to_string();
}
if matches.opt_present("w") {
let width_str = matches.opt_str("w").expect(err_msg);
config.line_width = crash_if_err!(
1, usize::from_str_radix(width_str.as_str(), 10));
}
if matches.opt_present("g") {
let gap_str = matches.opt_str("g").expect(err_msg);
config.gap_size = crash_if_err!(
1, usize::from_str_radix(gap_str.as_str(), 10));
}
if matches.opt_present("O") {
config.format = OutFormat::Roff;
}
if matches.opt_present("T") {
config.format = OutFormat::Tex;
}
config
}
fn read_input(input_files: Vec<&str>, config: &Config) ->
HashMap<String, (Vec<String>, usize)> {
let mut file_map : HashMap<String, (Vec<String>, usize)> =
HashMap::new();
let mut files = Vec::new();
if input_files.is_empty() {
files.push("-");
} else {
if config.gnu_ext {
files.push_all(input_files.as_slice());
} else {
files.push(input_files[0]);
}
}
let mut lines_so_far: usize = 0;
for filename in files {
let reader: BufReader<Box<Read>> = BufReader::new(
if filename == "-" {
Box::new(stdin())
} else {
let file = crash_if_err!(1, File::open(filename));
Box::new(file)
});
let lines: Vec<String> = reader.lines().map(|x| crash_if_err!(1, x))
.collect();
let size = lines.len();
file_map.insert(filename.to_string(), (lines, lines_so_far));
lines_so_far += size
}
file_map
}
fn create_word_set(config: &Config, filter: &WordFilter,
file_map: &HashMap<String, (Vec<String>, usize)>)->
BTreeSet<WordRef> {
let reg = Regex::new(filter.word_regex.as_str()).unwrap();
let ref_reg = Regex::new(config.context_regex.as_str()).unwrap();
let mut word_set: BTreeSet<WordRef> = BTreeSet::new();
for (file, lines) in file_map.iter() {
let mut count: usize = 0;
let offs = lines.1;
for line in (lines.0).iter() {
// if -r, exclude reference from word set
let (ref_beg, ref_end) = match ref_reg.find(line) {
Some(x) => x,
None => (0,0)
};
// match words with given regex
for (beg, end) in reg.find_iter(line) {
if config.input_ref && ((beg, end) == (ref_beg, ref_end)) {
continue;
}
let mut word = line.slice_chars(beg, end).to_string();
if filter.only_specified &&
!(filter.only_set.contains(&word)) {
continue;
}
if filter.ignore_specified &&
filter.ignore_set.contains(&word) {
continue;
}
if config.ignore_case {
word = word.to_lowercase();
}
word_set.insert(WordRef{
word: word,
filename: String::from(file.as_str()),
global_line_nr: offs + count,
local_line_nr: count,
position: beg,
position_end: end
});
}
count += 1;
}
}
word_set
}
fn get_reference(config: &Config, word_ref: &WordRef, line: &String) ->
String {
if config.auto_ref {
format!("{}:{}", word_ref.filename, word_ref.local_line_nr + 1)
} else if config.input_ref {
let reg = Regex::new(config.context_regex.as_str()).unwrap();
let (beg, end) = match reg.find(line) {
Some(x) => x,
None => (0,0)
};
format!("{}", line.slice_chars(beg, end))
} else {
String::new()
}
}
fn assert_str_integrity(s: &Vec<char>, beg: usize, end: usize) {
assert!(beg <= end);
assert!(end <= s.len());
}
fn trim_broken_word_left(s: &Vec<char>, beg: usize, end: usize) -> usize {
assert_str_integrity(s, beg, end);
if beg == end || beg == 0 || s[beg].is_whitespace() ||
s[beg-1].is_whitespace() {
return beg;
}
let mut b = beg;
while b < end &&!s[b].is_whitespace() {
b += 1;
}
b
}
fn trim_broken_word_right(s: &Vec<char>, beg: usize, end: usize) -> usize {
assert_str_integrity(s, beg, end);
if beg == end || end == s.len() || s[end-1].is_whitespace() ||
s[end].is_whitespace() {
return end;
}
let mut e = end;
while beg < e &&!s[e-1].is_whitespace() {
e -= 1;
}
e
}
fn trim_idx(s: &Vec<char>, beg: usize, end: usize) -> (usize, usize) {
assert_str_integrity(s, beg, end);
let mut b = beg;
let mut e = end;
while b < e && s[b].is_whitespace() {
b += 1;
}
while b < e && s[e-1].is_whitespace() {
e -= 1;
}
(b,e)
}
fn get_output_chunks(all_before: &String, keyword: &String, all_after: &String,
config: &Config) -> (String, String, String, String) {
assert!(all_before.trim() == all_before.as_str());
assert!(keyword.trim() == keyword.as_str());
assert!(all_after.trim() == all_after.as_str());
let mut head = String::new();
let mut before = String::new();
let mut after = String::new();
let mut tail = String::new();
let half_line_size = cmp::max((config.line_width/2) as isize -
(2*config.trunc_str.len()) as isize, 0) as usize;
let max_after_size = cmp::max(half_line_size as isize -
keyword.len() as isize - 1, 0) as usize;
let max_before_size = half_line_size;
let all_before_vec: Vec<char> = all_before.chars().collect();
let all_after_vec: Vec<char> = all_after.chars().collect();
// get before
let mut bb_tmp =
cmp::max(all_before.len() as isize - max_before_size as isize, 0) as usize;
bb_tmp = trim_broken_word_left(&all_before_vec, bb_tmp, all_before.len());
let (before_beg, before_end) =
trim_idx(&all_before_vec, bb_tmp, all_before.len());
before.push_str(all_before.slice_chars(before_beg, before_end));
assert!(max_before_size >= before.len());
// get after
let mut ae_tmp = cmp::min(max_after_size, all_after.len());
ae_tmp = trim_broken_word_right(&all_after_vec, 0, ae_tmp);
let (after_beg, after_end) = trim_idx(&all_after_vec, 0, ae_tmp);
after.push_str(all_after.slice_chars(after_beg, after_end));
assert!(max_after_size >= after.len());
// get tail
let max_tail_size = max_before_size - before.len();
let (tb, _) = trim_idx(&all_after_vec, after_end, all_after.len());
let mut te_tmp = cmp::min(tb + max_tail_size, all_after.len());
te_tmp = trim_broken_word_right(&all_after_vec, tb, te_tmp);
let (tail_beg, tail_end) = trim_idx(&all_after_vec, tb, te_tmp);
tail.push_str(all_after.slice_chars(tail_beg, tail_end));
// get head
let max_head_size = max_after_size - after.len();
let (_, he) = trim_idx(&all_before_vec, 0, before_beg);
let mut hb_tmp =
cmp::max(he as isize - max_head_size as isize, 0) as usize;
hb_tmp = trim_broken_word_left(&all_before_vec, hb_tmp, he);
let (head_beg, head_end) = trim_idx(&all_before_vec, hb_tmp, he);
head.push_str(all_before.slice_chars(head_beg, head_end));
// put right context truncation string if needed
if after_end!= all_after.len() && tail_beg == tail_end {
after.push_str(config.trunc_str.as_str());
} else if after_end!= all_after.len() && tail_end!= all_after.len() {
tail.push_str(config.trunc_str.as_str());
}
// put left context truncation string if needed
if before_beg!= 0 && head_beg == head_end {
before = format!("{}{}", config.trunc_str, before);
} else if before_beg!= 0 && head_beg!= 0 {
head = format!("{}{}", config.trunc_str, head);
}
// add space before "after" if needed
if after.len() > 0 {
after = format!(" {}", after);
}
(tail, before, after, head)
}
fn tex_mapper(x: char) -> String {
match x {
'\\' => "\\backslash{}".to_string(),
'$' | '%' | '#' | '&' | '_' => format!("\\{}", x),
'}' | '{' => format!("$\\{}$", x),
_ => x.to_string()
}
}
fn adjust_tex_str(context: &str) -> String {
let ws_reg = Regex::new(r"[\t\n\v\f\r ]").unwrap();
let mut fix: String = ws_reg.replace_all(context, " ").trim().to_string();
let mapped_chunks: Vec<String> = fix.chars().map(tex_mapper).collect();
fix = mapped_chunks.connect("");
fix
}
fn format_tex_line(config: &Config, word_ref: &WordRef, line: &String,
reference: &String) -> String {
let mut output = String::new();
output.push_str(&format!("\\{} ", config.macro_name));
let all_before = if config.input_ref {
let before = line.slice_chars(0, word_ref.position);
adjust_tex_str(before.trim().trim_left_matches(reference))
} else {
adjust_tex_str(line.slice_chars(0, word_ref.position))
};
let keyword = adjust_tex_str(
line.slice_chars(word_ref.position, word_ref.position_end));
let all_after = adjust_tex_str(
line.slice_chars(word_ref.position_end, line.len()));
let (tail, before, after, head) =
get_output_chunks(&all_before, &keyword, &all_after, &config);
output.push_str(format!("{5}{0}{6}{5}{1}{6}{5}{2}{6}{5}{3}{6}{5}{4}{6}",
tail, before, keyword, after, head, "{", "}").as_str());
if config.auto_ref || config.input_ref {
output.push_str(
&format!("{}{}{}", "{", adjust_tex_str(&reference), "}"));
}
output
}
fn adjust_roff_str(context: &str) -> String {
let ws_reg = Regex::new(r"[\t\n\v\f\r]").unwrap();
ws_reg.replace_all(context, " ").replace("\"", "\"\"").trim().to_string()
}
fn format_roff_line(config: &Config, word_ref: &WordRef, line: &str,
reference: &str) -> String {
let mut output = String::new();
output.push_str(&format!(".{}", config.macro_name));
let all_before = if config.input_ref {
let before = line.slice_chars(0, word_ref.position);
adjust_roff_str(before.trim().trim_left_matches(reference))
} else {
adjust_roff_str(line.slice_chars(0, word_ref.position))
};
let keyword = adjust_roff_str(
line.slice_chars(word_ref.position, word_ref.position_end));
let all_after = adjust_roff_str(
line.slice_chars(word_ref.position_end, line.len()));
let (tail, before, after, head) =
get_output_chunks(&all_before, &keyword, &all_after, &config);
output.push_str(format!(" \"{}\" \"{}\" \"{}{}\" \"{}\"",
tail, before, keyword, after, head).as_str());
if config.auto_ref || config.input_ref {
output.push_str(&format!(" \"{}\"", adjust_roff_str(&reference)));
}
output
}
fn write_traditional_output(config: &Config,
file_map: &HashMap<String, (Vec<String>,usize)>,
words: &BTreeSet<WordRef>, output_filename: &str) {
let mut writer: BufWriter<Box<Write>> = BufWriter::new(
if output_filename == "-" {
Box::new(stdout())
} else {
let file = crash_if_err!(1, File::create(output_filename));
Box::new(file)
});
for word_ref in words.iter() {
let file_map_value : &(Vec<String>, usize) =
file_map.get(&(word_ref.filename))
.expect("Missing file in file map");
let (ref lines, _) = *(file_map_value);
let reference =
get_reference(config, word_ref, &lines[word_ref.local_line_nr]);
let output_line: String = match config.format {
OutFormat::Tex => format_tex_line(
config, word_ref, &lines[word_ref.local_line_nr], &reference),
OutFormat::Roff => format_roff_line(
config, word_ref, &lines[word_ref.local_line_nr], &reference),
OutFormat::Dumb => crash!(
1, "There is no dumb format with GNU extensions disabled")
};
crash_if_err!(1, writeln!(writer, "{}", output_line));
}
}
pub fn uumain(args: Vec<String>) -> i32 {
let mut opts = Options::new();
opts.optflag("A", "auto-reference",
"output automatically generated references");
opts.optflag("G", "traditional", "behave more like System V 'ptx'");
opts.optopt("F", "flag-truncation",
"use STRING for flagging line truncations", "STRING");
opts.optopt("M", "macro-name", "macro name to use instead of 'xx'",
"STRING");
opts.optflag("O", "format=roff", "generate output as roff directives");
opts.optflag("R", "right-side-refs",
"put references at right, not counted in -w");
opts.optopt("S", "sentence-regexp", "for end of lines or end of sentences",
"REGEXP");
opts.optflag("T", "format=tex", "generate output as TeX directives");
opts.optopt("W", "word-regexp", "use REGEXP to match each keyword",
"REGEXP");
opts.optopt("b", "break-file", "word break characters in this FILE",
"FILE");
opts.optflag("f", "ignore-case",
"fold lower case to upper case for sorting");
opts.optopt("g", "gap-size", "gap size in columns between output fields",
"NUMBER");
opts.optopt("i", "ignore-file", "read ignore word list from FILE", "FILE");
opts.optopt("o", "only-file", "read only word list from this FILE",
"FILE");
opts.optflag("r", "references", "first field of each line is a reference");
opts.optopt("w", "width", "output width in columns, reference excluded",
"NUMBER");
opts.optflag("", "help", "display this help and | crash!(1, "GNU extensions not implemented yet");
}
if matches.opt_present("S") { | random_line_split |
parser.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The context within which CSS code is parsed.
#![deny(missing_docs)]
use cssparser::{Parser, SourcePosition, UnicodeRange};
use error_reporting::ParseErrorReporter;
#[cfg(feature = "gecko")]
use gecko_bindings::sugar::refptr::{GeckoArcPrincipal, GeckoArcURI};
use servo_url::ServoUrl;
use style_traits::OneOrMoreCommaSeparated;
use stylesheets::{MemoryHoleReporter, Origin};
/// Extra data that the style backend may need to parse stylesheets.
#[cfg(not(feature = "gecko"))]
pub struct ParserContextExtraData;
/// Extra data that the style backend may need to parse stylesheets.
#[cfg(feature = "gecko")]
pub struct ParserContextExtraData {
/// The base URI.
pub base: Option<GeckoArcURI>,
/// The referrer URI.
pub referrer: Option<GeckoArcURI>,
/// The principal that loaded this stylesheet.
pub principal: Option<GeckoArcPrincipal>,
}
#[cfg(not(feature = "gecko"))]
impl Default for ParserContextExtraData {
fn default() -> Self {
ParserContextExtraData
}
}
#[cfg(feature = "gecko")]
impl Default for ParserContextExtraData {
fn default() -> Self {
ParserContextExtraData { base: None, referrer: None, principal: None }
}
}
#[cfg(feature = "gecko")]
impl ParserContextExtraData {
/// Construct from a GeckoParserExtraData
///
/// GeckoParserExtraData must live longer than this call
pub unsafe fn new(data: *const ::gecko_bindings::structs::GeckoParserExtraData) -> Self {
// the to_safe calls are safe since we trust that we have references to
// real Gecko refptrs. The dereferencing of data is safe because this function
// is expected to be called with a `data` living longer than this function.
unsafe { ParserContextExtraData {
base: Some((*data).mBaseURI.to_safe()),
referrer: Some((*data).mReferrer.to_safe()),
principal: Some((*data).mPrincipal.to_safe()),
}}
}
}
/// The data that the parser needs from outside in order to parse a stylesheet.
pub struct ParserContext<'a> {
/// The `Origin` of the stylesheet, whether it's a user, author or
/// user-agent stylesheet.
pub stylesheet_origin: Origin,
/// The base url we're parsing this stylesheet as.
pub base_url: &'a ServoUrl,
/// An error reporter to report syntax errors.
pub error_reporter: Box<ParseErrorReporter + Send>,
/// Implementation-dependent extra data.
pub extra_data: ParserContextExtraData,
}
impl<'a> ParserContext<'a> {
/// Create a `ParserContext` with extra data.
pub fn new_with_extra_data(stylesheet_origin: Origin,
base_url: &'a ServoUrl,
error_reporter: Box<ParseErrorReporter + Send>,
extra_data: ParserContextExtraData)
-> ParserContext<'a> {
ParserContext {
stylesheet_origin: stylesheet_origin,
base_url: base_url,
error_reporter: error_reporter,
extra_data: extra_data,
}
}
/// Create a parser context with the default extra data.
pub fn new(stylesheet_origin: Origin,
base_url: &'a ServoUrl,
error_reporter: Box<ParseErrorReporter + Send>)
-> ParserContext<'a> {
let extra_data = ParserContextExtraData::default();
Self::new_with_extra_data(stylesheet_origin, base_url, error_reporter, extra_data)
}
/// Create a parser context for on-the-fly parsing in CSSOM
pub fn new_for_cssom(base_url: &'a ServoUrl) -> ParserContext<'a> {
Self::new(Origin::User, base_url, Box::new(MemoryHoleReporter))
}
}
/// Defaults to a no-op.
/// Set a `RUST_LOG=style::errors` environment variable
/// to log CSS parse errors to stderr.
pub fn log_css_error(input: &mut Parser, position: SourcePosition, message: &str, parsercontext: &ParserContext) {
let servo_url = parsercontext.base_url;
parsercontext.error_reporter.report_error(input, position, message, servo_url);
}
// XXXManishearth Replace all specified value parse impls with impls of this
// trait. This will make it easy to write more generic values in the future.
/// A trait to abstract parsing of a specified value given a `ParserContext` and
/// CSS input.
pub trait Parse : Sized {
/// Parse a value of this type.
///
/// Returns an error on failure.
fn parse(context: &ParserContext, input: &mut Parser) -> Result<Self, ()>;
}
impl<T> Parse for Vec<T> where T: Parse + OneOrMoreCommaSeparated {
fn parse(context: &ParserContext, input: &mut Parser) -> Result<Self, ()> |
}
/// Parse a non-empty space-separated or comma-separated list of objects parsed by parse_one
pub fn parse_space_or_comma_separated<F, T>(input: &mut Parser, mut parse_one: F)
-> Result<Vec<T>, ()>
where F: FnMut(&mut Parser) -> Result<T, ()> {
let first = parse_one(input)?;
let mut vec = vec![first];
loop {
let _ = input.try(|i| i.expect_comma());
if let Ok(val) = input.try(|i| parse_one(i)) {
vec.push(val)
} else {
break
}
}
Ok(vec)
}
impl Parse for UnicodeRange {
fn parse(_context: &ParserContext, input: &mut Parser) -> Result<Self, ()> {
UnicodeRange::parse(input)
}
}
| {
input.parse_comma_separated(|input| T::parse(context, input))
} | identifier_body |
parser.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The context within which CSS code is parsed.
#![deny(missing_docs)]
use cssparser::{Parser, SourcePosition, UnicodeRange};
use error_reporting::ParseErrorReporter;
#[cfg(feature = "gecko")]
use gecko_bindings::sugar::refptr::{GeckoArcPrincipal, GeckoArcURI};
use servo_url::ServoUrl;
use style_traits::OneOrMoreCommaSeparated;
use stylesheets::{MemoryHoleReporter, Origin};
/// Extra data that the style backend may need to parse stylesheets.
#[cfg(not(feature = "gecko"))]
pub struct ParserContextExtraData;
/// Extra data that the style backend may need to parse stylesheets.
#[cfg(feature = "gecko")]
pub struct ParserContextExtraData {
/// The base URI.
pub base: Option<GeckoArcURI>,
/// The referrer URI.
pub referrer: Option<GeckoArcURI>,
/// The principal that loaded this stylesheet.
pub principal: Option<GeckoArcPrincipal>,
}
#[cfg(not(feature = "gecko"))]
impl Default for ParserContextExtraData {
fn default() -> Self {
ParserContextExtraData
}
}
#[cfg(feature = "gecko")]
impl Default for ParserContextExtraData {
fn default() -> Self {
ParserContextExtraData { base: None, referrer: None, principal: None }
}
}
#[cfg(feature = "gecko")]
impl ParserContextExtraData {
/// Construct from a GeckoParserExtraData
///
/// GeckoParserExtraData must live longer than this call
pub unsafe fn new(data: *const ::gecko_bindings::structs::GeckoParserExtraData) -> Self {
// the to_safe calls are safe since we trust that we have references to
// real Gecko refptrs. The dereferencing of data is safe because this function
// is expected to be called with a `data` living longer than this function.
unsafe { ParserContextExtraData {
base: Some((*data).mBaseURI.to_safe()),
referrer: Some((*data).mReferrer.to_safe()),
principal: Some((*data).mPrincipal.to_safe()),
}}
}
}
/// The data that the parser needs from outside in order to parse a stylesheet.
pub struct ParserContext<'a> {
/// The `Origin` of the stylesheet, whether it's a user, author or
/// user-agent stylesheet.
pub stylesheet_origin: Origin,
/// The base url we're parsing this stylesheet as.
pub base_url: &'a ServoUrl,
/// An error reporter to report syntax errors.
pub error_reporter: Box<ParseErrorReporter + Send>,
/// Implementation-dependent extra data.
pub extra_data: ParserContextExtraData,
}
impl<'a> ParserContext<'a> {
/// Create a `ParserContext` with extra data.
pub fn new_with_extra_data(stylesheet_origin: Origin,
base_url: &'a ServoUrl,
error_reporter: Box<ParseErrorReporter + Send>,
extra_data: ParserContextExtraData)
-> ParserContext<'a> {
ParserContext {
stylesheet_origin: stylesheet_origin,
base_url: base_url,
error_reporter: error_reporter,
extra_data: extra_data,
}
}
/// Create a parser context with the default extra data.
pub fn new(stylesheet_origin: Origin,
base_url: &'a ServoUrl,
error_reporter: Box<ParseErrorReporter + Send>)
-> ParserContext<'a> {
let extra_data = ParserContextExtraData::default();
Self::new_with_extra_data(stylesheet_origin, base_url, error_reporter, extra_data)
}
/// Create a parser context for on-the-fly parsing in CSSOM
pub fn new_for_cssom(base_url: &'a ServoUrl) -> ParserContext<'a> {
Self::new(Origin::User, base_url, Box::new(MemoryHoleReporter))
}
}
/// Defaults to a no-op.
/// Set a `RUST_LOG=style::errors` environment variable
/// to log CSS parse errors to stderr.
pub fn log_css_error(input: &mut Parser, position: SourcePosition, message: &str, parsercontext: &ParserContext) {
let servo_url = parsercontext.base_url;
parsercontext.error_reporter.report_error(input, position, message, servo_url);
}
// XXXManishearth Replace all specified value parse impls with impls of this
// trait. This will make it easy to write more generic values in the future.
/// A trait to abstract parsing of a specified value given a `ParserContext` and
/// CSS input.
pub trait Parse : Sized {
/// Parse a value of this type.
///
/// Returns an error on failure.
fn parse(context: &ParserContext, input: &mut Parser) -> Result<Self, ()>;
}
impl<T> Parse for Vec<T> where T: Parse + OneOrMoreCommaSeparated {
fn parse(context: &ParserContext, input: &mut Parser) -> Result<Self, ()> {
input.parse_comma_separated(|input| T::parse(context, input))
}
}
/// Parse a non-empty space-separated or comma-separated list of objects parsed by parse_one
pub fn parse_space_or_comma_separated<F, T>(input: &mut Parser, mut parse_one: F)
-> Result<Vec<T>, ()>
where F: FnMut(&mut Parser) -> Result<T, ()> {
let first = parse_one(input)?;
let mut vec = vec![first];
loop {
let _ = input.try(|i| i.expect_comma());
if let Ok(val) = input.try(|i| parse_one(i)) | else {
break
}
}
Ok(vec)
}
impl Parse for UnicodeRange {
fn parse(_context: &ParserContext, input: &mut Parser) -> Result<Self, ()> {
UnicodeRange::parse(input)
}
}
| {
vec.push(val)
} | conditional_block |
parser.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The context within which CSS code is parsed.
#![deny(missing_docs)]
use cssparser::{Parser, SourcePosition, UnicodeRange};
use error_reporting::ParseErrorReporter;
#[cfg(feature = "gecko")]
use gecko_bindings::sugar::refptr::{GeckoArcPrincipal, GeckoArcURI};
use servo_url::ServoUrl;
use style_traits::OneOrMoreCommaSeparated;
use stylesheets::{MemoryHoleReporter, Origin};
/// Extra data that the style backend may need to parse stylesheets.
#[cfg(not(feature = "gecko"))]
pub struct ParserContextExtraData;
/// Extra data that the style backend may need to parse stylesheets.
#[cfg(feature = "gecko")]
pub struct ParserContextExtraData {
/// The base URI.
pub base: Option<GeckoArcURI>,
/// The referrer URI.
pub referrer: Option<GeckoArcURI>,
/// The principal that loaded this stylesheet.
pub principal: Option<GeckoArcPrincipal>,
}
#[cfg(not(feature = "gecko"))]
impl Default for ParserContextExtraData {
fn default() -> Self {
ParserContextExtraData
}
}
#[cfg(feature = "gecko")]
impl Default for ParserContextExtraData {
fn default() -> Self {
ParserContextExtraData { base: None, referrer: None, principal: None }
} | impl ParserContextExtraData {
/// Construct from a GeckoParserExtraData
///
/// GeckoParserExtraData must live longer than this call
pub unsafe fn new(data: *const ::gecko_bindings::structs::GeckoParserExtraData) -> Self {
// the to_safe calls are safe since we trust that we have references to
// real Gecko refptrs. The dereferencing of data is safe because this function
// is expected to be called with a `data` living longer than this function.
unsafe { ParserContextExtraData {
base: Some((*data).mBaseURI.to_safe()),
referrer: Some((*data).mReferrer.to_safe()),
principal: Some((*data).mPrincipal.to_safe()),
}}
}
}
/// The data that the parser needs from outside in order to parse a stylesheet.
pub struct ParserContext<'a> {
/// The `Origin` of the stylesheet, whether it's a user, author or
/// user-agent stylesheet.
pub stylesheet_origin: Origin,
/// The base url we're parsing this stylesheet as.
pub base_url: &'a ServoUrl,
/// An error reporter to report syntax errors.
pub error_reporter: Box<ParseErrorReporter + Send>,
/// Implementation-dependent extra data.
pub extra_data: ParserContextExtraData,
}
impl<'a> ParserContext<'a> {
/// Create a `ParserContext` with extra data.
pub fn new_with_extra_data(stylesheet_origin: Origin,
base_url: &'a ServoUrl,
error_reporter: Box<ParseErrorReporter + Send>,
extra_data: ParserContextExtraData)
-> ParserContext<'a> {
ParserContext {
stylesheet_origin: stylesheet_origin,
base_url: base_url,
error_reporter: error_reporter,
extra_data: extra_data,
}
}
/// Create a parser context with the default extra data.
pub fn new(stylesheet_origin: Origin,
base_url: &'a ServoUrl,
error_reporter: Box<ParseErrorReporter + Send>)
-> ParserContext<'a> {
let extra_data = ParserContextExtraData::default();
Self::new_with_extra_data(stylesheet_origin, base_url, error_reporter, extra_data)
}
/// Create a parser context for on-the-fly parsing in CSSOM
pub fn new_for_cssom(base_url: &'a ServoUrl) -> ParserContext<'a> {
Self::new(Origin::User, base_url, Box::new(MemoryHoleReporter))
}
}
/// Defaults to a no-op.
/// Set a `RUST_LOG=style::errors` environment variable
/// to log CSS parse errors to stderr.
pub fn log_css_error(input: &mut Parser, position: SourcePosition, message: &str, parsercontext: &ParserContext) {
let servo_url = parsercontext.base_url;
parsercontext.error_reporter.report_error(input, position, message, servo_url);
}
// XXXManishearth Replace all specified value parse impls with impls of this
// trait. This will make it easy to write more generic values in the future.
/// A trait to abstract parsing of a specified value given a `ParserContext` and
/// CSS input.
pub trait Parse : Sized {
/// Parse a value of this type.
///
/// Returns an error on failure.
fn parse(context: &ParserContext, input: &mut Parser) -> Result<Self, ()>;
}
impl<T> Parse for Vec<T> where T: Parse + OneOrMoreCommaSeparated {
fn parse(context: &ParserContext, input: &mut Parser) -> Result<Self, ()> {
input.parse_comma_separated(|input| T::parse(context, input))
}
}
/// Parse a non-empty space-separated or comma-separated list of objects parsed by parse_one
pub fn parse_space_or_comma_separated<F, T>(input: &mut Parser, mut parse_one: F)
-> Result<Vec<T>, ()>
where F: FnMut(&mut Parser) -> Result<T, ()> {
let first = parse_one(input)?;
let mut vec = vec![first];
loop {
let _ = input.try(|i| i.expect_comma());
if let Ok(val) = input.try(|i| parse_one(i)) {
vec.push(val)
} else {
break
}
}
Ok(vec)
}
impl Parse for UnicodeRange {
fn parse(_context: &ParserContext, input: &mut Parser) -> Result<Self, ()> {
UnicodeRange::parse(input)
}
} | }
#[cfg(feature = "gecko")] | random_line_split |
parser.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The context within which CSS code is parsed.
#![deny(missing_docs)]
use cssparser::{Parser, SourcePosition, UnicodeRange};
use error_reporting::ParseErrorReporter;
#[cfg(feature = "gecko")]
use gecko_bindings::sugar::refptr::{GeckoArcPrincipal, GeckoArcURI};
use servo_url::ServoUrl;
use style_traits::OneOrMoreCommaSeparated;
use stylesheets::{MemoryHoleReporter, Origin};
/// Extra data that the style backend may need to parse stylesheets.
#[cfg(not(feature = "gecko"))]
pub struct ParserContextExtraData;
/// Extra data that the style backend may need to parse stylesheets.
#[cfg(feature = "gecko")]
pub struct | {
/// The base URI.
pub base: Option<GeckoArcURI>,
/// The referrer URI.
pub referrer: Option<GeckoArcURI>,
/// The principal that loaded this stylesheet.
pub principal: Option<GeckoArcPrincipal>,
}
#[cfg(not(feature = "gecko"))]
impl Default for ParserContextExtraData {
fn default() -> Self {
ParserContextExtraData
}
}
#[cfg(feature = "gecko")]
impl Default for ParserContextExtraData {
fn default() -> Self {
ParserContextExtraData { base: None, referrer: None, principal: None }
}
}
#[cfg(feature = "gecko")]
impl ParserContextExtraData {
/// Construct from a GeckoParserExtraData
///
/// GeckoParserExtraData must live longer than this call
pub unsafe fn new(data: *const ::gecko_bindings::structs::GeckoParserExtraData) -> Self {
// the to_safe calls are safe since we trust that we have references to
// real Gecko refptrs. The dereferencing of data is safe because this function
// is expected to be called with a `data` living longer than this function.
unsafe { ParserContextExtraData {
base: Some((*data).mBaseURI.to_safe()),
referrer: Some((*data).mReferrer.to_safe()),
principal: Some((*data).mPrincipal.to_safe()),
}}
}
}
/// The data that the parser needs from outside in order to parse a stylesheet.
pub struct ParserContext<'a> {
/// The `Origin` of the stylesheet, whether it's a user, author or
/// user-agent stylesheet.
pub stylesheet_origin: Origin,
/// The base url we're parsing this stylesheet as.
pub base_url: &'a ServoUrl,
/// An error reporter to report syntax errors.
pub error_reporter: Box<ParseErrorReporter + Send>,
/// Implementation-dependent extra data.
pub extra_data: ParserContextExtraData,
}
impl<'a> ParserContext<'a> {
/// Create a `ParserContext` with extra data.
pub fn new_with_extra_data(stylesheet_origin: Origin,
base_url: &'a ServoUrl,
error_reporter: Box<ParseErrorReporter + Send>,
extra_data: ParserContextExtraData)
-> ParserContext<'a> {
ParserContext {
stylesheet_origin: stylesheet_origin,
base_url: base_url,
error_reporter: error_reporter,
extra_data: extra_data,
}
}
/// Create a parser context with the default extra data.
pub fn new(stylesheet_origin: Origin,
base_url: &'a ServoUrl,
error_reporter: Box<ParseErrorReporter + Send>)
-> ParserContext<'a> {
let extra_data = ParserContextExtraData::default();
Self::new_with_extra_data(stylesheet_origin, base_url, error_reporter, extra_data)
}
/// Create a parser context for on-the-fly parsing in CSSOM
pub fn new_for_cssom(base_url: &'a ServoUrl) -> ParserContext<'a> {
Self::new(Origin::User, base_url, Box::new(MemoryHoleReporter))
}
}
/// Defaults to a no-op.
/// Set a `RUST_LOG=style::errors` environment variable
/// to log CSS parse errors to stderr.
pub fn log_css_error(input: &mut Parser, position: SourcePosition, message: &str, parsercontext: &ParserContext) {
let servo_url = parsercontext.base_url;
parsercontext.error_reporter.report_error(input, position, message, servo_url);
}
// XXXManishearth Replace all specified value parse impls with impls of this
// trait. This will make it easy to write more generic values in the future.
/// A trait to abstract parsing of a specified value given a `ParserContext` and
/// CSS input.
pub trait Parse : Sized {
/// Parse a value of this type.
///
/// Returns an error on failure.
fn parse(context: &ParserContext, input: &mut Parser) -> Result<Self, ()>;
}
impl<T> Parse for Vec<T> where T: Parse + OneOrMoreCommaSeparated {
fn parse(context: &ParserContext, input: &mut Parser) -> Result<Self, ()> {
input.parse_comma_separated(|input| T::parse(context, input))
}
}
/// Parse a non-empty space-separated or comma-separated list of objects parsed by parse_one
pub fn parse_space_or_comma_separated<F, T>(input: &mut Parser, mut parse_one: F)
-> Result<Vec<T>, ()>
where F: FnMut(&mut Parser) -> Result<T, ()> {
let first = parse_one(input)?;
let mut vec = vec![first];
loop {
let _ = input.try(|i| i.expect_comma());
if let Ok(val) = input.try(|i| parse_one(i)) {
vec.push(val)
} else {
break
}
}
Ok(vec)
}
impl Parse for UnicodeRange {
fn parse(_context: &ParserContext, input: &mut Parser) -> Result<Self, ()> {
UnicodeRange::parse(input)
}
}
| ParserContextExtraData | identifier_name |
compress.rs | //! An example of offloading work to a thread pool instead of doing work on the
//! main event loop.
//!
//! In this example the server will act as a form of echo server except that
//! it'll echo back gzip-compressed data. Each connected client will have the
//! data written streamed back as the compressed version is available, and all
//! compressing will occur on a thread pool rather than the main event loop.
//!
//! You can preview this example with in one terminal:
//!
//! cargo run --example compress
//!
//! and in another terminal;
//!
//! echo test | cargo run --example connect 127.0.0.1:8080 | gunzip
//!
//! The latter command will need to be tweaked for non-unix-like shells, but
//! you can also redirect the stdout of the `connect` program to a file
//! and then decompress that.
extern crate futures;
extern crate futures_cpupool;
extern crate flate2;
extern crate tokio;
extern crate tokio_io;
use std::io;
use std::env;
use std::net::SocketAddr;
use futures::{Future, Stream, Poll};
use futures::future::Executor;
use futures_cpupool::CpuPool;
use tokio::net::{TcpListener, TcpStream};
use tokio::reactor::Core;
use tokio_io::{AsyncRead, AsyncWrite};
use flate2::write::GzEncoder;
fn main() {
// As with many other examples, parse our CLI arguments and prepare the
// reactor.
let addr = env::args().nth(1).unwrap_or("127.0.0.1:8080".to_string());
let addr = addr.parse::<SocketAddr>().unwrap();
let mut core = Core::new().unwrap();
let handle = core.handle();
let socket = TcpListener::bind(&addr, &handle).unwrap();
println!("Listening on: {}", addr);
// This is where we're going to offload our computationally heavy work
// (compressing) to. Here we just use a convenience constructor to create a
// pool of threads equal to the number of CPUs we have.
let pool = CpuPool::new_num_cpus();
// The compress logic will happen in the function below, but everything's
// still a future! Each client is spawned to concurrently get processed.
let server = socket.incoming().for_each(move |(socket, addr)| {
pool.execute(compress(socket, &pool).then(move |result| {
match result {
Ok((r, w)) => println!("{}: compressed {} bytes to {}", addr, r, w),
Err(e) => println!("{}: failed when compressing: {}", addr, e),
}
Ok(())
})).unwrap(); |
/// The main workhorse of this example. This'll compress all data read from
/// `socket` on the `pool` provided, writing it back out to `socket` as it's
/// available.
fn compress(socket: TcpStream, pool: &CpuPool)
-> Box<Future<Item = (u64, u64), Error = io::Error> + Send>
{
use tokio_io::io;
// The general interface that `CpuPool` provides is that we'll *spawn a
// future* onto it. All execution of the future will occur on the `CpuPool`
// and we'll get back a handle representing the completed value of the
// future. In essence it's our job here to create a future that represents
// compressing `socket`, and then we'll simply spawn it at the very end.
//
// Here we exploit the fact that `TcpStream` itself is `Send` in this
// function as well. That is, we can read/write the TCP stream on any
// thread, and we'll get notifications about it being ready from the reactor
// thread.
//
// Otherwise this is the same as the echo server except that after splitting
// we apply some encoding to one side, followed by a `shutdown` when we're
// done to ensure that all gz footers are written.
let (read, write) = socket.split();
let write = Count { io: write, amt: 0 };
let write = GzEncoder::new(write, flate2::Compression::Best);
let process = io::copy(read, write).and_then(|(amt, _read, write)| {
io::shutdown(write).map(move |io| (amt, io.get_ref().amt))
});
// Spawn the future so is executes entirely on the thread pool here
Box::new(pool.spawn(process))
}
struct Count<T> {
io: T,
amt: u64,
}
impl<T: io::Write> io::Write for Count<T> {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
let n = self.io.write(buf)?;
self.amt += n as u64;
Ok(n)
}
fn flush(&mut self) -> io::Result<()> {
self.io.flush()
}
}
impl<T: AsyncWrite> AsyncWrite for Count<T> {
fn shutdown(&mut self) -> Poll<(), io::Error> {
self.io.shutdown()
}
} | Ok(())
});
core.run(server).unwrap();
} | random_line_split |
compress.rs | //! An example of offloading work to a thread pool instead of doing work on the
//! main event loop.
//!
//! In this example the server will act as a form of echo server except that
//! it'll echo back gzip-compressed data. Each connected client will have the
//! data written streamed back as the compressed version is available, and all
//! compressing will occur on a thread pool rather than the main event loop.
//!
//! You can preview this example with in one terminal:
//!
//! cargo run --example compress
//!
//! and in another terminal;
//!
//! echo test | cargo run --example connect 127.0.0.1:8080 | gunzip
//!
//! The latter command will need to be tweaked for non-unix-like shells, but
//! you can also redirect the stdout of the `connect` program to a file
//! and then decompress that.
extern crate futures;
extern crate futures_cpupool;
extern crate flate2;
extern crate tokio;
extern crate tokio_io;
use std::io;
use std::env;
use std::net::SocketAddr;
use futures::{Future, Stream, Poll};
use futures::future::Executor;
use futures_cpupool::CpuPool;
use tokio::net::{TcpListener, TcpStream};
use tokio::reactor::Core;
use tokio_io::{AsyncRead, AsyncWrite};
use flate2::write::GzEncoder;
fn main() {
// As with many other examples, parse our CLI arguments and prepare the
// reactor.
let addr = env::args().nth(1).unwrap_or("127.0.0.1:8080".to_string());
let addr = addr.parse::<SocketAddr>().unwrap();
let mut core = Core::new().unwrap();
let handle = core.handle();
let socket = TcpListener::bind(&addr, &handle).unwrap();
println!("Listening on: {}", addr);
// This is where we're going to offload our computationally heavy work
// (compressing) to. Here we just use a convenience constructor to create a
// pool of threads equal to the number of CPUs we have.
let pool = CpuPool::new_num_cpus();
// The compress logic will happen in the function below, but everything's
// still a future! Each client is spawned to concurrently get processed.
let server = socket.incoming().for_each(move |(socket, addr)| {
pool.execute(compress(socket, &pool).then(move |result| {
match result {
Ok((r, w)) => println!("{}: compressed {} bytes to {}", addr, r, w),
Err(e) => println!("{}: failed when compressing: {}", addr, e),
}
Ok(())
})).unwrap();
Ok(())
});
core.run(server).unwrap();
}
/// The main workhorse of this example. This'll compress all data read from
/// `socket` on the `pool` provided, writing it back out to `socket` as it's
/// available.
fn | (socket: TcpStream, pool: &CpuPool)
-> Box<Future<Item = (u64, u64), Error = io::Error> + Send>
{
use tokio_io::io;
// The general interface that `CpuPool` provides is that we'll *spawn a
// future* onto it. All execution of the future will occur on the `CpuPool`
// and we'll get back a handle representing the completed value of the
// future. In essence it's our job here to create a future that represents
// compressing `socket`, and then we'll simply spawn it at the very end.
//
// Here we exploit the fact that `TcpStream` itself is `Send` in this
// function as well. That is, we can read/write the TCP stream on any
// thread, and we'll get notifications about it being ready from the reactor
// thread.
//
// Otherwise this is the same as the echo server except that after splitting
// we apply some encoding to one side, followed by a `shutdown` when we're
// done to ensure that all gz footers are written.
let (read, write) = socket.split();
let write = Count { io: write, amt: 0 };
let write = GzEncoder::new(write, flate2::Compression::Best);
let process = io::copy(read, write).and_then(|(amt, _read, write)| {
io::shutdown(write).map(move |io| (amt, io.get_ref().amt))
});
// Spawn the future so is executes entirely on the thread pool here
Box::new(pool.spawn(process))
}
struct Count<T> {
io: T,
amt: u64,
}
impl<T: io::Write> io::Write for Count<T> {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
let n = self.io.write(buf)?;
self.amt += n as u64;
Ok(n)
}
fn flush(&mut self) -> io::Result<()> {
self.io.flush()
}
}
impl<T: AsyncWrite> AsyncWrite for Count<T> {
fn shutdown(&mut self) -> Poll<(), io::Error> {
self.io.shutdown()
}
}
| compress | identifier_name |
compress.rs | //! An example of offloading work to a thread pool instead of doing work on the
//! main event loop.
//!
//! In this example the server will act as a form of echo server except that
//! it'll echo back gzip-compressed data. Each connected client will have the
//! data written streamed back as the compressed version is available, and all
//! compressing will occur on a thread pool rather than the main event loop.
//!
//! You can preview this example with in one terminal:
//!
//! cargo run --example compress
//!
//! and in another terminal;
//!
//! echo test | cargo run --example connect 127.0.0.1:8080 | gunzip
//!
//! The latter command will need to be tweaked for non-unix-like shells, but
//! you can also redirect the stdout of the `connect` program to a file
//! and then decompress that.
extern crate futures;
extern crate futures_cpupool;
extern crate flate2;
extern crate tokio;
extern crate tokio_io;
use std::io;
use std::env;
use std::net::SocketAddr;
use futures::{Future, Stream, Poll};
use futures::future::Executor;
use futures_cpupool::CpuPool;
use tokio::net::{TcpListener, TcpStream};
use tokio::reactor::Core;
use tokio_io::{AsyncRead, AsyncWrite};
use flate2::write::GzEncoder;
fn main() {
// As with many other examples, parse our CLI arguments and prepare the
// reactor.
let addr = env::args().nth(1).unwrap_or("127.0.0.1:8080".to_string());
let addr = addr.parse::<SocketAddr>().unwrap();
let mut core = Core::new().unwrap();
let handle = core.handle();
let socket = TcpListener::bind(&addr, &handle).unwrap();
println!("Listening on: {}", addr);
// This is where we're going to offload our computationally heavy work
// (compressing) to. Here we just use a convenience constructor to create a
// pool of threads equal to the number of CPUs we have.
let pool = CpuPool::new_num_cpus();
// The compress logic will happen in the function below, but everything's
// still a future! Each client is spawned to concurrently get processed.
let server = socket.incoming().for_each(move |(socket, addr)| {
pool.execute(compress(socket, &pool).then(move |result| {
match result {
Ok((r, w)) => println!("{}: compressed {} bytes to {}", addr, r, w),
Err(e) => println!("{}: failed when compressing: {}", addr, e),
}
Ok(())
})).unwrap();
Ok(())
});
core.run(server).unwrap();
}
/// The main workhorse of this example. This'll compress all data read from
/// `socket` on the `pool` provided, writing it back out to `socket` as it's
/// available.
fn compress(socket: TcpStream, pool: &CpuPool)
-> Box<Future<Item = (u64, u64), Error = io::Error> + Send>
{
use tokio_io::io;
// The general interface that `CpuPool` provides is that we'll *spawn a
// future* onto it. All execution of the future will occur on the `CpuPool`
// and we'll get back a handle representing the completed value of the
// future. In essence it's our job here to create a future that represents
// compressing `socket`, and then we'll simply spawn it at the very end.
//
// Here we exploit the fact that `TcpStream` itself is `Send` in this
// function as well. That is, we can read/write the TCP stream on any
// thread, and we'll get notifications about it being ready from the reactor
// thread.
//
// Otherwise this is the same as the echo server except that after splitting
// we apply some encoding to one side, followed by a `shutdown` when we're
// done to ensure that all gz footers are written.
let (read, write) = socket.split();
let write = Count { io: write, amt: 0 };
let write = GzEncoder::new(write, flate2::Compression::Best);
let process = io::copy(read, write).and_then(|(amt, _read, write)| {
io::shutdown(write).map(move |io| (amt, io.get_ref().amt))
});
// Spawn the future so is executes entirely on the thread pool here
Box::new(pool.spawn(process))
}
struct Count<T> {
io: T,
amt: u64,
}
impl<T: io::Write> io::Write for Count<T> {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
let n = self.io.write(buf)?;
self.amt += n as u64;
Ok(n)
}
fn flush(&mut self) -> io::Result<()> |
}
impl<T: AsyncWrite> AsyncWrite for Count<T> {
fn shutdown(&mut self) -> Poll<(), io::Error> {
self.io.shutdown()
}
}
| {
self.io.flush()
} | identifier_body |
dir_info.rs | use std::env;
use std::path::Path;
struct DirInfo {
size: u64,
depth: u32
}
const EMPTY: DirInfo = DirInfo {size: 0, depth: 0};
fn main() {
let arg = env::args_os().nth(1).expect("Please, provide a file as argument");
let path = Path::new(&arg);
if path.is_dir() {
let info = dir_info(path);
println!("size:\t{} bytes\ndepth:\t{} ", info.size, info.depth - 1);
} else {
println!("{} is not a directory", arg.to_str().expect("Fatal"));
}
}
fn update_info(info: DirInfo, path: &Path) -> DirInfo {
let child_info = dir_entry_info(path);
let new_size = info.size + child_info.size;
let new_depth = std::cmp::max(info.depth, 1 + child_info.depth);
DirInfo {size: new_size, depth: new_depth}
}
fn dir_entry_info(path: &Path) -> DirInfo {
if path.is_file() {
file_info(path)
} else if path.is_dir() {
dir_info(path)
} else {
EMPTY
}
}
fn file_info(path: &Path) -> DirInfo {
let metadata = path.metadata().ok().expect("Cannot get file metadata");
DirInfo {size: metadata.len(), depth: 0}
}
fn dir_info(path: &Path) -> DirInfo | {
match std::fs::read_dir(path) {
Err(_) => EMPTY,
Ok(entries) =>
entries.fold(EMPTY,
|info, entry| update_info(info, &entry.unwrap().path()))
}
} | identifier_body |
|
dir_info.rs | use std::env;
use std::path::Path;
struct DirInfo {
size: u64,
depth: u32
}
const EMPTY: DirInfo = DirInfo {size: 0, depth: 0};
fn main() {
let arg = env::args_os().nth(1).expect("Please, provide a file as argument");
let path = Path::new(&arg);
if path.is_dir() {
let info = dir_info(path);
println!("size:\t{} bytes\ndepth:\t{} ", info.size, info.depth - 1);
} else {
println!("{} is not a directory", arg.to_str().expect("Fatal"));
}
}
fn update_info(info: DirInfo, path: &Path) -> DirInfo {
let child_info = dir_entry_info(path);
let new_size = info.size + child_info.size;
let new_depth = std::cmp::max(info.depth, 1 + child_info.depth);
DirInfo {size: new_size, depth: new_depth}
}
fn | (path: &Path) -> DirInfo {
if path.is_file() {
file_info(path)
} else if path.is_dir() {
dir_info(path)
} else {
EMPTY
}
}
fn file_info(path: &Path) -> DirInfo {
let metadata = path.metadata().ok().expect("Cannot get file metadata");
DirInfo {size: metadata.len(), depth: 0}
}
fn dir_info(path: &Path) -> DirInfo {
match std::fs::read_dir(path) {
Err(_) => EMPTY,
Ok(entries) =>
entries.fold(EMPTY,
|info, entry| update_info(info, &entry.unwrap().path()))
}
}
| dir_entry_info | identifier_name |
dir_info.rs | use std::env;
use std::path::Path;
struct DirInfo {
size: u64,
depth: u32
}
const EMPTY: DirInfo = DirInfo {size: 0, depth: 0};
fn main() {
let arg = env::args_os().nth(1).expect("Please, provide a file as argument");
let path = Path::new(&arg);
if path.is_dir() {
let info = dir_info(path);
println!("size:\t{} bytes\ndepth:\t{} ", info.size, info.depth - 1);
} else {
println!("{} is not a directory", arg.to_str().expect("Fatal"));
}
}
fn update_info(info: DirInfo, path: &Path) -> DirInfo {
let child_info = dir_entry_info(path);
let new_size = info.size + child_info.size;
let new_depth = std::cmp::max(info.depth, 1 + child_info.depth);
DirInfo {size: new_size, depth: new_depth}
}
fn dir_entry_info(path: &Path) -> DirInfo {
if path.is_file() | else if path.is_dir() {
dir_info(path)
} else {
EMPTY
}
}
fn file_info(path: &Path) -> DirInfo {
let metadata = path.metadata().ok().expect("Cannot get file metadata");
DirInfo {size: metadata.len(), depth: 0}
}
fn dir_info(path: &Path) -> DirInfo {
match std::fs::read_dir(path) {
Err(_) => EMPTY,
Ok(entries) =>
entries.fold(EMPTY,
|info, entry| update_info(info, &entry.unwrap().path()))
}
}
| {
file_info(path)
} | conditional_block |
dir_info.rs | use std::env;
use std::path::Path;
struct DirInfo {
size: u64,
depth: u32
}
const EMPTY: DirInfo = DirInfo {size: 0, depth: 0};
fn main() {
let arg = env::args_os().nth(1).expect("Please, provide a file as argument");
let path = Path::new(&arg);
if path.is_dir() {
let info = dir_info(path);
println!("size:\t{} bytes\ndepth:\t{} ", info.size, info.depth - 1);
} else {
println!("{} is not a directory", arg.to_str().expect("Fatal"));
}
}
fn update_info(info: DirInfo, path: &Path) -> DirInfo {
let child_info = dir_entry_info(path);
let new_size = info.size + child_info.size;
let new_depth = std::cmp::max(info.depth, 1 + child_info.depth);
DirInfo {size: new_size, depth: new_depth}
}
fn dir_entry_info(path: &Path) -> DirInfo {
if path.is_file() {
file_info(path)
} else if path.is_dir() {
dir_info(path)
} else {
EMPTY
}
}
fn file_info(path: &Path) -> DirInfo {
let metadata = path.metadata().ok().expect("Cannot get file metadata");
DirInfo {size: metadata.len(), depth: 0}
}
fn dir_info(path: &Path) -> DirInfo {
match std::fs::read_dir(path) {
Err(_) => EMPTY,
Ok(entries) => | |info, entry| update_info(info, &entry.unwrap().path()))
}
} | entries.fold(EMPTY, | random_line_split |
shootout-fasta.rs | // Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/* -*- mode: rust; indent-tabs-mode: nil -*-
* Implementation of 'fasta' benchmark from
* Computer Language Benchmarks Game
* http://shootout.alioth.debian.org/
*/
use std::io;
use std::io::{BufferedWriter, File};
use std::num::min;
use std::os;
static LINE_LENGTH: uint = 60;
static IM: u32 = 139968;
struct MyRandom {
last: u32
}
impl MyRandom {
fn new() -> MyRandom { MyRandom { last: 42 } }
fn normalize(p: f32) -> u32 {(p * IM as f32).floor() as u32}
fn gen(&mut self) -> u32 {
self.last = (self.last * 3877 + 29573) % IM;
self.last
}
}
struct AAGen<'a> {
rng: &'a mut MyRandom,
data: ~[(u32, u8)]
}
impl<'a> AAGen<'a> {
fn new<'b>(rng: &'b mut MyRandom, aa: &[(char, f32)]) -> AAGen<'b> {
let mut cum = 0.;
let data = aa.iter()
.map(|&(ch, p)| { cum += p; (MyRandom::normalize(cum), ch as u8) })
.collect();
AAGen { rng: rng, data: data }
}
}
impl<'a> Iterator<u8> for AAGen<'a> {
fn next(&mut self) -> Option<u8> {
let r = self.rng.gen();
self.data.iter()
.skip_while(|pc| pc.n0() < r)
.map(|&(_, c)| c)
.next()
}
}
fn make_fasta<W: Writer, I: Iterator<u8>>(
wr: &mut W, header: &str, mut it: I, mut n: uint)
{
wr.write(header.as_bytes());
let mut line = [0u8,.. LINE_LENGTH + 1];
while n > 0 {
let nb = min(LINE_LENGTH, n);
for i in range(0, nb) {
line[i] = it.next().unwrap();
}
n -= nb;
line[nb] = '\n' as u8;
wr.write(line.slice_to(nb + 1));
}
}
fn run<W: Writer>(writer: &mut W) | ('t', 0.27), ('B', 0.02), ('D', 0.02),
('H', 0.02), ('K', 0.02), ('M', 0.02),
('N', 0.02), ('R', 0.02), ('S', 0.02),
('V', 0.02), ('W', 0.02), ('Y', 0.02)];
let homosapiens = &[('a', 0.3029549426680),
('c', 0.1979883004921),
('g', 0.1975473066391),
('t', 0.3015094502008)];
make_fasta(writer, ">ONE Homo sapiens alu\n",
alu.as_bytes().iter().cycle().map(|c| *c), n * 2);
make_fasta(writer, ">TWO IUB ambiguity codes\n",
AAGen::new(rng, iub), n * 3);
make_fasta(writer, ">THREE Homo sapiens frequency\n",
AAGen::new(rng, homosapiens), n * 5);
writer.flush();
}
fn main() {
if os::getenv("RUST_BENCH").is_some() {
let mut file = BufferedWriter::new(File::create(&Path::new("./shootout-fasta.data")));
run(&mut file);
} else {
run(&mut BufferedWriter::new(io::stdout()));
}
}
| {
let args = os::args();
let n = if os::getenv("RUST_BENCH").is_some() {
25000000
} else if args.len() <= 1u {
1000
} else {
from_str(args[1]).unwrap()
};
let rng = &mut MyRandom::new();
let alu =
"GGCCGGGCGCGGTGGCTCACGCCTGTAATCCCAGCACTTTGG\
GAGGCCGAGGCGGGCGGATCACCTGAGGTCAGGAGTTCGAGA\
CCAGCCTGGCCAACATGGTGAAACCCCGTCTCTACTAAAAAT\
ACAAAAATTAGCCGGGCGTGGTGGCGCGCGCCTGTAATCCCA\
GCTACTCGGGAGGCTGAGGCAGGAGAATCGCTTGAACCCGGG\
AGGCGGAGGTTGCAGTGAGCCGAGATCGCGCCACTGCACTCC\
AGCCTGGGCGACAGAGCGAGACTCCGTCTCAAAAA";
let iub = &[('a', 0.27), ('c', 0.12), ('g', 0.12), | identifier_body |
shootout-fasta.rs | // Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/* -*- mode: rust; indent-tabs-mode: nil -*-
* Implementation of 'fasta' benchmark from
* Computer Language Benchmarks Game
* http://shootout.alioth.debian.org/
*/
use std::io;
use std::io::{BufferedWriter, File};
use std::num::min;
use std::os;
static LINE_LENGTH: uint = 60;
static IM: u32 = 139968;
struct MyRandom {
last: u32
}
impl MyRandom {
fn new() -> MyRandom { MyRandom { last: 42 } }
fn normalize(p: f32) -> u32 {(p * IM as f32).floor() as u32}
fn gen(&mut self) -> u32 {
self.last = (self.last * 3877 + 29573) % IM;
self.last
}
}
struct AAGen<'a> {
rng: &'a mut MyRandom,
data: ~[(u32, u8)]
}
impl<'a> AAGen<'a> {
fn new<'b>(rng: &'b mut MyRandom, aa: &[(char, f32)]) -> AAGen<'b> {
let mut cum = 0.;
let data = aa.iter()
.map(|&(ch, p)| { cum += p; (MyRandom::normalize(cum), ch as u8) })
.collect();
AAGen { rng: rng, data: data }
}
}
impl<'a> Iterator<u8> for AAGen<'a> {
fn next(&mut self) -> Option<u8> {
let r = self.rng.gen();
self.data.iter()
.skip_while(|pc| pc.n0() < r)
.map(|&(_, c)| c)
.next()
}
}
fn make_fasta<W: Writer, I: Iterator<u8>>(
wr: &mut W, header: &str, mut it: I, mut n: uint)
{
wr.write(header.as_bytes());
let mut line = [0u8,.. LINE_LENGTH + 1];
while n > 0 {
let nb = min(LINE_LENGTH, n);
for i in range(0, nb) {
line[i] = it.next().unwrap();
}
n -= nb;
line[nb] = '\n' as u8;
wr.write(line.slice_to(nb + 1));
}
}
fn run<W: Writer>(writer: &mut W) {
let args = os::args();
let n = if os::getenv("RUST_BENCH").is_some() {
25000000
} else if args.len() <= 1u {
1000
} else {
from_str(args[1]).unwrap()
};
let rng = &mut MyRandom::new();
let alu =
"GGCCGGGCGCGGTGGCTCACGCCTGTAATCCCAGCACTTTGG\
GAGGCCGAGGCGGGCGGATCACCTGAGGTCAGGAGTTCGAGA\
CCAGCCTGGCCAACATGGTGAAACCCCGTCTCTACTAAAAAT\
ACAAAAATTAGCCGGGCGTGGTGGCGCGCGCCTGTAATCCCA\
GCTACTCGGGAGGCTGAGGCAGGAGAATCGCTTGAACCCGGG\
AGGCGGAGGTTGCAGTGAGCCGAGATCGCGCCACTGCACTCC\
AGCCTGGGCGACAGAGCGAGACTCCGTCTCAAAAA";
let iub = &[('a', 0.27), ('c', 0.12), ('g', 0.12),
('t', 0.27), ('B', 0.02), ('D', 0.02),
('H', 0.02), ('K', 0.02), ('M', 0.02),
('N', 0.02), ('R', 0.02), ('S', 0.02),
('V', 0.02), ('W', 0.02), ('Y', 0.02)];
let homosapiens = &[('a', 0.3029549426680),
('c', 0.1979883004921),
('g', 0.1975473066391),
('t', 0.3015094502008)];
make_fasta(writer, ">ONE Homo sapiens alu\n",
alu.as_bytes().iter().cycle().map(|c| *c), n * 2);
make_fasta(writer, ">TWO IUB ambiguity codes\n",
AAGen::new(rng, iub), n * 3);
make_fasta(writer, ">THREE Homo sapiens frequency\n", |
writer.flush();
}
fn main() {
if os::getenv("RUST_BENCH").is_some() {
let mut file = BufferedWriter::new(File::create(&Path::new("./shootout-fasta.data")));
run(&mut file);
} else {
run(&mut BufferedWriter::new(io::stdout()));
}
} | AAGen::new(rng, homosapiens), n * 5); | random_line_split |
shootout-fasta.rs | // Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/* -*- mode: rust; indent-tabs-mode: nil -*-
* Implementation of 'fasta' benchmark from
* Computer Language Benchmarks Game
* http://shootout.alioth.debian.org/
*/
use std::io;
use std::io::{BufferedWriter, File};
use std::num::min;
use std::os;
static LINE_LENGTH: uint = 60;
static IM: u32 = 139968;
struct MyRandom {
last: u32
}
impl MyRandom {
fn new() -> MyRandom { MyRandom { last: 42 } }
fn normalize(p: f32) -> u32 {(p * IM as f32).floor() as u32}
fn gen(&mut self) -> u32 {
self.last = (self.last * 3877 + 29573) % IM;
self.last
}
}
struct AAGen<'a> {
rng: &'a mut MyRandom,
data: ~[(u32, u8)]
}
impl<'a> AAGen<'a> {
fn new<'b>(rng: &'b mut MyRandom, aa: &[(char, f32)]) -> AAGen<'b> {
let mut cum = 0.;
let data = aa.iter()
.map(|&(ch, p)| { cum += p; (MyRandom::normalize(cum), ch as u8) })
.collect();
AAGen { rng: rng, data: data }
}
}
impl<'a> Iterator<u8> for AAGen<'a> {
fn next(&mut self) -> Option<u8> {
let r = self.rng.gen();
self.data.iter()
.skip_while(|pc| pc.n0() < r)
.map(|&(_, c)| c)
.next()
}
}
fn make_fasta<W: Writer, I: Iterator<u8>>(
wr: &mut W, header: &str, mut it: I, mut n: uint)
{
wr.write(header.as_bytes());
let mut line = [0u8,.. LINE_LENGTH + 1];
while n > 0 {
let nb = min(LINE_LENGTH, n);
for i in range(0, nb) {
line[i] = it.next().unwrap();
}
n -= nb;
line[nb] = '\n' as u8;
wr.write(line.slice_to(nb + 1));
}
}
fn run<W: Writer>(writer: &mut W) {
let args = os::args();
let n = if os::getenv("RUST_BENCH").is_some() | else if args.len() <= 1u {
1000
} else {
from_str(args[1]).unwrap()
};
let rng = &mut MyRandom::new();
let alu =
"GGCCGGGCGCGGTGGCTCACGCCTGTAATCCCAGCACTTTGG\
GAGGCCGAGGCGGGCGGATCACCTGAGGTCAGGAGTTCGAGA\
CCAGCCTGGCCAACATGGTGAAACCCCGTCTCTACTAAAAAT\
ACAAAAATTAGCCGGGCGTGGTGGCGCGCGCCTGTAATCCCA\
GCTACTCGGGAGGCTGAGGCAGGAGAATCGCTTGAACCCGGG\
AGGCGGAGGTTGCAGTGAGCCGAGATCGCGCCACTGCACTCC\
AGCCTGGGCGACAGAGCGAGACTCCGTCTCAAAAA";
let iub = &[('a', 0.27), ('c', 0.12), ('g', 0.12),
('t', 0.27), ('B', 0.02), ('D', 0.02),
('H', 0.02), ('K', 0.02), ('M', 0.02),
('N', 0.02), ('R', 0.02), ('S', 0.02),
('V', 0.02), ('W', 0.02), ('Y', 0.02)];
let homosapiens = &[('a', 0.3029549426680),
('c', 0.1979883004921),
('g', 0.1975473066391),
('t', 0.3015094502008)];
make_fasta(writer, ">ONE Homo sapiens alu\n",
alu.as_bytes().iter().cycle().map(|c| *c), n * 2);
make_fasta(writer, ">TWO IUB ambiguity codes\n",
AAGen::new(rng, iub), n * 3);
make_fasta(writer, ">THREE Homo sapiens frequency\n",
AAGen::new(rng, homosapiens), n * 5);
writer.flush();
}
fn main() {
if os::getenv("RUST_BENCH").is_some() {
let mut file = BufferedWriter::new(File::create(&Path::new("./shootout-fasta.data")));
run(&mut file);
} else {
run(&mut BufferedWriter::new(io::stdout()));
}
}
| {
25000000
} | conditional_block |
shootout-fasta.rs | // Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/* -*- mode: rust; indent-tabs-mode: nil -*-
* Implementation of 'fasta' benchmark from
* Computer Language Benchmarks Game
* http://shootout.alioth.debian.org/
*/
use std::io;
use std::io::{BufferedWriter, File};
use std::num::min;
use std::os;
static LINE_LENGTH: uint = 60;
static IM: u32 = 139968;
struct MyRandom {
last: u32
}
impl MyRandom {
fn new() -> MyRandom { MyRandom { last: 42 } }
fn normalize(p: f32) -> u32 {(p * IM as f32).floor() as u32}
fn | (&mut self) -> u32 {
self.last = (self.last * 3877 + 29573) % IM;
self.last
}
}
struct AAGen<'a> {
rng: &'a mut MyRandom,
data: ~[(u32, u8)]
}
impl<'a> AAGen<'a> {
fn new<'b>(rng: &'b mut MyRandom, aa: &[(char, f32)]) -> AAGen<'b> {
let mut cum = 0.;
let data = aa.iter()
.map(|&(ch, p)| { cum += p; (MyRandom::normalize(cum), ch as u8) })
.collect();
AAGen { rng: rng, data: data }
}
}
impl<'a> Iterator<u8> for AAGen<'a> {
fn next(&mut self) -> Option<u8> {
let r = self.rng.gen();
self.data.iter()
.skip_while(|pc| pc.n0() < r)
.map(|&(_, c)| c)
.next()
}
}
fn make_fasta<W: Writer, I: Iterator<u8>>(
wr: &mut W, header: &str, mut it: I, mut n: uint)
{
wr.write(header.as_bytes());
let mut line = [0u8,.. LINE_LENGTH + 1];
while n > 0 {
let nb = min(LINE_LENGTH, n);
for i in range(0, nb) {
line[i] = it.next().unwrap();
}
n -= nb;
line[nb] = '\n' as u8;
wr.write(line.slice_to(nb + 1));
}
}
fn run<W: Writer>(writer: &mut W) {
let args = os::args();
let n = if os::getenv("RUST_BENCH").is_some() {
25000000
} else if args.len() <= 1u {
1000
} else {
from_str(args[1]).unwrap()
};
let rng = &mut MyRandom::new();
let alu =
"GGCCGGGCGCGGTGGCTCACGCCTGTAATCCCAGCACTTTGG\
GAGGCCGAGGCGGGCGGATCACCTGAGGTCAGGAGTTCGAGA\
CCAGCCTGGCCAACATGGTGAAACCCCGTCTCTACTAAAAAT\
ACAAAAATTAGCCGGGCGTGGTGGCGCGCGCCTGTAATCCCA\
GCTACTCGGGAGGCTGAGGCAGGAGAATCGCTTGAACCCGGG\
AGGCGGAGGTTGCAGTGAGCCGAGATCGCGCCACTGCACTCC\
AGCCTGGGCGACAGAGCGAGACTCCGTCTCAAAAA";
let iub = &[('a', 0.27), ('c', 0.12), ('g', 0.12),
('t', 0.27), ('B', 0.02), ('D', 0.02),
('H', 0.02), ('K', 0.02), ('M', 0.02),
('N', 0.02), ('R', 0.02), ('S', 0.02),
('V', 0.02), ('W', 0.02), ('Y', 0.02)];
let homosapiens = &[('a', 0.3029549426680),
('c', 0.1979883004921),
('g', 0.1975473066391),
('t', 0.3015094502008)];
make_fasta(writer, ">ONE Homo sapiens alu\n",
alu.as_bytes().iter().cycle().map(|c| *c), n * 2);
make_fasta(writer, ">TWO IUB ambiguity codes\n",
AAGen::new(rng, iub), n * 3);
make_fasta(writer, ">THREE Homo sapiens frequency\n",
AAGen::new(rng, homosapiens), n * 5);
writer.flush();
}
fn main() {
if os::getenv("RUST_BENCH").is_some() {
let mut file = BufferedWriter::new(File::create(&Path::new("./shootout-fasta.data")));
run(&mut file);
} else {
run(&mut BufferedWriter::new(io::stdout()));
}
}
| gen | identifier_name |
player.rs | extern crate serde_redis;
#[derive(Debug, Deserialize, Serialize, Clone)]
pub struct Player {
#[serde(skip_serializing)]
pub addr: String,
pub name: Option<String>,
pub state: PlayerState
}
impl Player {
pub fn new(addr: String, name: Option<String>) -> Player {
Player {
addr: addr,
name: name,
state: PlayerState::Watching
}
}
|
pub fn format_hand_key(addr: &str, game_id: &str) -> String {
format!("HAND:{}:{}", addr, game_id)
}
pub fn state_key(&self) -> String {
Player::format_state_key(&self.addr)
}
pub fn hand_key(&self, game_id: &str) -> String {
Player::format_hand_key(&self.addr, game_id)
}
}
// States
#[derive(Debug, Deserialize, Serialize, Clone)]
#[serde(tag = "type")]
pub enum PlayerState {
Watching,
Playing,
Judging,
TimeOut,
Banned,
}
// Transitions | pub fn format_state_key(addr: &str) -> String {
format!("STATE:{}", addr)
} | random_line_split |
player.rs | extern crate serde_redis;
#[derive(Debug, Deserialize, Serialize, Clone)]
pub struct Player {
#[serde(skip_serializing)]
pub addr: String,
pub name: Option<String>,
pub state: PlayerState
}
impl Player {
pub fn new(addr: String, name: Option<String>) -> Player {
Player {
addr: addr,
name: name,
state: PlayerState::Watching
}
}
pub fn format_state_key(addr: &str) -> String {
format!("STATE:{}", addr)
}
pub fn format_hand_key(addr: &str, game_id: &str) -> String {
format!("HAND:{}:{}", addr, game_id)
}
pub fn state_key(&self) -> String {
Player::format_state_key(&self.addr)
}
pub fn | (&self, game_id: &str) -> String {
Player::format_hand_key(&self.addr, game_id)
}
}
// States
#[derive(Debug, Deserialize, Serialize, Clone)]
#[serde(tag = "type")]
pub enum PlayerState {
Watching,
Playing,
Judging,
TimeOut,
Banned,
}
// Transitions | hand_key | identifier_name |
curve.rs | /// (c) David Alan Gilbert <[email protected]> 2016
/// Licensed under GPLv3, see the LICENSE file for a full copy
// A curve to interpolate between points
// This is currently a Quadratic Bezier; pretty simple.
use point_line::Pointf;
pub struct | {
pub start : Pointf,
pub control : Pointf,
pub end : Pointf,
}
// From https://en.wikipedia.org/wiki/B%C3%A9zier_curve#Quadratic_B.C3.A9zier_curves
fn quad_interp(t: f64, s: f64, c: f64, e: f64) -> f64 {
(1.0-t)*(1.0-t)*s + 2.0*(1.0-t)*t*c + t*t*e
}
// Reworking the above to give c as the answer and specifying a
// t and result
fn find_control(s: f64, m: f64, e: f64, mid_t: f64) -> f64 {
(m-mid_t*mid_t*e-(1.0-mid_t)*(1.0-mid_t)*s) /
(2.0 * (1.0 - mid_t) * mid_t)
}
impl Bezierq {
// Return a point on the curve; t is 0 (start) -> 1 (end)
pub fn interp(&self, t: f64) -> Pointf {
Pointf { x: quad_interp(t, self.start.x, self.control.x, self.end.x),
y: quad_interp(t, self.start.y, self.control.y, self.end.y) }
}
// Return a curve that passes through the given points
// the'mid' point happens at the specified 't' interpolation point
pub fn through(s: Pointf, m: Pointf, e: Pointf, mid_t: f64) -> Bezierq {
Bezierq { start: s, end: e,
control: Pointf {
x: find_control(s.x, m.x, e.x, mid_t),
y: find_control(s.y, m.y, e.y, mid_t) } }
}
}
| Bezierq | identifier_name |
curve.rs | /// (c) David Alan Gilbert <[email protected]> 2016
/// Licensed under GPLv3, see the LICENSE file for a full copy
// A curve to interpolate between points
// This is currently a Quadratic Bezier; pretty simple.
use point_line::Pointf;
pub struct Bezierq {
pub start : Pointf,
pub control : Pointf,
pub end : Pointf,
}
// From https://en.wikipedia.org/wiki/B%C3%A9zier_curve#Quadratic_B.C3.A9zier_curves
fn quad_interp(t: f64, s: f64, c: f64, e: f64) -> f64 {
(1.0-t)*(1.0-t)*s + 2.0*(1.0-t)*t*c + t*t*e
}
// Reworking the above to give c as the answer and specifying a
// t and result
fn find_control(s: f64, m: f64, e: f64, mid_t: f64) -> f64 {
(m-mid_t*mid_t*e-(1.0-mid_t)*(1.0-mid_t)*s) /
(2.0 * (1.0 - mid_t) * mid_t)
}
impl Bezierq {
// Return a point on the curve; t is 0 (start) -> 1 (end)
pub fn interp(&self, t: f64) -> Pointf {
Pointf { x: quad_interp(t, self.start.x, self.control.x, self.end.x),
y: quad_interp(t, self.start.y, self.control.y, self.end.y) }
}
// Return a curve that passes through the given points
// the'mid' point happens at the specified 't' interpolation point
pub fn through(s: Pointf, m: Pointf, e: Pointf, mid_t: f64) -> Bezierq {
Bezierq { start: s, end: e,
control: Pointf { | } | x: find_control(s.x, m.x, e.x, mid_t),
y: find_control(s.y, m.y, e.y, mid_t) } }
} | random_line_split |
gpu.rs | use maplit::hashmap;
use crate::GpuState;
use std::{collections::HashMap, mem};
#[repr(C)]
#[derive(Copy, Clone)]
pub(crate) struct GenHeightmapsUniforms {
pub position: [i32; 2],
pub origin: [i32; 2],
pub spacing: f32,
pub in_slot: i32,
pub out_slot: i32,
pub level_resolution: i32,
pub face: u32,
}
unsafe impl bytemuck::Zeroable for GenHeightmapsUniforms {}
unsafe impl bytemuck::Pod for GenHeightmapsUniforms {}
#[repr(C)]
#[derive(Copy, Clone)]
pub(crate) struct GenDisplacementsUniforms {
pub node_center: [f64; 3],
pub padding0: f64,
pub origin: [i32; 2],
pub position: [i32; 2],
pub stride: i32,
pub heightmaps_slot: i32,
pub displacements_slot: i32,
pub face: i32,
pub level_resolution: u32,
}
unsafe impl bytemuck::Zeroable for GenDisplacementsUniforms {}
unsafe impl bytemuck::Pod for GenDisplacementsUniforms {}
#[repr(C)]
#[derive(Copy, Clone)]
pub(crate) struct GenNormalsUniforms {
pub heightmaps_origin: [i32; 2],
pub heightmaps_slot: i32,
pub normals_slot: i32,
pub spacing: f32,
pub padding: [f32; 3],
}
unsafe impl bytemuck::Zeroable for GenNormalsUniforms {}
unsafe impl bytemuck::Pod for GenNormalsUniforms {}
#[repr(C)]
#[derive(Copy, Clone)]
pub(crate) struct GenMaterialsUniforms {
pub heightmaps_origin: [i32; 2],
pub parent_origin: [u32; 2],
pub heightmaps_slot: i32,
pub normals_slot: i32,
pub albedo_slot: i32,
pub parent_slot: i32,
pub spacing: f32,
pub padding: i32,
}
unsafe impl bytemuck::Zeroable for GenMaterialsUniforms {}
unsafe impl bytemuck::Pod for GenMaterialsUniforms {}
pub(crate) struct ComputeShader<U> {
shader: rshader::ShaderSet,
bindgroup_pipeline: Option<(wgpu::BindGroup, wgpu::ComputePipeline)>,
uniforms: Option<wgpu::Buffer>,
name: String,
_phantom: std::marker::PhantomData<U>,
}
#[allow(unused)]
impl<U: bytemuck::Pod> ComputeShader<U> {
pub fn new(shader: rshader::ShaderSource, name: String) -> Self {
Self {
shader: rshader::ShaderSet::compute_only(shader).unwrap(),
bindgroup_pipeline: None,
uniforms: None,
name,
_phantom: std::marker::PhantomData,
}
}
pub fn refresh(&mut self) -> bool {
if self.shader.refresh() {
self.bindgroup_pipeline = None;
true
} else {
false
}
}
pub fn run(
&mut self,
device: &wgpu::Device,
encoder: &mut wgpu::CommandEncoder, | dimensions: (u32, u32, u32),
uniforms: &U,
) {
if self.uniforms.is_none() {
self.uniforms = Some(device.create_buffer(&wgpu::BufferDescriptor {
size: mem::size_of::<U>() as u64,
usage: wgpu::BufferUsage::COPY_DST | wgpu::BufferUsage::UNIFORM,
mapped_at_creation: false,
label: Some(&format!("buffer.{}.uniforms", self.name)),
}));
}
if self.bindgroup_pipeline.is_none() {
let (bind_group, bind_group_layout) = state.bind_group_for_shader(
device,
&self.shader,
hashmap!["ubo".into() => (false, wgpu::BindingResource::Buffer(wgpu::BufferBinding {
buffer: self.uniforms.as_ref().unwrap(),
offset: 0,
size: None,
}))],
HashMap::new(),
&format!("bindgroup.{}", self.name),
);
self.bindgroup_pipeline = Some((
bind_group,
device.create_compute_pipeline(&wgpu::ComputePipelineDescriptor {
layout: Some(&device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor {
bind_group_layouts: [&bind_group_layout][..].into(),
push_constant_ranges: &[],
label: Some(&format!("pipeline.{}.layout", self.name)),
})),
module: &device.create_shader_module(&wgpu::ShaderModuleDescriptor {
label: Some(&format!("shader.{}", self.name)),
source: wgpu::ShaderSource::SpirV(self.shader.compute().into()),
flags: wgpu::ShaderFlags::empty(),
}),
entry_point: "main",
label: Some(&format!("pipeline.{}", self.name)),
}),
));
}
let staging = device.create_buffer(&wgpu::BufferDescriptor {
size: mem::size_of::<U>() as u64,
usage: wgpu::BufferUsage::COPY_SRC,
label: Some(&format!("buffer.temporary.{}.upload", self.name)),
mapped_at_creation: true,
});
let mut buffer_view = staging.slice(..).get_mapped_range_mut();
buffer_view[..mem::size_of::<U>()].copy_from_slice(bytemuck::bytes_of(uniforms));
drop(buffer_view);
staging.unmap();
encoder.copy_buffer_to_buffer(
&staging,
0,
self.uniforms.as_ref().unwrap(),
0,
mem::size_of::<U>() as u64,
);
let mut cpass = encoder.begin_compute_pass(&wgpu::ComputePassDescriptor { label: None });
cpass.set_pipeline(&self.bindgroup_pipeline.as_ref().unwrap().1);
cpass.set_bind_group(0, &self.bindgroup_pipeline.as_ref().unwrap().0, &[]);
cpass.dispatch(dimensions.0, dimensions.1, dimensions.2);
}
} | state: &GpuState, | random_line_split |
gpu.rs | use maplit::hashmap;
use crate::GpuState;
use std::{collections::HashMap, mem};
#[repr(C)]
#[derive(Copy, Clone)]
pub(crate) struct GenHeightmapsUniforms {
pub position: [i32; 2],
pub origin: [i32; 2],
pub spacing: f32,
pub in_slot: i32,
pub out_slot: i32,
pub level_resolution: i32,
pub face: u32,
}
unsafe impl bytemuck::Zeroable for GenHeightmapsUniforms {}
unsafe impl bytemuck::Pod for GenHeightmapsUniforms {}
#[repr(C)]
#[derive(Copy, Clone)]
pub(crate) struct GenDisplacementsUniforms {
pub node_center: [f64; 3],
pub padding0: f64,
pub origin: [i32; 2],
pub position: [i32; 2],
pub stride: i32,
pub heightmaps_slot: i32,
pub displacements_slot: i32,
pub face: i32,
pub level_resolution: u32,
}
unsafe impl bytemuck::Zeroable for GenDisplacementsUniforms {}
unsafe impl bytemuck::Pod for GenDisplacementsUniforms {}
#[repr(C)]
#[derive(Copy, Clone)]
pub(crate) struct GenNormalsUniforms {
pub heightmaps_origin: [i32; 2],
pub heightmaps_slot: i32,
pub normals_slot: i32,
pub spacing: f32,
pub padding: [f32; 3],
}
unsafe impl bytemuck::Zeroable for GenNormalsUniforms {}
unsafe impl bytemuck::Pod for GenNormalsUniforms {}
#[repr(C)]
#[derive(Copy, Clone)]
pub(crate) struct GenMaterialsUniforms {
pub heightmaps_origin: [i32; 2],
pub parent_origin: [u32; 2],
pub heightmaps_slot: i32,
pub normals_slot: i32,
pub albedo_slot: i32,
pub parent_slot: i32,
pub spacing: f32,
pub padding: i32,
}
unsafe impl bytemuck::Zeroable for GenMaterialsUniforms {}
unsafe impl bytemuck::Pod for GenMaterialsUniforms {}
pub(crate) struct ComputeShader<U> {
shader: rshader::ShaderSet,
bindgroup_pipeline: Option<(wgpu::BindGroup, wgpu::ComputePipeline)>,
uniforms: Option<wgpu::Buffer>,
name: String,
_phantom: std::marker::PhantomData<U>,
}
#[allow(unused)]
impl<U: bytemuck::Pod> ComputeShader<U> {
pub fn new(shader: rshader::ShaderSource, name: String) -> Self {
Self {
shader: rshader::ShaderSet::compute_only(shader).unwrap(),
bindgroup_pipeline: None,
uniforms: None,
name,
_phantom: std::marker::PhantomData,
}
}
pub fn | (&mut self) -> bool {
if self.shader.refresh() {
self.bindgroup_pipeline = None;
true
} else {
false
}
}
pub fn run(
&mut self,
device: &wgpu::Device,
encoder: &mut wgpu::CommandEncoder,
state: &GpuState,
dimensions: (u32, u32, u32),
uniforms: &U,
) {
if self.uniforms.is_none() {
self.uniforms = Some(device.create_buffer(&wgpu::BufferDescriptor {
size: mem::size_of::<U>() as u64,
usage: wgpu::BufferUsage::COPY_DST | wgpu::BufferUsage::UNIFORM,
mapped_at_creation: false,
label: Some(&format!("buffer.{}.uniforms", self.name)),
}));
}
if self.bindgroup_pipeline.is_none() {
let (bind_group, bind_group_layout) = state.bind_group_for_shader(
device,
&self.shader,
hashmap!["ubo".into() => (false, wgpu::BindingResource::Buffer(wgpu::BufferBinding {
buffer: self.uniforms.as_ref().unwrap(),
offset: 0,
size: None,
}))],
HashMap::new(),
&format!("bindgroup.{}", self.name),
);
self.bindgroup_pipeline = Some((
bind_group,
device.create_compute_pipeline(&wgpu::ComputePipelineDescriptor {
layout: Some(&device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor {
bind_group_layouts: [&bind_group_layout][..].into(),
push_constant_ranges: &[],
label: Some(&format!("pipeline.{}.layout", self.name)),
})),
module: &device.create_shader_module(&wgpu::ShaderModuleDescriptor {
label: Some(&format!("shader.{}", self.name)),
source: wgpu::ShaderSource::SpirV(self.shader.compute().into()),
flags: wgpu::ShaderFlags::empty(),
}),
entry_point: "main",
label: Some(&format!("pipeline.{}", self.name)),
}),
));
}
let staging = device.create_buffer(&wgpu::BufferDescriptor {
size: mem::size_of::<U>() as u64,
usage: wgpu::BufferUsage::COPY_SRC,
label: Some(&format!("buffer.temporary.{}.upload", self.name)),
mapped_at_creation: true,
});
let mut buffer_view = staging.slice(..).get_mapped_range_mut();
buffer_view[..mem::size_of::<U>()].copy_from_slice(bytemuck::bytes_of(uniforms));
drop(buffer_view);
staging.unmap();
encoder.copy_buffer_to_buffer(
&staging,
0,
self.uniforms.as_ref().unwrap(),
0,
mem::size_of::<U>() as u64,
);
let mut cpass = encoder.begin_compute_pass(&wgpu::ComputePassDescriptor { label: None });
cpass.set_pipeline(&self.bindgroup_pipeline.as_ref().unwrap().1);
cpass.set_bind_group(0, &self.bindgroup_pipeline.as_ref().unwrap().0, &[]);
cpass.dispatch(dimensions.0, dimensions.1, dimensions.2);
}
}
| refresh | identifier_name |
gpu.rs | use maplit::hashmap;
use crate::GpuState;
use std::{collections::HashMap, mem};
#[repr(C)]
#[derive(Copy, Clone)]
pub(crate) struct GenHeightmapsUniforms {
pub position: [i32; 2],
pub origin: [i32; 2],
pub spacing: f32,
pub in_slot: i32,
pub out_slot: i32,
pub level_resolution: i32,
pub face: u32,
}
unsafe impl bytemuck::Zeroable for GenHeightmapsUniforms {}
unsafe impl bytemuck::Pod for GenHeightmapsUniforms {}
#[repr(C)]
#[derive(Copy, Clone)]
pub(crate) struct GenDisplacementsUniforms {
pub node_center: [f64; 3],
pub padding0: f64,
pub origin: [i32; 2],
pub position: [i32; 2],
pub stride: i32,
pub heightmaps_slot: i32,
pub displacements_slot: i32,
pub face: i32,
pub level_resolution: u32,
}
unsafe impl bytemuck::Zeroable for GenDisplacementsUniforms {}
unsafe impl bytemuck::Pod for GenDisplacementsUniforms {}
#[repr(C)]
#[derive(Copy, Clone)]
pub(crate) struct GenNormalsUniforms {
pub heightmaps_origin: [i32; 2],
pub heightmaps_slot: i32,
pub normals_slot: i32,
pub spacing: f32,
pub padding: [f32; 3],
}
unsafe impl bytemuck::Zeroable for GenNormalsUniforms {}
unsafe impl bytemuck::Pod for GenNormalsUniforms {}
#[repr(C)]
#[derive(Copy, Clone)]
pub(crate) struct GenMaterialsUniforms {
pub heightmaps_origin: [i32; 2],
pub parent_origin: [u32; 2],
pub heightmaps_slot: i32,
pub normals_slot: i32,
pub albedo_slot: i32,
pub parent_slot: i32,
pub spacing: f32,
pub padding: i32,
}
unsafe impl bytemuck::Zeroable for GenMaterialsUniforms {}
unsafe impl bytemuck::Pod for GenMaterialsUniforms {}
pub(crate) struct ComputeShader<U> {
shader: rshader::ShaderSet,
bindgroup_pipeline: Option<(wgpu::BindGroup, wgpu::ComputePipeline)>,
uniforms: Option<wgpu::Buffer>,
name: String,
_phantom: std::marker::PhantomData<U>,
}
#[allow(unused)]
impl<U: bytemuck::Pod> ComputeShader<U> {
pub fn new(shader: rshader::ShaderSource, name: String) -> Self {
Self {
shader: rshader::ShaderSet::compute_only(shader).unwrap(),
bindgroup_pipeline: None,
uniforms: None,
name,
_phantom: std::marker::PhantomData,
}
}
pub fn refresh(&mut self) -> bool {
if self.shader.refresh() {
self.bindgroup_pipeline = None;
true
} else |
}
pub fn run(
&mut self,
device: &wgpu::Device,
encoder: &mut wgpu::CommandEncoder,
state: &GpuState,
dimensions: (u32, u32, u32),
uniforms: &U,
) {
if self.uniforms.is_none() {
self.uniforms = Some(device.create_buffer(&wgpu::BufferDescriptor {
size: mem::size_of::<U>() as u64,
usage: wgpu::BufferUsage::COPY_DST | wgpu::BufferUsage::UNIFORM,
mapped_at_creation: false,
label: Some(&format!("buffer.{}.uniforms", self.name)),
}));
}
if self.bindgroup_pipeline.is_none() {
let (bind_group, bind_group_layout) = state.bind_group_for_shader(
device,
&self.shader,
hashmap!["ubo".into() => (false, wgpu::BindingResource::Buffer(wgpu::BufferBinding {
buffer: self.uniforms.as_ref().unwrap(),
offset: 0,
size: None,
}))],
HashMap::new(),
&format!("bindgroup.{}", self.name),
);
self.bindgroup_pipeline = Some((
bind_group,
device.create_compute_pipeline(&wgpu::ComputePipelineDescriptor {
layout: Some(&device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor {
bind_group_layouts: [&bind_group_layout][..].into(),
push_constant_ranges: &[],
label: Some(&format!("pipeline.{}.layout", self.name)),
})),
module: &device.create_shader_module(&wgpu::ShaderModuleDescriptor {
label: Some(&format!("shader.{}", self.name)),
source: wgpu::ShaderSource::SpirV(self.shader.compute().into()),
flags: wgpu::ShaderFlags::empty(),
}),
entry_point: "main",
label: Some(&format!("pipeline.{}", self.name)),
}),
));
}
let staging = device.create_buffer(&wgpu::BufferDescriptor {
size: mem::size_of::<U>() as u64,
usage: wgpu::BufferUsage::COPY_SRC,
label: Some(&format!("buffer.temporary.{}.upload", self.name)),
mapped_at_creation: true,
});
let mut buffer_view = staging.slice(..).get_mapped_range_mut();
buffer_view[..mem::size_of::<U>()].copy_from_slice(bytemuck::bytes_of(uniforms));
drop(buffer_view);
staging.unmap();
encoder.copy_buffer_to_buffer(
&staging,
0,
self.uniforms.as_ref().unwrap(),
0,
mem::size_of::<U>() as u64,
);
let mut cpass = encoder.begin_compute_pass(&wgpu::ComputePassDescriptor { label: None });
cpass.set_pipeline(&self.bindgroup_pipeline.as_ref().unwrap().1);
cpass.set_bind_group(0, &self.bindgroup_pipeline.as_ref().unwrap().0, &[]);
cpass.dispatch(dimensions.0, dimensions.1, dimensions.2);
}
}
| {
false
} | conditional_block |
mod.rs | //! Data structures related to the `/proc/<pid>/*` files
//!
//! The `Process` struct can load everything about a running process, and
//! provides some aggregate data about them.
mod cmd_line;
mod stat;
use std::fmt;
use crate::linux::{Jiffies, Ratio, PAGESIZE};
use crate::procfs::Result;
pub use self::cmd_line::CmdLine;
pub use self::stat::{Stat, State};
/// Information about a running process
#[derive(Clone, PartialEq, Eq, Debug, Default)]
pub struct Process {
/// The stat info for a process
pub stat: Stat,
/// The command line, as revealed by the /proc fs
pub cmdline: CmdLine,
}
impl Process {
pub fn from_pid<P: fmt::Display + Copy>(p: P) -> Result<Process> {
Ok(Process {
stat: Stat::from_pid(p)?,
cmdline: CmdLine::from_pid(p)?,
})
}
pub fn useful_cmdline(&self) -> String {
let cmd = self.cmdline.display();
if cmd.is_empty() | else {
cmd
}
}
/// What percent this process is using
///
/// First argument should be in bytes.
pub fn percent_ram(&self, of_bytes: usize) -> f64 {
pages_to_bytes(self.stat.rss) as f64 / of_bytes as f64 * 100.0
}
/// Compare this processes cpu utilization since the process representing th start time
///
/// The passed-in `start_process` is the time that we are comparing
/// against: `self` should be newer.
///
/// The `total_cpu` is how many Jiffies have passed on the cpu over the
/// same time period.
///
/// # Panics
/// If the total_cpu on start_process is higher than the total_cpu on current.
pub fn cpu_utilization_since<'a>(
&'a self,
start_process: &'a Process,
total_cpu: Jiffies,
) -> ProcessCpuUsage<'a> {
let (start_ps, end_ps) = (&start_process.stat, &self.stat);
if end_ps.utime < start_ps.utime || end_ps.stime < start_ps.stime {
panic!("End process is before start process (arguments called in wrong order)");
}
let user = 100.0
* (end_ps.utime - start_ps.utime)
.duration()
.ratio(&total_cpu.duration());
let sys = 100.0
* (end_ps.stime - start_ps.stime)
.duration()
.ratio(&total_cpu.duration());
ProcessCpuUsage {
process: &start_process,
upercent: user,
spercent: sys,
total: user + sys,
}
}
}
/// Represent the percent CPU utilization of a specific process over a specific
/// time period
///
/// This is generated by loading `RunningProcs` twice and diffing them.
#[derive(Debug)]
pub struct ProcessCpuUsage<'a> {
/// The process we're reporting on
pub process: &'a Process,
/// Percent time spent in user mode
pub upercent: f64,
/// Percent time spent in system mode
pub spercent: f64,
/// upercent + spercent
pub total: f64,
}
fn pages_to_bytes(pages: u64) -> u64 {
pages * (*PAGESIZE)
}
| {
self.stat.comm.clone()
} | conditional_block |
mod.rs | //! Data structures related to the `/proc/<pid>/*` files
//!
//! The `Process` struct can load everything about a running process, and
//! provides some aggregate data about them.
mod cmd_line;
mod stat;
| use crate::procfs::Result;
pub use self::cmd_line::CmdLine;
pub use self::stat::{Stat, State};
/// Information about a running process
#[derive(Clone, PartialEq, Eq, Debug, Default)]
pub struct Process {
/// The stat info for a process
pub stat: Stat,
/// The command line, as revealed by the /proc fs
pub cmdline: CmdLine,
}
impl Process {
pub fn from_pid<P: fmt::Display + Copy>(p: P) -> Result<Process> {
Ok(Process {
stat: Stat::from_pid(p)?,
cmdline: CmdLine::from_pid(p)?,
})
}
pub fn useful_cmdline(&self) -> String {
let cmd = self.cmdline.display();
if cmd.is_empty() {
self.stat.comm.clone()
} else {
cmd
}
}
/// What percent this process is using
///
/// First argument should be in bytes.
pub fn percent_ram(&self, of_bytes: usize) -> f64 {
pages_to_bytes(self.stat.rss) as f64 / of_bytes as f64 * 100.0
}
/// Compare this processes cpu utilization since the process representing th start time
///
/// The passed-in `start_process` is the time that we are comparing
/// against: `self` should be newer.
///
/// The `total_cpu` is how many Jiffies have passed on the cpu over the
/// same time period.
///
/// # Panics
/// If the total_cpu on start_process is higher than the total_cpu on current.
pub fn cpu_utilization_since<'a>(
&'a self,
start_process: &'a Process,
total_cpu: Jiffies,
) -> ProcessCpuUsage<'a> {
let (start_ps, end_ps) = (&start_process.stat, &self.stat);
if end_ps.utime < start_ps.utime || end_ps.stime < start_ps.stime {
panic!("End process is before start process (arguments called in wrong order)");
}
let user = 100.0
* (end_ps.utime - start_ps.utime)
.duration()
.ratio(&total_cpu.duration());
let sys = 100.0
* (end_ps.stime - start_ps.stime)
.duration()
.ratio(&total_cpu.duration());
ProcessCpuUsage {
process: &start_process,
upercent: user,
spercent: sys,
total: user + sys,
}
}
}
/// Represent the percent CPU utilization of a specific process over a specific
/// time period
///
/// This is generated by loading `RunningProcs` twice and diffing them.
#[derive(Debug)]
pub struct ProcessCpuUsage<'a> {
/// The process we're reporting on
pub process: &'a Process,
/// Percent time spent in user mode
pub upercent: f64,
/// Percent time spent in system mode
pub spercent: f64,
/// upercent + spercent
pub total: f64,
}
fn pages_to_bytes(pages: u64) -> u64 {
pages * (*PAGESIZE)
} | use std::fmt;
use crate::linux::{Jiffies, Ratio, PAGESIZE}; | random_line_split |
mod.rs | //! Data structures related to the `/proc/<pid>/*` files
//!
//! The `Process` struct can load everything about a running process, and
//! provides some aggregate data about them.
mod cmd_line;
mod stat;
use std::fmt;
use crate::linux::{Jiffies, Ratio, PAGESIZE};
use crate::procfs::Result;
pub use self::cmd_line::CmdLine;
pub use self::stat::{Stat, State};
/// Information about a running process
#[derive(Clone, PartialEq, Eq, Debug, Default)]
pub struct Process {
/// The stat info for a process
pub stat: Stat,
/// The command line, as revealed by the /proc fs
pub cmdline: CmdLine,
}
impl Process {
pub fn from_pid<P: fmt::Display + Copy>(p: P) -> Result<Process> {
Ok(Process {
stat: Stat::from_pid(p)?,
cmdline: CmdLine::from_pid(p)?,
})
}
pub fn useful_cmdline(&self) -> String {
let cmd = self.cmdline.display();
if cmd.is_empty() {
self.stat.comm.clone()
} else {
cmd
}
}
/// What percent this process is using
///
/// First argument should be in bytes.
pub fn percent_ram(&self, of_bytes: usize) -> f64 {
pages_to_bytes(self.stat.rss) as f64 / of_bytes as f64 * 100.0
}
/// Compare this processes cpu utilization since the process representing th start time
///
/// The passed-in `start_process` is the time that we are comparing
/// against: `self` should be newer.
///
/// The `total_cpu` is how many Jiffies have passed on the cpu over the
/// same time period.
///
/// # Panics
/// If the total_cpu on start_process is higher than the total_cpu on current.
pub fn | <'a>(
&'a self,
start_process: &'a Process,
total_cpu: Jiffies,
) -> ProcessCpuUsage<'a> {
let (start_ps, end_ps) = (&start_process.stat, &self.stat);
if end_ps.utime < start_ps.utime || end_ps.stime < start_ps.stime {
panic!("End process is before start process (arguments called in wrong order)");
}
let user = 100.0
* (end_ps.utime - start_ps.utime)
.duration()
.ratio(&total_cpu.duration());
let sys = 100.0
* (end_ps.stime - start_ps.stime)
.duration()
.ratio(&total_cpu.duration());
ProcessCpuUsage {
process: &start_process,
upercent: user,
spercent: sys,
total: user + sys,
}
}
}
/// Represent the percent CPU utilization of a specific process over a specific
/// time period
///
/// This is generated by loading `RunningProcs` twice and diffing them.
#[derive(Debug)]
pub struct ProcessCpuUsage<'a> {
/// The process we're reporting on
pub process: &'a Process,
/// Percent time spent in user mode
pub upercent: f64,
/// Percent time spent in system mode
pub spercent: f64,
/// upercent + spercent
pub total: f64,
}
fn pages_to_bytes(pages: u64) -> u64 {
pages * (*PAGESIZE)
}
| cpu_utilization_since | identifier_name |
struct-partial-move-1.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[derive(PartialEq, Show)]
pub struct Partial<T> { x: T, y: T }
#[derive(PartialEq, Show)]
struct S { val: int }
impl S { fn new(v: int) -> S { S { val: v } } }
impl Drop for S { fn drop(&mut self) { } } | // `..p` moves all fields *except* `p.y` in this context.
Partial { y: f(p.y),..p }
}
pub fn main() {
let p = f((S::new(3), S::new(4)), |S { val: z }| S::new(z+1));
assert_eq!(p, Partial { x: S::new(3), y: S::new(5) });
} |
pub fn f<T, F>((b1, b2): (T, T), mut f: F) -> Partial<T> where F: FnMut(T) -> T {
let p = Partial { x: b1, y: b2 };
// Move of `p` is legal even though we are also moving `p.y`; the | random_line_split |
struct-partial-move-1.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[derive(PartialEq, Show)]
pub struct Partial<T> { x: T, y: T }
#[derive(PartialEq, Show)]
struct S { val: int }
impl S { fn | (v: int) -> S { S { val: v } } }
impl Drop for S { fn drop(&mut self) { } }
pub fn f<T, F>((b1, b2): (T, T), mut f: F) -> Partial<T> where F: FnMut(T) -> T {
let p = Partial { x: b1, y: b2 };
// Move of `p` is legal even though we are also moving `p.y`; the
// `..p` moves all fields *except* `p.y` in this context.
Partial { y: f(p.y),..p }
}
pub fn main() {
let p = f((S::new(3), S::new(4)), |S { val: z }| S::new(z+1));
assert_eq!(p, Partial { x: S::new(3), y: S::new(5) });
}
| new | identifier_name |
struct-partial-move-1.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[derive(PartialEq, Show)]
pub struct Partial<T> { x: T, y: T }
#[derive(PartialEq, Show)]
struct S { val: int }
impl S { fn new(v: int) -> S { S { val: v } } }
impl Drop for S { fn drop(&mut self) { } }
pub fn f<T, F>((b1, b2): (T, T), mut f: F) -> Partial<T> where F: FnMut(T) -> T |
pub fn main() {
let p = f((S::new(3), S::new(4)), |S { val: z }| S::new(z+1));
assert_eq!(p, Partial { x: S::new(3), y: S::new(5) });
}
| {
let p = Partial { x: b1, y: b2 };
// Move of `p` is legal even though we are also moving `p.y`; the
// `..p` moves all fields *except* `p.y` in this context.
Partial { y: f(p.y), ..p }
} | identifier_body |
main.rs | extern crate feroxide;
use feroxide::data_atoms::*;
use feroxide::data_molecules::*;
use feroxide::data_sef::*;
use feroxide::data_sep::*;
use feroxide::*;
fn main() | let _name = carbondioxide.name();
//... or the symbol
let symbol = carbondioxide.symbol();
// You can calculate the mass per mole
let mass_per_mole = carbondioxide.mass();
// Multiply that with your amount of moles
let weight = mass_per_mole * 10.0;
// To get your data
println!("10 moles of {} weigh {} gram(s).", symbol, weight);
// Throw a bunch of molecules together in a container with a bit of energy
let mut container = Container {
contents: vec![
ContainerCompound {
element: ion_from_molecule!(carbonmonoxide.clone()),
moles: Moles::from(10000000000.0),
},
ContainerCompound {
element: ion_from_molecule!(WATER.clone()),
moles: Moles::from(10000000000.0),
},
ContainerCompound {
element: ion_from_atom!(OXYGEN.clone()),
moles: Moles::from(10000000000.0),
},
],
available_energy: Energy::from(100_000f64), // in Joules
};
// Specify the reaction that will occur
// H₂O + CO₂ ⇌ H₂CO₃
let reaction = ElemReaction {
lhs: ReactionSide {
compounds: vec![
ReactionCompound {
element: ion_from_atom!(OXYGEN.clone()),
amount: 1,
},
ReactionCompound {
element: ion_from_molecule!(carbonmonoxide.clone()),
amount: 2,
},
],
},
rhs: ReactionSide {
compounds: vec![ReactionCompound {
element: ion_from_molecule!(carbondioxide.clone()),
amount: 2,
}],
},
is_equilibrium: true,
};
// Make sure the reaction is valid
assert!(reaction.equalise());
assert!(reaction.is_valid());
// Print the reaction in names
println!("{}", reaction.name());
//... or in symbols (the default)
println!("{}", reaction.symbol());
// Print the contents of the container at the start
println!("Contents: {}", container);
// Run the reaction 10 times
for i in 0..10 {
// Run the reaction on the container
container.react(&reaction);
// Show what's left
println!("[{:>2}] Contents: {}", i + 1, container.to_string());
}
// Redox reactions are also possible
let redox = RedoxReaction {
oxidator: ElemReaction::<Ion>::ion_from_string("F2 + 2e <> 2F;1-").unwrap(),
reductor: ElemReaction::<Ion>::ion_from_string("Fe <> Fe;3 + 3e").unwrap(),
};
// Make sure it's valid
assert!(redox.equalise());
assert!(redox.is_valid());
// Print the symbol representation
println!("{}", redox.symbol());
// Print the SEP values
println!("oxidator: {}", get_sep(&redox.oxidator).unwrap());
println!("reductor: {}", get_sep(&redox.reductor).unwrap());
// Print the SEF value
println!(
"SEF(AlCl3) = {} kJ/mol",
get_sef(&ion_from_string!("AlCl3")).unwrap()
);
// Boom
println!("\n\n\n");
let mut water_container =
Container::<Ion>::ion_from_string("2000 H2; + 1000 O2; [10000 J]").unwrap();
println!("pre: {}", water_container);
let redox_boom = get_redox_reaction(&water_container).unwrap();
println!("reaction: {}", redox_boom.elem_reaction().symbol());
for _ in 0..100 {
water_container.react(&redox_boom);
}
println!("post: {}", water_container);
println!("\n\n\n");
// Automatic redox reactions
println!("\n\n\n");
// Get the possible redox reactions from a container
let mut redox_container = Container {
contents: vec![
ContainerCompound {
element: ion_from_string!("Fe"),
moles: Moles::from(100.0),
},
ContainerCompound {
element: ion_from_string!("O2"),
moles: Moles::from(100.0),
},
ContainerCompound {
element: ion_from_string!("H2O"),
moles: Moles::from(200.0),
},
],
available_energy: Energy::from(100_000f64),
};
let redox_reaction = get_redox_reaction(&redox_container);
if let Some(redox) = redox_reaction {
println!("\n\n");
println!("Container: {}", redox_container);
println!("\tcan have the following reaction:");
println!("Redox reaction: \n{}", redox.symbol());
println!("Total reaction: {}", redox.elem_reaction().symbol());
for _ in 0..100 {
redox_container.react(&redox);
}
println!("\n");
println!("After 100 times:");
println!("Container: {}", redox_container);
let rust = ElemReaction::<Ion>::ion_from_string("Fe;2+ + 2OH;- > FeO2H2;0").unwrap();
println!("\n");
println!("Container: {}", &redox_container);
println!("\tcan have the following reaction:");
println!("Salt reaction: \n{}", rust.symbol());
let fe2 = ContainerCompound::<Ion>::ion_from_string("Fe;2+").unwrap();
while redox_container.contains(&fe2) {
redox_container.react(&rust);
}
println!("\n");
println!("After all {} is gone:", fe2.symbol());
println!("Container: {}", redox_container);
println!("\n\n\n");
}
}
| {
// You can create digital molecules with ease on two ways:
// ... the easy way
let carbondioxide = Molecule::from_string("CO2").unwrap();
// ... and the fast way
let carbonmonoxide = Molecule {
compounds: vec![
MoleculeCompound {
atom: CARBON,
amount: 1,
},
MoleculeCompound {
atom: OXYGEN,
amount: 1,
},
],
};
// Of which you can generate the name | identifier_body |
main.rs | extern crate feroxide;
use feroxide::data_atoms::*;
use feroxide::data_molecules::*;
use feroxide::data_sef::*;
use feroxide::data_sep::*;
use feroxide::*;
fn | () {
// You can create digital molecules with ease on two ways:
//... the easy way
let carbondioxide = Molecule::from_string("CO2").unwrap();
//... and the fast way
let carbonmonoxide = Molecule {
compounds: vec![
MoleculeCompound {
atom: CARBON,
amount: 1,
},
MoleculeCompound {
atom: OXYGEN,
amount: 1,
},
],
};
// Of which you can generate the name
let _name = carbondioxide.name();
//... or the symbol
let symbol = carbondioxide.symbol();
// You can calculate the mass per mole
let mass_per_mole = carbondioxide.mass();
// Multiply that with your amount of moles
let weight = mass_per_mole * 10.0;
// To get your data
println!("10 moles of {} weigh {} gram(s).", symbol, weight);
// Throw a bunch of molecules together in a container with a bit of energy
let mut container = Container {
contents: vec![
ContainerCompound {
element: ion_from_molecule!(carbonmonoxide.clone()),
moles: Moles::from(10000000000.0),
},
ContainerCompound {
element: ion_from_molecule!(WATER.clone()),
moles: Moles::from(10000000000.0),
},
ContainerCompound {
element: ion_from_atom!(OXYGEN.clone()),
moles: Moles::from(10000000000.0),
},
],
available_energy: Energy::from(100_000f64), // in Joules
};
// Specify the reaction that will occur
// H₂O + CO₂ ⇌ H₂CO₃
let reaction = ElemReaction {
lhs: ReactionSide {
compounds: vec![
ReactionCompound {
element: ion_from_atom!(OXYGEN.clone()),
amount: 1,
},
ReactionCompound {
element: ion_from_molecule!(carbonmonoxide.clone()),
amount: 2,
},
],
},
rhs: ReactionSide {
compounds: vec![ReactionCompound {
element: ion_from_molecule!(carbondioxide.clone()),
amount: 2,
}],
},
is_equilibrium: true,
};
// Make sure the reaction is valid
assert!(reaction.equalise());
assert!(reaction.is_valid());
// Print the reaction in names
println!("{}", reaction.name());
//... or in symbols (the default)
println!("{}", reaction.symbol());
// Print the contents of the container at the start
println!("Contents: {}", container);
// Run the reaction 10 times
for i in 0..10 {
// Run the reaction on the container
container.react(&reaction);
// Show what's left
println!("[{:>2}] Contents: {}", i + 1, container.to_string());
}
// Redox reactions are also possible
let redox = RedoxReaction {
oxidator: ElemReaction::<Ion>::ion_from_string("F2 + 2e <> 2F;1-").unwrap(),
reductor: ElemReaction::<Ion>::ion_from_string("Fe <> Fe;3 + 3e").unwrap(),
};
// Make sure it's valid
assert!(redox.equalise());
assert!(redox.is_valid());
// Print the symbol representation
println!("{}", redox.symbol());
// Print the SEP values
println!("oxidator: {}", get_sep(&redox.oxidator).unwrap());
println!("reductor: {}", get_sep(&redox.reductor).unwrap());
// Print the SEF value
println!(
"SEF(AlCl3) = {} kJ/mol",
get_sef(&ion_from_string!("AlCl3")).unwrap()
);
// Boom
println!("\n\n\n");
let mut water_container =
Container::<Ion>::ion_from_string("2000 H2; + 1000 O2; [10000 J]").unwrap();
println!("pre: {}", water_container);
let redox_boom = get_redox_reaction(&water_container).unwrap();
println!("reaction: {}", redox_boom.elem_reaction().symbol());
for _ in 0..100 {
water_container.react(&redox_boom);
}
println!("post: {}", water_container);
println!("\n\n\n");
// Automatic redox reactions
println!("\n\n\n");
// Get the possible redox reactions from a container
let mut redox_container = Container {
contents: vec![
ContainerCompound {
element: ion_from_string!("Fe"),
moles: Moles::from(100.0),
},
ContainerCompound {
element: ion_from_string!("O2"),
moles: Moles::from(100.0),
},
ContainerCompound {
element: ion_from_string!("H2O"),
moles: Moles::from(200.0),
},
],
available_energy: Energy::from(100_000f64),
};
let redox_reaction = get_redox_reaction(&redox_container);
if let Some(redox) = redox_reaction {
println!("\n\n");
println!("Container: {}", redox_container);
println!("\tcan have the following reaction:");
println!("Redox reaction: \n{}", redox.symbol());
println!("Total reaction: {}", redox.elem_reaction().symbol());
for _ in 0..100 {
redox_container.react(&redox);
}
println!("\n");
println!("After 100 times:");
println!("Container: {}", redox_container);
let rust = ElemReaction::<Ion>::ion_from_string("Fe;2+ + 2OH;- > FeO2H2;0").unwrap();
println!("\n");
println!("Container: {}", &redox_container);
println!("\tcan have the following reaction:");
println!("Salt reaction: \n{}", rust.symbol());
let fe2 = ContainerCompound::<Ion>::ion_from_string("Fe;2+").unwrap();
while redox_container.contains(&fe2) {
redox_container.react(&rust);
}
println!("\n");
println!("After all {} is gone:", fe2.symbol());
println!("Container: {}", redox_container);
println!("\n\n\n");
}
}
| main | identifier_name |
main.rs | extern crate feroxide;
use feroxide::data_atoms::*;
use feroxide::data_molecules::*;
use feroxide::data_sef::*;
use feroxide::data_sep::*;
use feroxide::*;
fn main() {
// You can create digital molecules with ease on two ways:
//... the easy way
let carbondioxide = Molecule::from_string("CO2").unwrap();
//... and the fast way
let carbonmonoxide = Molecule {
compounds: vec![
MoleculeCompound {
atom: CARBON,
amount: 1,
},
MoleculeCompound {
atom: OXYGEN,
amount: 1,
},
],
};
// Of which you can generate the name
let _name = carbondioxide.name();
//... or the symbol
let symbol = carbondioxide.symbol();
// You can calculate the mass per mole
let mass_per_mole = carbondioxide.mass();
// Multiply that with your amount of moles
let weight = mass_per_mole * 10.0;
// To get your data
println!("10 moles of {} weigh {} gram(s).", symbol, weight);
// Throw a bunch of molecules together in a container with a bit of energy
let mut container = Container {
contents: vec![
ContainerCompound {
element: ion_from_molecule!(carbonmonoxide.clone()),
moles: Moles::from(10000000000.0),
},
ContainerCompound {
element: ion_from_molecule!(WATER.clone()),
moles: Moles::from(10000000000.0),
},
ContainerCompound {
element: ion_from_atom!(OXYGEN.clone()),
moles: Moles::from(10000000000.0),
},
],
available_energy: Energy::from(100_000f64), // in Joules
};
// Specify the reaction that will occur
// H₂O + CO₂ ⇌ H₂CO₃
let reaction = ElemReaction {
lhs: ReactionSide {
compounds: vec![
ReactionCompound {
element: ion_from_atom!(OXYGEN.clone()),
amount: 1,
},
ReactionCompound {
element: ion_from_molecule!(carbonmonoxide.clone()),
amount: 2,
},
],
},
rhs: ReactionSide {
compounds: vec![ReactionCompound {
element: ion_from_molecule!(carbondioxide.clone()),
amount: 2,
}],
},
is_equilibrium: true,
};
// Make sure the reaction is valid
assert!(reaction.equalise());
assert!(reaction.is_valid());
// Print the reaction in names
println!("{}", reaction.name());
//... or in symbols (the default)
println!("{}", reaction.symbol());
// Print the contents of the container at the start
println!("Contents: {}", container);
// Run the reaction 10 times
for i in 0..10 {
// Run the reaction on the container
container.react(&reaction);
// Show what's left
println!("[{:>2}] Contents: {}", i + 1, container.to_string());
}
// Redox reactions are also possible
let redox = RedoxReaction {
oxidator: ElemReaction::<Ion>::ion_from_string("F2 + 2e <> 2F;1-").unwrap(),
reductor: ElemReaction::<Ion>::ion_from_string("Fe <> Fe;3 + 3e").unwrap(),
};
// Make sure it's valid
assert!(redox.equalise());
assert!(redox.is_valid());
// Print the symbol representation
println!("{}", redox.symbol());
// Print the SEP values
println!("oxidator: {}", get_sep(&redox.oxidator).unwrap());
println!("reductor: {}", get_sep(&redox.reductor).unwrap());
// Print the SEF value
println!(
"SEF(AlCl3) = {} kJ/mol",
get_sef(&ion_from_string!("AlCl3")).unwrap()
);
// Boom
println!("\n\n\n");
let mut water_container =
Container::<Ion>::ion_from_string("2000 H2; + 1000 O2; [10000 J]").unwrap();
println!("pre: {}", water_container);
let redox_boom = get_redox_reaction(&water_container).unwrap();
println!("reaction: {}", redox_boom.elem_reaction().symbol());
for _ in 0..100 {
water_container.react(&redox_boom);
}
println!("post: {}", water_container);
println!("\n\n\n");
// Automatic redox reactions
println!("\n\n\n");
// Get the possible redox reactions from a container
let mut redox_container = Container {
contents: vec![
ContainerCompound {
element: ion_from_string!("Fe"),
moles: Moles::from(100.0),
},
ContainerCompound {
element: ion_from_string!("O2"),
moles: Moles::from(100.0),
},
ContainerCompound {
element: ion_from_string!("H2O"),
moles: Moles::from(200.0),
},
],
available_energy: Energy::from(100_000f64),
};
let redox_reaction = get_redox_reaction(&redox_container);
if let Some(redox) = redox_reaction {
|
let fe2 = ContainerCompound::<Ion>::ion_from_string("Fe;2+").unwrap();
while redox_container.contains(&fe2) {
redox_container.react(&rust);
}
println!("\n");
println!("After all {} is gone:", fe2.symbol());
println!("Container: {}", redox_container);
println!("\n\n\n");
}
}
| println!("\n\n");
println!("Container: {}", redox_container);
println!("\tcan have the following reaction:");
println!("Redox reaction: \n{}", redox.symbol());
println!("Total reaction: {}", redox.elem_reaction().symbol());
for _ in 0..100 {
redox_container.react(&redox);
}
println!("\n");
println!("After 100 times:");
println!("Container: {}", redox_container);
let rust = ElemReaction::<Ion>::ion_from_string("Fe;2+ + 2OH;- > FeO2H2;0").unwrap();
println!("\n");
println!("Container: {}", &redox_container);
println!("\tcan have the following reaction:");
println!("Salt reaction: \n{}", rust.symbol()); | conditional_block |
main.rs | extern crate feroxide;
use feroxide::data_atoms::*;
use feroxide::data_molecules::*;
use feroxide::data_sef::*;
use feroxide::data_sep::*;
use feroxide::*;
fn main() {
// You can create digital molecules with ease on two ways:
//... the easy way
let carbondioxide = Molecule::from_string("CO2").unwrap();
//... and the fast way
let carbonmonoxide = Molecule {
compounds: vec![
MoleculeCompound {
atom: CARBON,
amount: 1,
},
MoleculeCompound {
atom: OXYGEN,
amount: 1,
},
],
};
// Of which you can generate the name
let _name = carbondioxide.name();
//... or the symbol
let symbol = carbondioxide.symbol();
// You can calculate the mass per mole
let mass_per_mole = carbondioxide.mass();
// Multiply that with your amount of moles
let weight = mass_per_mole * 10.0;
// To get your data
println!("10 moles of {} weigh {} gram(s).", symbol, weight);
// Throw a bunch of molecules together in a container with a bit of energy
let mut container = Container {
contents: vec![
ContainerCompound {
element: ion_from_molecule!(carbonmonoxide.clone()),
moles: Moles::from(10000000000.0),
},
ContainerCompound {
element: ion_from_molecule!(WATER.clone()),
moles: Moles::from(10000000000.0),
}, | element: ion_from_atom!(OXYGEN.clone()),
moles: Moles::from(10000000000.0),
},
],
available_energy: Energy::from(100_000f64), // in Joules
};
// Specify the reaction that will occur
// H₂O + CO₂ ⇌ H₂CO₃
let reaction = ElemReaction {
lhs: ReactionSide {
compounds: vec![
ReactionCompound {
element: ion_from_atom!(OXYGEN.clone()),
amount: 1,
},
ReactionCompound {
element: ion_from_molecule!(carbonmonoxide.clone()),
amount: 2,
},
],
},
rhs: ReactionSide {
compounds: vec![ReactionCompound {
element: ion_from_molecule!(carbondioxide.clone()),
amount: 2,
}],
},
is_equilibrium: true,
};
// Make sure the reaction is valid
assert!(reaction.equalise());
assert!(reaction.is_valid());
// Print the reaction in names
println!("{}", reaction.name());
//... or in symbols (the default)
println!("{}", reaction.symbol());
// Print the contents of the container at the start
println!("Contents: {}", container);
// Run the reaction 10 times
for i in 0..10 {
// Run the reaction on the container
container.react(&reaction);
// Show what's left
println!("[{:>2}] Contents: {}", i + 1, container.to_string());
}
// Redox reactions are also possible
let redox = RedoxReaction {
oxidator: ElemReaction::<Ion>::ion_from_string("F2 + 2e <> 2F;1-").unwrap(),
reductor: ElemReaction::<Ion>::ion_from_string("Fe <> Fe;3 + 3e").unwrap(),
};
// Make sure it's valid
assert!(redox.equalise());
assert!(redox.is_valid());
// Print the symbol representation
println!("{}", redox.symbol());
// Print the SEP values
println!("oxidator: {}", get_sep(&redox.oxidator).unwrap());
println!("reductor: {}", get_sep(&redox.reductor).unwrap());
// Print the SEF value
println!(
"SEF(AlCl3) = {} kJ/mol",
get_sef(&ion_from_string!("AlCl3")).unwrap()
);
// Boom
println!("\n\n\n");
let mut water_container =
Container::<Ion>::ion_from_string("2000 H2; + 1000 O2; [10000 J]").unwrap();
println!("pre: {}", water_container);
let redox_boom = get_redox_reaction(&water_container).unwrap();
println!("reaction: {}", redox_boom.elem_reaction().symbol());
for _ in 0..100 {
water_container.react(&redox_boom);
}
println!("post: {}", water_container);
println!("\n\n\n");
// Automatic redox reactions
println!("\n\n\n");
// Get the possible redox reactions from a container
let mut redox_container = Container {
contents: vec![
ContainerCompound {
element: ion_from_string!("Fe"),
moles: Moles::from(100.0),
},
ContainerCompound {
element: ion_from_string!("O2"),
moles: Moles::from(100.0),
},
ContainerCompound {
element: ion_from_string!("H2O"),
moles: Moles::from(200.0),
},
],
available_energy: Energy::from(100_000f64),
};
let redox_reaction = get_redox_reaction(&redox_container);
if let Some(redox) = redox_reaction {
println!("\n\n");
println!("Container: {}", redox_container);
println!("\tcan have the following reaction:");
println!("Redox reaction: \n{}", redox.symbol());
println!("Total reaction: {}", redox.elem_reaction().symbol());
for _ in 0..100 {
redox_container.react(&redox);
}
println!("\n");
println!("After 100 times:");
println!("Container: {}", redox_container);
let rust = ElemReaction::<Ion>::ion_from_string("Fe;2+ + 2OH;- > FeO2H2;0").unwrap();
println!("\n");
println!("Container: {}", &redox_container);
println!("\tcan have the following reaction:");
println!("Salt reaction: \n{}", rust.symbol());
let fe2 = ContainerCompound::<Ion>::ion_from_string("Fe;2+").unwrap();
while redox_container.contains(&fe2) {
redox_container.react(&rust);
}
println!("\n");
println!("After all {} is gone:", fe2.symbol());
println!("Container: {}", redox_container);
println!("\n\n\n");
}
} | ContainerCompound { | random_line_split |
read_file.rs | /*
* How to read a file.
* Future work: as a variant, we may use the C bindings to call mmap/munmap
*/
use std::io;
use std::result;
/* read the file path by calling the read_whole_file_str function */
fn | (path: ~str) -> ~str {
let res = io::read_whole_file_str(&Path(path));
if result::is_err(&res) {
fail!(~"file_reader error: " + result::get_err(&res));
}
res.get()
}
/* read the file path line by line */
fn read_file_lines(path: ~str) -> ~str {
let res = io::file_reader(&Path(path));
if result::is_err(&res) {
fail!(~"file_reader error: " + result::get_err(&res));
}
let mut content = ~"";
let reader = res.get();
loop {
let line = reader.read_line();
if reader.eof() {
break;
}
// read_line does not return the '\n', so we add it
content = content + line + "\n";
}
content
}
fn main() {
let filename = ~"read_file.rs";
//let content = read_file_whole(copy filename);
let content = read_file_lines(copy filename);
io::println("the content of " + filename + " is [\n" + content + "]");
}
| read_file_whole | identifier_name |
read_file.rs | /*
* How to read a file.
* Future work: as a variant, we may use the C bindings to call mmap/munmap
*/
use std::io;
use std::result;
/* read the file path by calling the read_whole_file_str function */
fn read_file_whole(path: ~str) -> ~str {
let res = io::read_whole_file_str(&Path(path));
if result::is_err(&res) {
fail!(~"file_reader error: " + result::get_err(&res));
}
res.get()
}
/* read the file path line by line */
fn read_file_lines(path: ~str) -> ~str |
fn main() {
let filename = ~"read_file.rs";
//let content = read_file_whole(copy filename);
let content = read_file_lines(copy filename);
io::println("the content of " + filename + " is [\n" + content + "]");
}
| {
let res = io::file_reader(&Path(path));
if result::is_err(&res) {
fail!(~"file_reader error: " + result::get_err(&res));
}
let mut content = ~"";
let reader = res.get();
loop {
let line = reader.read_line();
if reader.eof() {
break;
}
// read_line does not return the '\n', so we add it
content = content + line + "\n";
}
content
} | identifier_body |
read_file.rs | /*
* How to read a file. |
use std::io;
use std::result;
/* read the file path by calling the read_whole_file_str function */
fn read_file_whole(path: ~str) -> ~str {
let res = io::read_whole_file_str(&Path(path));
if result::is_err(&res) {
fail!(~"file_reader error: " + result::get_err(&res));
}
res.get()
}
/* read the file path line by line */
fn read_file_lines(path: ~str) -> ~str {
let res = io::file_reader(&Path(path));
if result::is_err(&res) {
fail!(~"file_reader error: " + result::get_err(&res));
}
let mut content = ~"";
let reader = res.get();
loop {
let line = reader.read_line();
if reader.eof() {
break;
}
// read_line does not return the '\n', so we add it
content = content + line + "\n";
}
content
}
fn main() {
let filename = ~"read_file.rs";
//let content = read_file_whole(copy filename);
let content = read_file_lines(copy filename);
io::println("the content of " + filename + " is [\n" + content + "]");
} | * Future work: as a variant, we may use the C bindings to call mmap/munmap
*/ | random_line_split |
read_file.rs | /*
* How to read a file.
* Future work: as a variant, we may use the C bindings to call mmap/munmap
*/
use std::io;
use std::result;
/* read the file path by calling the read_whole_file_str function */
fn read_file_whole(path: ~str) -> ~str {
let res = io::read_whole_file_str(&Path(path));
if result::is_err(&res) {
fail!(~"file_reader error: " + result::get_err(&res));
}
res.get()
}
/* read the file path line by line */
fn read_file_lines(path: ~str) -> ~str {
let res = io::file_reader(&Path(path));
if result::is_err(&res) |
let mut content = ~"";
let reader = res.get();
loop {
let line = reader.read_line();
if reader.eof() {
break;
}
// read_line does not return the '\n', so we add it
content = content + line + "\n";
}
content
}
fn main() {
let filename = ~"read_file.rs";
//let content = read_file_whole(copy filename);
let content = read_file_lines(copy filename);
io::println("the content of " + filename + " is [\n" + content + "]");
}
| {
fail!(~"file_reader error: " + result::get_err(&res));
} | conditional_block |
lev_distance.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::cmp;
pub fn lev_distance(me: &str, t: &str) -> usize {
if me.is_empty() { return t.chars().count(); }
if t.is_empty() { return me.chars().count(); }
let mut dcol: Vec<_> = (0..t.len() + 1).collect();
let mut t_last = 0;
for (i, sc) in me.chars().enumerate() {
let mut current = i;
dcol[0] = current + 1;
for (j, tc) in t.chars().enumerate() {
let next = dcol[j + 1];
if sc == tc | else {
dcol[j + 1] = cmp::min(current, next);
dcol[j + 1] = cmp::min(dcol[j + 1], dcol[j]) + 1;
}
current = next;
t_last = j;
}
}
dcol[t_last + 1]
}
#[test]
fn test_lev_distance() {
use std::char::{ from_u32, MAX };
// Test bytelength agnosticity
for c in (0..MAX as u32)
.filter_map(|i| from_u32(i))
.map(|i| i.to_string()) {
assert_eq!(lev_distance(&c[..], &c[..]), 0);
}
let a = "\nMäry häd ä little lämb\n\nLittle lämb\n";
let b = "\nMary häd ä little lämb\n\nLittle lämb\n";
let c = "Mary häd ä little lämb\n\nLittle lämb\n";
assert_eq!(lev_distance(a, b), 1);
assert_eq!(lev_distance(b, a), 1);
assert_eq!(lev_distance(a, c), 2);
assert_eq!(lev_distance(c, a), 2);
assert_eq!(lev_distance(b, c), 1);
assert_eq!(lev_distance(c, b), 1);
}
| {
dcol[j + 1] = current;
} | conditional_block |
lev_distance.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::cmp;
pub fn lev_distance(me: &str, t: &str) -> usize { |
let mut dcol: Vec<_> = (0..t.len() + 1).collect();
let mut t_last = 0;
for (i, sc) in me.chars().enumerate() {
let mut current = i;
dcol[0] = current + 1;
for (j, tc) in t.chars().enumerate() {
let next = dcol[j + 1];
if sc == tc {
dcol[j + 1] = current;
} else {
dcol[j + 1] = cmp::min(current, next);
dcol[j + 1] = cmp::min(dcol[j + 1], dcol[j]) + 1;
}
current = next;
t_last = j;
}
}
dcol[t_last + 1]
}
#[test]
fn test_lev_distance() {
use std::char::{ from_u32, MAX };
// Test bytelength agnosticity
for c in (0..MAX as u32)
.filter_map(|i| from_u32(i))
.map(|i| i.to_string()) {
assert_eq!(lev_distance(&c[..], &c[..]), 0);
}
let a = "\nMäry häd ä little lämb\n\nLittle lämb\n";
let b = "\nMary häd ä little lämb\n\nLittle lämb\n";
let c = "Mary häd ä little lämb\n\nLittle lämb\n";
assert_eq!(lev_distance(a, b), 1);
assert_eq!(lev_distance(b, a), 1);
assert_eq!(lev_distance(a, c), 2);
assert_eq!(lev_distance(c, a), 2);
assert_eq!(lev_distance(b, c), 1);
assert_eq!(lev_distance(c, b), 1);
} | if me.is_empty() { return t.chars().count(); }
if t.is_empty() { return me.chars().count(); } | random_line_split |
lev_distance.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::cmp;
pub fn lev_distance(me: &str, t: &str) -> usize {
if me.is_empty() { return t.chars().count(); }
if t.is_empty() { return me.chars().count(); }
let mut dcol: Vec<_> = (0..t.len() + 1).collect();
let mut t_last = 0;
for (i, sc) in me.chars().enumerate() {
let mut current = i;
dcol[0] = current + 1;
for (j, tc) in t.chars().enumerate() {
let next = dcol[j + 1];
if sc == tc {
dcol[j + 1] = current;
} else {
dcol[j + 1] = cmp::min(current, next);
dcol[j + 1] = cmp::min(dcol[j + 1], dcol[j]) + 1;
}
current = next;
t_last = j;
}
}
dcol[t_last + 1]
}
#[test]
fn test_lev_distance() | {
use std::char::{ from_u32, MAX };
// Test bytelength agnosticity
for c in (0..MAX as u32)
.filter_map(|i| from_u32(i))
.map(|i| i.to_string()) {
assert_eq!(lev_distance(&c[..], &c[..]), 0);
}
let a = "\nMäry häd ä little lämb\n\nLittle lämb\n";
let b = "\nMary häd ä little lämb\n\nLittle lämb\n";
let c = "Mary häd ä little lämb\n\nLittle lämb\n";
assert_eq!(lev_distance(a, b), 1);
assert_eq!(lev_distance(b, a), 1);
assert_eq!(lev_distance(a, c), 2);
assert_eq!(lev_distance(c, a), 2);
assert_eq!(lev_distance(b, c), 1);
assert_eq!(lev_distance(c, b), 1);
}
| identifier_body |
|
lev_distance.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::cmp;
pub fn lev_distance(me: &str, t: &str) -> usize {
if me.is_empty() { return t.chars().count(); }
if t.is_empty() { return me.chars().count(); }
let mut dcol: Vec<_> = (0..t.len() + 1).collect();
let mut t_last = 0;
for (i, sc) in me.chars().enumerate() {
let mut current = i;
dcol[0] = current + 1;
for (j, tc) in t.chars().enumerate() {
let next = dcol[j + 1];
if sc == tc {
dcol[j + 1] = current;
} else {
dcol[j + 1] = cmp::min(current, next);
dcol[j + 1] = cmp::min(dcol[j + 1], dcol[j]) + 1;
}
current = next;
t_last = j;
}
}
dcol[t_last + 1]
}
#[test]
fn | () {
use std::char::{ from_u32, MAX };
// Test bytelength agnosticity
for c in (0..MAX as u32)
.filter_map(|i| from_u32(i))
.map(|i| i.to_string()) {
assert_eq!(lev_distance(&c[..], &c[..]), 0);
}
let a = "\nMäry häd ä little lämb\n\nLittle lämb\n";
let b = "\nMary häd ä little lämb\n\nLittle lämb\n";
let c = "Mary häd ä little lämb\n\nLittle lämb\n";
assert_eq!(lev_distance(a, b), 1);
assert_eq!(lev_distance(b, a), 1);
assert_eq!(lev_distance(a, c), 2);
assert_eq!(lev_distance(c, a), 2);
assert_eq!(lev_distance(b, c), 1);
assert_eq!(lev_distance(c, b), 1);
}
| test_lev_distance | identifier_name |
modules.rs | // Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::sync::Arc;
use ethcore::client::BlockChainClient;
use hypervisor::Hypervisor;
use ethsync::{SyncConfig, NetworkConfiguration, NetworkError};
use ethcore::snapshot::SnapshotService;
#[cfg(not(feature="ipc"))]
use self::no_ipc_deps::*;
#[cfg(feature="ipc")]
use self::ipc_deps::*;
use ethcore_logger::Config as LogConfig;
use std::path::Path;
#[cfg(feature="ipc")]
pub mod service_urls {
use std::path::PathBuf;
pub const CLIENT: &'static str = "parity-chain.ipc";
pub const SNAPSHOT: &'static str = "parity-snapshot.ipc";
pub const SYNC: &'static str = "parity-sync.ipc";
pub const SYNC_NOTIFY: &'static str = "parity-sync-notify.ipc";
pub const NETWORK_MANAGER: &'static str = "parity-manage-net.ipc";
pub const SYNC_CONTROL: &'static str = "parity-sync-control.ipc";
#[cfg(feature="stratum")]
pub const STRATUM: &'static str = "parity-stratum.ipc";
#[cfg(feature="stratum")]
pub const MINING_JOB_DISPATCHER: &'static str = "parity-mining-jobs.ipc";
pub fn with_base(data_dir: &str, service_path: &str) -> String {
let mut path = PathBuf::from(data_dir);
path.push(service_path);
format!("ipc://{}", path.to_str().unwrap())
}
}
#[cfg(not(feature="ipc"))]
mod no_ipc_deps {
pub use ethsync::{EthSync, SyncProvider, ManageNetwork};
pub use ethcore::client::ChainNotify;
}
#[cfg(feature="ipc")]
pub type SyncModules = (
GuardedSocket<SyncClient<NanoSocket>>,
GuardedSocket<NetworkManagerClient<NanoSocket>>,
GuardedSocket<ChainNotifyClient<NanoSocket>>
);
#[cfg(not(feature="ipc"))]
pub type SyncModules = (Arc<SyncProvider>, Arc<ManageNetwork>, Arc<ChainNotify>);
#[cfg(feature="ipc")]
mod ipc_deps {
pub use ethsync::remote::{SyncClient, NetworkManagerClient};
pub use ethsync::ServiceConfiguration;
pub use ethcore::client::remote::ChainNotifyClient;
pub use hypervisor::{SYNC_MODULE_ID, BootArgs, HYPERVISOR_IPC_URL};
pub use nanoipc::{GuardedSocket, NanoSocket, generic_client, fast_client};
pub use ipc::IpcSocket;
pub use ipc::binary::serialize;
}
#[cfg(feature="ipc")]
pub fn hypervisor(base_path: &Path) -> Option<Hypervisor> {
Some(Hypervisor
::with_url(&service_urls::with_base(base_path.to_str().unwrap(), HYPERVISOR_IPC_URL))
.io_path(base_path.to_str().unwrap()))
}
#[cfg(not(feature="ipc"))]
pub fn hypervisor(_: &Path) -> Option<Hypervisor> {
None
}
#[cfg(feature="ipc")]
fn | (io_path: &str, sync_cfg: SyncConfig, net_cfg: NetworkConfiguration, log_settings: &LogConfig) -> BootArgs {
let service_config = ServiceConfiguration {
sync: sync_cfg,
net: net_cfg,
io_path: io_path.to_owned(),
};
// initialisation payload is passed via stdin
let service_payload = serialize(&service_config).expect("Any binary-derived struct is serializable by definition");
// client service url and logging settings are passed in command line
let mut cli_args = Vec::new();
cli_args.push("sync".to_owned());
if!log_settings.color { cli_args.push("--no-color".to_owned()); }
if let Some(ref mode) = log_settings.mode {
cli_args.push("-l".to_owned());
cli_args.push(mode.to_owned());
}
if let Some(ref file) = log_settings.file {
cli_args.push("--log-file".to_owned());
cli_args.push(file.to_owned());
}
BootArgs::new().stdin(service_payload).cli(cli_args)
}
#[cfg(feature="ipc")]
pub fn sync
(
hypervisor_ref: &mut Option<Hypervisor>,
sync_cfg: SyncConfig,
net_cfg: NetworkConfiguration,
_client: Arc<BlockChainClient>,
_snapshot_service: Arc<SnapshotService>,
log_settings: &LogConfig,
)
-> Result<SyncModules, NetworkError>
{
let mut hypervisor = hypervisor_ref.take().expect("There should be hypervisor for ipc configuration");
let args = sync_arguments(&hypervisor.io_path, sync_cfg, net_cfg, log_settings);
hypervisor = hypervisor.module(SYNC_MODULE_ID, args);
hypervisor.start();
hypervisor.wait_for_startup();
let sync_client = generic_client::<SyncClient<_>>(
&service_urls::with_base(&hypervisor.io_path, service_urls::SYNC)).unwrap();
let notify_client = generic_client::<ChainNotifyClient<_>>(
&service_urls::with_base(&hypervisor.io_path, service_urls::SYNC_NOTIFY)).unwrap();
let manage_client = generic_client::<NetworkManagerClient<_>>(
&service_urls::with_base(&hypervisor.io_path, service_urls::NETWORK_MANAGER)).unwrap();
*hypervisor_ref = Some(hypervisor);
Ok((sync_client, manage_client, notify_client))
}
#[cfg(not(feature="ipc"))]
pub fn sync
(
_hypervisor: &mut Option<Hypervisor>,
sync_cfg: SyncConfig,
net_cfg: NetworkConfiguration,
client: Arc<BlockChainClient>,
snapshot_service: Arc<SnapshotService>,
_log_settings: &LogConfig,
)
-> Result<SyncModules, NetworkError>
{
let eth_sync = try!(EthSync::new(sync_cfg, client, snapshot_service, net_cfg));
Ok((eth_sync.clone() as Arc<SyncProvider>, eth_sync.clone() as Arc<ManageNetwork>, eth_sync.clone() as Arc<ChainNotify>))
}
| sync_arguments | identifier_name |
modules.rs | // Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::sync::Arc;
use ethcore::client::BlockChainClient;
use hypervisor::Hypervisor;
use ethsync::{SyncConfig, NetworkConfiguration, NetworkError};
use ethcore::snapshot::SnapshotService;
#[cfg(not(feature="ipc"))]
use self::no_ipc_deps::*;
#[cfg(feature="ipc")]
use self::ipc_deps::*;
use ethcore_logger::Config as LogConfig;
use std::path::Path;
#[cfg(feature="ipc")]
pub mod service_urls {
use std::path::PathBuf;
pub const CLIENT: &'static str = "parity-chain.ipc";
pub const SNAPSHOT: &'static str = "parity-snapshot.ipc";
pub const SYNC: &'static str = "parity-sync.ipc";
pub const SYNC_NOTIFY: &'static str = "parity-sync-notify.ipc";
pub const NETWORK_MANAGER: &'static str = "parity-manage-net.ipc";
pub const SYNC_CONTROL: &'static str = "parity-sync-control.ipc";
#[cfg(feature="stratum")]
pub const STRATUM: &'static str = "parity-stratum.ipc";
#[cfg(feature="stratum")]
pub const MINING_JOB_DISPATCHER: &'static str = "parity-mining-jobs.ipc";
pub fn with_base(data_dir: &str, service_path: &str) -> String {
let mut path = PathBuf::from(data_dir);
path.push(service_path);
format!("ipc://{}", path.to_str().unwrap())
}
}
#[cfg(not(feature="ipc"))]
mod no_ipc_deps {
pub use ethsync::{EthSync, SyncProvider, ManageNetwork};
pub use ethcore::client::ChainNotify;
}
#[cfg(feature="ipc")]
pub type SyncModules = (
GuardedSocket<SyncClient<NanoSocket>>,
GuardedSocket<NetworkManagerClient<NanoSocket>>,
GuardedSocket<ChainNotifyClient<NanoSocket>>
);
#[cfg(not(feature="ipc"))]
pub type SyncModules = (Arc<SyncProvider>, Arc<ManageNetwork>, Arc<ChainNotify>);
#[cfg(feature="ipc")]
mod ipc_deps {
pub use ethsync::remote::{SyncClient, NetworkManagerClient};
pub use ethsync::ServiceConfiguration;
pub use ethcore::client::remote::ChainNotifyClient;
pub use hypervisor::{SYNC_MODULE_ID, BootArgs, HYPERVISOR_IPC_URL};
pub use nanoipc::{GuardedSocket, NanoSocket, generic_client, fast_client};
pub use ipc::IpcSocket;
pub use ipc::binary::serialize;
}
#[cfg(feature="ipc")]
pub fn hypervisor(base_path: &Path) -> Option<Hypervisor> {
Some(Hypervisor
::with_url(&service_urls::with_base(base_path.to_str().unwrap(), HYPERVISOR_IPC_URL))
.io_path(base_path.to_str().unwrap()))
}
#[cfg(not(feature="ipc"))]
pub fn hypervisor(_: &Path) -> Option<Hypervisor> |
#[cfg(feature="ipc")]
fn sync_arguments(io_path: &str, sync_cfg: SyncConfig, net_cfg: NetworkConfiguration, log_settings: &LogConfig) -> BootArgs {
let service_config = ServiceConfiguration {
sync: sync_cfg,
net: net_cfg,
io_path: io_path.to_owned(),
};
// initialisation payload is passed via stdin
let service_payload = serialize(&service_config).expect("Any binary-derived struct is serializable by definition");
// client service url and logging settings are passed in command line
let mut cli_args = Vec::new();
cli_args.push("sync".to_owned());
if!log_settings.color { cli_args.push("--no-color".to_owned()); }
if let Some(ref mode) = log_settings.mode {
cli_args.push("-l".to_owned());
cli_args.push(mode.to_owned());
}
if let Some(ref file) = log_settings.file {
cli_args.push("--log-file".to_owned());
cli_args.push(file.to_owned());
}
BootArgs::new().stdin(service_payload).cli(cli_args)
}
#[cfg(feature="ipc")]
pub fn sync
(
hypervisor_ref: &mut Option<Hypervisor>,
sync_cfg: SyncConfig,
net_cfg: NetworkConfiguration,
_client: Arc<BlockChainClient>,
_snapshot_service: Arc<SnapshotService>,
log_settings: &LogConfig,
)
-> Result<SyncModules, NetworkError>
{
let mut hypervisor = hypervisor_ref.take().expect("There should be hypervisor for ipc configuration");
let args = sync_arguments(&hypervisor.io_path, sync_cfg, net_cfg, log_settings);
hypervisor = hypervisor.module(SYNC_MODULE_ID, args);
hypervisor.start();
hypervisor.wait_for_startup();
let sync_client = generic_client::<SyncClient<_>>(
&service_urls::with_base(&hypervisor.io_path, service_urls::SYNC)).unwrap();
let notify_client = generic_client::<ChainNotifyClient<_>>(
&service_urls::with_base(&hypervisor.io_path, service_urls::SYNC_NOTIFY)).unwrap();
let manage_client = generic_client::<NetworkManagerClient<_>>(
&service_urls::with_base(&hypervisor.io_path, service_urls::NETWORK_MANAGER)).unwrap();
*hypervisor_ref = Some(hypervisor);
Ok((sync_client, manage_client, notify_client))
}
#[cfg(not(feature="ipc"))]
pub fn sync
(
_hypervisor: &mut Option<Hypervisor>,
sync_cfg: SyncConfig,
net_cfg: NetworkConfiguration,
client: Arc<BlockChainClient>,
snapshot_service: Arc<SnapshotService>,
_log_settings: &LogConfig,
)
-> Result<SyncModules, NetworkError>
{
let eth_sync = try!(EthSync::new(sync_cfg, client, snapshot_service, net_cfg));
Ok((eth_sync.clone() as Arc<SyncProvider>, eth_sync.clone() as Arc<ManageNetwork>, eth_sync.clone() as Arc<ChainNotify>))
}
| {
None
} | identifier_body |
modules.rs | // Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::sync::Arc;
use ethcore::client::BlockChainClient;
use hypervisor::Hypervisor;
use ethsync::{SyncConfig, NetworkConfiguration, NetworkError};
use ethcore::snapshot::SnapshotService;
#[cfg(not(feature="ipc"))]
use self::no_ipc_deps::*;
#[cfg(feature="ipc")]
use self::ipc_deps::*;
use ethcore_logger::Config as LogConfig;
use std::path::Path;
#[cfg(feature="ipc")]
pub mod service_urls {
use std::path::PathBuf;
pub const CLIENT: &'static str = "parity-chain.ipc";
pub const SNAPSHOT: &'static str = "parity-snapshot.ipc";
pub const SYNC: &'static str = "parity-sync.ipc";
pub const SYNC_NOTIFY: &'static str = "parity-sync-notify.ipc";
pub const NETWORK_MANAGER: &'static str = "parity-manage-net.ipc";
pub const SYNC_CONTROL: &'static str = "parity-sync-control.ipc";
#[cfg(feature="stratum")]
pub const STRATUM: &'static str = "parity-stratum.ipc";
#[cfg(feature="stratum")]
pub const MINING_JOB_DISPATCHER: &'static str = "parity-mining-jobs.ipc";
pub fn with_base(data_dir: &str, service_path: &str) -> String {
let mut path = PathBuf::from(data_dir);
path.push(service_path);
format!("ipc://{}", path.to_str().unwrap())
}
}
#[cfg(not(feature="ipc"))]
mod no_ipc_deps {
pub use ethsync::{EthSync, SyncProvider, ManageNetwork};
pub use ethcore::client::ChainNotify;
}
#[cfg(feature="ipc")]
pub type SyncModules = (
GuardedSocket<SyncClient<NanoSocket>>,
GuardedSocket<NetworkManagerClient<NanoSocket>>,
GuardedSocket<ChainNotifyClient<NanoSocket>>
);
#[cfg(not(feature="ipc"))]
pub type SyncModules = (Arc<SyncProvider>, Arc<ManageNetwork>, Arc<ChainNotify>);
#[cfg(feature="ipc")]
mod ipc_deps {
pub use ethsync::remote::{SyncClient, NetworkManagerClient};
pub use ethsync::ServiceConfiguration;
pub use ethcore::client::remote::ChainNotifyClient;
pub use hypervisor::{SYNC_MODULE_ID, BootArgs, HYPERVISOR_IPC_URL};
pub use nanoipc::{GuardedSocket, NanoSocket, generic_client, fast_client}; | pub use ipc::IpcSocket;
pub use ipc::binary::serialize;
}
#[cfg(feature="ipc")]
pub fn hypervisor(base_path: &Path) -> Option<Hypervisor> {
Some(Hypervisor
::with_url(&service_urls::with_base(base_path.to_str().unwrap(), HYPERVISOR_IPC_URL))
.io_path(base_path.to_str().unwrap()))
}
#[cfg(not(feature="ipc"))]
pub fn hypervisor(_: &Path) -> Option<Hypervisor> {
None
}
#[cfg(feature="ipc")]
fn sync_arguments(io_path: &str, sync_cfg: SyncConfig, net_cfg: NetworkConfiguration, log_settings: &LogConfig) -> BootArgs {
let service_config = ServiceConfiguration {
sync: sync_cfg,
net: net_cfg,
io_path: io_path.to_owned(),
};
// initialisation payload is passed via stdin
let service_payload = serialize(&service_config).expect("Any binary-derived struct is serializable by definition");
// client service url and logging settings are passed in command line
let mut cli_args = Vec::new();
cli_args.push("sync".to_owned());
if!log_settings.color { cli_args.push("--no-color".to_owned()); }
if let Some(ref mode) = log_settings.mode {
cli_args.push("-l".to_owned());
cli_args.push(mode.to_owned());
}
if let Some(ref file) = log_settings.file {
cli_args.push("--log-file".to_owned());
cli_args.push(file.to_owned());
}
BootArgs::new().stdin(service_payload).cli(cli_args)
}
#[cfg(feature="ipc")]
pub fn sync
(
hypervisor_ref: &mut Option<Hypervisor>,
sync_cfg: SyncConfig,
net_cfg: NetworkConfiguration,
_client: Arc<BlockChainClient>,
_snapshot_service: Arc<SnapshotService>,
log_settings: &LogConfig,
)
-> Result<SyncModules, NetworkError>
{
let mut hypervisor = hypervisor_ref.take().expect("There should be hypervisor for ipc configuration");
let args = sync_arguments(&hypervisor.io_path, sync_cfg, net_cfg, log_settings);
hypervisor = hypervisor.module(SYNC_MODULE_ID, args);
hypervisor.start();
hypervisor.wait_for_startup();
let sync_client = generic_client::<SyncClient<_>>(
&service_urls::with_base(&hypervisor.io_path, service_urls::SYNC)).unwrap();
let notify_client = generic_client::<ChainNotifyClient<_>>(
&service_urls::with_base(&hypervisor.io_path, service_urls::SYNC_NOTIFY)).unwrap();
let manage_client = generic_client::<NetworkManagerClient<_>>(
&service_urls::with_base(&hypervisor.io_path, service_urls::NETWORK_MANAGER)).unwrap();
*hypervisor_ref = Some(hypervisor);
Ok((sync_client, manage_client, notify_client))
}
#[cfg(not(feature="ipc"))]
pub fn sync
(
_hypervisor: &mut Option<Hypervisor>,
sync_cfg: SyncConfig,
net_cfg: NetworkConfiguration,
client: Arc<BlockChainClient>,
snapshot_service: Arc<SnapshotService>,
_log_settings: &LogConfig,
)
-> Result<SyncModules, NetworkError>
{
let eth_sync = try!(EthSync::new(sync_cfg, client, snapshot_service, net_cfg));
Ok((eth_sync.clone() as Arc<SyncProvider>, eth_sync.clone() as Arc<ManageNetwork>, eth_sync.clone() as Arc<ChainNotify>))
} | random_line_split |
|
modules.rs | // Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::sync::Arc;
use ethcore::client::BlockChainClient;
use hypervisor::Hypervisor;
use ethsync::{SyncConfig, NetworkConfiguration, NetworkError};
use ethcore::snapshot::SnapshotService;
#[cfg(not(feature="ipc"))]
use self::no_ipc_deps::*;
#[cfg(feature="ipc")]
use self::ipc_deps::*;
use ethcore_logger::Config as LogConfig;
use std::path::Path;
#[cfg(feature="ipc")]
pub mod service_urls {
use std::path::PathBuf;
pub const CLIENT: &'static str = "parity-chain.ipc";
pub const SNAPSHOT: &'static str = "parity-snapshot.ipc";
pub const SYNC: &'static str = "parity-sync.ipc";
pub const SYNC_NOTIFY: &'static str = "parity-sync-notify.ipc";
pub const NETWORK_MANAGER: &'static str = "parity-manage-net.ipc";
pub const SYNC_CONTROL: &'static str = "parity-sync-control.ipc";
#[cfg(feature="stratum")]
pub const STRATUM: &'static str = "parity-stratum.ipc";
#[cfg(feature="stratum")]
pub const MINING_JOB_DISPATCHER: &'static str = "parity-mining-jobs.ipc";
pub fn with_base(data_dir: &str, service_path: &str) -> String {
let mut path = PathBuf::from(data_dir);
path.push(service_path);
format!("ipc://{}", path.to_str().unwrap())
}
}
#[cfg(not(feature="ipc"))]
mod no_ipc_deps {
pub use ethsync::{EthSync, SyncProvider, ManageNetwork};
pub use ethcore::client::ChainNotify;
}
#[cfg(feature="ipc")]
pub type SyncModules = (
GuardedSocket<SyncClient<NanoSocket>>,
GuardedSocket<NetworkManagerClient<NanoSocket>>,
GuardedSocket<ChainNotifyClient<NanoSocket>>
);
#[cfg(not(feature="ipc"))]
pub type SyncModules = (Arc<SyncProvider>, Arc<ManageNetwork>, Arc<ChainNotify>);
#[cfg(feature="ipc")]
mod ipc_deps {
pub use ethsync::remote::{SyncClient, NetworkManagerClient};
pub use ethsync::ServiceConfiguration;
pub use ethcore::client::remote::ChainNotifyClient;
pub use hypervisor::{SYNC_MODULE_ID, BootArgs, HYPERVISOR_IPC_URL};
pub use nanoipc::{GuardedSocket, NanoSocket, generic_client, fast_client};
pub use ipc::IpcSocket;
pub use ipc::binary::serialize;
}
#[cfg(feature="ipc")]
pub fn hypervisor(base_path: &Path) -> Option<Hypervisor> {
Some(Hypervisor
::with_url(&service_urls::with_base(base_path.to_str().unwrap(), HYPERVISOR_IPC_URL))
.io_path(base_path.to_str().unwrap()))
}
#[cfg(not(feature="ipc"))]
pub fn hypervisor(_: &Path) -> Option<Hypervisor> {
None
}
#[cfg(feature="ipc")]
fn sync_arguments(io_path: &str, sync_cfg: SyncConfig, net_cfg: NetworkConfiguration, log_settings: &LogConfig) -> BootArgs {
let service_config = ServiceConfiguration {
sync: sync_cfg,
net: net_cfg,
io_path: io_path.to_owned(),
};
// initialisation payload is passed via stdin
let service_payload = serialize(&service_config).expect("Any binary-derived struct is serializable by definition");
// client service url and logging settings are passed in command line
let mut cli_args = Vec::new();
cli_args.push("sync".to_owned());
if!log_settings.color { cli_args.push("--no-color".to_owned()); }
if let Some(ref mode) = log_settings.mode {
cli_args.push("-l".to_owned());
cli_args.push(mode.to_owned());
}
if let Some(ref file) = log_settings.file |
BootArgs::new().stdin(service_payload).cli(cli_args)
}
#[cfg(feature="ipc")]
pub fn sync
(
hypervisor_ref: &mut Option<Hypervisor>,
sync_cfg: SyncConfig,
net_cfg: NetworkConfiguration,
_client: Arc<BlockChainClient>,
_snapshot_service: Arc<SnapshotService>,
log_settings: &LogConfig,
)
-> Result<SyncModules, NetworkError>
{
let mut hypervisor = hypervisor_ref.take().expect("There should be hypervisor for ipc configuration");
let args = sync_arguments(&hypervisor.io_path, sync_cfg, net_cfg, log_settings);
hypervisor = hypervisor.module(SYNC_MODULE_ID, args);
hypervisor.start();
hypervisor.wait_for_startup();
let sync_client = generic_client::<SyncClient<_>>(
&service_urls::with_base(&hypervisor.io_path, service_urls::SYNC)).unwrap();
let notify_client = generic_client::<ChainNotifyClient<_>>(
&service_urls::with_base(&hypervisor.io_path, service_urls::SYNC_NOTIFY)).unwrap();
let manage_client = generic_client::<NetworkManagerClient<_>>(
&service_urls::with_base(&hypervisor.io_path, service_urls::NETWORK_MANAGER)).unwrap();
*hypervisor_ref = Some(hypervisor);
Ok((sync_client, manage_client, notify_client))
}
#[cfg(not(feature="ipc"))]
pub fn sync
(
_hypervisor: &mut Option<Hypervisor>,
sync_cfg: SyncConfig,
net_cfg: NetworkConfiguration,
client: Arc<BlockChainClient>,
snapshot_service: Arc<SnapshotService>,
_log_settings: &LogConfig,
)
-> Result<SyncModules, NetworkError>
{
let eth_sync = try!(EthSync::new(sync_cfg, client, snapshot_service, net_cfg));
Ok((eth_sync.clone() as Arc<SyncProvider>, eth_sync.clone() as Arc<ManageNetwork>, eth_sync.clone() as Arc<ChainNotify>))
}
| {
cli_args.push("--log-file".to_owned());
cli_args.push(file.to_owned());
} | conditional_block |
reflector.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The `Reflector` struct.
use dom::bindings::conversions::DerivedFrom;
use dom::bindings::root::DomRoot;
use dom::globalscope::GlobalScope;
use js::jsapi::{HandleObject, JSContext, JSObject, Heap};
use std::default::Default;
/// Create the reflector for a new DOM object and yield ownership to the
/// reflector.
pub fn reflect_dom_object<T, U>(
obj: Box<T>,
global: &U,
wrap_fn: unsafe fn(*mut JSContext, &GlobalScope, Box<T>) -> DomRoot<T>)
-> DomRoot<T>
where T: DomObject, U: DerivedFrom<GlobalScope>
{
let global_scope = global.upcast();
unsafe {
wrap_fn(global_scope.get_cx(), global_scope, obj)
}
}
/// A struct to store a reference to the reflector of a DOM object.
#[allow(unrooted_must_root)]
#[derive(MallocSizeOf)]
#[must_root]
// If you're renaming or moving this field, update the path in plugins::reflector as well
pub struct Reflector {
#[ignore_malloc_size_of = "defined and measured in rust-mozjs"]
object: Heap<*mut JSObject>,
}
#[allow(unrooted_must_root)]
impl PartialEq for Reflector {
fn eq(&self, other: &Reflector) -> bool {
self.object.get() == other.object.get()
}
}
impl Reflector {
/// Get the reflector.
#[inline]
pub fn get_jsobject(&self) -> HandleObject {
// We're rooted, so it's safe to hand out a handle to object in Heap
unsafe { self.object.handle() }
}
/// Initialize the reflector. (May be called only once.)
pub fn set_jsobject(&mut self, object: *mut JSObject) {
assert!(self.object.get().is_null());
assert!(!object.is_null());
self.object.set(object);
}
/// Return a pointer to the memory location at which the JS reflector
/// object is stored. Used to root the reflector, as
/// required by the JSAPI rooting APIs.
pub fn rootable(&self) -> &Heap<*mut JSObject> {
&self.object
}
/// Create an uninitialized `Reflector`.
pub fn new() -> Reflector {
Reflector {
object: Heap::default(),
}
}
}
/// A trait to provide access to the `Reflector` for a DOM object.
pub trait DomObject:'static {
/// Returns the receiver's reflector.
fn reflector(&self) -> &Reflector;
/// Returns the global scope of the realm that the DomObject was created in.
fn global(&self) -> DomRoot<GlobalScope> where Self: Sized {
GlobalScope::from_reflector(self)
}
} |
impl DomObject for Reflector {
fn reflector(&self) -> &Self {
self
}
}
/// A trait to initialize the `Reflector` for a DOM object.
pub trait MutDomObject: DomObject {
/// Initializes the Reflector
fn init_reflector(&mut self, obj: *mut JSObject);
}
impl MutDomObject for Reflector {
fn init_reflector(&mut self, obj: *mut JSObject) {
self.set_jsobject(obj)
}
} | random_line_split |
|
reflector.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The `Reflector` struct.
use dom::bindings::conversions::DerivedFrom;
use dom::bindings::root::DomRoot;
use dom::globalscope::GlobalScope;
use js::jsapi::{HandleObject, JSContext, JSObject, Heap};
use std::default::Default;
/// Create the reflector for a new DOM object and yield ownership to the
/// reflector.
pub fn reflect_dom_object<T, U>(
obj: Box<T>,
global: &U,
wrap_fn: unsafe fn(*mut JSContext, &GlobalScope, Box<T>) -> DomRoot<T>)
-> DomRoot<T>
where T: DomObject, U: DerivedFrom<GlobalScope>
{
let global_scope = global.upcast();
unsafe {
wrap_fn(global_scope.get_cx(), global_scope, obj)
}
}
/// A struct to store a reference to the reflector of a DOM object.
#[allow(unrooted_must_root)]
#[derive(MallocSizeOf)]
#[must_root]
// If you're renaming or moving this field, update the path in plugins::reflector as well
pub struct Reflector {
#[ignore_malloc_size_of = "defined and measured in rust-mozjs"]
object: Heap<*mut JSObject>,
}
#[allow(unrooted_must_root)]
impl PartialEq for Reflector {
fn eq(&self, other: &Reflector) -> bool {
self.object.get() == other.object.get()
}
}
impl Reflector {
/// Get the reflector.
#[inline]
pub fn | (&self) -> HandleObject {
// We're rooted, so it's safe to hand out a handle to object in Heap
unsafe { self.object.handle() }
}
/// Initialize the reflector. (May be called only once.)
pub fn set_jsobject(&mut self, object: *mut JSObject) {
assert!(self.object.get().is_null());
assert!(!object.is_null());
self.object.set(object);
}
/// Return a pointer to the memory location at which the JS reflector
/// object is stored. Used to root the reflector, as
/// required by the JSAPI rooting APIs.
pub fn rootable(&self) -> &Heap<*mut JSObject> {
&self.object
}
/// Create an uninitialized `Reflector`.
pub fn new() -> Reflector {
Reflector {
object: Heap::default(),
}
}
}
/// A trait to provide access to the `Reflector` for a DOM object.
pub trait DomObject:'static {
/// Returns the receiver's reflector.
fn reflector(&self) -> &Reflector;
/// Returns the global scope of the realm that the DomObject was created in.
fn global(&self) -> DomRoot<GlobalScope> where Self: Sized {
GlobalScope::from_reflector(self)
}
}
impl DomObject for Reflector {
fn reflector(&self) -> &Self {
self
}
}
/// A trait to initialize the `Reflector` for a DOM object.
pub trait MutDomObject: DomObject {
/// Initializes the Reflector
fn init_reflector(&mut self, obj: *mut JSObject);
}
impl MutDomObject for Reflector {
fn init_reflector(&mut self, obj: *mut JSObject) {
self.set_jsobject(obj)
}
}
| get_jsobject | identifier_name |
reflector.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The `Reflector` struct.
use dom::bindings::conversions::DerivedFrom;
use dom::bindings::root::DomRoot;
use dom::globalscope::GlobalScope;
use js::jsapi::{HandleObject, JSContext, JSObject, Heap};
use std::default::Default;
/// Create the reflector for a new DOM object and yield ownership to the
/// reflector.
pub fn reflect_dom_object<T, U>(
obj: Box<T>,
global: &U,
wrap_fn: unsafe fn(*mut JSContext, &GlobalScope, Box<T>) -> DomRoot<T>)
-> DomRoot<T>
where T: DomObject, U: DerivedFrom<GlobalScope>
{
let global_scope = global.upcast();
unsafe {
wrap_fn(global_scope.get_cx(), global_scope, obj)
}
}
/// A struct to store a reference to the reflector of a DOM object.
#[allow(unrooted_must_root)]
#[derive(MallocSizeOf)]
#[must_root]
// If you're renaming or moving this field, update the path in plugins::reflector as well
pub struct Reflector {
#[ignore_malloc_size_of = "defined and measured in rust-mozjs"]
object: Heap<*mut JSObject>,
}
#[allow(unrooted_must_root)]
impl PartialEq for Reflector {
fn eq(&self, other: &Reflector) -> bool {
self.object.get() == other.object.get()
}
}
impl Reflector {
/// Get the reflector.
#[inline]
pub fn get_jsobject(&self) -> HandleObject {
// We're rooted, so it's safe to hand out a handle to object in Heap
unsafe { self.object.handle() }
}
/// Initialize the reflector. (May be called only once.)
pub fn set_jsobject(&mut self, object: *mut JSObject) {
assert!(self.object.get().is_null());
assert!(!object.is_null());
self.object.set(object);
}
/// Return a pointer to the memory location at which the JS reflector
/// object is stored. Used to root the reflector, as
/// required by the JSAPI rooting APIs.
pub fn rootable(&self) -> &Heap<*mut JSObject> {
&self.object
}
/// Create an uninitialized `Reflector`.
pub fn new() -> Reflector {
Reflector {
object: Heap::default(),
}
}
}
/// A trait to provide access to the `Reflector` for a DOM object.
pub trait DomObject:'static {
/// Returns the receiver's reflector.
fn reflector(&self) -> &Reflector;
/// Returns the global scope of the realm that the DomObject was created in.
fn global(&self) -> DomRoot<GlobalScope> where Self: Sized {
GlobalScope::from_reflector(self)
}
}
impl DomObject for Reflector {
fn reflector(&self) -> &Self {
self
}
}
/// A trait to initialize the `Reflector` for a DOM object.
pub trait MutDomObject: DomObject {
/// Initializes the Reflector
fn init_reflector(&mut self, obj: *mut JSObject);
}
impl MutDomObject for Reflector {
fn init_reflector(&mut self, obj: *mut JSObject) |
}
| {
self.set_jsobject(obj)
} | identifier_body |
main.rs | use std::env;
struct LwzDict {
words: Vec<String>
}
impl LwzDict {
pub fn | (&mut self, s: &str) {
if s.len() < 1 {
return;
}
let next = match self.find(s) {
Some(c) => {
self.words.push(
format!("{}{}", c, s)
);
s[c.len()..s.len()]
},
None => {
self.words.push(s[0..1].to_string());
s[1..s.len()]
}
};
self.add_chain(&next);
}
fn find(&self, s: &str) -> Option<String> {
for word in self.words {
if(word == s) {
Some(word.clone())
}
}
None
}
}
impl From<String> for LwzDict {
fn from(from: String) -> Self {
}
}
fn main() {
let sentence = env::args().nth(1).unwrap();
}
| add_chain | identifier_name |
main.rs | use std::env;
struct LwzDict {
words: Vec<String>
}
impl LwzDict {
pub fn add_chain(&mut self, s: &str) {
if s.len() < 1 {
return;
}
let next = match self.find(s) {
Some(c) => {
self.words.push(
format!("{}{}", c, s) | s[1..s.len()]
}
};
self.add_chain(&next);
}
fn find(&self, s: &str) -> Option<String> {
for word in self.words {
if(word == s) {
Some(word.clone())
}
}
None
}
}
impl From<String> for LwzDict {
fn from(from: String) -> Self {
}
}
fn main() {
let sentence = env::args().nth(1).unwrap();
} | );
s[c.len()..s.len()]
},
None => {
self.words.push(s[0..1].to_string()); | random_line_split |
main.rs | use std::env;
struct LwzDict {
words: Vec<String>
}
impl LwzDict {
pub fn add_chain(&mut self, s: &str) |
fn find(&self, s: &str) -> Option<String> {
for word in self.words {
if(word == s) {
Some(word.clone())
}
}
None
}
}
impl From<String> for LwzDict {
fn from(from: String) -> Self {
}
}
fn main() {
let sentence = env::args().nth(1).unwrap();
}
| {
if s.len() < 1 {
return;
}
let next = match self.find(s) {
Some(c) => {
self.words.push(
format!("{}{}", c, s)
);
s[c.len()..s.len()]
},
None => {
self.words.push(s[0..1].to_string());
s[1..s.len()]
}
};
self.add_chain(&next);
} | identifier_body |
build.rs | extern crate curl;
extern crate env_logger;
extern crate flate2;
extern crate krpc_build;
extern crate prost_build;
extern crate tar;
use std::env;
use std::io::Cursor;
use std::path::PathBuf;
use curl::easy::Easy; | const VERSION: &'static str = "1.7.1";
fn main() {
env_logger::init();
let target = PathBuf::from(env::var("OUT_DIR").expect("OUT_DIR environment variable not set"));
let dir = target.join(format!("kudu-{}", VERSION));
// Download the Kudu source tarball.
if!dir.exists() {
let mut data = Vec::new();
let mut handle = Easy::new();
handle
.url(&format!(
"https://github.com/apache/kudu/archive/{}.tar.gz",
VERSION
)).expect("failed to configure Kudu tarball URL");
handle
.follow_location(true)
.expect("failed to configure follow location");
{
let mut transfer = handle.transfer();
transfer
.write_function(|new_data| {
data.extend_from_slice(new_data);
Ok(new_data.len())
}).expect("failed to write download data");
transfer
.perform()
.expect("failed to download Kudu source tarball");
}
Archive::new(GzDecoder::new(Cursor::new(data)))
.unpack(target)
.expect("failed to unpack Kudu source tarball");
}
prost_build::Config::new()
.service_generator(Box::new(krpc_build::KrpcServiceGenerator))
.compile_protos(
&[
dir.join("src/kudu/client/client.proto"),
dir.join("src/kudu/consensus/metadata.proto"),
dir.join("src/kudu/master/master.proto"),
dir.join("src/kudu/rpc/rpc_header.proto"),
dir.join("src/kudu/tools/tool.proto"),
dir.join("src/kudu/tserver/tserver_service.proto"),
],
&[dir.join("src")],
).unwrap();
} | use flate2::bufread::GzDecoder;
use tar::Archive;
| random_line_split |
build.rs | extern crate curl;
extern crate env_logger;
extern crate flate2;
extern crate krpc_build;
extern crate prost_build;
extern crate tar;
use std::env;
use std::io::Cursor;
use std::path::PathBuf;
use curl::easy::Easy;
use flate2::bufread::GzDecoder;
use tar::Archive;
const VERSION: &'static str = "1.7.1";
fn | () {
env_logger::init();
let target = PathBuf::from(env::var("OUT_DIR").expect("OUT_DIR environment variable not set"));
let dir = target.join(format!("kudu-{}", VERSION));
// Download the Kudu source tarball.
if!dir.exists() {
let mut data = Vec::new();
let mut handle = Easy::new();
handle
.url(&format!(
"https://github.com/apache/kudu/archive/{}.tar.gz",
VERSION
)).expect("failed to configure Kudu tarball URL");
handle
.follow_location(true)
.expect("failed to configure follow location");
{
let mut transfer = handle.transfer();
transfer
.write_function(|new_data| {
data.extend_from_slice(new_data);
Ok(new_data.len())
}).expect("failed to write download data");
transfer
.perform()
.expect("failed to download Kudu source tarball");
}
Archive::new(GzDecoder::new(Cursor::new(data)))
.unpack(target)
.expect("failed to unpack Kudu source tarball");
}
prost_build::Config::new()
.service_generator(Box::new(krpc_build::KrpcServiceGenerator))
.compile_protos(
&[
dir.join("src/kudu/client/client.proto"),
dir.join("src/kudu/consensus/metadata.proto"),
dir.join("src/kudu/master/master.proto"),
dir.join("src/kudu/rpc/rpc_header.proto"),
dir.join("src/kudu/tools/tool.proto"),
dir.join("src/kudu/tserver/tserver_service.proto"),
],
&[dir.join("src")],
).unwrap();
}
| main | identifier_name |
build.rs | extern crate curl;
extern crate env_logger;
extern crate flate2;
extern crate krpc_build;
extern crate prost_build;
extern crate tar;
use std::env;
use std::io::Cursor;
use std::path::PathBuf;
use curl::easy::Easy;
use flate2::bufread::GzDecoder;
use tar::Archive;
const VERSION: &'static str = "1.7.1";
fn main() | transfer
.write_function(|new_data| {
data.extend_from_slice(new_data);
Ok(new_data.len())
}).expect("failed to write download data");
transfer
.perform()
.expect("failed to download Kudu source tarball");
}
Archive::new(GzDecoder::new(Cursor::new(data)))
.unpack(target)
.expect("failed to unpack Kudu source tarball");
}
prost_build::Config::new()
.service_generator(Box::new(krpc_build::KrpcServiceGenerator))
.compile_protos(
&[
dir.join("src/kudu/client/client.proto"),
dir.join("src/kudu/consensus/metadata.proto"),
dir.join("src/kudu/master/master.proto"),
dir.join("src/kudu/rpc/rpc_header.proto"),
dir.join("src/kudu/tools/tool.proto"),
dir.join("src/kudu/tserver/tserver_service.proto"),
],
&[dir.join("src")],
).unwrap();
}
| {
env_logger::init();
let target = PathBuf::from(env::var("OUT_DIR").expect("OUT_DIR environment variable not set"));
let dir = target.join(format!("kudu-{}", VERSION));
// Download the Kudu source tarball.
if !dir.exists() {
let mut data = Vec::new();
let mut handle = Easy::new();
handle
.url(&format!(
"https://github.com/apache/kudu/archive/{}.tar.gz",
VERSION
)).expect("failed to configure Kudu tarball URL");
handle
.follow_location(true)
.expect("failed to configure follow location");
{
let mut transfer = handle.transfer(); | identifier_body |
build.rs | extern crate curl;
extern crate env_logger;
extern crate flate2;
extern crate krpc_build;
extern crate prost_build;
extern crate tar;
use std::env;
use std::io::Cursor;
use std::path::PathBuf;
use curl::easy::Easy;
use flate2::bufread::GzDecoder;
use tar::Archive;
const VERSION: &'static str = "1.7.1";
fn main() {
env_logger::init();
let target = PathBuf::from(env::var("OUT_DIR").expect("OUT_DIR environment variable not set"));
let dir = target.join(format!("kudu-{}", VERSION));
// Download the Kudu source tarball.
if!dir.exists() | .perform()
.expect("failed to download Kudu source tarball");
}
Archive::new(GzDecoder::new(Cursor::new(data)))
.unpack(target)
.expect("failed to unpack Kudu source tarball");
}
prost_build::Config::new()
.service_generator(Box::new(krpc_build::KrpcServiceGenerator))
.compile_protos(
&[
dir.join("src/kudu/client/client.proto"),
dir.join("src/kudu/consensus/metadata.proto"),
dir.join("src/kudu/master/master.proto"),
dir.join("src/kudu/rpc/rpc_header.proto"),
dir.join("src/kudu/tools/tool.proto"),
dir.join("src/kudu/tserver/tserver_service.proto"),
],
&[dir.join("src")],
).unwrap();
}
| {
let mut data = Vec::new();
let mut handle = Easy::new();
handle
.url(&format!(
"https://github.com/apache/kudu/archive/{}.tar.gz",
VERSION
)).expect("failed to configure Kudu tarball URL");
handle
.follow_location(true)
.expect("failed to configure follow location");
{
let mut transfer = handle.transfer();
transfer
.write_function(|new_data| {
data.extend_from_slice(new_data);
Ok(new_data.len())
}).expect("failed to write download data");
transfer | conditional_block |
generate-mod.rs | // Modules generated by transparent proc macros still acts as barriers for names (issue #50504).
// aux-build:generate-mod.rs
extern crate generate_mod;
struct FromOutside;
generate_mod::check!(); //~ ERROR cannot find type `FromOutside` in this scope
//~| ERROR cannot find type `Outer` in this scope
#[generate_mod::check_attr] //~ ERROR cannot find type `FromOutside` in this scope
//~| ERROR cannot find type `OuterAttr` in this scope
struct S;
#[derive(generate_mod::CheckDerive)] //~ ERROR cannot find type `FromOutside` in this scope
//~| ERROR cannot find type `OuterDerive` in this scope
//~| WARN this was previously accepted
//~| WARN this was previously accepted
struct Z;
fn inner_block() {
#[derive(generate_mod::CheckDerive)] //~ ERROR cannot find type `FromOutside` in this scope
//~| ERROR cannot find type `OuterDerive` in this scope
//~| WARN this was previously accepted
//~| WARN this was previously accepted
struct | ;
}
#[derive(generate_mod::CheckDeriveLint)] // OK, lint is suppressed
struct W;
fn main() {}
| InnerZ | identifier_name |
generate-mod.rs | // Modules generated by transparent proc macros still acts as barriers for names (issue #50504).
// aux-build:generate-mod.rs
| struct FromOutside;
generate_mod::check!(); //~ ERROR cannot find type `FromOutside` in this scope
//~| ERROR cannot find type `Outer` in this scope
#[generate_mod::check_attr] //~ ERROR cannot find type `FromOutside` in this scope
//~| ERROR cannot find type `OuterAttr` in this scope
struct S;
#[derive(generate_mod::CheckDerive)] //~ ERROR cannot find type `FromOutside` in this scope
//~| ERROR cannot find type `OuterDerive` in this scope
//~| WARN this was previously accepted
//~| WARN this was previously accepted
struct Z;
fn inner_block() {
#[derive(generate_mod::CheckDerive)] //~ ERROR cannot find type `FromOutside` in this scope
//~| ERROR cannot find type `OuterDerive` in this scope
//~| WARN this was previously accepted
//~| WARN this was previously accepted
struct InnerZ;
}
#[derive(generate_mod::CheckDeriveLint)] // OK, lint is suppressed
struct W;
fn main() {} | extern crate generate_mod;
| random_line_split |
21.rs | use std::fs::File;
use std::io::prelude::*;
use std::iter;
mod intcode;
use intcode::*;
#[derive(Debug, Clone)]
enum DroidResult {
Fail(String),
Success(i64)
}
fn get_input() -> std::io::Result<String> {
let mut file = File::open("21.txt")?;
let mut contents = String::new();
file.read_to_string(&mut contents)?;
Ok(contents)
}
fn run_springdroid(state: &mut ProgramState, script: &str) -> DroidResult {
let inputs = script.lines()
.map(|line| line.trim())
.filter(|line| line.len() > 0 && line.chars().next().unwrap()!= '#')
.flat_map(|instruction| instruction.chars().chain(iter::once('\n')))
.map(|c| c as i64);
let (outputs, _) = run_program_with_inputs(state, inputs);
match outputs.last() {
Some(&x) if x > u8::max_value() as i64 => {
DroidResult::Success(x)
},
_ => {
DroidResult::Fail(
outputs.iter()
.map(|&c| c as u8 as char)
.fold(String::new(), |mut acc, c| {
acc.push(c);
acc
})
)
}
}
}
fn main() {
let input = get_input().unwrap();
let program = input.split(',')
.filter_map(|x| x.trim().parse::<i64>().ok())
.collect::<Vec<_>>();
let springscript = "
# There's a hole in ABC
NOT A J
NOT J J
AND B J
AND C J
NOT J J
# and D is not a hole
AND D J
WALK
";
let result = run_springdroid(&mut ProgramState::new(program.clone()), springscript);
match result {
DroidResult::Success(x) => println!("Part 1: {}", x),
DroidResult::Fail(msg) => println!("Failure: {}", msg)
}
let springscript = "
# (
# E and I are not holes
OR E J
AND I J
# )
| # E and F are not holes
OR E T
AND F T
# )
OR T J
OR H J
# (
# There's a hole in ABC
NOT A T
NOT T T
AND B T
AND C T
NOT T T
# and D is not a hole
AND D T
# )
AND T J
RUN
";
let result = run_springdroid(&mut ProgramState::new(program.clone()), springscript);
match result {
DroidResult::Success(x) => println!("Part 2: {}", x),
DroidResult::Fail(msg) => println!("Failure: {}", msg)
}
} | # ( | random_line_split |
21.rs | use std::fs::File;
use std::io::prelude::*;
use std::iter;
mod intcode;
use intcode::*;
#[derive(Debug, Clone)]
enum DroidResult {
Fail(String),
Success(i64)
}
fn | () -> std::io::Result<String> {
let mut file = File::open("21.txt")?;
let mut contents = String::new();
file.read_to_string(&mut contents)?;
Ok(contents)
}
fn run_springdroid(state: &mut ProgramState, script: &str) -> DroidResult {
let inputs = script.lines()
.map(|line| line.trim())
.filter(|line| line.len() > 0 && line.chars().next().unwrap()!= '#')
.flat_map(|instruction| instruction.chars().chain(iter::once('\n')))
.map(|c| c as i64);
let (outputs, _) = run_program_with_inputs(state, inputs);
match outputs.last() {
Some(&x) if x > u8::max_value() as i64 => {
DroidResult::Success(x)
},
_ => {
DroidResult::Fail(
outputs.iter()
.map(|&c| c as u8 as char)
.fold(String::new(), |mut acc, c| {
acc.push(c);
acc
})
)
}
}
}
fn main() {
let input = get_input().unwrap();
let program = input.split(',')
.filter_map(|x| x.trim().parse::<i64>().ok())
.collect::<Vec<_>>();
let springscript = "
# There's a hole in ABC
NOT A J
NOT J J
AND B J
AND C J
NOT J J
# and D is not a hole
AND D J
WALK
";
let result = run_springdroid(&mut ProgramState::new(program.clone()), springscript);
match result {
DroidResult::Success(x) => println!("Part 1: {}", x),
DroidResult::Fail(msg) => println!("Failure: {}", msg)
}
let springscript = "
# (
# E and I are not holes
OR E J
AND I J
# )
# (
# E and F are not holes
OR E T
AND F T
# )
OR T J
OR H J
# (
# There's a hole in ABC
NOT A T
NOT T T
AND B T
AND C T
NOT T T
# and D is not a hole
AND D T
# )
AND T J
RUN
";
let result = run_springdroid(&mut ProgramState::new(program.clone()), springscript);
match result {
DroidResult::Success(x) => println!("Part 2: {}", x),
DroidResult::Fail(msg) => println!("Failure: {}", msg)
}
}
| get_input | identifier_name |
21.rs | use std::fs::File;
use std::io::prelude::*;
use std::iter;
mod intcode;
use intcode::*;
#[derive(Debug, Clone)]
enum DroidResult {
Fail(String),
Success(i64)
}
fn get_input() -> std::io::Result<String> {
let mut file = File::open("21.txt")?;
let mut contents = String::new();
file.read_to_string(&mut contents)?;
Ok(contents)
}
fn run_springdroid(state: &mut ProgramState, script: &str) -> DroidResult {
let inputs = script.lines()
.map(|line| line.trim())
.filter(|line| line.len() > 0 && line.chars().next().unwrap()!= '#')
.flat_map(|instruction| instruction.chars().chain(iter::once('\n')))
.map(|c| c as i64);
let (outputs, _) = run_program_with_inputs(state, inputs);
match outputs.last() {
Some(&x) if x > u8::max_value() as i64 => | ,
_ => {
DroidResult::Fail(
outputs.iter()
.map(|&c| c as u8 as char)
.fold(String::new(), |mut acc, c| {
acc.push(c);
acc
})
)
}
}
}
fn main() {
let input = get_input().unwrap();
let program = input.split(',')
.filter_map(|x| x.trim().parse::<i64>().ok())
.collect::<Vec<_>>();
let springscript = "
# There's a hole in ABC
NOT A J
NOT J J
AND B J
AND C J
NOT J J
# and D is not a hole
AND D J
WALK
";
let result = run_springdroid(&mut ProgramState::new(program.clone()), springscript);
match result {
DroidResult::Success(x) => println!("Part 1: {}", x),
DroidResult::Fail(msg) => println!("Failure: {}", msg)
}
let springscript = "
# (
# E and I are not holes
OR E J
AND I J
# )
# (
# E and F are not holes
OR E T
AND F T
# )
OR T J
OR H J
# (
# There's a hole in ABC
NOT A T
NOT T T
AND B T
AND C T
NOT T T
# and D is not a hole
AND D T
# )
AND T J
RUN
";
let result = run_springdroid(&mut ProgramState::new(program.clone()), springscript);
match result {
DroidResult::Success(x) => println!("Part 2: {}", x),
DroidResult::Fail(msg) => println!("Failure: {}", msg)
}
}
| {
DroidResult::Success(x)
} | conditional_block |
dont_promote_unstable_const_fn.rs | // Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![unstable(feature = "humans",
reason = "who ever let humans program computers,
we're apparently really bad at it",
issue = "0")]
#![feature(rustc_const_unstable, const_fn)]
#![feature(staged_api)]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_const_unstable(feature="foo")]
const fn foo() -> u32 { 42 }
fn meh() -> u32 { 42 }
const fn bar() -> u32 { foo() } //~ ERROR `foo` is not yet stable as a const fn
fn a() {
let _: &'static u32 = &foo(); //~ ERROR does not live long enough
}
fn | () {
let _: &'static u32 = &meh(); //~ ERROR does not live long enough
let x: &'static _ = &std::time::Duration::from_millis(42).subsec_millis();
//~^ ERROR does not live long enough
}
| main | identifier_name |
dont_promote_unstable_const_fn.rs | // Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![unstable(feature = "humans",
reason = "who ever let humans program computers,
we're apparently really bad at it",
issue = "0")]
#![feature(rustc_const_unstable, const_fn)]
#![feature(staged_api)]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_const_unstable(feature="foo")]
const fn foo() -> u32 { 42 }
fn meh() -> u32 { 42 }
const fn bar() -> u32 { foo() } //~ ERROR `foo` is not yet stable as a const fn
fn a() {
let _: &'static u32 = &foo(); //~ ERROR does not live long enough
}
fn main() {
let _: &'static u32 = &meh(); //~ ERROR does not live long enough | let x: &'static _ = &std::time::Duration::from_millis(42).subsec_millis();
//~^ ERROR does not live long enough
} | random_line_split |
|
dont_promote_unstable_const_fn.rs | // Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![unstable(feature = "humans",
reason = "who ever let humans program computers,
we're apparently really bad at it",
issue = "0")]
#![feature(rustc_const_unstable, const_fn)]
#![feature(staged_api)]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_const_unstable(feature="foo")]
const fn foo() -> u32 { 42 }
fn meh() -> u32 { 42 }
const fn bar() -> u32 | //~ ERROR `foo` is not yet stable as a const fn
fn a() {
let _: &'static u32 = &foo(); //~ ERROR does not live long enough
}
fn main() {
let _: &'static u32 = &meh(); //~ ERROR does not live long enough
let x: &'static _ = &std::time::Duration::from_millis(42).subsec_millis();
//~^ ERROR does not live long enough
}
| { foo() } | identifier_body |
mpsc_queue.rs | /* Copyright (c) 2010-2011 Dmitry Vyukov. All rights reserved.
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY DMITRY VYUKOV "AS IS" AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
* SHALL DMITRY VYUKOV OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation are
* those of the authors and should not be interpreted as representing official
* policies, either expressed or implied, of Dmitry Vyukov.
*/
//! A mostly lock-free multi-producer, single consumer queue.
//!
//! This module contains an implementation of a concurrent MPSC queue. This
//! queue can be used to share data between threads, and is also used as the
//! building block of channels in rust.
//!
//! Note that the current implementation of this queue has a caveat of the `pop`
//! method, and see the method for more information about it. Due to this
//! caveat, this queue may not be appropriate for all use-cases.
// http://www.1024cores.net/home/lock-free-algorithms
// /queues/non-intrusive-mpsc-node-based-queue
pub use self::PopResult::*;
#[cfg(stage0)]
use core::prelude::v1::*;
use alloc::boxed::Box;
use core::ptr;
use core::cell::UnsafeCell;
use sync::atomic::{AtomicPtr, Ordering};
/// A result of the `pop` function.
pub enum PopResult<T> {
/// Some data has been popped
Data(T),
/// The queue is empty
Empty,
/// The queue is in an inconsistent state. Popping data should succeed, but
/// some pushers have yet to make enough progress in order allow a pop to
/// succeed. It is recommended that a pop() occur "in the near future" in
/// order to see if the sender has made progress or not
Inconsistent,
}
struct Node<T> {
next: AtomicPtr<Node<T>>,
value: Option<T>,
}
/// The multi-producer single-consumer structure. This is not cloneable, but it
/// may be safely shared so long as it is guaranteed that there is only one
/// popper at a time (many pushers are allowed).
pub struct Queue<T> {
head: AtomicPtr<Node<T>>,
tail: UnsafeCell<*mut Node<T>>,
}
unsafe impl<T: Send> Send for Queue<T> { }
unsafe impl<T: Send> Sync for Queue<T> { }
impl<T> Node<T> {
unsafe fn new(v: Option<T>) -> *mut Node<T> {
Box::into_raw(box Node {
next: AtomicPtr::new(ptr::null_mut()),
value: v,
})
}
}
impl<T> Queue<T> {
/// Creates a new queue that is safe to share among multiple producers and
/// one consumer.
pub fn new() -> Queue<T> {
let stub = unsafe { Node::new(None) };
Queue {
head: AtomicPtr::new(stub),
tail: UnsafeCell::new(stub),
}
}
/// Pushes a new value onto this queue.
pub fn push(&self, t: T) {
unsafe {
let n = Node::new(Some(t));
let prev = self.head.swap(n, Ordering::AcqRel);
(*prev).next.store(n, Ordering::Release);
}
}
/// Pops some data from this queue.
///
/// Note that the current implementation means that this function cannot
/// return `Option<T>`. It is possible for this queue to be in an
/// inconsistent state where many pushes have succeeded and completely
/// finished, but pops cannot return `Some(t)`. This inconsistent state
/// happens when a pusher is pre-empted at an inopportune moment.
///
/// This inconsistent state means that this queue does indeed have data, but
/// it does not currently have access to it at this time.
pub fn pop(&self) -> PopResult<T> {
unsafe {
let tail = *self.tail.get();
let next = (*tail).next.load(Ordering::Acquire);
if!next.is_null() {
*self.tail.get() = next;
assert!((*tail).value.is_none());
assert!((*next).value.is_some());
let ret = (*next).value.take().unwrap();
let _: Box<Node<T>> = Box::from_raw(tail);
return Data(ret);
}
if self.head.load(Ordering::Acquire) == tail {Empty} else {Inconsistent}
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Drop for Queue<T> {
fn drop(&mut self) {
unsafe {
let mut cur = *self.tail.get();
while!cur.is_null() {
let next = (*cur).next.load(Ordering::Relaxed);
let _: Box<Node<T>> = Box::from_raw(cur);
cur = next;
}
}
}
}
#[cfg(test)]
mod tests {
use prelude::v1::*;
use sync::mpsc::channel;
use super::{Queue, Data, Empty, Inconsistent};
use sync::Arc;
use thread;
#[test]
fn test_full() {
let q: Queue<Box<_>> = Queue::new();
q.push(box 1);
q.push(box 2);
}
#[test]
fn test() {
let nthreads = 8;
let nmsgs = 1000;
let q = Queue::new();
match q.pop() {
Empty => |
Inconsistent | Data(..) => panic!()
}
let (tx, rx) = channel();
let q = Arc::new(q);
for _ in 0..nthreads {
let tx = tx.clone();
let q = q.clone();
thread::spawn(move|| {
for i in 0..nmsgs {
q.push(i);
}
tx.send(()).unwrap();
});
}
let mut i = 0;
while i < nthreads * nmsgs {
match q.pop() {
Empty | Inconsistent => {},
Data(_) => { i += 1 }
}
}
drop(tx);
for _ in 0..nthreads {
rx.recv().unwrap();
}
}
}
| {} | conditional_block |
mpsc_queue.rs | /* Copyright (c) 2010-2011 Dmitry Vyukov. All rights reserved.
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY DMITRY VYUKOV "AS IS" AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
* SHALL DMITRY VYUKOV OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation are
* those of the authors and should not be interpreted as representing official
* policies, either expressed or implied, of Dmitry Vyukov.
*/
//! A mostly lock-free multi-producer, single consumer queue.
//!
//! This module contains an implementation of a concurrent MPSC queue. This
//! queue can be used to share data between threads, and is also used as the
//! building block of channels in rust.
//!
//! Note that the current implementation of this queue has a caveat of the `pop`
//! method, and see the method for more information about it. Due to this
//! caveat, this queue may not be appropriate for all use-cases.
// http://www.1024cores.net/home/lock-free-algorithms
// /queues/non-intrusive-mpsc-node-based-queue
pub use self::PopResult::*;
#[cfg(stage0)]
use core::prelude::v1::*;
use alloc::boxed::Box;
use core::ptr;
use core::cell::UnsafeCell;
use sync::atomic::{AtomicPtr, Ordering};
/// A result of the `pop` function.
pub enum PopResult<T> {
/// Some data has been popped
Data(T),
/// The queue is empty
Empty,
/// The queue is in an inconsistent state. Popping data should succeed, but
/// some pushers have yet to make enough progress in order allow a pop to
/// succeed. It is recommended that a pop() occur "in the near future" in
/// order to see if the sender has made progress or not
Inconsistent,
}
struct Node<T> {
next: AtomicPtr<Node<T>>,
value: Option<T>,
}
/// The multi-producer single-consumer structure. This is not cloneable, but it
/// may be safely shared so long as it is guaranteed that there is only one
/// popper at a time (many pushers are allowed).
pub struct Queue<T> {
head: AtomicPtr<Node<T>>,
tail: UnsafeCell<*mut Node<T>>,
}
unsafe impl<T: Send> Send for Queue<T> { }
unsafe impl<T: Send> Sync for Queue<T> { }
impl<T> Node<T> {
unsafe fn new(v: Option<T>) -> *mut Node<T> |
}
impl<T> Queue<T> {
/// Creates a new queue that is safe to share among multiple producers and
/// one consumer.
pub fn new() -> Queue<T> {
let stub = unsafe { Node::new(None) };
Queue {
head: AtomicPtr::new(stub),
tail: UnsafeCell::new(stub),
}
}
/// Pushes a new value onto this queue.
pub fn push(&self, t: T) {
unsafe {
let n = Node::new(Some(t));
let prev = self.head.swap(n, Ordering::AcqRel);
(*prev).next.store(n, Ordering::Release);
}
}
/// Pops some data from this queue.
///
/// Note that the current implementation means that this function cannot
/// return `Option<T>`. It is possible for this queue to be in an
/// inconsistent state where many pushes have succeeded and completely
/// finished, but pops cannot return `Some(t)`. This inconsistent state
/// happens when a pusher is pre-empted at an inopportune moment.
///
/// This inconsistent state means that this queue does indeed have data, but
/// it does not currently have access to it at this time.
pub fn pop(&self) -> PopResult<T> {
unsafe {
let tail = *self.tail.get();
let next = (*tail).next.load(Ordering::Acquire);
if!next.is_null() {
*self.tail.get() = next;
assert!((*tail).value.is_none());
assert!((*next).value.is_some());
let ret = (*next).value.take().unwrap();
let _: Box<Node<T>> = Box::from_raw(tail);
return Data(ret);
}
if self.head.load(Ordering::Acquire) == tail {Empty} else {Inconsistent}
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Drop for Queue<T> {
fn drop(&mut self) {
unsafe {
let mut cur = *self.tail.get();
while!cur.is_null() {
let next = (*cur).next.load(Ordering::Relaxed);
let _: Box<Node<T>> = Box::from_raw(cur);
cur = next;
}
}
}
}
#[cfg(test)]
mod tests {
use prelude::v1::*;
use sync::mpsc::channel;
use super::{Queue, Data, Empty, Inconsistent};
use sync::Arc;
use thread;
#[test]
fn test_full() {
let q: Queue<Box<_>> = Queue::new();
q.push(box 1);
q.push(box 2);
}
#[test]
fn test() {
let nthreads = 8;
let nmsgs = 1000;
let q = Queue::new();
match q.pop() {
Empty => {}
Inconsistent | Data(..) => panic!()
}
let (tx, rx) = channel();
let q = Arc::new(q);
for _ in 0..nthreads {
let tx = tx.clone();
let q = q.clone();
thread::spawn(move|| {
for i in 0..nmsgs {
q.push(i);
}
tx.send(()).unwrap();
});
}
let mut i = 0;
while i < nthreads * nmsgs {
match q.pop() {
Empty | Inconsistent => {},
Data(_) => { i += 1 }
}
}
drop(tx);
for _ in 0..nthreads {
rx.recv().unwrap();
}
}
}
| {
Box::into_raw(box Node {
next: AtomicPtr::new(ptr::null_mut()),
value: v,
})
} | identifier_body |
mpsc_queue.rs | /* Copyright (c) 2010-2011 Dmitry Vyukov. All rights reserved.
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY DMITRY VYUKOV "AS IS" AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
* SHALL DMITRY VYUKOV OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation are
* those of the authors and should not be interpreted as representing official
* policies, either expressed or implied, of Dmitry Vyukov.
*/
//! A mostly lock-free multi-producer, single consumer queue.
//!
//! This module contains an implementation of a concurrent MPSC queue. This
//! queue can be used to share data between threads, and is also used as the
//! building block of channels in rust.
//!
//! Note that the current implementation of this queue has a caveat of the `pop`
//! method, and see the method for more information about it. Due to this
//! caveat, this queue may not be appropriate for all use-cases.
// http://www.1024cores.net/home/lock-free-algorithms
// /queues/non-intrusive-mpsc-node-based-queue
pub use self::PopResult::*;
#[cfg(stage0)]
use core::prelude::v1::*;
use alloc::boxed::Box;
use core::ptr;
use core::cell::UnsafeCell;
use sync::atomic::{AtomicPtr, Ordering};
/// A result of the `pop` function.
pub enum PopResult<T> {
/// Some data has been popped
Data(T),
/// The queue is empty
Empty,
/// The queue is in an inconsistent state. Popping data should succeed, but
/// some pushers have yet to make enough progress in order allow a pop to
/// succeed. It is recommended that a pop() occur "in the near future" in
/// order to see if the sender has made progress or not
Inconsistent,
}
struct Node<T> {
next: AtomicPtr<Node<T>>,
value: Option<T>,
}
/// The multi-producer single-consumer structure. This is not cloneable, but it
/// may be safely shared so long as it is guaranteed that there is only one
/// popper at a time (many pushers are allowed).
pub struct Queue<T> {
head: AtomicPtr<Node<T>>,
tail: UnsafeCell<*mut Node<T>>,
}
unsafe impl<T: Send> Send for Queue<T> { }
unsafe impl<T: Send> Sync for Queue<T> { }
impl<T> Node<T> {
unsafe fn new(v: Option<T>) -> *mut Node<T> {
Box::into_raw(box Node {
next: AtomicPtr::new(ptr::null_mut()),
value: v,
})
}
}
impl<T> Queue<T> {
/// Creates a new queue that is safe to share among multiple producers and
/// one consumer.
pub fn new() -> Queue<T> {
let stub = unsafe { Node::new(None) };
Queue {
head: AtomicPtr::new(stub),
tail: UnsafeCell::new(stub),
}
}
/// Pushes a new value onto this queue.
pub fn push(&self, t: T) {
unsafe {
let n = Node::new(Some(t));
let prev = self.head.swap(n, Ordering::AcqRel);
(*prev).next.store(n, Ordering::Release);
}
}
/// Pops some data from this queue.
///
/// Note that the current implementation means that this function cannot
/// return `Option<T>`. It is possible for this queue to be in an
/// inconsistent state where many pushes have succeeded and completely
/// finished, but pops cannot return `Some(t)`. This inconsistent state
/// happens when a pusher is pre-empted at an inopportune moment.
///
/// This inconsistent state means that this queue does indeed have data, but
/// it does not currently have access to it at this time.
pub fn | (&self) -> PopResult<T> {
unsafe {
let tail = *self.tail.get();
let next = (*tail).next.load(Ordering::Acquire);
if!next.is_null() {
*self.tail.get() = next;
assert!((*tail).value.is_none());
assert!((*next).value.is_some());
let ret = (*next).value.take().unwrap();
let _: Box<Node<T>> = Box::from_raw(tail);
return Data(ret);
}
if self.head.load(Ordering::Acquire) == tail {Empty} else {Inconsistent}
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Drop for Queue<T> {
fn drop(&mut self) {
unsafe {
let mut cur = *self.tail.get();
while!cur.is_null() {
let next = (*cur).next.load(Ordering::Relaxed);
let _: Box<Node<T>> = Box::from_raw(cur);
cur = next;
}
}
}
}
#[cfg(test)]
mod tests {
use prelude::v1::*;
use sync::mpsc::channel;
use super::{Queue, Data, Empty, Inconsistent};
use sync::Arc;
use thread;
#[test]
fn test_full() {
let q: Queue<Box<_>> = Queue::new();
q.push(box 1);
q.push(box 2);
}
#[test]
fn test() {
let nthreads = 8;
let nmsgs = 1000;
let q = Queue::new();
match q.pop() {
Empty => {}
Inconsistent | Data(..) => panic!()
}
let (tx, rx) = channel();
let q = Arc::new(q);
for _ in 0..nthreads {
let tx = tx.clone();
let q = q.clone();
thread::spawn(move|| {
for i in 0..nmsgs {
q.push(i);
}
tx.send(()).unwrap();
});
}
let mut i = 0;
while i < nthreads * nmsgs {
match q.pop() {
Empty | Inconsistent => {},
Data(_) => { i += 1 }
}
}
drop(tx);
for _ in 0..nthreads {
rx.recv().unwrap();
}
}
}
| pop | identifier_name |
mpsc_queue.rs | /* Copyright (c) 2010-2011 Dmitry Vyukov. All rights reserved.
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY DMITRY VYUKOV "AS IS" AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
* SHALL DMITRY VYUKOV OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation are
* those of the authors and should not be interpreted as representing official
* policies, either expressed or implied, of Dmitry Vyukov.
*/
//! A mostly lock-free multi-producer, single consumer queue.
//!
//! This module contains an implementation of a concurrent MPSC queue. This
//! queue can be used to share data between threads, and is also used as the
//! building block of channels in rust.
//!
//! Note that the current implementation of this queue has a caveat of the `pop`
//! method, and see the method for more information about it. Due to this
//! caveat, this queue may not be appropriate for all use-cases.
// http://www.1024cores.net/home/lock-free-algorithms
// /queues/non-intrusive-mpsc-node-based-queue
pub use self::PopResult::*;
#[cfg(stage0)]
use core::prelude::v1::*;
use alloc::boxed::Box;
use core::ptr;
use core::cell::UnsafeCell;
use sync::atomic::{AtomicPtr, Ordering};
/// A result of the `pop` function.
pub enum PopResult<T> {
/// Some data has been popped
Data(T),
/// The queue is empty
Empty,
/// The queue is in an inconsistent state. Popping data should succeed, but
/// some pushers have yet to make enough progress in order allow a pop to
/// succeed. It is recommended that a pop() occur "in the near future" in
/// order to see if the sender has made progress or not
Inconsistent,
}
struct Node<T> {
next: AtomicPtr<Node<T>>,
value: Option<T>,
}
/// The multi-producer single-consumer structure. This is not cloneable, but it
/// may be safely shared so long as it is guaranteed that there is only one
/// popper at a time (many pushers are allowed).
pub struct Queue<T> {
head: AtomicPtr<Node<T>>,
tail: UnsafeCell<*mut Node<T>>,
}
unsafe impl<T: Send> Send for Queue<T> { }
unsafe impl<T: Send> Sync for Queue<T> { } | Box::into_raw(box Node {
next: AtomicPtr::new(ptr::null_mut()),
value: v,
})
}
}
impl<T> Queue<T> {
/// Creates a new queue that is safe to share among multiple producers and
/// one consumer.
pub fn new() -> Queue<T> {
let stub = unsafe { Node::new(None) };
Queue {
head: AtomicPtr::new(stub),
tail: UnsafeCell::new(stub),
}
}
/// Pushes a new value onto this queue.
pub fn push(&self, t: T) {
unsafe {
let n = Node::new(Some(t));
let prev = self.head.swap(n, Ordering::AcqRel);
(*prev).next.store(n, Ordering::Release);
}
}
/// Pops some data from this queue.
///
/// Note that the current implementation means that this function cannot
/// return `Option<T>`. It is possible for this queue to be in an
/// inconsistent state where many pushes have succeeded and completely
/// finished, but pops cannot return `Some(t)`. This inconsistent state
/// happens when a pusher is pre-empted at an inopportune moment.
///
/// This inconsistent state means that this queue does indeed have data, but
/// it does not currently have access to it at this time.
pub fn pop(&self) -> PopResult<T> {
unsafe {
let tail = *self.tail.get();
let next = (*tail).next.load(Ordering::Acquire);
if!next.is_null() {
*self.tail.get() = next;
assert!((*tail).value.is_none());
assert!((*next).value.is_some());
let ret = (*next).value.take().unwrap();
let _: Box<Node<T>> = Box::from_raw(tail);
return Data(ret);
}
if self.head.load(Ordering::Acquire) == tail {Empty} else {Inconsistent}
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Drop for Queue<T> {
fn drop(&mut self) {
unsafe {
let mut cur = *self.tail.get();
while!cur.is_null() {
let next = (*cur).next.load(Ordering::Relaxed);
let _: Box<Node<T>> = Box::from_raw(cur);
cur = next;
}
}
}
}
#[cfg(test)]
mod tests {
use prelude::v1::*;
use sync::mpsc::channel;
use super::{Queue, Data, Empty, Inconsistent};
use sync::Arc;
use thread;
#[test]
fn test_full() {
let q: Queue<Box<_>> = Queue::new();
q.push(box 1);
q.push(box 2);
}
#[test]
fn test() {
let nthreads = 8;
let nmsgs = 1000;
let q = Queue::new();
match q.pop() {
Empty => {}
Inconsistent | Data(..) => panic!()
}
let (tx, rx) = channel();
let q = Arc::new(q);
for _ in 0..nthreads {
let tx = tx.clone();
let q = q.clone();
thread::spawn(move|| {
for i in 0..nmsgs {
q.push(i);
}
tx.send(()).unwrap();
});
}
let mut i = 0;
while i < nthreads * nmsgs {
match q.pop() {
Empty | Inconsistent => {},
Data(_) => { i += 1 }
}
}
drop(tx);
for _ in 0..nthreads {
rx.recv().unwrap();
}
}
} |
impl<T> Node<T> {
unsafe fn new(v: Option<T>) -> *mut Node<T> { | random_line_split |
lib.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![feature(box_syntax)]
#![feature(conservative_impl_trait)]
#![feature(const_fn)]
#![feature(core_intrinsics)]
#![feature(mpsc_select)]
#![feature(nonzero)]
#![feature(on_unimplemented)]
#![feature(optin_builtin_traits)]
#![feature(plugin)]
#![feature(proc_macro)]
#![feature(slice_patterns)]
#![feature(stmt_expr_attributes)]
#![feature(try_from)]
#![feature(untagged_unions)]
#![deny(unsafe_code)]
#![allow(non_snake_case)]
#![doc = "The script crate contains all matters DOM."]
#![plugin(script_plugins)]
extern crate angle;
extern crate app_units;
extern crate atomic_refcell;
extern crate audio_video_metadata;
#[macro_use]
extern crate bitflags;
extern crate bluetooth_traits;
extern crate byteorder;
extern crate canvas_traits;
extern crate caseless;
extern crate cookie as cookie_rs;
extern crate core;
#[macro_use] extern crate cssparser;
#[macro_use] extern crate deny_public_fields;
extern crate devtools_traits;
extern crate dom_struct;
#[macro_use]
extern crate domobject_derive;
extern crate encoding;
extern crate euclid;
extern crate fnv;
extern crate gfx_traits;
extern crate heapsize;
#[macro_use] extern crate heapsize_derive;
extern crate html5ever;
#[macro_use] extern crate html5ever_atoms;
#[macro_use]
extern crate hyper;
extern crate hyper_serde;
extern crate image;
extern crate ipc_channel;
#[macro_use]
extern crate js;
#[macro_use]
extern crate jstraceable_derive;
extern crate libc;
#[macro_use]
extern crate log;
#[macro_use]
extern crate mime;
extern crate mime_guess;
extern crate msg;
extern crate net_traits;
extern crate num_traits;
extern crate offscreen_gl_context;
extern crate open;
extern crate parking_lot;
extern crate phf;
#[macro_use]
extern crate profile_traits;
extern crate range;
extern crate ref_filter_map;
extern crate ref_slice;
extern crate regex;
extern crate rustc_serialize;
extern crate script_layout_interface;
extern crate script_traits;
extern crate selectors;
extern crate serde;
#[macro_use] extern crate servo_atoms;
extern crate servo_config;
extern crate servo_geometry;
extern crate servo_rand;
extern crate servo_url;
extern crate smallvec;
#[macro_use]
extern crate style;
extern crate style_traits;
extern crate time;
#[cfg(any(target_os = "macos", target_os = "linux", target_os = "windows"))]
extern crate tinyfiledialogs;
extern crate url;
extern crate uuid;
extern crate webrender_traits;
extern crate websocket;
extern crate webvr_traits;
extern crate xml5ever;
mod body;
pub mod clipboard_provider;
mod devtools;
pub mod document_loader;
#[macro_use]
mod dom;
pub mod fetch;
mod layout_image;
pub mod layout_wrapper;
mod mem;
mod microtask;
mod network_listener;
pub mod script_runtime;
#[allow(unsafe_code)]
pub mod script_thread;
mod serviceworker_manager;
mod serviceworkerjob;
mod stylesheet_loader;
mod task_source;
pub mod test;
pub mod textinput;
mod timers;
mod unpremultiplytable;
mod webdriver_handlers;
use dom::bindings::codegen::RegisterBindings;
use dom::bindings::proxyhandler;
use script_traits::SWManagerSenders;
use serviceworker_manager::ServiceWorkerManager;
#[cfg(target_os = "linux")]
#[allow(unsafe_code)]
fn perform_platform_specific_initialization() {
use std::mem;
// 4096 is default max on many linux systems
const MAX_FILE_LIMIT: libc::rlim_t = 4096;
// Bump up our number of file descriptors to save us from impending doom caused by an onslaught
// of iframes.
unsafe {
let mut rlim: libc::rlimit = mem::uninitialized();
match libc::getrlimit(libc::RLIMIT_NOFILE, &mut rlim) {
0 => {
if rlim.rlim_cur >= MAX_FILE_LIMIT {
// we have more than enough
return;
}
rlim.rlim_cur = match rlim.rlim_max {
libc::RLIM_INFINITY => MAX_FILE_LIMIT,
_ => {
if rlim.rlim_max < MAX_FILE_LIMIT {
rlim.rlim_max
} else {
MAX_FILE_LIMIT
}
}
};
match libc::setrlimit(libc::RLIMIT_NOFILE, &rlim) {
0 => (),
_ => warn!("Failed to set file count limit"),
};
},
_ => warn!("Failed to get file count limit"),
};
}
}
#[cfg(not(target_os = "linux"))]
fn | () {}
pub fn init_service_workers(sw_senders: SWManagerSenders) {
// Spawn the service worker manager passing the constellation sender
ServiceWorkerManager::spawn_manager(sw_senders);
}
#[allow(unsafe_code)]
pub fn init() {
unsafe {
proxyhandler::init();
// Create the global vtables used by the (generated) DOM
// bindings to implement JS proxies.
RegisterBindings::RegisterProxyHandlers();
}
perform_platform_specific_initialization();
}
| perform_platform_specific_initialization | identifier_name |
lib.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![feature(box_syntax)]
#![feature(conservative_impl_trait)]
#![feature(const_fn)]
#![feature(core_intrinsics)]
#![feature(mpsc_select)]
#![feature(nonzero)]
#![feature(on_unimplemented)]
#![feature(optin_builtin_traits)]
#![feature(plugin)]
#![feature(proc_macro)]
#![feature(slice_patterns)]
#![feature(stmt_expr_attributes)]
#![feature(try_from)]
#![feature(untagged_unions)]
#![deny(unsafe_code)]
#![allow(non_snake_case)]
#![doc = "The script crate contains all matters DOM."]
#![plugin(script_plugins)]
extern crate angle;
extern crate app_units; | #[macro_use]
extern crate bitflags;
extern crate bluetooth_traits;
extern crate byteorder;
extern crate canvas_traits;
extern crate caseless;
extern crate cookie as cookie_rs;
extern crate core;
#[macro_use] extern crate cssparser;
#[macro_use] extern crate deny_public_fields;
extern crate devtools_traits;
extern crate dom_struct;
#[macro_use]
extern crate domobject_derive;
extern crate encoding;
extern crate euclid;
extern crate fnv;
extern crate gfx_traits;
extern crate heapsize;
#[macro_use] extern crate heapsize_derive;
extern crate html5ever;
#[macro_use] extern crate html5ever_atoms;
#[macro_use]
extern crate hyper;
extern crate hyper_serde;
extern crate image;
extern crate ipc_channel;
#[macro_use]
extern crate js;
#[macro_use]
extern crate jstraceable_derive;
extern crate libc;
#[macro_use]
extern crate log;
#[macro_use]
extern crate mime;
extern crate mime_guess;
extern crate msg;
extern crate net_traits;
extern crate num_traits;
extern crate offscreen_gl_context;
extern crate open;
extern crate parking_lot;
extern crate phf;
#[macro_use]
extern crate profile_traits;
extern crate range;
extern crate ref_filter_map;
extern crate ref_slice;
extern crate regex;
extern crate rustc_serialize;
extern crate script_layout_interface;
extern crate script_traits;
extern crate selectors;
extern crate serde;
#[macro_use] extern crate servo_atoms;
extern crate servo_config;
extern crate servo_geometry;
extern crate servo_rand;
extern crate servo_url;
extern crate smallvec;
#[macro_use]
extern crate style;
extern crate style_traits;
extern crate time;
#[cfg(any(target_os = "macos", target_os = "linux", target_os = "windows"))]
extern crate tinyfiledialogs;
extern crate url;
extern crate uuid;
extern crate webrender_traits;
extern crate websocket;
extern crate webvr_traits;
extern crate xml5ever;
mod body;
pub mod clipboard_provider;
mod devtools;
pub mod document_loader;
#[macro_use]
mod dom;
pub mod fetch;
mod layout_image;
pub mod layout_wrapper;
mod mem;
mod microtask;
mod network_listener;
pub mod script_runtime;
#[allow(unsafe_code)]
pub mod script_thread;
mod serviceworker_manager;
mod serviceworkerjob;
mod stylesheet_loader;
mod task_source;
pub mod test;
pub mod textinput;
mod timers;
mod unpremultiplytable;
mod webdriver_handlers;
use dom::bindings::codegen::RegisterBindings;
use dom::bindings::proxyhandler;
use script_traits::SWManagerSenders;
use serviceworker_manager::ServiceWorkerManager;
#[cfg(target_os = "linux")]
#[allow(unsafe_code)]
fn perform_platform_specific_initialization() {
use std::mem;
// 4096 is default max on many linux systems
const MAX_FILE_LIMIT: libc::rlim_t = 4096;
// Bump up our number of file descriptors to save us from impending doom caused by an onslaught
// of iframes.
unsafe {
let mut rlim: libc::rlimit = mem::uninitialized();
match libc::getrlimit(libc::RLIMIT_NOFILE, &mut rlim) {
0 => {
if rlim.rlim_cur >= MAX_FILE_LIMIT {
// we have more than enough
return;
}
rlim.rlim_cur = match rlim.rlim_max {
libc::RLIM_INFINITY => MAX_FILE_LIMIT,
_ => {
if rlim.rlim_max < MAX_FILE_LIMIT {
rlim.rlim_max
} else {
MAX_FILE_LIMIT
}
}
};
match libc::setrlimit(libc::RLIMIT_NOFILE, &rlim) {
0 => (),
_ => warn!("Failed to set file count limit"),
};
},
_ => warn!("Failed to get file count limit"),
};
}
}
#[cfg(not(target_os = "linux"))]
fn perform_platform_specific_initialization() {}
pub fn init_service_workers(sw_senders: SWManagerSenders) {
// Spawn the service worker manager passing the constellation sender
ServiceWorkerManager::spawn_manager(sw_senders);
}
#[allow(unsafe_code)]
pub fn init() {
unsafe {
proxyhandler::init();
// Create the global vtables used by the (generated) DOM
// bindings to implement JS proxies.
RegisterBindings::RegisterProxyHandlers();
}
perform_platform_specific_initialization();
} | extern crate atomic_refcell;
extern crate audio_video_metadata; | random_line_split |
lib.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![feature(box_syntax)]
#![feature(conservative_impl_trait)]
#![feature(const_fn)]
#![feature(core_intrinsics)]
#![feature(mpsc_select)]
#![feature(nonzero)]
#![feature(on_unimplemented)]
#![feature(optin_builtin_traits)]
#![feature(plugin)]
#![feature(proc_macro)]
#![feature(slice_patterns)]
#![feature(stmt_expr_attributes)]
#![feature(try_from)]
#![feature(untagged_unions)]
#![deny(unsafe_code)]
#![allow(non_snake_case)]
#![doc = "The script crate contains all matters DOM."]
#![plugin(script_plugins)]
extern crate angle;
extern crate app_units;
extern crate atomic_refcell;
extern crate audio_video_metadata;
#[macro_use]
extern crate bitflags;
extern crate bluetooth_traits;
extern crate byteorder;
extern crate canvas_traits;
extern crate caseless;
extern crate cookie as cookie_rs;
extern crate core;
#[macro_use] extern crate cssparser;
#[macro_use] extern crate deny_public_fields;
extern crate devtools_traits;
extern crate dom_struct;
#[macro_use]
extern crate domobject_derive;
extern crate encoding;
extern crate euclid;
extern crate fnv;
extern crate gfx_traits;
extern crate heapsize;
#[macro_use] extern crate heapsize_derive;
extern crate html5ever;
#[macro_use] extern crate html5ever_atoms;
#[macro_use]
extern crate hyper;
extern crate hyper_serde;
extern crate image;
extern crate ipc_channel;
#[macro_use]
extern crate js;
#[macro_use]
extern crate jstraceable_derive;
extern crate libc;
#[macro_use]
extern crate log;
#[macro_use]
extern crate mime;
extern crate mime_guess;
extern crate msg;
extern crate net_traits;
extern crate num_traits;
extern crate offscreen_gl_context;
extern crate open;
extern crate parking_lot;
extern crate phf;
#[macro_use]
extern crate profile_traits;
extern crate range;
extern crate ref_filter_map;
extern crate ref_slice;
extern crate regex;
extern crate rustc_serialize;
extern crate script_layout_interface;
extern crate script_traits;
extern crate selectors;
extern crate serde;
#[macro_use] extern crate servo_atoms;
extern crate servo_config;
extern crate servo_geometry;
extern crate servo_rand;
extern crate servo_url;
extern crate smallvec;
#[macro_use]
extern crate style;
extern crate style_traits;
extern crate time;
#[cfg(any(target_os = "macos", target_os = "linux", target_os = "windows"))]
extern crate tinyfiledialogs;
extern crate url;
extern crate uuid;
extern crate webrender_traits;
extern crate websocket;
extern crate webvr_traits;
extern crate xml5ever;
mod body;
pub mod clipboard_provider;
mod devtools;
pub mod document_loader;
#[macro_use]
mod dom;
pub mod fetch;
mod layout_image;
pub mod layout_wrapper;
mod mem;
mod microtask;
mod network_listener;
pub mod script_runtime;
#[allow(unsafe_code)]
pub mod script_thread;
mod serviceworker_manager;
mod serviceworkerjob;
mod stylesheet_loader;
mod task_source;
pub mod test;
pub mod textinput;
mod timers;
mod unpremultiplytable;
mod webdriver_handlers;
use dom::bindings::codegen::RegisterBindings;
use dom::bindings::proxyhandler;
use script_traits::SWManagerSenders;
use serviceworker_manager::ServiceWorkerManager;
#[cfg(target_os = "linux")]
#[allow(unsafe_code)]
fn perform_platform_specific_initialization() {
use std::mem;
// 4096 is default max on many linux systems
const MAX_FILE_LIMIT: libc::rlim_t = 4096;
// Bump up our number of file descriptors to save us from impending doom caused by an onslaught
// of iframes.
unsafe {
let mut rlim: libc::rlimit = mem::uninitialized();
match libc::getrlimit(libc::RLIMIT_NOFILE, &mut rlim) {
0 => {
if rlim.rlim_cur >= MAX_FILE_LIMIT {
// we have more than enough
return;
}
rlim.rlim_cur = match rlim.rlim_max {
libc::RLIM_INFINITY => MAX_FILE_LIMIT,
_ => {
if rlim.rlim_max < MAX_FILE_LIMIT {
rlim.rlim_max
} else {
MAX_FILE_LIMIT
}
}
};
match libc::setrlimit(libc::RLIMIT_NOFILE, &rlim) {
0 => (),
_ => warn!("Failed to set file count limit"),
};
},
_ => warn!("Failed to get file count limit"),
};
}
}
#[cfg(not(target_os = "linux"))]
fn perform_platform_specific_initialization() {}
pub fn init_service_workers(sw_senders: SWManagerSenders) {
// Spawn the service worker manager passing the constellation sender
ServiceWorkerManager::spawn_manager(sw_senders);
}
#[allow(unsafe_code)]
pub fn init() | {
unsafe {
proxyhandler::init();
// Create the global vtables used by the (generated) DOM
// bindings to implement JS proxies.
RegisterBindings::RegisterProxyHandlers();
}
perform_platform_specific_initialization();
} | identifier_body |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.