file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
macro_import.rs
|
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Used by `rustc` when loading a crate with exported macros.
use session::Session;
use metadata::creader::CrateReader;
use std::collections::{HashSet, HashMap};
use syntax::ast;
use syntax::attr;
use syntax::codemap::Span;
use syntax::parse::token;
use syntax::visit;
use syntax::visit::Visitor;
use syntax::attr::AttrMetaMethods;
struct MacroLoader<'a> {
sess: &'a Session,
span_whitelist: HashSet<Span>,
reader: CrateReader<'a>,
macros: Vec<ast::MacroDef>,
}
impl<'a> MacroLoader<'a> {
fn new(sess: &'a Session) -> MacroLoader<'a> {
MacroLoader {
sess: sess,
span_whitelist: HashSet::new(),
reader: CrateReader::new(sess),
macros: vec![],
}
}
}
/// Read exported macros.
pub fn read_macro_defs(sess: &Session, krate: &ast::Crate) -> Vec<ast::MacroDef> {
let mut loader = MacroLoader::new(sess);
// We need to error on `#[macro_use] extern crate` when it isn't at the
// crate root, because `$crate` won't work properly. Identify these by
// spans, because the crate map isn't set up yet.
for item in &krate.module.items {
if let ast::ItemExternCrate(_) = item.node {
loader.span_whitelist.insert(item.span);
}
}
visit::walk_crate(&mut loader, krate);
loader.macros
}
pub type MacroSelection = HashMap<token::InternedString, Span>;
// note that macros aren't expanded yet, and therefore macros can't add macro imports.
impl<'a, 'v> Visitor<'v> for MacroLoader<'a> {
fn
|
(&mut self, item: &ast::Item) {
// We're only interested in `extern crate`.
match item.node {
ast::ItemExternCrate(_) => {}
_ => {
visit::walk_item(self, item);
return;
}
}
// Parse the attributes relating to macros.
let mut import = Some(HashMap::new()); // None => load all
let mut reexport = HashMap::new();
for attr in &item.attrs {
let mut used = true;
match &attr.name()[..] {
"phase" => {
self.sess.span_err(attr.span, "#[phase] is deprecated");
}
"plugin" => {
self.sess.span_err(attr.span, "#[plugin] on `extern crate` is deprecated");
self.sess.fileline_help(attr.span, &format!("use a crate attribute instead, \
i.e. #![plugin({})]",
item.ident.as_str()));
}
"macro_use" => {
let names = attr.meta_item_list();
if names.is_none() {
// no names => load all
import = None;
}
if let (Some(sel), Some(names)) = (import.as_mut(), names) {
for attr in names {
if let ast::MetaWord(ref name) = attr.node {
sel.insert(name.clone(), attr.span);
} else {
self.sess.span_err(attr.span, "bad macro import");
}
}
}
}
"macro_reexport" => {
let names = match attr.meta_item_list() {
Some(names) => names,
None => {
self.sess.span_err(attr.span, "bad macro reexport");
continue;
}
};
for attr in names {
if let ast::MetaWord(ref name) = attr.node {
reexport.insert(name.clone(), attr.span);
} else {
self.sess.span_err(attr.span, "bad macro reexport");
}
}
}
_ => used = false,
}
if used {
attr::mark_used(attr);
}
}
self.load_macros(item, import, reexport)
}
fn visit_mac(&mut self, _: &ast::Mac) {
// bummer... can't see macro imports inside macros.
// do nothing.
}
}
impl<'a> MacroLoader<'a> {
fn load_macros<'b>(&mut self,
vi: &ast::Item,
import: Option<MacroSelection>,
reexport: MacroSelection) {
if let Some(sel) = import.as_ref() {
if sel.is_empty() && reexport.is_empty() {
return;
}
}
if!self.span_whitelist.contains(&vi.span) {
self.sess.span_err(vi.span, "an `extern crate` loading macros must be at \
the crate root");
return;
}
let macros = self.reader.read_exported_macros(vi);
let mut seen = HashSet::new();
for mut def in macros {
let name = token::get_ident(def.ident);
seen.insert(name.clone());
def.use_locally = match import.as_ref() {
None => true,
Some(sel) => sel.contains_key(&name),
};
def.export = reexport.contains_key(&name);
def.allow_internal_unstable = attr::contains_name(&def.attrs,
"allow_internal_unstable");
debug!("load_macros: loaded: {:?}", def);
self.macros.push(def);
}
if let Some(sel) = import.as_ref() {
for (name, span) in sel.iter() {
if!seen.contains(name) {
self.sess.span_err(*span, "imported macro not found");
}
}
}
for (name, span) in reexport.iter() {
if!seen.contains(name) {
self.sess.span_err(*span, "reexported macro not found");
}
}
}
}
|
visit_item
|
identifier_name
|
macro_import.rs
|
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Used by `rustc` when loading a crate with exported macros.
use session::Session;
use metadata::creader::CrateReader;
use std::collections::{HashSet, HashMap};
use syntax::ast;
use syntax::attr;
use syntax::codemap::Span;
use syntax::parse::token;
use syntax::visit;
use syntax::visit::Visitor;
use syntax::attr::AttrMetaMethods;
struct MacroLoader<'a> {
sess: &'a Session,
span_whitelist: HashSet<Span>,
reader: CrateReader<'a>,
macros: Vec<ast::MacroDef>,
}
impl<'a> MacroLoader<'a> {
fn new(sess: &'a Session) -> MacroLoader<'a> {
MacroLoader {
sess: sess,
span_whitelist: HashSet::new(),
reader: CrateReader::new(sess),
macros: vec![],
}
}
}
/// Read exported macros.
pub fn read_macro_defs(sess: &Session, krate: &ast::Crate) -> Vec<ast::MacroDef> {
let mut loader = MacroLoader::new(sess);
// We need to error on `#[macro_use] extern crate` when it isn't at the
// crate root, because `$crate` won't work properly. Identify these by
// spans, because the crate map isn't set up yet.
for item in &krate.module.items {
if let ast::ItemExternCrate(_) = item.node {
loader.span_whitelist.insert(item.span);
}
}
visit::walk_crate(&mut loader, krate);
loader.macros
}
pub type MacroSelection = HashMap<token::InternedString, Span>;
// note that macros aren't expanded yet, and therefore macros can't add macro imports.
impl<'a, 'v> Visitor<'v> for MacroLoader<'a> {
fn visit_item(&mut self, item: &ast::Item) {
// We're only interested in `extern crate`.
match item.node {
ast::ItemExternCrate(_) => {}
_ => {
visit::walk_item(self, item);
return;
}
}
// Parse the attributes relating to macros.
let mut import = Some(HashMap::new()); // None => load all
let mut reexport = HashMap::new();
for attr in &item.attrs {
let mut used = true;
match &attr.name()[..] {
"phase" => {
self.sess.span_err(attr.span, "#[phase] is deprecated");
}
"plugin" => {
self.sess.span_err(attr.span, "#[plugin] on `extern crate` is deprecated");
self.sess.fileline_help(attr.span, &format!("use a crate attribute instead, \
i.e. #![plugin({})]",
item.ident.as_str()));
}
"macro_use" => {
let names = attr.meta_item_list();
if names.is_none() {
// no names => load all
import = None;
}
if let (Some(sel), Some(names)) = (import.as_mut(), names) {
for attr in names {
if let ast::MetaWord(ref name) = attr.node {
sel.insert(name.clone(), attr.span);
} else {
self.sess.span_err(attr.span, "bad macro import");
}
}
}
}
"macro_reexport" => {
let names = match attr.meta_item_list() {
Some(names) => names,
None => {
self.sess.span_err(attr.span, "bad macro reexport");
continue;
}
};
for attr in names {
if let ast::MetaWord(ref name) = attr.node {
reexport.insert(name.clone(), attr.span);
} else {
self.sess.span_err(attr.span, "bad macro reexport");
}
}
}
_ => used = false,
}
if used {
attr::mark_used(attr);
}
}
self.load_macros(item, import, reexport)
|
// do nothing.
}
}
impl<'a> MacroLoader<'a> {
fn load_macros<'b>(&mut self,
vi: &ast::Item,
import: Option<MacroSelection>,
reexport: MacroSelection) {
if let Some(sel) = import.as_ref() {
if sel.is_empty() && reexport.is_empty() {
return;
}
}
if!self.span_whitelist.contains(&vi.span) {
self.sess.span_err(vi.span, "an `extern crate` loading macros must be at \
the crate root");
return;
}
let macros = self.reader.read_exported_macros(vi);
let mut seen = HashSet::new();
for mut def in macros {
let name = token::get_ident(def.ident);
seen.insert(name.clone());
def.use_locally = match import.as_ref() {
None => true,
Some(sel) => sel.contains_key(&name),
};
def.export = reexport.contains_key(&name);
def.allow_internal_unstable = attr::contains_name(&def.attrs,
"allow_internal_unstable");
debug!("load_macros: loaded: {:?}", def);
self.macros.push(def);
}
if let Some(sel) = import.as_ref() {
for (name, span) in sel.iter() {
if!seen.contains(name) {
self.sess.span_err(*span, "imported macro not found");
}
}
}
for (name, span) in reexport.iter() {
if!seen.contains(name) {
self.sess.span_err(*span, "reexported macro not found");
}
}
}
}
|
}
fn visit_mac(&mut self, _: &ast::Mac) {
// bummer... can't see macro imports inside macros.
|
random_line_split
|
macro_import.rs
|
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Used by `rustc` when loading a crate with exported macros.
use session::Session;
use metadata::creader::CrateReader;
use std::collections::{HashSet, HashMap};
use syntax::ast;
use syntax::attr;
use syntax::codemap::Span;
use syntax::parse::token;
use syntax::visit;
use syntax::visit::Visitor;
use syntax::attr::AttrMetaMethods;
struct MacroLoader<'a> {
sess: &'a Session,
span_whitelist: HashSet<Span>,
reader: CrateReader<'a>,
macros: Vec<ast::MacroDef>,
}
impl<'a> MacroLoader<'a> {
fn new(sess: &'a Session) -> MacroLoader<'a> {
MacroLoader {
sess: sess,
span_whitelist: HashSet::new(),
reader: CrateReader::new(sess),
macros: vec![],
}
}
}
/// Read exported macros.
pub fn read_macro_defs(sess: &Session, krate: &ast::Crate) -> Vec<ast::MacroDef>
|
pub type MacroSelection = HashMap<token::InternedString, Span>;
// note that macros aren't expanded yet, and therefore macros can't add macro imports.
impl<'a, 'v> Visitor<'v> for MacroLoader<'a> {
fn visit_item(&mut self, item: &ast::Item) {
// We're only interested in `extern crate`.
match item.node {
ast::ItemExternCrate(_) => {}
_ => {
visit::walk_item(self, item);
return;
}
}
// Parse the attributes relating to macros.
let mut import = Some(HashMap::new()); // None => load all
let mut reexport = HashMap::new();
for attr in &item.attrs {
let mut used = true;
match &attr.name()[..] {
"phase" => {
self.sess.span_err(attr.span, "#[phase] is deprecated");
}
"plugin" => {
self.sess.span_err(attr.span, "#[plugin] on `extern crate` is deprecated");
self.sess.fileline_help(attr.span, &format!("use a crate attribute instead, \
i.e. #![plugin({})]",
item.ident.as_str()));
}
"macro_use" => {
let names = attr.meta_item_list();
if names.is_none() {
// no names => load all
import = None;
}
if let (Some(sel), Some(names)) = (import.as_mut(), names) {
for attr in names {
if let ast::MetaWord(ref name) = attr.node {
sel.insert(name.clone(), attr.span);
} else {
self.sess.span_err(attr.span, "bad macro import");
}
}
}
}
"macro_reexport" => {
let names = match attr.meta_item_list() {
Some(names) => names,
None => {
self.sess.span_err(attr.span, "bad macro reexport");
continue;
}
};
for attr in names {
if let ast::MetaWord(ref name) = attr.node {
reexport.insert(name.clone(), attr.span);
} else {
self.sess.span_err(attr.span, "bad macro reexport");
}
}
}
_ => used = false,
}
if used {
attr::mark_used(attr);
}
}
self.load_macros(item, import, reexport)
}
fn visit_mac(&mut self, _: &ast::Mac) {
// bummer... can't see macro imports inside macros.
// do nothing.
}
}
impl<'a> MacroLoader<'a> {
fn load_macros<'b>(&mut self,
vi: &ast::Item,
import: Option<MacroSelection>,
reexport: MacroSelection) {
if let Some(sel) = import.as_ref() {
if sel.is_empty() && reexport.is_empty() {
return;
}
}
if!self.span_whitelist.contains(&vi.span) {
self.sess.span_err(vi.span, "an `extern crate` loading macros must be at \
the crate root");
return;
}
let macros = self.reader.read_exported_macros(vi);
let mut seen = HashSet::new();
for mut def in macros {
let name = token::get_ident(def.ident);
seen.insert(name.clone());
def.use_locally = match import.as_ref() {
None => true,
Some(sel) => sel.contains_key(&name),
};
def.export = reexport.contains_key(&name);
def.allow_internal_unstable = attr::contains_name(&def.attrs,
"allow_internal_unstable");
debug!("load_macros: loaded: {:?}", def);
self.macros.push(def);
}
if let Some(sel) = import.as_ref() {
for (name, span) in sel.iter() {
if!seen.contains(name) {
self.sess.span_err(*span, "imported macro not found");
}
}
}
for (name, span) in reexport.iter() {
if!seen.contains(name) {
self.sess.span_err(*span, "reexported macro not found");
}
}
}
}
|
{
let mut loader = MacroLoader::new(sess);
// We need to error on `#[macro_use] extern crate` when it isn't at the
// crate root, because `$crate` won't work properly. Identify these by
// spans, because the crate map isn't set up yet.
for item in &krate.module.items {
if let ast::ItemExternCrate(_) = item.node {
loader.span_whitelist.insert(item.span);
}
}
visit::walk_crate(&mut loader, krate);
loader.macros
}
|
identifier_body
|
macro_import.rs
|
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Used by `rustc` when loading a crate with exported macros.
use session::Session;
use metadata::creader::CrateReader;
use std::collections::{HashSet, HashMap};
use syntax::ast;
use syntax::attr;
use syntax::codemap::Span;
use syntax::parse::token;
use syntax::visit;
use syntax::visit::Visitor;
use syntax::attr::AttrMetaMethods;
struct MacroLoader<'a> {
sess: &'a Session,
span_whitelist: HashSet<Span>,
reader: CrateReader<'a>,
macros: Vec<ast::MacroDef>,
}
impl<'a> MacroLoader<'a> {
fn new(sess: &'a Session) -> MacroLoader<'a> {
MacroLoader {
sess: sess,
span_whitelist: HashSet::new(),
reader: CrateReader::new(sess),
macros: vec![],
}
}
}
/// Read exported macros.
pub fn read_macro_defs(sess: &Session, krate: &ast::Crate) -> Vec<ast::MacroDef> {
let mut loader = MacroLoader::new(sess);
// We need to error on `#[macro_use] extern crate` when it isn't at the
// crate root, because `$crate` won't work properly. Identify these by
// spans, because the crate map isn't set up yet.
for item in &krate.module.items {
if let ast::ItemExternCrate(_) = item.node {
loader.span_whitelist.insert(item.span);
}
}
visit::walk_crate(&mut loader, krate);
loader.macros
}
pub type MacroSelection = HashMap<token::InternedString, Span>;
// note that macros aren't expanded yet, and therefore macros can't add macro imports.
impl<'a, 'v> Visitor<'v> for MacroLoader<'a> {
fn visit_item(&mut self, item: &ast::Item) {
// We're only interested in `extern crate`.
match item.node {
ast::ItemExternCrate(_) => {}
_ => {
visit::walk_item(self, item);
return;
}
}
// Parse the attributes relating to macros.
let mut import = Some(HashMap::new()); // None => load all
let mut reexport = HashMap::new();
for attr in &item.attrs {
let mut used = true;
match &attr.name()[..] {
"phase" => {
self.sess.span_err(attr.span, "#[phase] is deprecated");
}
"plugin" =>
|
"macro_use" => {
let names = attr.meta_item_list();
if names.is_none() {
// no names => load all
import = None;
}
if let (Some(sel), Some(names)) = (import.as_mut(), names) {
for attr in names {
if let ast::MetaWord(ref name) = attr.node {
sel.insert(name.clone(), attr.span);
} else {
self.sess.span_err(attr.span, "bad macro import");
}
}
}
}
"macro_reexport" => {
let names = match attr.meta_item_list() {
Some(names) => names,
None => {
self.sess.span_err(attr.span, "bad macro reexport");
continue;
}
};
for attr in names {
if let ast::MetaWord(ref name) = attr.node {
reexport.insert(name.clone(), attr.span);
} else {
self.sess.span_err(attr.span, "bad macro reexport");
}
}
}
_ => used = false,
}
if used {
attr::mark_used(attr);
}
}
self.load_macros(item, import, reexport)
}
fn visit_mac(&mut self, _: &ast::Mac) {
// bummer... can't see macro imports inside macros.
// do nothing.
}
}
impl<'a> MacroLoader<'a> {
fn load_macros<'b>(&mut self,
vi: &ast::Item,
import: Option<MacroSelection>,
reexport: MacroSelection) {
if let Some(sel) = import.as_ref() {
if sel.is_empty() && reexport.is_empty() {
return;
}
}
if!self.span_whitelist.contains(&vi.span) {
self.sess.span_err(vi.span, "an `extern crate` loading macros must be at \
the crate root");
return;
}
let macros = self.reader.read_exported_macros(vi);
let mut seen = HashSet::new();
for mut def in macros {
let name = token::get_ident(def.ident);
seen.insert(name.clone());
def.use_locally = match import.as_ref() {
None => true,
Some(sel) => sel.contains_key(&name),
};
def.export = reexport.contains_key(&name);
def.allow_internal_unstable = attr::contains_name(&def.attrs,
"allow_internal_unstable");
debug!("load_macros: loaded: {:?}", def);
self.macros.push(def);
}
if let Some(sel) = import.as_ref() {
for (name, span) in sel.iter() {
if!seen.contains(name) {
self.sess.span_err(*span, "imported macro not found");
}
}
}
for (name, span) in reexport.iter() {
if!seen.contains(name) {
self.sess.span_err(*span, "reexported macro not found");
}
}
}
}
|
{
self.sess.span_err(attr.span, "#[plugin] on `extern crate` is deprecated");
self.sess.fileline_help(attr.span, &format!("use a crate attribute instead, \
i.e. #![plugin({})]",
item.ident.as_str()));
}
|
conditional_block
|
htmldialogelement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::cell::DomRefCell;
use crate::dom::bindings::codegen::Bindings::HTMLDialogElementBinding;
use crate::dom::bindings::codegen::Bindings::HTMLDialogElementBinding::HTMLDialogElementMethods;
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::root::DomRoot;
use crate::dom::bindings::str::DOMString;
use crate::dom::document::Document;
use crate::dom::element::Element;
use crate::dom::eventtarget::EventTarget;
use crate::dom::htmlelement::HTMLElement;
use crate::dom::node::{window_from_node, Node};
use dom_struct::dom_struct;
use html5ever::{LocalName, Prefix};
#[dom_struct]
pub struct HTMLDialogElement {
htmlelement: HTMLElement,
return_value: DomRefCell<DOMString>,
}
impl HTMLDialogElement {
fn new_inherited(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
) -> HTMLDialogElement {
HTMLDialogElement {
htmlelement: HTMLElement::new_inherited(local_name, prefix, document),
return_value: DomRefCell::new(DOMString::new()),
}
}
#[allow(unrooted_must_root)]
pub fn new(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
) -> DomRoot<HTMLDialogElement> {
Node::reflect_node(
Box::new(HTMLDialogElement::new_inherited(
local_name, prefix, document,
)),
document,
HTMLDialogElementBinding::Wrap,
)
}
}
impl HTMLDialogElementMethods for HTMLDialogElement {
// https://html.spec.whatwg.org/multipage/#dom-dialog-open
make_bool_getter!(Open, "open");
// https://html.spec.whatwg.org/multipage/#dom-dialog-open
make_bool_setter!(SetOpen, "open");
// https://html.spec.whatwg.org/multipage/#dom-dialog-returnvalue
fn
|
(&self) -> DOMString {
let return_value = self.return_value.borrow();
return_value.clone()
}
// https://html.spec.whatwg.org/multipage/#dom-dialog-returnvalue
fn SetReturnValue(&self, return_value: DOMString) {
*self.return_value.borrow_mut() = return_value;
}
// https://html.spec.whatwg.org/multipage/#dom-dialog-close
fn Close(&self, return_value: Option<DOMString>) {
let element = self.upcast::<Element>();
let target = self.upcast::<EventTarget>();
let win = window_from_node(self);
// Step 1 & 2
if element
.remove_attribute(&ns!(), &local_name!("open"))
.is_none()
{
return;
}
// Step 3
if let Some(new_value) = return_value {
*self.return_value.borrow_mut() = new_value;
}
// TODO: Step 4 implement pending dialog stack removal
// Step 5
win.task_manager()
.dom_manipulation_task_source()
.queue_simple_event(target, atom!("close"), &win);
}
}
|
ReturnValue
|
identifier_name
|
htmldialogelement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::cell::DomRefCell;
use crate::dom::bindings::codegen::Bindings::HTMLDialogElementBinding;
use crate::dom::bindings::codegen::Bindings::HTMLDialogElementBinding::HTMLDialogElementMethods;
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::root::DomRoot;
use crate::dom::bindings::str::DOMString;
use crate::dom::document::Document;
use crate::dom::element::Element;
use crate::dom::eventtarget::EventTarget;
use crate::dom::htmlelement::HTMLElement;
use crate::dom::node::{window_from_node, Node};
use dom_struct::dom_struct;
use html5ever::{LocalName, Prefix};
#[dom_struct]
pub struct HTMLDialogElement {
htmlelement: HTMLElement,
return_value: DomRefCell<DOMString>,
}
impl HTMLDialogElement {
fn new_inherited(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
) -> HTMLDialogElement {
HTMLDialogElement {
htmlelement: HTMLElement::new_inherited(local_name, prefix, document),
return_value: DomRefCell::new(DOMString::new()),
}
}
#[allow(unrooted_must_root)]
pub fn new(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
) -> DomRoot<HTMLDialogElement> {
Node::reflect_node(
Box::new(HTMLDialogElement::new_inherited(
local_name, prefix, document,
)),
document,
HTMLDialogElementBinding::Wrap,
)
}
}
impl HTMLDialogElementMethods for HTMLDialogElement {
// https://html.spec.whatwg.org/multipage/#dom-dialog-open
make_bool_getter!(Open, "open");
// https://html.spec.whatwg.org/multipage/#dom-dialog-open
make_bool_setter!(SetOpen, "open");
// https://html.spec.whatwg.org/multipage/#dom-dialog-returnvalue
fn ReturnValue(&self) -> DOMString {
let return_value = self.return_value.borrow();
return_value.clone()
}
// https://html.spec.whatwg.org/multipage/#dom-dialog-returnvalue
fn SetReturnValue(&self, return_value: DOMString) {
*self.return_value.borrow_mut() = return_value;
}
// https://html.spec.whatwg.org/multipage/#dom-dialog-close
fn Close(&self, return_value: Option<DOMString>) {
let element = self.upcast::<Element>();
let target = self.upcast::<EventTarget>();
let win = window_from_node(self);
// Step 1 & 2
if element
.remove_attribute(&ns!(), &local_name!("open"))
.is_none()
|
// Step 3
if let Some(new_value) = return_value {
*self.return_value.borrow_mut() = new_value;
}
// TODO: Step 4 implement pending dialog stack removal
// Step 5
win.task_manager()
.dom_manipulation_task_source()
.queue_simple_event(target, atom!("close"), &win);
}
}
|
{
return;
}
|
conditional_block
|
htmldialogelement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::cell::DomRefCell;
use crate::dom::bindings::codegen::Bindings::HTMLDialogElementBinding;
use crate::dom::bindings::codegen::Bindings::HTMLDialogElementBinding::HTMLDialogElementMethods;
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::root::DomRoot;
use crate::dom::bindings::str::DOMString;
use crate::dom::document::Document;
use crate::dom::element::Element;
use crate::dom::eventtarget::EventTarget;
use crate::dom::htmlelement::HTMLElement;
use crate::dom::node::{window_from_node, Node};
use dom_struct::dom_struct;
use html5ever::{LocalName, Prefix};
#[dom_struct]
pub struct HTMLDialogElement {
htmlelement: HTMLElement,
return_value: DomRefCell<DOMString>,
}
impl HTMLDialogElement {
fn new_inherited(
local_name: LocalName,
prefix: Option<Prefix>,
|
HTMLDialogElement {
htmlelement: HTMLElement::new_inherited(local_name, prefix, document),
return_value: DomRefCell::new(DOMString::new()),
}
}
#[allow(unrooted_must_root)]
pub fn new(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
) -> DomRoot<HTMLDialogElement> {
Node::reflect_node(
Box::new(HTMLDialogElement::new_inherited(
local_name, prefix, document,
)),
document,
HTMLDialogElementBinding::Wrap,
)
}
}
impl HTMLDialogElementMethods for HTMLDialogElement {
// https://html.spec.whatwg.org/multipage/#dom-dialog-open
make_bool_getter!(Open, "open");
// https://html.spec.whatwg.org/multipage/#dom-dialog-open
make_bool_setter!(SetOpen, "open");
// https://html.spec.whatwg.org/multipage/#dom-dialog-returnvalue
fn ReturnValue(&self) -> DOMString {
let return_value = self.return_value.borrow();
return_value.clone()
}
// https://html.spec.whatwg.org/multipage/#dom-dialog-returnvalue
fn SetReturnValue(&self, return_value: DOMString) {
*self.return_value.borrow_mut() = return_value;
}
// https://html.spec.whatwg.org/multipage/#dom-dialog-close
fn Close(&self, return_value: Option<DOMString>) {
let element = self.upcast::<Element>();
let target = self.upcast::<EventTarget>();
let win = window_from_node(self);
// Step 1 & 2
if element
.remove_attribute(&ns!(), &local_name!("open"))
.is_none()
{
return;
}
// Step 3
if let Some(new_value) = return_value {
*self.return_value.borrow_mut() = new_value;
}
// TODO: Step 4 implement pending dialog stack removal
// Step 5
win.task_manager()
.dom_manipulation_task_source()
.queue_simple_event(target, atom!("close"), &win);
}
}
|
document: &Document,
) -> HTMLDialogElement {
|
random_line_split
|
htmldialogelement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::cell::DomRefCell;
use crate::dom::bindings::codegen::Bindings::HTMLDialogElementBinding;
use crate::dom::bindings::codegen::Bindings::HTMLDialogElementBinding::HTMLDialogElementMethods;
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::root::DomRoot;
use crate::dom::bindings::str::DOMString;
use crate::dom::document::Document;
use crate::dom::element::Element;
use crate::dom::eventtarget::EventTarget;
use crate::dom::htmlelement::HTMLElement;
use crate::dom::node::{window_from_node, Node};
use dom_struct::dom_struct;
use html5ever::{LocalName, Prefix};
#[dom_struct]
pub struct HTMLDialogElement {
htmlelement: HTMLElement,
return_value: DomRefCell<DOMString>,
}
impl HTMLDialogElement {
fn new_inherited(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
) -> HTMLDialogElement {
HTMLDialogElement {
htmlelement: HTMLElement::new_inherited(local_name, prefix, document),
return_value: DomRefCell::new(DOMString::new()),
}
}
#[allow(unrooted_must_root)]
pub fn new(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
) -> DomRoot<HTMLDialogElement> {
Node::reflect_node(
Box::new(HTMLDialogElement::new_inherited(
local_name, prefix, document,
)),
document,
HTMLDialogElementBinding::Wrap,
)
}
}
impl HTMLDialogElementMethods for HTMLDialogElement {
// https://html.spec.whatwg.org/multipage/#dom-dialog-open
make_bool_getter!(Open, "open");
// https://html.spec.whatwg.org/multipage/#dom-dialog-open
make_bool_setter!(SetOpen, "open");
// https://html.spec.whatwg.org/multipage/#dom-dialog-returnvalue
fn ReturnValue(&self) -> DOMString {
let return_value = self.return_value.borrow();
return_value.clone()
}
// https://html.spec.whatwg.org/multipage/#dom-dialog-returnvalue
fn SetReturnValue(&self, return_value: DOMString) {
*self.return_value.borrow_mut() = return_value;
}
// https://html.spec.whatwg.org/multipage/#dom-dialog-close
fn Close(&self, return_value: Option<DOMString>)
|
// Step 5
win.task_manager()
.dom_manipulation_task_source()
.queue_simple_event(target, atom!("close"), &win);
}
}
|
{
let element = self.upcast::<Element>();
let target = self.upcast::<EventTarget>();
let win = window_from_node(self);
// Step 1 & 2
if element
.remove_attribute(&ns!(), &local_name!("open"))
.is_none()
{
return;
}
// Step 3
if let Some(new_value) = return_value {
*self.return_value.borrow_mut() = new_value;
}
// TODO: Step 4 implement pending dialog stack removal
|
identifier_body
|
channelmergernode.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::audionode::{AudioNode, MAX_CHANNEL_COUNT};
use crate::dom::baseaudiocontext::BaseAudioContext;
use crate::dom::bindings::codegen::Bindings::AudioNodeBinding::{
ChannelCountMode, ChannelInterpretation,
};
use crate::dom::bindings::codegen::Bindings::ChannelMergerNodeBinding::{
self, ChannelMergerOptions,
};
use crate::dom::bindings::error::{Error, Fallible};
use crate::dom::bindings::reflector::reflect_dom_object;
use crate::dom::bindings::root::DomRoot;
use crate::dom::window::Window;
use dom_struct::dom_struct;
use servo_media::audio::channel_node::ChannelNodeOptions;
use servo_media::audio::node::AudioNodeInit;
#[dom_struct]
pub struct ChannelMergerNode {
node: AudioNode,
}
impl ChannelMergerNode {
#[allow(unrooted_must_root)]
pub fn new_inherited(
_: &Window,
context: &BaseAudioContext,
options: &ChannelMergerOptions,
) -> Fallible<ChannelMergerNode> {
let node_options = options.parent.unwrap_or(
1,
ChannelCountMode::Explicit,
ChannelInterpretation::Speakers,
);
if node_options.count!= 1 || node_options.mode!= ChannelCountMode::Explicit
|
if options.numberOfInputs < 1 || options.numberOfInputs > MAX_CHANNEL_COUNT {
return Err(Error::IndexSize);
}
let node = AudioNode::new_inherited(
AudioNodeInit::ChannelMergerNode(options.into()),
context,
node_options,
options.numberOfInputs, // inputs
1, // outputs
)?;
Ok(ChannelMergerNode { node })
}
#[allow(unrooted_must_root)]
pub fn new(
window: &Window,
context: &BaseAudioContext,
options: &ChannelMergerOptions,
) -> Fallible<DomRoot<ChannelMergerNode>> {
let node = ChannelMergerNode::new_inherited(window, context, options)?;
Ok(reflect_dom_object(
Box::new(node),
window,
ChannelMergerNodeBinding::Wrap,
))
}
#[allow(non_snake_case)]
pub fn Constructor(
window: &Window,
context: &BaseAudioContext,
options: &ChannelMergerOptions,
) -> Fallible<DomRoot<ChannelMergerNode>> {
ChannelMergerNode::new(window, context, options)
}
}
impl<'a> From<&'a ChannelMergerOptions> for ChannelNodeOptions {
fn from(options: &'a ChannelMergerOptions) -> Self {
Self {
channels: options.numberOfInputs as u8,
}
}
}
|
{
return Err(Error::InvalidState);
}
|
conditional_block
|
channelmergernode.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::audionode::{AudioNode, MAX_CHANNEL_COUNT};
use crate::dom::baseaudiocontext::BaseAudioContext;
use crate::dom::bindings::codegen::Bindings::AudioNodeBinding::{
ChannelCountMode, ChannelInterpretation,
};
use crate::dom::bindings::codegen::Bindings::ChannelMergerNodeBinding::{
self, ChannelMergerOptions,
};
use crate::dom::bindings::error::{Error, Fallible};
use crate::dom::bindings::reflector::reflect_dom_object;
use crate::dom::bindings::root::DomRoot;
use crate::dom::window::Window;
use dom_struct::dom_struct;
use servo_media::audio::channel_node::ChannelNodeOptions;
use servo_media::audio::node::AudioNodeInit;
#[dom_struct]
pub struct ChannelMergerNode {
node: AudioNode,
}
impl ChannelMergerNode {
#[allow(unrooted_must_root)]
pub fn new_inherited(
_: &Window,
context: &BaseAudioContext,
options: &ChannelMergerOptions,
) -> Fallible<ChannelMergerNode> {
let node_options = options.parent.unwrap_or(
1,
|
if node_options.count!= 1 || node_options.mode!= ChannelCountMode::Explicit {
return Err(Error::InvalidState);
}
if options.numberOfInputs < 1 || options.numberOfInputs > MAX_CHANNEL_COUNT {
return Err(Error::IndexSize);
}
let node = AudioNode::new_inherited(
AudioNodeInit::ChannelMergerNode(options.into()),
context,
node_options,
options.numberOfInputs, // inputs
1, // outputs
)?;
Ok(ChannelMergerNode { node })
}
#[allow(unrooted_must_root)]
pub fn new(
window: &Window,
context: &BaseAudioContext,
options: &ChannelMergerOptions,
) -> Fallible<DomRoot<ChannelMergerNode>> {
let node = ChannelMergerNode::new_inherited(window, context, options)?;
Ok(reflect_dom_object(
Box::new(node),
window,
ChannelMergerNodeBinding::Wrap,
))
}
#[allow(non_snake_case)]
pub fn Constructor(
window: &Window,
context: &BaseAudioContext,
options: &ChannelMergerOptions,
) -> Fallible<DomRoot<ChannelMergerNode>> {
ChannelMergerNode::new(window, context, options)
}
}
impl<'a> From<&'a ChannelMergerOptions> for ChannelNodeOptions {
fn from(options: &'a ChannelMergerOptions) -> Self {
Self {
channels: options.numberOfInputs as u8,
}
}
}
|
ChannelCountMode::Explicit,
ChannelInterpretation::Speakers,
);
|
random_line_split
|
channelmergernode.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::audionode::{AudioNode, MAX_CHANNEL_COUNT};
use crate::dom::baseaudiocontext::BaseAudioContext;
use crate::dom::bindings::codegen::Bindings::AudioNodeBinding::{
ChannelCountMode, ChannelInterpretation,
};
use crate::dom::bindings::codegen::Bindings::ChannelMergerNodeBinding::{
self, ChannelMergerOptions,
};
use crate::dom::bindings::error::{Error, Fallible};
use crate::dom::bindings::reflector::reflect_dom_object;
use crate::dom::bindings::root::DomRoot;
use crate::dom::window::Window;
use dom_struct::dom_struct;
use servo_media::audio::channel_node::ChannelNodeOptions;
use servo_media::audio::node::AudioNodeInit;
#[dom_struct]
pub struct ChannelMergerNode {
node: AudioNode,
}
impl ChannelMergerNode {
#[allow(unrooted_must_root)]
pub fn new_inherited(
_: &Window,
context: &BaseAudioContext,
options: &ChannelMergerOptions,
) -> Fallible<ChannelMergerNode> {
let node_options = options.parent.unwrap_or(
1,
ChannelCountMode::Explicit,
ChannelInterpretation::Speakers,
);
if node_options.count!= 1 || node_options.mode!= ChannelCountMode::Explicit {
return Err(Error::InvalidState);
}
if options.numberOfInputs < 1 || options.numberOfInputs > MAX_CHANNEL_COUNT {
return Err(Error::IndexSize);
}
let node = AudioNode::new_inherited(
AudioNodeInit::ChannelMergerNode(options.into()),
context,
node_options,
options.numberOfInputs, // inputs
1, // outputs
)?;
Ok(ChannelMergerNode { node })
}
#[allow(unrooted_must_root)]
pub fn new(
window: &Window,
context: &BaseAudioContext,
options: &ChannelMergerOptions,
) -> Fallible<DomRoot<ChannelMergerNode>> {
let node = ChannelMergerNode::new_inherited(window, context, options)?;
Ok(reflect_dom_object(
Box::new(node),
window,
ChannelMergerNodeBinding::Wrap,
))
}
#[allow(non_snake_case)]
pub fn Constructor(
window: &Window,
context: &BaseAudioContext,
options: &ChannelMergerOptions,
) -> Fallible<DomRoot<ChannelMergerNode>> {
ChannelMergerNode::new(window, context, options)
}
}
impl<'a> From<&'a ChannelMergerOptions> for ChannelNodeOptions {
fn
|
(options: &'a ChannelMergerOptions) -> Self {
Self {
channels: options.numberOfInputs as u8,
}
}
}
|
from
|
identifier_name
|
channelmergernode.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::audionode::{AudioNode, MAX_CHANNEL_COUNT};
use crate::dom::baseaudiocontext::BaseAudioContext;
use crate::dom::bindings::codegen::Bindings::AudioNodeBinding::{
ChannelCountMode, ChannelInterpretation,
};
use crate::dom::bindings::codegen::Bindings::ChannelMergerNodeBinding::{
self, ChannelMergerOptions,
};
use crate::dom::bindings::error::{Error, Fallible};
use crate::dom::bindings::reflector::reflect_dom_object;
use crate::dom::bindings::root::DomRoot;
use crate::dom::window::Window;
use dom_struct::dom_struct;
use servo_media::audio::channel_node::ChannelNodeOptions;
use servo_media::audio::node::AudioNodeInit;
#[dom_struct]
pub struct ChannelMergerNode {
node: AudioNode,
}
impl ChannelMergerNode {
#[allow(unrooted_must_root)]
pub fn new_inherited(
_: &Window,
context: &BaseAudioContext,
options: &ChannelMergerOptions,
) -> Fallible<ChannelMergerNode> {
let node_options = options.parent.unwrap_or(
1,
ChannelCountMode::Explicit,
ChannelInterpretation::Speakers,
);
if node_options.count!= 1 || node_options.mode!= ChannelCountMode::Explicit {
return Err(Error::InvalidState);
}
if options.numberOfInputs < 1 || options.numberOfInputs > MAX_CHANNEL_COUNT {
return Err(Error::IndexSize);
}
let node = AudioNode::new_inherited(
AudioNodeInit::ChannelMergerNode(options.into()),
context,
node_options,
options.numberOfInputs, // inputs
1, // outputs
)?;
Ok(ChannelMergerNode { node })
}
#[allow(unrooted_must_root)]
pub fn new(
window: &Window,
context: &BaseAudioContext,
options: &ChannelMergerOptions,
) -> Fallible<DomRoot<ChannelMergerNode>> {
let node = ChannelMergerNode::new_inherited(window, context, options)?;
Ok(reflect_dom_object(
Box::new(node),
window,
ChannelMergerNodeBinding::Wrap,
))
}
#[allow(non_snake_case)]
pub fn Constructor(
window: &Window,
context: &BaseAudioContext,
options: &ChannelMergerOptions,
) -> Fallible<DomRoot<ChannelMergerNode>> {
ChannelMergerNode::new(window, context, options)
}
}
impl<'a> From<&'a ChannelMergerOptions> for ChannelNodeOptions {
fn from(options: &'a ChannelMergerOptions) -> Self
|
}
|
{
Self {
channels: options.numberOfInputs as u8,
}
}
|
identifier_body
|
sph_shabal.rs
|
extern crate libc;
use std::default::Default;
#[repr(C)]
pub struct sph_shabal_context {
pub buf: [u8; 64],
pub ptr: libc::size_t,
pub a: [u32; 12],
pub b: [u32; 16],
pub c: [u32; 16],
pub w_high: u32,
pub w_low: u32,
}
impl Default for sph_shabal_context {
fn default() -> sph_shabal_context {
sph_shabal_context {
buf: [0u8; 64],
ptr: 0,
a: [0u32; 12],
b: [0u32; 16],
c: [0u32; 16],
w_high: 0,
w_low: 0,
}
}
}
pub fn
|
(input: &[u8]) -> [u8; 32] {
let mut output: [u8; 32] = [0; 32];
unsafe {
let mut shabal_ctx: sph_shabal_context = Default::default();
let shabal_ctx_ptr: *mut libc::c_void = &mut shabal_ctx as *mut _ as *mut libc::c_void;
sph_shabal256_init(shabal_ctx_ptr);
sph_shabal256(shabal_ctx_ptr,
&input[0] as *const _ as *const libc::c_void,
input.len());
// println!("input: {}", input.to_hex());
sph_shabal256_close(shabal_ctx_ptr,
&mut output[0] as *mut _ as *mut libc::c_void);
// println!("output: {}", output.to_hex());
}
return output;
}
extern "C" {
pub fn sph_shabal256_init(cc: *mut libc::c_void);
pub fn sph_shabal256(cc: *mut libc::c_void,
data: *const libc::c_void,
input_size: libc::size_t);
pub fn sph_shabal256_close(cc: *mut libc::c_void, dst: *mut libc::c_void);
}
|
shabal256
|
identifier_name
|
sph_shabal.rs
|
extern crate libc;
use std::default::Default;
#[repr(C)]
pub struct sph_shabal_context {
pub buf: [u8; 64],
pub ptr: libc::size_t,
pub a: [u32; 12],
pub b: [u32; 16],
pub c: [u32; 16],
pub w_high: u32,
pub w_low: u32,
|
impl Default for sph_shabal_context {
fn default() -> sph_shabal_context {
sph_shabal_context {
buf: [0u8; 64],
ptr: 0,
a: [0u32; 12],
b: [0u32; 16],
c: [0u32; 16],
w_high: 0,
w_low: 0,
}
}
}
pub fn shabal256(input: &[u8]) -> [u8; 32] {
let mut output: [u8; 32] = [0; 32];
unsafe {
let mut shabal_ctx: sph_shabal_context = Default::default();
let shabal_ctx_ptr: *mut libc::c_void = &mut shabal_ctx as *mut _ as *mut libc::c_void;
sph_shabal256_init(shabal_ctx_ptr);
sph_shabal256(shabal_ctx_ptr,
&input[0] as *const _ as *const libc::c_void,
input.len());
// println!("input: {}", input.to_hex());
sph_shabal256_close(shabal_ctx_ptr,
&mut output[0] as *mut _ as *mut libc::c_void);
// println!("output: {}", output.to_hex());
}
return output;
}
extern "C" {
pub fn sph_shabal256_init(cc: *mut libc::c_void);
pub fn sph_shabal256(cc: *mut libc::c_void,
data: *const libc::c_void,
input_size: libc::size_t);
pub fn sph_shabal256_close(cc: *mut libc::c_void, dst: *mut libc::c_void);
}
|
}
|
random_line_split
|
mod.rs
|
use super::ObjectSafetyViolation;
use crate::infer::InferCtxt;
use rustc_data_structures::fx::FxHashSet;
use rustc_errors::{struct_span_err, DiagnosticBuilder};
use rustc_hir as hir;
use rustc_hir::def_id::DefId;
use rustc_middle::ty::TyCtxt;
use rustc_span::symbol::Symbol;
use rustc_span::{MultiSpan, Span};
use std::fmt;
use std::iter;
impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
pub fn report_extra_impl_obligation(
&self,
error_span: Span,
item_name: Symbol,
_impl_item_def_id: DefId,
trait_item_def_id: DefId,
requirement: &dyn fmt::Display,
) -> DiagnosticBuilder<'tcx> {
let msg = "impl has stricter requirements than trait";
let sp = self.tcx.sess.source_map().guess_head_span(error_span);
let mut err = struct_span_err!(self.tcx.sess, sp, E0276, "{}", msg);
if let Some(trait_item_span) = self.tcx.hir().span_if_local(trait_item_def_id) {
let span = self.tcx.sess.source_map().guess_head_span(trait_item_span);
err.span_label(span, format!("definition of `{}` from trait", item_name));
}
err.span_label(sp, format!("impl has extra requirement {}", requirement));
err
}
}
pub fn report_object_safety_error(
tcx: TyCtxt<'tcx>,
span: Span,
trait_def_id: DefId,
violations: &[ObjectSafetyViolation],
) -> DiagnosticBuilder<'tcx> {
let trait_str = tcx.def_path_str(trait_def_id);
let trait_span = tcx.hir().get_if_local(trait_def_id).and_then(|node| match node {
hir::Node::Item(item) => Some(item.ident.span),
_ => None,
});
let span = tcx.sess.source_map().guess_head_span(span);
let mut err = struct_span_err!(
tcx.sess,
span,
E0038,
"the trait `{}` cannot be made into an object",
trait_str
);
err.span_label(span, format!("`{}` cannot be made into an object", trait_str));
let mut reported_violations = FxHashSet::default();
let mut multi_span = vec![];
let mut messages = vec![];
for violation in violations {
if let ObjectSafetyViolation::SizedSelf(sp) = &violation {
if!sp.is_empty() {
// Do not report `SizedSelf` without spans pointing at `SizedSelf` obligations
// with a `Span`.
reported_violations.insert(ObjectSafetyViolation::SizedSelf(vec![].into()));
}
}
if reported_violations.insert(violation.clone())
|
}
let has_multi_span =!multi_span.is_empty();
let mut note_span = MultiSpan::from_spans(multi_span.clone());
if let (Some(trait_span), true) = (trait_span, has_multi_span) {
note_span
.push_span_label(trait_span, "this trait cannot be made into an object...".to_string());
}
for (span, msg) in iter::zip(multi_span, messages) {
note_span.push_span_label(span, msg);
}
err.span_note(
note_span,
"for a trait to be \"object safe\" it needs to allow building a vtable to allow the call \
to be resolvable dynamically; for more information visit \
<https://doc.rust-lang.org/reference/items/traits.html#object-safety>",
);
err
}
|
{
let spans = violation.spans();
let msg = if trait_span.is_none() || spans.is_empty() {
format!("the trait cannot be made into an object because {}", violation.error_msg())
} else {
format!("...because {}", violation.error_msg())
};
if spans.is_empty() {
err.note(&msg);
} else {
for span in spans {
multi_span.push(span);
messages.push(msg.clone());
}
}
if trait_span.is_some() {
// Only provide the help if its a local trait, otherwise it's not actionable.
violation.solution(&mut err);
}
}
|
conditional_block
|
mod.rs
|
use super::ObjectSafetyViolation;
use crate::infer::InferCtxt;
use rustc_data_structures::fx::FxHashSet;
use rustc_errors::{struct_span_err, DiagnosticBuilder};
use rustc_hir as hir;
use rustc_hir::def_id::DefId;
use rustc_middle::ty::TyCtxt;
use rustc_span::symbol::Symbol;
use rustc_span::{MultiSpan, Span};
use std::fmt;
use std::iter;
impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
pub fn report_extra_impl_obligation(
&self,
error_span: Span,
item_name: Symbol,
_impl_item_def_id: DefId,
trait_item_def_id: DefId,
requirement: &dyn fmt::Display,
) -> DiagnosticBuilder<'tcx> {
let msg = "impl has stricter requirements than trait";
let sp = self.tcx.sess.source_map().guess_head_span(error_span);
let mut err = struct_span_err!(self.tcx.sess, sp, E0276, "{}", msg);
if let Some(trait_item_span) = self.tcx.hir().span_if_local(trait_item_def_id) {
let span = self.tcx.sess.source_map().guess_head_span(trait_item_span);
err.span_label(span, format!("definition of `{}` from trait", item_name));
}
err.span_label(sp, format!("impl has extra requirement {}", requirement));
err
}
}
pub fn report_object_safety_error(
tcx: TyCtxt<'tcx>,
span: Span,
trait_def_id: DefId,
violations: &[ObjectSafetyViolation],
) -> DiagnosticBuilder<'tcx>
|
if let ObjectSafetyViolation::SizedSelf(sp) = &violation {
if!sp.is_empty() {
// Do not report `SizedSelf` without spans pointing at `SizedSelf` obligations
// with a `Span`.
reported_violations.insert(ObjectSafetyViolation::SizedSelf(vec![].into()));
}
}
if reported_violations.insert(violation.clone()) {
let spans = violation.spans();
let msg = if trait_span.is_none() || spans.is_empty() {
format!("the trait cannot be made into an object because {}", violation.error_msg())
} else {
format!("...because {}", violation.error_msg())
};
if spans.is_empty() {
err.note(&msg);
} else {
for span in spans {
multi_span.push(span);
messages.push(msg.clone());
}
}
if trait_span.is_some() {
// Only provide the help if its a local trait, otherwise it's not actionable.
violation.solution(&mut err);
}
}
}
let has_multi_span =!multi_span.is_empty();
let mut note_span = MultiSpan::from_spans(multi_span.clone());
if let (Some(trait_span), true) = (trait_span, has_multi_span) {
note_span
.push_span_label(trait_span, "this trait cannot be made into an object...".to_string());
}
for (span, msg) in iter::zip(multi_span, messages) {
note_span.push_span_label(span, msg);
}
err.span_note(
note_span,
"for a trait to be \"object safe\" it needs to allow building a vtable to allow the call \
to be resolvable dynamically; for more information visit \
<https://doc.rust-lang.org/reference/items/traits.html#object-safety>",
);
err
}
|
{
let trait_str = tcx.def_path_str(trait_def_id);
let trait_span = tcx.hir().get_if_local(trait_def_id).and_then(|node| match node {
hir::Node::Item(item) => Some(item.ident.span),
_ => None,
});
let span = tcx.sess.source_map().guess_head_span(span);
let mut err = struct_span_err!(
tcx.sess,
span,
E0038,
"the trait `{}` cannot be made into an object",
trait_str
);
err.span_label(span, format!("`{}` cannot be made into an object", trait_str));
let mut reported_violations = FxHashSet::default();
let mut multi_span = vec![];
let mut messages = vec![];
for violation in violations {
|
identifier_body
|
mod.rs
|
use super::ObjectSafetyViolation;
use crate::infer::InferCtxt;
use rustc_data_structures::fx::FxHashSet;
use rustc_errors::{struct_span_err, DiagnosticBuilder};
use rustc_hir as hir;
use rustc_hir::def_id::DefId;
use rustc_middle::ty::TyCtxt;
use rustc_span::symbol::Symbol;
use rustc_span::{MultiSpan, Span};
use std::fmt;
use std::iter;
impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
pub fn report_extra_impl_obligation(
&self,
error_span: Span,
item_name: Symbol,
_impl_item_def_id: DefId,
trait_item_def_id: DefId,
requirement: &dyn fmt::Display,
) -> DiagnosticBuilder<'tcx> {
let msg = "impl has stricter requirements than trait";
let sp = self.tcx.sess.source_map().guess_head_span(error_span);
let mut err = struct_span_err!(self.tcx.sess, sp, E0276, "{}", msg);
if let Some(trait_item_span) = self.tcx.hir().span_if_local(trait_item_def_id) {
let span = self.tcx.sess.source_map().guess_head_span(trait_item_span);
err.span_label(span, format!("definition of `{}` from trait", item_name));
}
err.span_label(sp, format!("impl has extra requirement {}", requirement));
err
}
}
pub fn report_object_safety_error(
tcx: TyCtxt<'tcx>,
span: Span,
trait_def_id: DefId,
violations: &[ObjectSafetyViolation],
) -> DiagnosticBuilder<'tcx> {
let trait_str = tcx.def_path_str(trait_def_id);
let trait_span = tcx.hir().get_if_local(trait_def_id).and_then(|node| match node {
hir::Node::Item(item) => Some(item.ident.span),
_ => None,
});
let span = tcx.sess.source_map().guess_head_span(span);
let mut err = struct_span_err!(
tcx.sess,
span,
E0038,
"the trait `{}` cannot be made into an object",
trait_str
);
err.span_label(span, format!("`{}` cannot be made into an object", trait_str));
let mut reported_violations = FxHashSet::default();
let mut multi_span = vec![];
let mut messages = vec![];
for violation in violations {
if let ObjectSafetyViolation::SizedSelf(sp) = &violation {
if!sp.is_empty() {
// Do not report `SizedSelf` without spans pointing at `SizedSelf` obligations
// with a `Span`.
reported_violations.insert(ObjectSafetyViolation::SizedSelf(vec![].into()));
}
}
if reported_violations.insert(violation.clone()) {
let spans = violation.spans();
let msg = if trait_span.is_none() || spans.is_empty() {
format!("the trait cannot be made into an object because {}", violation.error_msg())
} else {
format!("...because {}", violation.error_msg())
};
if spans.is_empty() {
err.note(&msg);
} else {
for span in spans {
multi_span.push(span);
messages.push(msg.clone());
}
}
if trait_span.is_some() {
// Only provide the help if its a local trait, otherwise it's not actionable.
|
}
}
}
let has_multi_span =!multi_span.is_empty();
let mut note_span = MultiSpan::from_spans(multi_span.clone());
if let (Some(trait_span), true) = (trait_span, has_multi_span) {
note_span
.push_span_label(trait_span, "this trait cannot be made into an object...".to_string());
}
for (span, msg) in iter::zip(multi_span, messages) {
note_span.push_span_label(span, msg);
}
err.span_note(
note_span,
"for a trait to be \"object safe\" it needs to allow building a vtable to allow the call \
to be resolvable dynamically; for more information visit \
<https://doc.rust-lang.org/reference/items/traits.html#object-safety>",
);
err
}
|
violation.solution(&mut err);
|
random_line_split
|
mod.rs
|
use super::ObjectSafetyViolation;
use crate::infer::InferCtxt;
use rustc_data_structures::fx::FxHashSet;
use rustc_errors::{struct_span_err, DiagnosticBuilder};
use rustc_hir as hir;
use rustc_hir::def_id::DefId;
use rustc_middle::ty::TyCtxt;
use rustc_span::symbol::Symbol;
use rustc_span::{MultiSpan, Span};
use std::fmt;
use std::iter;
impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
pub fn
|
(
&self,
error_span: Span,
item_name: Symbol,
_impl_item_def_id: DefId,
trait_item_def_id: DefId,
requirement: &dyn fmt::Display,
) -> DiagnosticBuilder<'tcx> {
let msg = "impl has stricter requirements than trait";
let sp = self.tcx.sess.source_map().guess_head_span(error_span);
let mut err = struct_span_err!(self.tcx.sess, sp, E0276, "{}", msg);
if let Some(trait_item_span) = self.tcx.hir().span_if_local(trait_item_def_id) {
let span = self.tcx.sess.source_map().guess_head_span(trait_item_span);
err.span_label(span, format!("definition of `{}` from trait", item_name));
}
err.span_label(sp, format!("impl has extra requirement {}", requirement));
err
}
}
pub fn report_object_safety_error(
tcx: TyCtxt<'tcx>,
span: Span,
trait_def_id: DefId,
violations: &[ObjectSafetyViolation],
) -> DiagnosticBuilder<'tcx> {
let trait_str = tcx.def_path_str(trait_def_id);
let trait_span = tcx.hir().get_if_local(trait_def_id).and_then(|node| match node {
hir::Node::Item(item) => Some(item.ident.span),
_ => None,
});
let span = tcx.sess.source_map().guess_head_span(span);
let mut err = struct_span_err!(
tcx.sess,
span,
E0038,
"the trait `{}` cannot be made into an object",
trait_str
);
err.span_label(span, format!("`{}` cannot be made into an object", trait_str));
let mut reported_violations = FxHashSet::default();
let mut multi_span = vec![];
let mut messages = vec![];
for violation in violations {
if let ObjectSafetyViolation::SizedSelf(sp) = &violation {
if!sp.is_empty() {
// Do not report `SizedSelf` without spans pointing at `SizedSelf` obligations
// with a `Span`.
reported_violations.insert(ObjectSafetyViolation::SizedSelf(vec![].into()));
}
}
if reported_violations.insert(violation.clone()) {
let spans = violation.spans();
let msg = if trait_span.is_none() || spans.is_empty() {
format!("the trait cannot be made into an object because {}", violation.error_msg())
} else {
format!("...because {}", violation.error_msg())
};
if spans.is_empty() {
err.note(&msg);
} else {
for span in spans {
multi_span.push(span);
messages.push(msg.clone());
}
}
if trait_span.is_some() {
// Only provide the help if its a local trait, otherwise it's not actionable.
violation.solution(&mut err);
}
}
}
let has_multi_span =!multi_span.is_empty();
let mut note_span = MultiSpan::from_spans(multi_span.clone());
if let (Some(trait_span), true) = (trait_span, has_multi_span) {
note_span
.push_span_label(trait_span, "this trait cannot be made into an object...".to_string());
}
for (span, msg) in iter::zip(multi_span, messages) {
note_span.push_span_label(span, msg);
}
err.span_note(
note_span,
"for a trait to be \"object safe\" it needs to allow building a vtable to allow the call \
to be resolvable dynamically; for more information visit \
<https://doc.rust-lang.org/reference/items/traits.html#object-safety>",
);
err
}
|
report_extra_impl_obligation
|
identifier_name
|
coord.rs
|
use std::ops::*;
use std::fmt;
#[derive(Clone, Copy, PartialEq, Eq, Hash, Default)]
pub struct Coord<T>(pub T, pub T);
impl<T> Coord<T> {
pub const fn squared(value: T) -> Self where T: Copy {
Coord(value, value)
}
pub fn sum(self) -> T where T: Add<Output=T> {
let Coord(x, y) = self;
x + y
}
pub fn map<U>(self, mut func: impl FnMut(T) -> U) -> Coord<U> {
let Coord(x, y) = self;
Coord(func(x), func(y))
}
}
impl<T: fmt::Debug> fmt::Debug for Coord<T> {
fn fmt (&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "({:?}, {:?})", self.0, self.1)
}
}
impl<T: fmt::Display> fmt::Display for Coord<T> {
fn fmt (&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "({}, {})", self.0, self.1)
}
}
macro_rules! coord_op_impl {
( $(($trait: ident, $func: ident),)* ) => ( $(
// Self
impl<T> $trait<Self> for Coord<T> where T: $trait<Output = T> {
type Output = Self;
#[inline]
fn $func(self, rhs: Self) -> Self {
Coord(
$trait::$func(self.0, rhs.0),
$trait::$func(self.1, rhs.1),
)
}
}
)* )
}
coord_op_impl! {
(Add, add),
(Sub, sub),
(Mul, mul),
(Div, div),
(Rem, rem),
}
impl<T: AddAssign> AddAssign<Self> for Coord<T> {
fn add_assign(&mut self, rhs: Self) {
self.0 += rhs.0;
self.1 += rhs.1;
}
}
impl<T: AddAssign + Clone> AddAssign<T> for Coord<T> {
fn add_assign(&mut self, rhs: T) {
self.0 += rhs.clone();
self.1 += rhs;
}
}
impl From<Coord<usize>> for Coord<isize> {
fn from(src: Coord<usize>) -> Self {
Coord(
src.0 as isize,
src.1 as isize,
)
}
}
impl From<Coord<isize>> for Coord<usize> {
fn from(src: Coord<isize>) -> Self {
Coord(
src.0 as usize,
src.1 as usize,
)
}
}
impl<T> From<(T, T)> for Coord<T> {
fn from((x, y): (T, T)) -> Self { Coord(x, y) }
}
impl Coord<isize> {
pub fn abs(self) -> Self { self.map(isize::abs) }
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn sanity()
|
}
|
{
let coord = Coord(5,6);
assert_eq!(5, coord.0);
assert_eq!(6, coord.1);
}
|
identifier_body
|
coord.rs
|
use std::ops::*;
use std::fmt;
#[derive(Clone, Copy, PartialEq, Eq, Hash, Default)]
pub struct Coord<T>(pub T, pub T);
impl<T> Coord<T> {
pub const fn squared(value: T) -> Self where T: Copy {
Coord(value, value)
}
pub fn sum(self) -> T where T: Add<Output=T> {
let Coord(x, y) = self;
x + y
}
pub fn
|
<U>(self, mut func: impl FnMut(T) -> U) -> Coord<U> {
let Coord(x, y) = self;
Coord(func(x), func(y))
}
}
impl<T: fmt::Debug> fmt::Debug for Coord<T> {
fn fmt (&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "({:?}, {:?})", self.0, self.1)
}
}
impl<T: fmt::Display> fmt::Display for Coord<T> {
fn fmt (&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "({}, {})", self.0, self.1)
}
}
macro_rules! coord_op_impl {
( $(($trait: ident, $func: ident),)* ) => ( $(
// Self
impl<T> $trait<Self> for Coord<T> where T: $trait<Output = T> {
type Output = Self;
#[inline]
fn $func(self, rhs: Self) -> Self {
Coord(
$trait::$func(self.0, rhs.0),
$trait::$func(self.1, rhs.1),
)
}
}
)* )
}
coord_op_impl! {
(Add, add),
(Sub, sub),
(Mul, mul),
(Div, div),
(Rem, rem),
}
impl<T: AddAssign> AddAssign<Self> for Coord<T> {
fn add_assign(&mut self, rhs: Self) {
self.0 += rhs.0;
self.1 += rhs.1;
}
}
impl<T: AddAssign + Clone> AddAssign<T> for Coord<T> {
fn add_assign(&mut self, rhs: T) {
self.0 += rhs.clone();
self.1 += rhs;
}
}
impl From<Coord<usize>> for Coord<isize> {
fn from(src: Coord<usize>) -> Self {
Coord(
src.0 as isize,
src.1 as isize,
)
}
}
impl From<Coord<isize>> for Coord<usize> {
fn from(src: Coord<isize>) -> Self {
Coord(
src.0 as usize,
src.1 as usize,
)
}
}
impl<T> From<(T, T)> for Coord<T> {
fn from((x, y): (T, T)) -> Self { Coord(x, y) }
}
impl Coord<isize> {
pub fn abs(self) -> Self { self.map(isize::abs) }
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn sanity() {
let coord = Coord(5,6);
assert_eq!(5, coord.0);
assert_eq!(6, coord.1);
}
}
|
map
|
identifier_name
|
coord.rs
|
use std::ops::*;
use std::fmt;
#[derive(Clone, Copy, PartialEq, Eq, Hash, Default)]
pub struct Coord<T>(pub T, pub T);
impl<T> Coord<T> {
pub const fn squared(value: T) -> Self where T: Copy {
Coord(value, value)
}
pub fn sum(self) -> T where T: Add<Output=T> {
let Coord(x, y) = self;
x + y
}
pub fn map<U>(self, mut func: impl FnMut(T) -> U) -> Coord<U> {
let Coord(x, y) = self;
Coord(func(x), func(y))
}
}
impl<T: fmt::Debug> fmt::Debug for Coord<T> {
fn fmt (&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "({:?}, {:?})", self.0, self.1)
}
}
impl<T: fmt::Display> fmt::Display for Coord<T> {
fn fmt (&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "({}, {})", self.0, self.1)
}
}
macro_rules! coord_op_impl {
( $(($trait: ident, $func: ident),)* ) => ( $(
// Self
impl<T> $trait<Self> for Coord<T> where T: $trait<Output = T> {
type Output = Self;
#[inline]
fn $func(self, rhs: Self) -> Self {
Coord(
$trait::$func(self.0, rhs.0),
$trait::$func(self.1, rhs.1),
)
}
}
)* )
}
coord_op_impl! {
(Add, add),
(Sub, sub),
(Mul, mul),
(Div, div),
(Rem, rem),
}
impl<T: AddAssign> AddAssign<Self> for Coord<T> {
fn add_assign(&mut self, rhs: Self) {
self.0 += rhs.0;
self.1 += rhs.1;
}
}
impl<T: AddAssign + Clone> AddAssign<T> for Coord<T> {
fn add_assign(&mut self, rhs: T) {
self.0 += rhs.clone();
self.1 += rhs;
}
}
impl From<Coord<usize>> for Coord<isize> {
fn from(src: Coord<usize>) -> Self {
Coord(
src.0 as isize,
src.1 as isize,
)
}
}
impl From<Coord<isize>> for Coord<usize> {
fn from(src: Coord<isize>) -> Self {
Coord(
src.0 as usize,
src.1 as usize,
)
}
}
impl<T> From<(T, T)> for Coord<T> {
fn from((x, y): (T, T)) -> Self { Coord(x, y) }
}
impl Coord<isize> {
pub fn abs(self) -> Self { self.map(isize::abs) }
}
|
use super::*;
#[test]
fn sanity() {
let coord = Coord(5,6);
assert_eq!(5, coord.0);
assert_eq!(6, coord.1);
}
}
|
#[cfg(test)]
mod tests {
|
random_line_split
|
enum.rs
|
// Task : To explain enum in rust
// Author : Vigneshwer
// Version : 1.0
// Date : 3 Dec 2016
fn main() {
let hulk = Hero::Strong(100);
let fasty = Hero::Fast;
//converting from
let spiderman = Hero::Info {
name: "spiderman".to_owned(),
secret: "peter parker".to_owned(),
};
get_info(spiderman);
get_info(hulk);
get_info(fasty);
}
// declaring the enum
enum Hero {
Fast,
Strong(i32),
Info { name: String, secret: String },
}
// function to perform for each types
fn
|
(h: Hero) {
match h {
Hero::Fast => println!("Fast"),
Hero::Strong(i) => println!("Lifts {} tons", i),
Hero::Info { name, secret } => {
println!(" name is : {0} secret is : {1}", name, secret);
}
}
}
|
get_info
|
identifier_name
|
enum.rs
|
fn main() {
let hulk = Hero::Strong(100);
let fasty = Hero::Fast;
//converting from
let spiderman = Hero::Info {
name: "spiderman".to_owned(),
secret: "peter parker".to_owned(),
};
get_info(spiderman);
get_info(hulk);
get_info(fasty);
}
// declaring the enum
enum Hero {
Fast,
Strong(i32),
Info { name: String, secret: String },
}
// function to perform for each types
fn get_info(h: Hero) {
match h {
Hero::Fast => println!("Fast"),
Hero::Strong(i) => println!("Lifts {} tons", i),
Hero::Info { name, secret } => {
println!(" name is : {0} secret is : {1}", name, secret);
}
}
}
|
// Task : To explain enum in rust
// Author : Vigneshwer
// Version : 1.0
// Date : 3 Dec 2016
|
random_line_split
|
|
enum.rs
|
// Task : To explain enum in rust
// Author : Vigneshwer
// Version : 1.0
// Date : 3 Dec 2016
fn main()
|
// declaring the enum
enum Hero {
Fast,
Strong(i32),
Info { name: String, secret: String },
}
// function to perform for each types
fn get_info(h: Hero) {
match h {
Hero::Fast => println!("Fast"),
Hero::Strong(i) => println!("Lifts {} tons", i),
Hero::Info { name, secret } => {
println!(" name is : {0} secret is : {1}", name, secret);
}
}
}
|
{
let hulk = Hero::Strong(100);
let fasty = Hero::Fast;
//converting from
let spiderman = Hero::Info {
name: "spiderman".to_owned(),
secret: "peter parker".to_owned(),
};
get_info(spiderman);
get_info(hulk);
get_info(fasty);
}
|
identifier_body
|
lib.rs
|
//! This library provides wrappers for LLVM that are memory-safe and follow
//! Rust idioms.
|
//!
//! The original LLVM reference is available [here](http://llvm.org/doxygen/)
//! but take note that this isn't as thorough as this documentation.
extern crate llvm_sys as ffi;
extern crate libc;
extern crate cbox;
#[macro_use]
mod macros;
mod buffer;
mod block;
mod builder;
mod compile;
mod context;
mod engine;
mod module;
mod object;
mod target;
mod ty;
mod value;
mod util;
pub use cbox::{CBox, CSemiBox};
pub use builder::Builder;
pub use block::BasicBlock;
pub use compile::Compile;
pub use context::{Context, GetContext};
pub use engine::{JitEngine, JitOptions, Interpreter, ExecutionEngine, GenericValue, GenericValueCast};
pub use module::{Module, Functions};
pub use object::{ObjectFile, Symbol, Symbols};
pub use target::{TargetData, Target};
pub use ty::{FunctionType, StructType, Type};
pub use value::{Arg, Attribute, Value, Function, Predicate};
pub use util::CastFrom;
|
random_line_split
|
|
example.rs
|
#[derive(Debug, PartialEq, Eq)]
pub struct Palindrome {
pub factors: Vec<(u64, u64)>,
}
impl Palindrome {
/// Create a palindrome with the given factors
pub fn new(mut a: u64, mut b: u64) -> Palindrome {
if a > b {
std::mem::swap(&mut a, &mut b);
}
Palindrome {
factors: vec![(a, b)],
}
}
/// Return the palindrome's value
pub fn value(&self) -> u64 {
// this could in theory panic with a bounds error, but the length of
// self.factors is known to start at 1 and is only ever increased
self.factors[0].0 * self.factors[0].1
}
/// Insert a new set of factors into an existing palindrome
pub fn insert(&mut self, mut a: u64, mut b: u64) {
if a > b {
std::mem::swap(&mut a, &mut b);
}
self.factors.push((a, b));
self.factors.sort_unstable();
self.factors.dedup();
}
}
/// return the (min, max) palindrome pair comprised of the products of numbers in the input range
pub fn palindrome_products(min: u64, max: u64) -> Option<(Palindrome, Palindrome)> {
let mut result = None;
for a in min..=max {
for b in min..=a {
if is_palindrome(a * b) {
|
Some((mut minp, mut maxp)) => {
match (a * b).cmp(&minp.value()) {
std::cmp::Ordering::Greater => {}
std::cmp::Ordering::Less => minp = Palindrome::new(a, b),
std::cmp::Ordering::Equal => minp.insert(a, b),
}
match (a * b).cmp(&maxp.value()) {
std::cmp::Ordering::Less => {}
std::cmp::Ordering::Greater => maxp = Palindrome::new(a, b),
std::cmp::Ordering::Equal => maxp.insert(a, b),
}
Some((minp, maxp))
}
};
}
}
}
result
}
#[inline]
pub fn is_palindrome(n: u64) -> bool {
let s = n.to_string().into_bytes();
s.iter().zip(s.iter().rev()).all(|(a, b)| a == b)
}
|
result = match result {
None => Some((Palindrome::new(a, b), Palindrome::new(a, b))),
|
random_line_split
|
example.rs
|
#[derive(Debug, PartialEq, Eq)]
pub struct
|
{
pub factors: Vec<(u64, u64)>,
}
impl Palindrome {
/// Create a palindrome with the given factors
pub fn new(mut a: u64, mut b: u64) -> Palindrome {
if a > b {
std::mem::swap(&mut a, &mut b);
}
Palindrome {
factors: vec![(a, b)],
}
}
/// Return the palindrome's value
pub fn value(&self) -> u64 {
// this could in theory panic with a bounds error, but the length of
// self.factors is known to start at 1 and is only ever increased
self.factors[0].0 * self.factors[0].1
}
/// Insert a new set of factors into an existing palindrome
pub fn insert(&mut self, mut a: u64, mut b: u64) {
if a > b {
std::mem::swap(&mut a, &mut b);
}
self.factors.push((a, b));
self.factors.sort_unstable();
self.factors.dedup();
}
}
/// return the (min, max) palindrome pair comprised of the products of numbers in the input range
pub fn palindrome_products(min: u64, max: u64) -> Option<(Palindrome, Palindrome)> {
let mut result = None;
for a in min..=max {
for b in min..=a {
if is_palindrome(a * b) {
result = match result {
None => Some((Palindrome::new(a, b), Palindrome::new(a, b))),
Some((mut minp, mut maxp)) => {
match (a * b).cmp(&minp.value()) {
std::cmp::Ordering::Greater => {}
std::cmp::Ordering::Less => minp = Palindrome::new(a, b),
std::cmp::Ordering::Equal => minp.insert(a, b),
}
match (a * b).cmp(&maxp.value()) {
std::cmp::Ordering::Less => {}
std::cmp::Ordering::Greater => maxp = Palindrome::new(a, b),
std::cmp::Ordering::Equal => maxp.insert(a, b),
}
Some((minp, maxp))
}
};
}
}
}
result
}
#[inline]
pub fn is_palindrome(n: u64) -> bool {
let s = n.to_string().into_bytes();
s.iter().zip(s.iter().rev()).all(|(a, b)| a == b)
}
|
Palindrome
|
identifier_name
|
example.rs
|
#[derive(Debug, PartialEq, Eq)]
pub struct Palindrome {
pub factors: Vec<(u64, u64)>,
}
impl Palindrome {
/// Create a palindrome with the given factors
pub fn new(mut a: u64, mut b: u64) -> Palindrome {
if a > b {
std::mem::swap(&mut a, &mut b);
}
Palindrome {
factors: vec![(a, b)],
}
}
/// Return the palindrome's value
pub fn value(&self) -> u64
|
/// Insert a new set of factors into an existing palindrome
pub fn insert(&mut self, mut a: u64, mut b: u64) {
if a > b {
std::mem::swap(&mut a, &mut b);
}
self.factors.push((a, b));
self.factors.sort_unstable();
self.factors.dedup();
}
}
/// return the (min, max) palindrome pair comprised of the products of numbers in the input range
pub fn palindrome_products(min: u64, max: u64) -> Option<(Palindrome, Palindrome)> {
let mut result = None;
for a in min..=max {
for b in min..=a {
if is_palindrome(a * b) {
result = match result {
None => Some((Palindrome::new(a, b), Palindrome::new(a, b))),
Some((mut minp, mut maxp)) => {
match (a * b).cmp(&minp.value()) {
std::cmp::Ordering::Greater => {}
std::cmp::Ordering::Less => minp = Palindrome::new(a, b),
std::cmp::Ordering::Equal => minp.insert(a, b),
}
match (a * b).cmp(&maxp.value()) {
std::cmp::Ordering::Less => {}
std::cmp::Ordering::Greater => maxp = Palindrome::new(a, b),
std::cmp::Ordering::Equal => maxp.insert(a, b),
}
Some((minp, maxp))
}
};
}
}
}
result
}
#[inline]
pub fn is_palindrome(n: u64) -> bool {
let s = n.to_string().into_bytes();
s.iter().zip(s.iter().rev()).all(|(a, b)| a == b)
}
|
{
// this could in theory panic with a bounds error, but the length of
// self.factors is known to start at 1 and is only ever increased
self.factors[0].0 * self.factors[0].1
}
|
identifier_body
|
privacy1.rs
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(globs, lang_items)]
#![no_std] // makes debugging this test *a lot* easier (during resolve)
#[lang="sized"]
pub trait Sized {}
mod bar {
// shouldn't bring in too much
pub use self::glob::*;
// can't publicly re-export private items
pub use self::baz::{foo, bar};
//~^ ERROR: function `bar` is private
pub use self::private::ppriv;
//~^ ERROR: function `ppriv` is private
pub struct A;
impl A {
pub fn foo() {}
fn bar() {}
pub fn foo2(&self) {}
fn bar2(&self)
|
}
trait B {
fn foo() -> Self;
}
impl B for int { fn foo() -> int { 3 } }
pub enum Enum {
Pub
}
mod baz {
pub struct A;
impl A {
pub fn foo() {}
fn bar() {}
pub fn foo2(&self) {}
fn bar2(&self) {}
}
// both of these are re-exported by `bar`, but only one should be
// validly re-exported
pub fn foo() {}
fn bar() {}
}
extern {
fn epriv();
pub fn epub();
}
fn test() {
self::Pub;
unsafe {
epriv();
epub();
}
self::baz::A;
self::baz::A::foo();
self::baz::A::bar(); //~ ERROR: method `bar` is private
self::baz::A.foo2();
self::baz::A.bar2(); //~ ERROR: method `bar2` is private
// this used to cause an ICE in privacy traversal.
super::gpub();
}
mod glob {
pub fn gpub() {}
fn gpriv() {}
}
mod private {
fn ppriv() {}
}
}
pub fn gpub() {}
fn lol() {
bar::A;
bar::A::foo();
bar::A::bar(); //~ ERROR: method `bar` is private
bar::A.foo2();
bar::A.bar2(); //~ ERROR: method `bar2` is private
}
mod foo {
fn test() {
::bar::A::foo();
::bar::A::bar(); //~ ERROR: method `bar` is private
::bar::A.foo2();
::bar::A.bar2(); //~ ERROR: method `bar2` is private
::bar::baz::A::foo(); //~ ERROR: method `foo` is inaccessible
//~^ NOTE: module `baz` is private
::bar::baz::A::bar(); //~ ERROR: method `bar` is private
::bar::baz::A.foo2(); //~ ERROR: struct `A` is inaccessible
//~^ NOTE: module `baz` is private
::bar::baz::A.bar2(); //~ ERROR: struct `A` is inaccessible
//~^ ERROR: method `bar2` is private
//~^^ NOTE: module `baz` is private
let _: int =
::bar::B::foo(); //~ ERROR: method `foo` is inaccessible
//~^ NOTE: trait `B` is private
::lol();
::bar::Pub;
unsafe {
::bar::epriv(); //~ ERROR: function `epriv` is private
::bar::epub();
}
::bar::foo();
::bar::bar();
::bar::gpub();
::bar::baz::foo(); //~ ERROR: function `foo` is inaccessible
//~^ NOTE: module `baz` is private
::bar::baz::bar(); //~ ERROR: function `bar` is private
}
fn test2() {
use bar::baz::{foo, bar};
//~^ ERROR: function `foo` is inaccessible
//~^^ ERROR: function `bar` is private
foo();
bar();
}
fn test3() {
use bar::baz;
//~^ ERROR: module `baz` is private
}
fn test4() {
use bar::{foo, bar};
foo();
bar();
}
fn test5() {
use bar;
bar::foo();
bar::bar();
}
impl ::bar::B for f32 { fn foo() -> f32 { 1.0 } }
//~^ ERROR: trait `B` is private
}
pub mod mytest {
// Even though the inner `A` struct is a publicly exported item (usable from
// external crates through `foo::foo`, it should not be accessible through
// its definition path (which has the private `i` module).
use self::foo::i::A; //~ ERROR: type `A` is inaccessible
//~^ NOTE: module `i` is private
pub mod foo {
pub use self::i::A as foo;
mod i {
pub struct A;
}
}
}
#[start] fn main(_: int, _: *const *const u8) -> int { 3 }
|
{}
|
identifier_body
|
privacy1.rs
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(globs, lang_items)]
#![no_std] // makes debugging this test *a lot* easier (during resolve)
#[lang="sized"]
pub trait Sized {}
mod bar {
// shouldn't bring in too much
pub use self::glob::*;
// can't publicly re-export private items
pub use self::baz::{foo, bar};
//~^ ERROR: function `bar` is private
pub use self::private::ppriv;
//~^ ERROR: function `ppriv` is private
pub struct A;
impl A {
pub fn foo() {}
fn bar() {}
pub fn foo2(&self) {}
fn bar2(&self) {}
}
trait B {
fn foo() -> Self;
}
impl B for int { fn foo() -> int { 3 } }
pub enum Enum {
Pub
}
mod baz {
pub struct A;
impl A {
pub fn
|
() {}
fn bar() {}
pub fn foo2(&self) {}
fn bar2(&self) {}
}
// both of these are re-exported by `bar`, but only one should be
// validly re-exported
pub fn foo() {}
fn bar() {}
}
extern {
fn epriv();
pub fn epub();
}
fn test() {
self::Pub;
unsafe {
epriv();
epub();
}
self::baz::A;
self::baz::A::foo();
self::baz::A::bar(); //~ ERROR: method `bar` is private
self::baz::A.foo2();
self::baz::A.bar2(); //~ ERROR: method `bar2` is private
// this used to cause an ICE in privacy traversal.
super::gpub();
}
mod glob {
pub fn gpub() {}
fn gpriv() {}
}
mod private {
fn ppriv() {}
}
}
pub fn gpub() {}
fn lol() {
bar::A;
bar::A::foo();
bar::A::bar(); //~ ERROR: method `bar` is private
bar::A.foo2();
bar::A.bar2(); //~ ERROR: method `bar2` is private
}
mod foo {
fn test() {
::bar::A::foo();
::bar::A::bar(); //~ ERROR: method `bar` is private
::bar::A.foo2();
::bar::A.bar2(); //~ ERROR: method `bar2` is private
::bar::baz::A::foo(); //~ ERROR: method `foo` is inaccessible
//~^ NOTE: module `baz` is private
::bar::baz::A::bar(); //~ ERROR: method `bar` is private
::bar::baz::A.foo2(); //~ ERROR: struct `A` is inaccessible
//~^ NOTE: module `baz` is private
::bar::baz::A.bar2(); //~ ERROR: struct `A` is inaccessible
//~^ ERROR: method `bar2` is private
//~^^ NOTE: module `baz` is private
let _: int =
::bar::B::foo(); //~ ERROR: method `foo` is inaccessible
//~^ NOTE: trait `B` is private
::lol();
::bar::Pub;
unsafe {
::bar::epriv(); //~ ERROR: function `epriv` is private
::bar::epub();
}
::bar::foo();
::bar::bar();
::bar::gpub();
::bar::baz::foo(); //~ ERROR: function `foo` is inaccessible
//~^ NOTE: module `baz` is private
::bar::baz::bar(); //~ ERROR: function `bar` is private
}
fn test2() {
use bar::baz::{foo, bar};
//~^ ERROR: function `foo` is inaccessible
//~^^ ERROR: function `bar` is private
foo();
bar();
}
fn test3() {
use bar::baz;
//~^ ERROR: module `baz` is private
}
fn test4() {
use bar::{foo, bar};
foo();
bar();
}
fn test5() {
use bar;
bar::foo();
bar::bar();
}
impl ::bar::B for f32 { fn foo() -> f32 { 1.0 } }
//~^ ERROR: trait `B` is private
}
pub mod mytest {
// Even though the inner `A` struct is a publicly exported item (usable from
// external crates through `foo::foo`, it should not be accessible through
// its definition path (which has the private `i` module).
use self::foo::i::A; //~ ERROR: type `A` is inaccessible
//~^ NOTE: module `i` is private
pub mod foo {
pub use self::i::A as foo;
mod i {
pub struct A;
}
}
}
#[start] fn main(_: int, _: *const *const u8) -> int { 3 }
|
foo
|
identifier_name
|
privacy1.rs
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(globs, lang_items)]
#![no_std] // makes debugging this test *a lot* easier (during resolve)
#[lang="sized"]
pub trait Sized {}
mod bar {
// shouldn't bring in too much
pub use self::glob::*;
// can't publicly re-export private items
pub use self::baz::{foo, bar};
//~^ ERROR: function `bar` is private
pub use self::private::ppriv;
//~^ ERROR: function `ppriv` is private
pub struct A;
impl A {
pub fn foo() {}
fn bar() {}
pub fn foo2(&self) {}
fn bar2(&self) {}
}
trait B {
fn foo() -> Self;
}
impl B for int { fn foo() -> int { 3 } }
pub enum Enum {
Pub
}
mod baz {
pub struct A;
impl A {
pub fn foo() {}
fn bar() {}
pub fn foo2(&self) {}
fn bar2(&self) {}
}
// both of these are re-exported by `bar`, but only one should be
// validly re-exported
pub fn foo() {}
fn bar() {}
}
extern {
fn epriv();
pub fn epub();
}
fn test() {
self::Pub;
unsafe {
epriv();
epub();
}
self::baz::A;
self::baz::A::foo();
self::baz::A::bar(); //~ ERROR: method `bar` is private
self::baz::A.foo2();
self::baz::A.bar2(); //~ ERROR: method `bar2` is private
// this used to cause an ICE in privacy traversal.
super::gpub();
}
mod glob {
pub fn gpub() {}
fn gpriv() {}
}
mod private {
fn ppriv() {}
}
}
pub fn gpub() {}
fn lol() {
bar::A;
bar::A::foo();
bar::A::bar(); //~ ERROR: method `bar` is private
bar::A.foo2();
bar::A.bar2(); //~ ERROR: method `bar2` is private
}
mod foo {
fn test() {
::bar::A::foo();
::bar::A::bar(); //~ ERROR: method `bar` is private
::bar::A.foo2();
|
::bar::baz::A.foo2(); //~ ERROR: struct `A` is inaccessible
//~^ NOTE: module `baz` is private
::bar::baz::A.bar2(); //~ ERROR: struct `A` is inaccessible
//~^ ERROR: method `bar2` is private
//~^^ NOTE: module `baz` is private
let _: int =
::bar::B::foo(); //~ ERROR: method `foo` is inaccessible
//~^ NOTE: trait `B` is private
::lol();
::bar::Pub;
unsafe {
::bar::epriv(); //~ ERROR: function `epriv` is private
::bar::epub();
}
::bar::foo();
::bar::bar();
::bar::gpub();
::bar::baz::foo(); //~ ERROR: function `foo` is inaccessible
//~^ NOTE: module `baz` is private
::bar::baz::bar(); //~ ERROR: function `bar` is private
}
fn test2() {
use bar::baz::{foo, bar};
//~^ ERROR: function `foo` is inaccessible
//~^^ ERROR: function `bar` is private
foo();
bar();
}
fn test3() {
use bar::baz;
//~^ ERROR: module `baz` is private
}
fn test4() {
use bar::{foo, bar};
foo();
bar();
}
fn test5() {
use bar;
bar::foo();
bar::bar();
}
impl ::bar::B for f32 { fn foo() -> f32 { 1.0 } }
//~^ ERROR: trait `B` is private
}
pub mod mytest {
// Even though the inner `A` struct is a publicly exported item (usable from
// external crates through `foo::foo`, it should not be accessible through
// its definition path (which has the private `i` module).
use self::foo::i::A; //~ ERROR: type `A` is inaccessible
//~^ NOTE: module `i` is private
pub mod foo {
pub use self::i::A as foo;
mod i {
pub struct A;
}
}
}
#[start] fn main(_: int, _: *const *const u8) -> int { 3 }
|
::bar::A.bar2(); //~ ERROR: method `bar2` is private
::bar::baz::A::foo(); //~ ERROR: method `foo` is inaccessible
//~^ NOTE: module `baz` is private
::bar::baz::A::bar(); //~ ERROR: method `bar` is private
|
random_line_split
|
extract.rs
|
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use cpython::Python;
/// Trait for extracting a Rust object from a Python wrapper class.
///
/// A common pattern when writing bindings for Rust objects is to define
/// a Python wrapper class using the `py_class!` macro, with the underlying
/// Rust object stored as a data field within the Python object.
///
/// When Rust code interacts with a Python wrapper, it may want to work
/// with the underlying Rust object directly. This trait provides a means
/// to do so. Note that the `extract_inner` methods takes `&self`, meaning
/// the inner Rust value cannot be moved out of the wrapper. As a result,
/// the inner value will typically be wrapped in something like an `Arc`.
pub trait ExtractInner {
type Inner;
fn extract_inner(&self, py: Python) -> Self::Inner;
}
/// Similar to `ExtractInner`, but returns a reference to the wrapped
/// Rust object. Types that implement this trait will automatically
/// implement `ExtractInner` if the inner type implements `Clone`.
pub trait ExtractInnerRef {
type Inner;
fn extract_inner_ref<'a>(&'a self, py: Python<'a>) -> &'a Self::Inner;
}
impl<T> ExtractInner for T
where
T: ExtractInnerRef,
T::Inner: Clone +'static,
{
type Inner = <T as ExtractInnerRef>::Inner;
fn
|
(&self, py: Python) -> Self::Inner {
self.extract_inner_ref(py).clone()
}
}
|
extract_inner
|
identifier_name
|
extract.rs
|
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This software may be used and distributed according to the terms of the
|
*/
use cpython::Python;
/// Trait for extracting a Rust object from a Python wrapper class.
///
/// A common pattern when writing bindings for Rust objects is to define
/// a Python wrapper class using the `py_class!` macro, with the underlying
/// Rust object stored as a data field within the Python object.
///
/// When Rust code interacts with a Python wrapper, it may want to work
/// with the underlying Rust object directly. This trait provides a means
/// to do so. Note that the `extract_inner` methods takes `&self`, meaning
/// the inner Rust value cannot be moved out of the wrapper. As a result,
/// the inner value will typically be wrapped in something like an `Arc`.
pub trait ExtractInner {
type Inner;
fn extract_inner(&self, py: Python) -> Self::Inner;
}
/// Similar to `ExtractInner`, but returns a reference to the wrapped
/// Rust object. Types that implement this trait will automatically
/// implement `ExtractInner` if the inner type implements `Clone`.
pub trait ExtractInnerRef {
type Inner;
fn extract_inner_ref<'a>(&'a self, py: Python<'a>) -> &'a Self::Inner;
}
impl<T> ExtractInner for T
where
T: ExtractInnerRef,
T::Inner: Clone +'static,
{
type Inner = <T as ExtractInnerRef>::Inner;
fn extract_inner(&self, py: Python) -> Self::Inner {
self.extract_inner_ref(py).clone()
}
}
|
* GNU General Public License version 2.
|
random_line_split
|
extract.rs
|
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use cpython::Python;
/// Trait for extracting a Rust object from a Python wrapper class.
///
/// A common pattern when writing bindings for Rust objects is to define
/// a Python wrapper class using the `py_class!` macro, with the underlying
/// Rust object stored as a data field within the Python object.
///
/// When Rust code interacts with a Python wrapper, it may want to work
/// with the underlying Rust object directly. This trait provides a means
/// to do so. Note that the `extract_inner` methods takes `&self`, meaning
/// the inner Rust value cannot be moved out of the wrapper. As a result,
/// the inner value will typically be wrapped in something like an `Arc`.
pub trait ExtractInner {
type Inner;
fn extract_inner(&self, py: Python) -> Self::Inner;
}
/// Similar to `ExtractInner`, but returns a reference to the wrapped
/// Rust object. Types that implement this trait will automatically
/// implement `ExtractInner` if the inner type implements `Clone`.
pub trait ExtractInnerRef {
type Inner;
fn extract_inner_ref<'a>(&'a self, py: Python<'a>) -> &'a Self::Inner;
}
impl<T> ExtractInner for T
where
T: ExtractInnerRef,
T::Inner: Clone +'static,
{
type Inner = <T as ExtractInnerRef>::Inner;
fn extract_inner(&self, py: Python) -> Self::Inner
|
}
|
{
self.extract_inner_ref(py).clone()
}
|
identifier_body
|
cell.rs
|
// Lumol, an extensible molecular simulation engine
// Copyright (C) Lumol's contributors — BSD license
use std::fs::File;
use std::io::{self, BufWriter};
use std::io::prelude::*;
use std::path::{Path, PathBuf};
use log::error;
use super::Output;
use lumol_core::System;
/// The `CellOutput` writes all the components of a cell to a file. The columns
/// in the file contain the following values: `step A B C α β γ`.
pub struct CellOutput {
file: BufWriter<File>,
path: PathBuf,
}
impl CellOutput {
/// Create a new `CellOutput` writing to `filename`. The file is replaced if
/// it already exists.
pub fn new<P: AsRef<Path>>(filename: P) -> Result<CellOutput, io::Error> {
|
mpl Output for CellOutput {
#[allow(clippy::non_ascii_literal)]
fn setup(&mut self, _: &System) {
writeln_or_log!(self, "# Unit cell of the simulation");
writeln_or_log!(self, "# Step A/Å B/Å C/Å α/deg β/deg γ/deg");
}
fn write(&mut self, system: &System) {
writeln_or_log!(self, "{} {} {} {} {} {} {}",
system.step,
system.cell.a(),
system.cell.b(),
system.cell.c(),
system.cell.alpha(),
system.cell.beta(),
system.cell.gamma()
)
}
}
#[cfg(test)]
#[allow(clippy::non_ascii_literal)]
mod tests {
use super::*;
use super::super::tests::test_output;
#[test]
fn cell() {
test_output(
|path| Box::new(CellOutput::new(path).unwrap()),
"# Unit cell of the simulation
# Step A/Å B/Å C/Å α/deg β/deg γ/deg
42 10 10 10 90 90 90
",
);
}
}
|
Ok(CellOutput {
file: BufWriter::new(File::create(filename.as_ref())?),
path: filename.as_ref().to_owned(),
})
}
}
i
|
identifier_body
|
cell.rs
|
// Lumol, an extensible molecular simulation engine
// Copyright (C) Lumol's contributors — BSD license
use std::fs::File;
use std::io::{self, BufWriter};
use std::io::prelude::*;
use std::path::{Path, PathBuf};
use log::error;
use super::Output;
use lumol_core::System;
/// The `CellOutput` writes all the components of a cell to a file. The columns
/// in the file contain the following values: `step A B C α β γ`.
pub struct CellOutput {
file: BufWriter<File>,
path: PathBuf,
}
impl CellOutput {
/// Create a new `CellOutput` writing to `filename`. The file is replaced if
/// it already exists.
pub fn new<P: AsRef<Path>>(filename: P) -> Result<CellOutput, io::Error> {
Ok(CellOutput {
file: BufWriter::new(File::create(filename.as_ref())?),
path: filename.as_ref().to_owned(),
})
}
}
impl Output for CellOutput {
#[allow(clippy::non_ascii_literal)]
fn setup(&mut self, _: &System) {
writeln_or_log!(self, "# Unit cell of the simulation");
writeln_or_log!(self, "# Step A/Å B/Å C/Å α/deg β/deg γ/deg");
}
fn write(&mut self, system: &System) {
writeln_or_log!(self, "{} {} {} {} {} {} {}",
system.step,
system.cell.a(),
system.cell.b(),
system.cell.c(),
system.cell.alpha(),
system.cell.beta(),
system.cell.gamma()
)
}
}
#[cfg(test)]
#[allow(clippy::non_ascii_literal)]
mod tests {
|
fn cell() {
test_output(
|path| Box::new(CellOutput::new(path).unwrap()),
"# Unit cell of the simulation
# Step A/Å B/Å C/Å α/deg β/deg γ/deg
42 10 10 10 90 90 90
",
);
}
}
|
use super::*;
use super::super::tests::test_output;
#[test]
|
random_line_split
|
cell.rs
|
// Lumol, an extensible molecular simulation engine
// Copyright (C) Lumol's contributors — BSD license
use std::fs::File;
use std::io::{self, BufWriter};
use std::io::prelude::*;
use std::path::{Path, PathBuf};
use log::error;
use super::Output;
use lumol_core::System;
/// The `CellOutput` writes all the components of a cell to a file. The columns
/// in the file contain the following values: `step A B C α β γ`.
pub struct CellOutput {
file: BufWriter<File>,
path: PathBuf,
}
impl CellOutput {
/// Create a new `CellOutput` writing to `filename`. The file is replaced if
/// it already exists.
pub fn new<P: AsRef<Path>>(filename: P) -> Result<CellOutput, io::Error> {
Ok(CellOutput {
file: BufWriter::new(File::create(filename.as_ref())?),
path: filename.as_ref().to_owned(),
})
}
}
impl Output for CellOutput {
#[allow(clippy::non_ascii_literal)]
fn setup
|
self, _: &System) {
writeln_or_log!(self, "# Unit cell of the simulation");
writeln_or_log!(self, "# Step A/Å B/Å C/Å α/deg β/deg γ/deg");
}
fn write(&mut self, system: &System) {
writeln_or_log!(self, "{} {} {} {} {} {} {}",
system.step,
system.cell.a(),
system.cell.b(),
system.cell.c(),
system.cell.alpha(),
system.cell.beta(),
system.cell.gamma()
)
}
}
#[cfg(test)]
#[allow(clippy::non_ascii_literal)]
mod tests {
use super::*;
use super::super::tests::test_output;
#[test]
fn cell() {
test_output(
|path| Box::new(CellOutput::new(path).unwrap()),
"# Unit cell of the simulation
# Step A/Å B/Å C/Å α/deg β/deg γ/deg
42 10 10 10 90 90 90
",
);
}
}
|
(&mut
|
identifier_name
|
issue-34053.rs
|
// run-pass
use std::sync::atomic::{AtomicUsize, Ordering};
static DROP_COUNTER: AtomicUsize = AtomicUsize::new(0);
struct A(i32);
impl Drop for A {
fn drop(&mut self)
|
}
static FOO: A = A(123);
const BAR: A = A(456);
impl A {
const BAZ: A = A(789);
}
fn main() {
assert_eq!(DROP_COUNTER.load(Ordering::SeqCst), 0);
assert_eq!(&FOO.0, &123);
assert_eq!(DROP_COUNTER.load(Ordering::SeqCst), 0);
assert_eq!(BAR.0, 456);
assert_eq!(DROP_COUNTER.load(Ordering::SeqCst), 1);
assert_eq!(A::BAZ.0, 789);
assert_eq!(DROP_COUNTER.load(Ordering::SeqCst), 2);
}
|
{
// update global drop count
DROP_COUNTER.fetch_add(1, Ordering::SeqCst);
}
|
identifier_body
|
issue-34053.rs
|
// run-pass
use std::sync::atomic::{AtomicUsize, Ordering};
static DROP_COUNTER: AtomicUsize = AtomicUsize::new(0);
struct A(i32);
impl Drop for A {
fn
|
(&mut self) {
// update global drop count
DROP_COUNTER.fetch_add(1, Ordering::SeqCst);
}
}
static FOO: A = A(123);
const BAR: A = A(456);
impl A {
const BAZ: A = A(789);
}
fn main() {
assert_eq!(DROP_COUNTER.load(Ordering::SeqCst), 0);
assert_eq!(&FOO.0, &123);
assert_eq!(DROP_COUNTER.load(Ordering::SeqCst), 0);
assert_eq!(BAR.0, 456);
assert_eq!(DROP_COUNTER.load(Ordering::SeqCst), 1);
assert_eq!(A::BAZ.0, 789);
assert_eq!(DROP_COUNTER.load(Ordering::SeqCst), 2);
}
|
drop
|
identifier_name
|
issue-34053.rs
|
// run-pass
use std::sync::atomic::{AtomicUsize, Ordering};
|
fn drop(&mut self) {
// update global drop count
DROP_COUNTER.fetch_add(1, Ordering::SeqCst);
}
}
static FOO: A = A(123);
const BAR: A = A(456);
impl A {
const BAZ: A = A(789);
}
fn main() {
assert_eq!(DROP_COUNTER.load(Ordering::SeqCst), 0);
assert_eq!(&FOO.0, &123);
assert_eq!(DROP_COUNTER.load(Ordering::SeqCst), 0);
assert_eq!(BAR.0, 456);
assert_eq!(DROP_COUNTER.load(Ordering::SeqCst), 1);
assert_eq!(A::BAZ.0, 789);
assert_eq!(DROP_COUNTER.load(Ordering::SeqCst), 2);
}
|
static DROP_COUNTER: AtomicUsize = AtomicUsize::new(0);
struct A(i32);
impl Drop for A {
|
random_line_split
|
solution.rs
|
use std::env;
use std::io::prelude::*;
use std::io::BufReader;
use std::fs::File;
fn is_triangle(mut triangle: Vec<i32>) -> bool
{
triangle.sort();
return triangle[0] + triangle[1] > triangle[2];
}
fn main() {
let args: Vec<String> = env::args().collect();
let f = File::open(&args[1]).expect("Could not open file");
let reader = BufReader::new(f);
let mut horizontal = 0;
let mut all_nums: Vec<i32> = Vec::new();
|
.filter(|s|!s.is_empty())
.map(|s| s.parse().unwrap())
.collect();
for num in &numbers {
all_nums.push(*num);
}
if is_triangle(numbers) {
horizontal += 1;
}
}
let mut vertical = 0;
for i in 0..(all_nums.len() / 9) {
let offset = i * 9;
for j in 0..3 {
let triangle = vec![all_nums[offset + j + 0], all_nums[offset + j + 3], all_nums[offset + j + 6]];
if is_triangle(triangle) {
vertical += 1;
}
}
}
println!("{} triangles possible horizontally", horizontal);
println!("{} triangles possible vertically", vertical);
}
|
for line in reader.lines() {
let numbers: Vec<i32> = line.unwrap()
.split(' ')
.map(|s| s.trim())
|
random_line_split
|
solution.rs
|
use std::env;
use std::io::prelude::*;
use std::io::BufReader;
use std::fs::File;
fn is_triangle(mut triangle: Vec<i32>) -> bool
{
triangle.sort();
return triangle[0] + triangle[1] > triangle[2];
}
fn
|
() {
let args: Vec<String> = env::args().collect();
let f = File::open(&args[1]).expect("Could not open file");
let reader = BufReader::new(f);
let mut horizontal = 0;
let mut all_nums: Vec<i32> = Vec::new();
for line in reader.lines() {
let numbers: Vec<i32> = line.unwrap()
.split(' ')
.map(|s| s.trim())
.filter(|s|!s.is_empty())
.map(|s| s.parse().unwrap())
.collect();
for num in &numbers {
all_nums.push(*num);
}
if is_triangle(numbers) {
horizontal += 1;
}
}
let mut vertical = 0;
for i in 0..(all_nums.len() / 9) {
let offset = i * 9;
for j in 0..3 {
let triangle = vec![all_nums[offset + j + 0], all_nums[offset + j + 3], all_nums[offset + j + 6]];
if is_triangle(triangle) {
vertical += 1;
}
}
}
println!("{} triangles possible horizontally", horizontal);
println!("{} triangles possible vertically", vertical);
}
|
main
|
identifier_name
|
solution.rs
|
use std::env;
use std::io::prelude::*;
use std::io::BufReader;
use std::fs::File;
fn is_triangle(mut triangle: Vec<i32>) -> bool
{
triangle.sort();
return triangle[0] + triangle[1] > triangle[2];
}
fn main() {
let args: Vec<String> = env::args().collect();
let f = File::open(&args[1]).expect("Could not open file");
let reader = BufReader::new(f);
let mut horizontal = 0;
let mut all_nums: Vec<i32> = Vec::new();
for line in reader.lines() {
let numbers: Vec<i32> = line.unwrap()
.split(' ')
.map(|s| s.trim())
.filter(|s|!s.is_empty())
.map(|s| s.parse().unwrap())
.collect();
for num in &numbers {
all_nums.push(*num);
}
if is_triangle(numbers) {
horizontal += 1;
}
}
let mut vertical = 0;
for i in 0..(all_nums.len() / 9) {
let offset = i * 9;
for j in 0..3 {
let triangle = vec![all_nums[offset + j + 0], all_nums[offset + j + 3], all_nums[offset + j + 6]];
if is_triangle(triangle)
|
}
}
println!("{} triangles possible horizontally", horizontal);
println!("{} triangles possible vertically", vertical);
}
|
{
vertical += 1;
}
|
conditional_block
|
solution.rs
|
use std::env;
use std::io::prelude::*;
use std::io::BufReader;
use std::fs::File;
fn is_triangle(mut triangle: Vec<i32>) -> bool
|
fn main() {
let args: Vec<String> = env::args().collect();
let f = File::open(&args[1]).expect("Could not open file");
let reader = BufReader::new(f);
let mut horizontal = 0;
let mut all_nums: Vec<i32> = Vec::new();
for line in reader.lines() {
let numbers: Vec<i32> = line.unwrap()
.split(' ')
.map(|s| s.trim())
.filter(|s|!s.is_empty())
.map(|s| s.parse().unwrap())
.collect();
for num in &numbers {
all_nums.push(*num);
}
if is_triangle(numbers) {
horizontal += 1;
}
}
let mut vertical = 0;
for i in 0..(all_nums.len() / 9) {
let offset = i * 9;
for j in 0..3 {
let triangle = vec![all_nums[offset + j + 0], all_nums[offset + j + 3], all_nums[offset + j + 6]];
if is_triangle(triangle) {
vertical += 1;
}
}
}
println!("{} triangles possible horizontally", horizontal);
println!("{} triangles possible vertically", vertical);
}
|
{
triangle.sort();
return triangle[0] + triangle[1] > triangle[2];
}
|
identifier_body
|
main.rs
|
mod common;
mod color;
mod math;
mod ppm;
mod renderer;
mod shade_record;
mod shape;
mod tracer;
mod view_plane;
mod world;
use std::cell::RefCell;
use std::rc::Rc;
use color::RGBColor;
use renderer::Renderer;
use tracer::{Tracer, MultipleObjects};
use view_plane::ViewPlane;
use world::World;
fn
|
() {
let mut view_plane = ViewPlane::new(300, 300, 1.0, 1.0, RGBColor::new(0.5, 0.5, 0.5));
let world = Rc::new(RefCell::new(World::new()));
let tracer = Box::new(MultipleObjects::new(world.clone())) as Box<Tracer>;
let renderer = Renderer::new(tracer);
world.borrow().build();
renderer.render_world(&mut view_plane);
ppm::write("output", view_plane.width(), view_plane.height(), view_plane.pixels());
}
|
main
|
identifier_name
|
main.rs
|
mod common;
mod color;
mod math;
mod ppm;
mod renderer;
mod shade_record;
mod shape;
mod tracer;
mod view_plane;
mod world;
use std::cell::RefCell;
use std::rc::Rc;
use color::RGBColor;
use renderer::Renderer;
use tracer::{Tracer, MultipleObjects};
use view_plane::ViewPlane;
use world::World;
fn main()
|
{
let mut view_plane = ViewPlane::new(300, 300, 1.0, 1.0, RGBColor::new(0.5 , 0.5, 0.5));
let world = Rc::new(RefCell::new(World::new()));
let tracer = Box::new(MultipleObjects::new(world.clone())) as Box<Tracer>;
let renderer = Renderer::new(tracer);
world.borrow().build();
renderer.render_world(&mut view_plane);
ppm::write("output", view_plane.width(), view_plane.height(), view_plane.pixels());
}
|
identifier_body
|
|
main.rs
|
mod common;
mod color;
mod math;
mod ppm;
mod renderer;
mod shade_record;
mod shape;
mod tracer;
|
use std::rc::Rc;
use color::RGBColor;
use renderer::Renderer;
use tracer::{Tracer, MultipleObjects};
use view_plane::ViewPlane;
use world::World;
fn main() {
let mut view_plane = ViewPlane::new(300, 300, 1.0, 1.0, RGBColor::new(0.5, 0.5, 0.5));
let world = Rc::new(RefCell::new(World::new()));
let tracer = Box::new(MultipleObjects::new(world.clone())) as Box<Tracer>;
let renderer = Renderer::new(tracer);
world.borrow().build();
renderer.render_world(&mut view_plane);
ppm::write("output", view_plane.width(), view_plane.height(), view_plane.pixels());
}
|
mod view_plane;
mod world;
use std::cell::RefCell;
|
random_line_split
|
streaming.rs
|
// Copyright 2015, 2016 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Content Stream Response
use std::io::{self, Read};
use hyper::{header, server, Decoder, Encoder, Next};
use hyper::net::HttpStream;
use hyper::mime::Mime;
use hyper::status::StatusCode;
use handlers::add_security_headers;
|
pub struct StreamingHandler<R: io::Read> {
buffer: [u8; BUFFER_SIZE],
buffer_leftover: usize,
status: StatusCode,
content: io::BufReader<R>,
mimetype: Mime,
safe_to_embed_on: Option<(String, u16)>,
}
impl<R: io::Read> StreamingHandler<R> {
pub fn new(content: R, status: StatusCode, mimetype: Mime, embeddable_on: Option<(String, u16)>) -> Self {
StreamingHandler {
buffer: [0; BUFFER_SIZE],
buffer_leftover: 0,
status: status,
content: io::BufReader::new(content),
mimetype: mimetype,
safe_to_embed_on: embeddable_on,
}
}
pub fn set_initial_content(&mut self, content: &str) {
assert_eq!(self.buffer_leftover, 0);
let bytes = content.as_bytes();
self.buffer_leftover = bytes.len();
self.buffer[0..self.buffer_leftover].copy_from_slice(bytes);
}
}
impl<R: io::Read> server::Handler<HttpStream> for StreamingHandler<R> {
fn on_request(&mut self, _request: server::Request<HttpStream>) -> Next {
Next::write()
}
fn on_request_readable(&mut self, _decoder: &mut Decoder<HttpStream>) -> Next {
Next::write()
}
fn on_response(&mut self, res: &mut server::Response) -> Next {
res.set_status(self.status);
res.headers_mut().set(header::ContentType(self.mimetype.clone()));
add_security_headers(&mut res.headers_mut(), self.safe_to_embed_on.clone());
Next::write()
}
fn on_response_writable(&mut self, encoder: &mut Encoder<HttpStream>) -> Next {
fn handle_error(e: io::Error) -> Next {
match e.kind() {
::std::io::ErrorKind::WouldBlock => Next::write(),
_ => Next::end(),
}
}
let write_pos = self.buffer_leftover;
match self.content.read(&mut self.buffer[write_pos..]) {
Err(e) => handle_error(e),
Ok(read) => match encoder.write(&self.buffer[..write_pos + read]) {
Err(e) => handle_error(e),
Ok(0) => Next::end(),
Ok(wrote) => {
self.buffer_leftover = write_pos + read - wrote;
if self.buffer_leftover > 0 {
for i in self.buffer_leftover..write_pos + read {
self.buffer.swap(i, i - self.buffer_leftover);
}
}
Next::write()
},
},
}
}
}
|
const BUFFER_SIZE: usize = 1024;
|
random_line_split
|
streaming.rs
|
// Copyright 2015, 2016 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Content Stream Response
use std::io::{self, Read};
use hyper::{header, server, Decoder, Encoder, Next};
use hyper::net::HttpStream;
use hyper::mime::Mime;
use hyper::status::StatusCode;
use handlers::add_security_headers;
const BUFFER_SIZE: usize = 1024;
pub struct StreamingHandler<R: io::Read> {
buffer: [u8; BUFFER_SIZE],
buffer_leftover: usize,
status: StatusCode,
content: io::BufReader<R>,
mimetype: Mime,
safe_to_embed_on: Option<(String, u16)>,
}
impl<R: io::Read> StreamingHandler<R> {
pub fn
|
(content: R, status: StatusCode, mimetype: Mime, embeddable_on: Option<(String, u16)>) -> Self {
StreamingHandler {
buffer: [0; BUFFER_SIZE],
buffer_leftover: 0,
status: status,
content: io::BufReader::new(content),
mimetype: mimetype,
safe_to_embed_on: embeddable_on,
}
}
pub fn set_initial_content(&mut self, content: &str) {
assert_eq!(self.buffer_leftover, 0);
let bytes = content.as_bytes();
self.buffer_leftover = bytes.len();
self.buffer[0..self.buffer_leftover].copy_from_slice(bytes);
}
}
impl<R: io::Read> server::Handler<HttpStream> for StreamingHandler<R> {
fn on_request(&mut self, _request: server::Request<HttpStream>) -> Next {
Next::write()
}
fn on_request_readable(&mut self, _decoder: &mut Decoder<HttpStream>) -> Next {
Next::write()
}
fn on_response(&mut self, res: &mut server::Response) -> Next {
res.set_status(self.status);
res.headers_mut().set(header::ContentType(self.mimetype.clone()));
add_security_headers(&mut res.headers_mut(), self.safe_to_embed_on.clone());
Next::write()
}
fn on_response_writable(&mut self, encoder: &mut Encoder<HttpStream>) -> Next {
fn handle_error(e: io::Error) -> Next {
match e.kind() {
::std::io::ErrorKind::WouldBlock => Next::write(),
_ => Next::end(),
}
}
let write_pos = self.buffer_leftover;
match self.content.read(&mut self.buffer[write_pos..]) {
Err(e) => handle_error(e),
Ok(read) => match encoder.write(&self.buffer[..write_pos + read]) {
Err(e) => handle_error(e),
Ok(0) => Next::end(),
Ok(wrote) => {
self.buffer_leftover = write_pos + read - wrote;
if self.buffer_leftover > 0 {
for i in self.buffer_leftover..write_pos + read {
self.buffer.swap(i, i - self.buffer_leftover);
}
}
Next::write()
},
},
}
}
}
|
new
|
identifier_name
|
multiple.rs
|
// rustfmt-normalize_comments: true
// rustfmt-wrap_comments: true
// rustfmt-format_strings: true
// Test of lots of random stuff.
// FIXME split this into multiple, self-contained tests.
#[attr1]
extern crate foo;
#[attr2]
#[attr3]
extern crate foo;
#[attr1]
extern crate foo;
#[attr2]
#[attr3]
extern crate foo;
use std::cell::*;
use std::{
self, any, ascii, borrow, borrow, borrow, borrow, borrow, borrow, borrow, borrow, borrow,
borrow, borrow, boxed, boxed, boxed, boxed, boxed, boxed, boxed, boxed, boxed, boxed, char,
char, char, char, char, char, char, char, char, char,
};
mod doc;
mod other;
// sfdgfffffffffffffffffffffffffffffffffffffffffffffffffffffff
// ffffffffffffffffffffffffffffffffffffffffff
fn foo(a: isize, b: u32 /* blah blah */, c: f64) {}
fn foo() -> Box<Write +'static>
where
'a: 'b,
for<'a> D<'b>: 'a,
{
hello!()
}
fn baz<
'a: 'b, // comment on 'a
T: SomsssssssssssssssssssssssssssssssssssssssssssssssssssssseType, // comment on T
>(
a: A,
b: B, // comment on b
c: C,
) -> Bob {
#[attr1]
extern crate foo;
#[attr2]
#[attr3]
extern crate foo;
#[attr1]
extern crate foo;
#[attr2]
#[attr3]
extern crate foo;
}
#[rustfmt::skip]
fn qux(a: dadsfa, // Comment 1
b: sdfasdfa, // Comment 2
c: dsfdsafa) // Comment 3
{
}
/// Blah blah blah.
impl Bar {
fn foo(
&mut self,
a: sdfsdfcccccccccccccccccccccccccccccccccccccccccccccccccc, // comment on a
b: sdfasdfsdfasfs, // closing comment
) -> isize {
}
/// Blah blah blah.
pub fn f2(self) {
(foo, bar)
}
#[an_attribute]
fn f3(self) -> Dog {}
}
/// The `nodes` and `edges` method each return instantiations of
/// `Cow<[T]>` to leave implementers the freedom to create
/// entirely new vectors or to pass back slices into internally owned
/// vectors.
pub trait GraphWalk<'a, N, E> {
/// Returns all the nodes in this graph.
fn nodes(&'a self) -> Nodes<'a, N>;
/// Returns all of the edges in this graph.
fn edges(&'a self) -> Edges<'a, E>;
/// The source node for `edge`.
fn source(&'a self, edge: &E) -> N;
/// The target node for `edge`.
fn target(&'a self, edge: &E) -> N;
}
/// A Doc comment
#[AnAttribute]
pub struct Foo {
#[rustfmt::skip]
f : SomeType, // Comment beside a field
f: SomeType, // Comment beside a field
// Comment on a field
g: SomeOtherType,
/// A doc comment on a field
h: AThirdType,
}
struct Bar;
// With a where-clause and generics.
pub struct Foo<'a, Y: Baz>
where
X: Whatever,
{
f: SomeType, // Comment beside a field
}
fn foo(ann: &'a (PpAnn + 'a)) {}
fn main() {
for i in 0i32..4 {
println!("{}", i);
}
while true {
hello();
}
let rc = Cell::new(
42usize,
42usize,
Cell::new(
42usize,
remaining_widthremaining_widthremaining_widthremaining_width,
),
42usize,
);
let rc = RefCell::new(42usize, remaining_width, remaining_width); // a comment
let x = "Hello!!!!!!!!! abcd abcd abcd abcd abcd abcd\n abcd abcd abcd abcd abcd abcd abcd \
abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd \
abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd \
abcd abcd";
let s = expand(a, b);
}
fn deconstruct() -> (
SocketAddr,
Method,
Headers,
|
HttpVersion,
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA,
) {
}
fn deconstruct(
foo: Bar,
) -> (
SocketAddr,
Method,
Headers,
RequestUri,
HttpVersion,
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA,
) {
}
#[rustfmt::skip]
mod a{
fn foo(x: T) {
let x: T = dfasdf;
}
}
|
RequestUri,
|
random_line_split
|
multiple.rs
|
// rustfmt-normalize_comments: true
// rustfmt-wrap_comments: true
// rustfmt-format_strings: true
// Test of lots of random stuff.
// FIXME split this into multiple, self-contained tests.
#[attr1]
extern crate foo;
#[attr2]
#[attr3]
extern crate foo;
#[attr1]
extern crate foo;
#[attr2]
#[attr3]
extern crate foo;
use std::cell::*;
use std::{
self, any, ascii, borrow, borrow, borrow, borrow, borrow, borrow, borrow, borrow, borrow,
borrow, borrow, boxed, boxed, boxed, boxed, boxed, boxed, boxed, boxed, boxed, boxed, char,
char, char, char, char, char, char, char, char, char,
};
mod doc;
mod other;
// sfdgfffffffffffffffffffffffffffffffffffffffffffffffffffffff
// ffffffffffffffffffffffffffffffffffffffffff
fn foo(a: isize, b: u32 /* blah blah */, c: f64) {}
fn foo() -> Box<Write +'static>
where
'a: 'b,
for<'a> D<'b>: 'a,
{
hello!()
}
fn baz<
'a: 'b, // comment on 'a
T: SomsssssssssssssssssssssssssssssssssssssssssssssssssssssseType, // comment on T
>(
a: A,
b: B, // comment on b
c: C,
) -> Bob {
#[attr1]
extern crate foo;
#[attr2]
#[attr3]
extern crate foo;
#[attr1]
extern crate foo;
#[attr2]
#[attr3]
extern crate foo;
}
#[rustfmt::skip]
fn qux(a: dadsfa, // Comment 1
b: sdfasdfa, // Comment 2
c: dsfdsafa) // Comment 3
{
}
/// Blah blah blah.
impl Bar {
fn foo(
&mut self,
a: sdfsdfcccccccccccccccccccccccccccccccccccccccccccccccccc, // comment on a
b: sdfasdfsdfasfs, // closing comment
) -> isize {
}
/// Blah blah blah.
pub fn f2(self) {
(foo, bar)
}
#[an_attribute]
fn f3(self) -> Dog {}
}
/// The `nodes` and `edges` method each return instantiations of
/// `Cow<[T]>` to leave implementers the freedom to create
/// entirely new vectors or to pass back slices into internally owned
/// vectors.
pub trait GraphWalk<'a, N, E> {
/// Returns all the nodes in this graph.
fn nodes(&'a self) -> Nodes<'a, N>;
/// Returns all of the edges in this graph.
fn edges(&'a self) -> Edges<'a, E>;
/// The source node for `edge`.
fn source(&'a self, edge: &E) -> N;
/// The target node for `edge`.
fn target(&'a self, edge: &E) -> N;
}
/// A Doc comment
#[AnAttribute]
pub struct Foo {
#[rustfmt::skip]
f : SomeType, // Comment beside a field
f: SomeType, // Comment beside a field
// Comment on a field
g: SomeOtherType,
/// A doc comment on a field
h: AThirdType,
}
struct Bar;
// With a where-clause and generics.
pub struct Foo<'a, Y: Baz>
where
X: Whatever,
{
f: SomeType, // Comment beside a field
}
fn foo(ann: &'a (PpAnn + 'a)) {}
fn main() {
for i in 0i32..4 {
println!("{}", i);
}
while true {
hello();
}
let rc = Cell::new(
42usize,
42usize,
Cell::new(
42usize,
remaining_widthremaining_widthremaining_widthremaining_width,
),
42usize,
);
let rc = RefCell::new(42usize, remaining_width, remaining_width); // a comment
let x = "Hello!!!!!!!!! abcd abcd abcd abcd abcd abcd\n abcd abcd abcd abcd abcd abcd abcd \
abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd \
abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd \
abcd abcd";
let s = expand(a, b);
}
fn deconstruct() -> (
SocketAddr,
Method,
Headers,
RequestUri,
HttpVersion,
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA,
) {
}
fn deconstruct(
foo: Bar,
) -> (
SocketAddr,
Method,
Headers,
RequestUri,
HttpVersion,
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA,
) {
}
#[rustfmt::skip]
mod a{
fn foo(x: T)
|
}
|
{
let x: T = dfasdf;
}
|
identifier_body
|
multiple.rs
|
// rustfmt-normalize_comments: true
// rustfmt-wrap_comments: true
// rustfmt-format_strings: true
// Test of lots of random stuff.
// FIXME split this into multiple, self-contained tests.
#[attr1]
extern crate foo;
#[attr2]
#[attr3]
extern crate foo;
#[attr1]
extern crate foo;
#[attr2]
#[attr3]
extern crate foo;
use std::cell::*;
use std::{
self, any, ascii, borrow, borrow, borrow, borrow, borrow, borrow, borrow, borrow, borrow,
borrow, borrow, boxed, boxed, boxed, boxed, boxed, boxed, boxed, boxed, boxed, boxed, char,
char, char, char, char, char, char, char, char, char,
};
mod doc;
mod other;
// sfdgfffffffffffffffffffffffffffffffffffffffffffffffffffffff
// ffffffffffffffffffffffffffffffffffffffffff
fn foo(a: isize, b: u32 /* blah blah */, c: f64) {}
fn foo() -> Box<Write +'static>
where
'a: 'b,
for<'a> D<'b>: 'a,
{
hello!()
}
fn baz<
'a: 'b, // comment on 'a
T: SomsssssssssssssssssssssssssssssssssssssssssssssssssssssseType, // comment on T
>(
a: A,
b: B, // comment on b
c: C,
) -> Bob {
#[attr1]
extern crate foo;
#[attr2]
#[attr3]
extern crate foo;
#[attr1]
extern crate foo;
#[attr2]
#[attr3]
extern crate foo;
}
#[rustfmt::skip]
fn qux(a: dadsfa, // Comment 1
b: sdfasdfa, // Comment 2
c: dsfdsafa) // Comment 3
{
}
/// Blah blah blah.
impl Bar {
fn foo(
&mut self,
a: sdfsdfcccccccccccccccccccccccccccccccccccccccccccccccccc, // comment on a
b: sdfasdfsdfasfs, // closing comment
) -> isize {
}
/// Blah blah blah.
pub fn f2(self) {
(foo, bar)
}
#[an_attribute]
fn
|
(self) -> Dog {}
}
/// The `nodes` and `edges` method each return instantiations of
/// `Cow<[T]>` to leave implementers the freedom to create
/// entirely new vectors or to pass back slices into internally owned
/// vectors.
pub trait GraphWalk<'a, N, E> {
/// Returns all the nodes in this graph.
fn nodes(&'a self) -> Nodes<'a, N>;
/// Returns all of the edges in this graph.
fn edges(&'a self) -> Edges<'a, E>;
/// The source node for `edge`.
fn source(&'a self, edge: &E) -> N;
/// The target node for `edge`.
fn target(&'a self, edge: &E) -> N;
}
/// A Doc comment
#[AnAttribute]
pub struct Foo {
#[rustfmt::skip]
f : SomeType, // Comment beside a field
f: SomeType, // Comment beside a field
// Comment on a field
g: SomeOtherType,
/// A doc comment on a field
h: AThirdType,
}
struct Bar;
// With a where-clause and generics.
pub struct Foo<'a, Y: Baz>
where
X: Whatever,
{
f: SomeType, // Comment beside a field
}
fn foo(ann: &'a (PpAnn + 'a)) {}
fn main() {
for i in 0i32..4 {
println!("{}", i);
}
while true {
hello();
}
let rc = Cell::new(
42usize,
42usize,
Cell::new(
42usize,
remaining_widthremaining_widthremaining_widthremaining_width,
),
42usize,
);
let rc = RefCell::new(42usize, remaining_width, remaining_width); // a comment
let x = "Hello!!!!!!!!! abcd abcd abcd abcd abcd abcd\n abcd abcd abcd abcd abcd abcd abcd \
abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd \
abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd \
abcd abcd";
let s = expand(a, b);
}
fn deconstruct() -> (
SocketAddr,
Method,
Headers,
RequestUri,
HttpVersion,
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA,
) {
}
fn deconstruct(
foo: Bar,
) -> (
SocketAddr,
Method,
Headers,
RequestUri,
HttpVersion,
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA,
) {
}
#[rustfmt::skip]
mod a{
fn foo(x: T) {
let x: T = dfasdf;
}
}
|
f3
|
identifier_name
|
managed_process.rs
|
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
// Assumes Unix
use libc::{self, c_int};
use std::thread;
use std::thread::JoinHandle;
use std::sync::{Arc, Mutex, RwLock};
use std::process::Child;
use std::io::{Error, ErrorKind, Result};
use std::time::{Duration, Instant};
/// Unix exit statuses
#[derive(PartialEq, Eq, Clone, Copy, Debug)]
pub struct ExitStatus(c_int);
const RESTART_TIME_THRESHOLD: u64 = 5; // seconds
pub struct ManagedProcess {
kill_signal: Arc<Mutex<u32>>,
pid: Arc<Mutex<Option<u32>>>,
thread: JoinHandle<()>,
backoff: Arc<RwLock<Backoff>>,
}
struct Backoff {
restart_count: u64,
restart_threshold: Duration,
start_time: Option<Instant>,
backoff: u64,
}
impl Backoff {
fn new(restart_threshold: Duration) -> Self {
Backoff {
restart_count: 0,
restart_threshold: restart_threshold,
start_time: None,
backoff: 1,
}
}
fn from_secs(restart_threshold_secs: u64) -> Self {
Backoff::new(Duration::from_secs(restart_threshold_secs))
}
fn next_backoff(&mut self) -> Duration {
let end_time = Instant::now();
let duration_to_backoff = if let Some(start_time) = self.start_time {
if (end_time - start_time) < self.restart_threshold {
self.backoff += 1;
// non-linear back off
Duration::from_secs((self.backoff * self.backoff) >> 1)
} else {
self.backoff = 1;
Duration::from_secs(0)
}
} else {
Duration::from_secs(0)
};
self.restart_count += 1;
self.start_time = Some(Instant::now());
duration_to_backoff
}
pub fn get_restart_count(&self) -> u64 {
self.restart_count
}
}
impl ManagedProcess {
/// Create a new ManagedProcess and start it.
///
/// # Examples
///
/// use tunnel_controller::ManagedProcess;
/// use std::process::Command;
///
/// let process = ManagedProcess::start(|| {
/// Command::new("echo")
/// .arg("Hello")
/// .arg("World")
/// .spawn()
/// });
///
pub fn start<F:'static>(spawn: F) -> Result<ManagedProcess>
where F: Fn() -> Result<Child> + Send
{
let pid = Arc::new(Mutex::new(None));
// Uses a u32 Mutex to avoid the compiler complaining that you can use an AtomicBool.
// In this case we want a bool like thing _and_ a lock.
let kill_signal = Arc::new(Mutex::new(0));
let shared_kill_signal = kill_signal.clone();
let backoff = Arc::new(RwLock::new(Backoff::from_secs(RESTART_TIME_THRESHOLD)));
let shared_pid = pid.clone();
let shared_backoff = backoff.clone();
let thread = thread::spawn(move || {
let backoff = shared_backoff;
loop {
let mut child_process;
{
let kill_signal = shared_kill_signal.lock().unwrap();
let mut pid = shared_pid.lock().unwrap();
if *kill_signal == 1 {
*pid = None;
debug!("Received process kill signal");
break;
}
info!("Starting process. Restarted {} times",
checklock!(backoff.read()).get_restart_count());
child_process = spawn().unwrap();
*pid = Some(child_process.id());
}
child_process.wait().unwrap();
let backoff_duration = checklock!(backoff.write()).next_backoff();
thread::sleep(backoff_duration);
}
});
Ok(ManagedProcess {
backoff: backoff,
kill_signal: kill_signal,
pid: pid,
thread: thread,
})
}
#[allow(dead_code)]
pub fn get_restart_count(&self) -> u64 {
checklock!(self.backoff.read()).get_restart_count()
}
/// Get the current process ID or None if no process is running
fn get_pid(&self) -> Option<u32> {
*self.pid.lock().unwrap()
}
/// Shut the ManagedProcess down safely. Equivalent to sending SIGKILL to the
/// running process if it is currently alive
///
/// # Examples
///
/// use tunnel_controller::ManagedProcess;
/// use std::process::Command;
///
/// let process = ManagedProcess::start(|| {
/// Command::new("sleep")
/// .arg("10000")
/// .spawn()
/// });
///
/// process.shutdown().unwrap();
///
pub fn shutdown(self) -> Result<()> {
{
let mut kill_signal = self.kill_signal.lock().unwrap();
*kill_signal = 1;
}
// If there is no assigned pid, the process is not running.
let pid = self.get_pid();
if pid.is_none() {
self.join_thread();
return Ok(());
}
let pid = pid.unwrap() as i32;
// if the process has finished, and therefore had waitpid called,
// and we kill it, then on unix we might ending up killing a
// newer process that happens to have a re-used id
let status_result = try_wait(pid);
let needs_kill = match status_result {
Ok(Some(_)) => {
// Process is already exited
false
}
Ok(None) => {
// Process is still alive
true
}
Err(e) => {
// Something went wrong probably at the OS level, warn and don't
// try and kill the process.
warn!("{}", e);
false
}
};
if needs_kill {
debug!("Sending SIGKILL to pid: {}", pid);
if let Err(e) = unsafe { c_rv(libc::kill(pid, libc::SIGKILL)) } {
warn!("{}", e);
}
}
self.join_thread();
Ok(())
}
/// Wait for the thread to exit
fn join_thread(self) -> () {
self.thread.join().unwrap();
}
}
/// A non-blocking 'wait' for a given process id.
fn try_wait(id: i32) -> Result<Option<ExitStatus>> {
let mut status = 0 as c_int;
match c_rv_retry(|| unsafe { libc::waitpid(id, &mut status, libc::WNOHANG) }) {
Ok(0) => Ok(None),
Ok(n) if n == id => Ok(Some(ExitStatus(status))),
Ok(n) => Err(Error::new(ErrorKind::NotFound, format!("Unknown pid: {}", n))),
Err(e) => Err(Error::new(ErrorKind::Other, format!("Unknown waitpid error: {}", e))),
}
}
/// Check the return value of libc function and turn it into a
/// Result type
fn c_rv(t: c_int) -> Result<c_int> {
if t == -1
|
else {
Ok(t)
}
}
/// Check the return value of a libc function, but, retry the given function if
/// the returned error is EINTR (Interrupted)
fn c_rv_retry<F>(mut f: F) -> Result<c_int>
where F: FnMut() -> c_int
{
loop {
match c_rv(f()) {
Err(ref e) if e.kind() == ErrorKind::Interrupted => {}
other => return other,
}
}
}
#[cfg(test)]
mod test {
use super::Backoff;
use std::thread;
use std::time::Duration;
#[test]
fn test_backoff_immediate_if_failed_after_threshold() {
let mut backoff = Backoff::from_secs(2);
assert_eq!(backoff.next_backoff().as_secs(), 0);
// Simulate process running
thread::sleep(Duration::new(4, 0));
assert_eq!(backoff.next_backoff().as_secs(), 0);
}
#[test]
fn test_backoff_wait_if_failed_before_threshold() {
let mut backoff = Backoff::from_secs(1);
assert_eq!(backoff.next_backoff().as_secs(), 0);
assert_eq!(backoff.next_backoff().as_secs(), 2);
assert_eq!(backoff.next_backoff().as_secs(), 4);
assert_eq!(backoff.next_backoff().as_secs(), 8);
assert_eq!(backoff.next_backoff().as_secs(), 12);
assert_eq!(backoff.next_backoff().as_secs(), 18);
assert_eq!(backoff.next_backoff().as_secs(), 24);
}
#[test]
fn test_backoff_reset_if_running_for_more_than_threshold() {
let mut backoff = Backoff::from_secs(1);
assert_eq!(backoff.next_backoff().as_secs(), 0);
assert_eq!(backoff.next_backoff().as_secs(), 2);
assert_eq!(backoff.next_backoff().as_secs(), 4);
assert_eq!(backoff.next_backoff().as_secs(), 8);
// Simulate process running
thread::sleep(Duration::new(3, 0));
assert_eq!(backoff.next_backoff().as_secs(), 0);
}
}
#[test]
fn test_managed_process_restart() {
use std::process::Command;
let process = ManagedProcess::start(|| {
Command::new("sleep")
.arg("0")
.spawn()
})
.unwrap();
// Maybe spin with try_recv and check a duration
// to assert liveness?
let mut spin_count = 0;
while process.get_restart_count() < 2 {
if spin_count > 2 {
panic!("Process has not restarted twice, within the expected amount of time");
} else {
spin_count += 1;
thread::sleep(Duration::new(3, 0));
}
}
process.shutdown().unwrap();
}
#[test]
fn test_managed_process_shutdown() {
use std::process::Command;
// Ideally need a timeout. The test should be, if shutdown doesn't happen immediately,
// something's broken.
let process = ManagedProcess::start(|| {
Command::new("sleep")
.arg("1000")
.spawn()
})
.unwrap();
process.shutdown().unwrap();
}
|
{
Err(Error::last_os_error())
}
|
conditional_block
|
managed_process.rs
|
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
// Assumes Unix
use libc::{self, c_int};
use std::thread;
use std::thread::JoinHandle;
use std::sync::{Arc, Mutex, RwLock};
use std::process::Child;
use std::io::{Error, ErrorKind, Result};
use std::time::{Duration, Instant};
/// Unix exit statuses
#[derive(PartialEq, Eq, Clone, Copy, Debug)]
pub struct ExitStatus(c_int);
const RESTART_TIME_THRESHOLD: u64 = 5; // seconds
pub struct ManagedProcess {
kill_signal: Arc<Mutex<u32>>,
pid: Arc<Mutex<Option<u32>>>,
thread: JoinHandle<()>,
backoff: Arc<RwLock<Backoff>>,
}
struct Backoff {
restart_count: u64,
restart_threshold: Duration,
start_time: Option<Instant>,
backoff: u64,
}
impl Backoff {
fn new(restart_threshold: Duration) -> Self {
Backoff {
restart_count: 0,
restart_threshold: restart_threshold,
start_time: None,
backoff: 1,
}
}
fn
|
(restart_threshold_secs: u64) -> Self {
Backoff::new(Duration::from_secs(restart_threshold_secs))
}
fn next_backoff(&mut self) -> Duration {
let end_time = Instant::now();
let duration_to_backoff = if let Some(start_time) = self.start_time {
if (end_time - start_time) < self.restart_threshold {
self.backoff += 1;
// non-linear back off
Duration::from_secs((self.backoff * self.backoff) >> 1)
} else {
self.backoff = 1;
Duration::from_secs(0)
}
} else {
Duration::from_secs(0)
};
self.restart_count += 1;
self.start_time = Some(Instant::now());
duration_to_backoff
}
pub fn get_restart_count(&self) -> u64 {
self.restart_count
}
}
impl ManagedProcess {
/// Create a new ManagedProcess and start it.
///
/// # Examples
///
/// use tunnel_controller::ManagedProcess;
/// use std::process::Command;
///
/// let process = ManagedProcess::start(|| {
/// Command::new("echo")
/// .arg("Hello")
/// .arg("World")
/// .spawn()
/// });
///
pub fn start<F:'static>(spawn: F) -> Result<ManagedProcess>
where F: Fn() -> Result<Child> + Send
{
let pid = Arc::new(Mutex::new(None));
// Uses a u32 Mutex to avoid the compiler complaining that you can use an AtomicBool.
// In this case we want a bool like thing _and_ a lock.
let kill_signal = Arc::new(Mutex::new(0));
let shared_kill_signal = kill_signal.clone();
let backoff = Arc::new(RwLock::new(Backoff::from_secs(RESTART_TIME_THRESHOLD)));
let shared_pid = pid.clone();
let shared_backoff = backoff.clone();
let thread = thread::spawn(move || {
let backoff = shared_backoff;
loop {
let mut child_process;
{
let kill_signal = shared_kill_signal.lock().unwrap();
let mut pid = shared_pid.lock().unwrap();
if *kill_signal == 1 {
*pid = None;
debug!("Received process kill signal");
break;
}
info!("Starting process. Restarted {} times",
checklock!(backoff.read()).get_restart_count());
child_process = spawn().unwrap();
*pid = Some(child_process.id());
}
child_process.wait().unwrap();
let backoff_duration = checklock!(backoff.write()).next_backoff();
thread::sleep(backoff_duration);
}
});
Ok(ManagedProcess {
backoff: backoff,
kill_signal: kill_signal,
pid: pid,
thread: thread,
})
}
#[allow(dead_code)]
pub fn get_restart_count(&self) -> u64 {
checklock!(self.backoff.read()).get_restart_count()
}
/// Get the current process ID or None if no process is running
fn get_pid(&self) -> Option<u32> {
*self.pid.lock().unwrap()
}
/// Shut the ManagedProcess down safely. Equivalent to sending SIGKILL to the
/// running process if it is currently alive
///
/// # Examples
///
/// use tunnel_controller::ManagedProcess;
/// use std::process::Command;
///
/// let process = ManagedProcess::start(|| {
/// Command::new("sleep")
/// .arg("10000")
/// .spawn()
/// });
///
/// process.shutdown().unwrap();
///
pub fn shutdown(self) -> Result<()> {
{
let mut kill_signal = self.kill_signal.lock().unwrap();
*kill_signal = 1;
}
// If there is no assigned pid, the process is not running.
let pid = self.get_pid();
if pid.is_none() {
self.join_thread();
return Ok(());
}
let pid = pid.unwrap() as i32;
// if the process has finished, and therefore had waitpid called,
// and we kill it, then on unix we might ending up killing a
// newer process that happens to have a re-used id
let status_result = try_wait(pid);
let needs_kill = match status_result {
Ok(Some(_)) => {
// Process is already exited
false
}
Ok(None) => {
// Process is still alive
true
}
Err(e) => {
// Something went wrong probably at the OS level, warn and don't
// try and kill the process.
warn!("{}", e);
false
}
};
if needs_kill {
debug!("Sending SIGKILL to pid: {}", pid);
if let Err(e) = unsafe { c_rv(libc::kill(pid, libc::SIGKILL)) } {
warn!("{}", e);
}
}
self.join_thread();
Ok(())
}
/// Wait for the thread to exit
fn join_thread(self) -> () {
self.thread.join().unwrap();
}
}
/// A non-blocking 'wait' for a given process id.
fn try_wait(id: i32) -> Result<Option<ExitStatus>> {
let mut status = 0 as c_int;
match c_rv_retry(|| unsafe { libc::waitpid(id, &mut status, libc::WNOHANG) }) {
Ok(0) => Ok(None),
Ok(n) if n == id => Ok(Some(ExitStatus(status))),
Ok(n) => Err(Error::new(ErrorKind::NotFound, format!("Unknown pid: {}", n))),
Err(e) => Err(Error::new(ErrorKind::Other, format!("Unknown waitpid error: {}", e))),
}
}
/// Check the return value of libc function and turn it into a
/// Result type
fn c_rv(t: c_int) -> Result<c_int> {
if t == -1 {
Err(Error::last_os_error())
} else {
Ok(t)
}
}
/// Check the return value of a libc function, but, retry the given function if
/// the returned error is EINTR (Interrupted)
fn c_rv_retry<F>(mut f: F) -> Result<c_int>
where F: FnMut() -> c_int
{
loop {
match c_rv(f()) {
Err(ref e) if e.kind() == ErrorKind::Interrupted => {}
other => return other,
}
}
}
#[cfg(test)]
mod test {
use super::Backoff;
use std::thread;
use std::time::Duration;
#[test]
fn test_backoff_immediate_if_failed_after_threshold() {
let mut backoff = Backoff::from_secs(2);
assert_eq!(backoff.next_backoff().as_secs(), 0);
// Simulate process running
thread::sleep(Duration::new(4, 0));
assert_eq!(backoff.next_backoff().as_secs(), 0);
}
#[test]
fn test_backoff_wait_if_failed_before_threshold() {
let mut backoff = Backoff::from_secs(1);
assert_eq!(backoff.next_backoff().as_secs(), 0);
assert_eq!(backoff.next_backoff().as_secs(), 2);
assert_eq!(backoff.next_backoff().as_secs(), 4);
assert_eq!(backoff.next_backoff().as_secs(), 8);
assert_eq!(backoff.next_backoff().as_secs(), 12);
assert_eq!(backoff.next_backoff().as_secs(), 18);
assert_eq!(backoff.next_backoff().as_secs(), 24);
}
#[test]
fn test_backoff_reset_if_running_for_more_than_threshold() {
let mut backoff = Backoff::from_secs(1);
assert_eq!(backoff.next_backoff().as_secs(), 0);
assert_eq!(backoff.next_backoff().as_secs(), 2);
assert_eq!(backoff.next_backoff().as_secs(), 4);
assert_eq!(backoff.next_backoff().as_secs(), 8);
// Simulate process running
thread::sleep(Duration::new(3, 0));
assert_eq!(backoff.next_backoff().as_secs(), 0);
}
}
#[test]
fn test_managed_process_restart() {
use std::process::Command;
let process = ManagedProcess::start(|| {
Command::new("sleep")
.arg("0")
.spawn()
})
.unwrap();
// Maybe spin with try_recv and check a duration
// to assert liveness?
let mut spin_count = 0;
while process.get_restart_count() < 2 {
if spin_count > 2 {
panic!("Process has not restarted twice, within the expected amount of time");
} else {
spin_count += 1;
thread::sleep(Duration::new(3, 0));
}
}
process.shutdown().unwrap();
}
#[test]
fn test_managed_process_shutdown() {
use std::process::Command;
// Ideally need a timeout. The test should be, if shutdown doesn't happen immediately,
// something's broken.
let process = ManagedProcess::start(|| {
Command::new("sleep")
.arg("1000")
.spawn()
})
.unwrap();
process.shutdown().unwrap();
}
|
from_secs
|
identifier_name
|
managed_process.rs
|
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
// Assumes Unix
use libc::{self, c_int};
use std::thread;
use std::thread::JoinHandle;
use std::sync::{Arc, Mutex, RwLock};
use std::process::Child;
use std::io::{Error, ErrorKind, Result};
use std::time::{Duration, Instant};
/// Unix exit statuses
#[derive(PartialEq, Eq, Clone, Copy, Debug)]
pub struct ExitStatus(c_int);
const RESTART_TIME_THRESHOLD: u64 = 5; // seconds
pub struct ManagedProcess {
kill_signal: Arc<Mutex<u32>>,
pid: Arc<Mutex<Option<u32>>>,
thread: JoinHandle<()>,
backoff: Arc<RwLock<Backoff>>,
}
struct Backoff {
restart_count: u64,
restart_threshold: Duration,
start_time: Option<Instant>,
backoff: u64,
}
impl Backoff {
fn new(restart_threshold: Duration) -> Self {
Backoff {
restart_count: 0,
restart_threshold: restart_threshold,
start_time: None,
backoff: 1,
}
}
fn from_secs(restart_threshold_secs: u64) -> Self {
Backoff::new(Duration::from_secs(restart_threshold_secs))
}
fn next_backoff(&mut self) -> Duration {
let end_time = Instant::now();
let duration_to_backoff = if let Some(start_time) = self.start_time {
if (end_time - start_time) < self.restart_threshold {
self.backoff += 1;
// non-linear back off
Duration::from_secs((self.backoff * self.backoff) >> 1)
} else {
self.backoff = 1;
Duration::from_secs(0)
}
} else {
Duration::from_secs(0)
};
self.restart_count += 1;
self.start_time = Some(Instant::now());
duration_to_backoff
}
pub fn get_restart_count(&self) -> u64 {
self.restart_count
}
|
/// Create a new ManagedProcess and start it.
///
/// # Examples
///
/// use tunnel_controller::ManagedProcess;
/// use std::process::Command;
///
/// let process = ManagedProcess::start(|| {
/// Command::new("echo")
/// .arg("Hello")
/// .arg("World")
/// .spawn()
/// });
///
pub fn start<F:'static>(spawn: F) -> Result<ManagedProcess>
where F: Fn() -> Result<Child> + Send
{
let pid = Arc::new(Mutex::new(None));
// Uses a u32 Mutex to avoid the compiler complaining that you can use an AtomicBool.
// In this case we want a bool like thing _and_ a lock.
let kill_signal = Arc::new(Mutex::new(0));
let shared_kill_signal = kill_signal.clone();
let backoff = Arc::new(RwLock::new(Backoff::from_secs(RESTART_TIME_THRESHOLD)));
let shared_pid = pid.clone();
let shared_backoff = backoff.clone();
let thread = thread::spawn(move || {
let backoff = shared_backoff;
loop {
let mut child_process;
{
let kill_signal = shared_kill_signal.lock().unwrap();
let mut pid = shared_pid.lock().unwrap();
if *kill_signal == 1 {
*pid = None;
debug!("Received process kill signal");
break;
}
info!("Starting process. Restarted {} times",
checklock!(backoff.read()).get_restart_count());
child_process = spawn().unwrap();
*pid = Some(child_process.id());
}
child_process.wait().unwrap();
let backoff_duration = checklock!(backoff.write()).next_backoff();
thread::sleep(backoff_duration);
}
});
Ok(ManagedProcess {
backoff: backoff,
kill_signal: kill_signal,
pid: pid,
thread: thread,
})
}
#[allow(dead_code)]
pub fn get_restart_count(&self) -> u64 {
checklock!(self.backoff.read()).get_restart_count()
}
/// Get the current process ID or None if no process is running
fn get_pid(&self) -> Option<u32> {
*self.pid.lock().unwrap()
}
/// Shut the ManagedProcess down safely. Equivalent to sending SIGKILL to the
/// running process if it is currently alive
///
/// # Examples
///
/// use tunnel_controller::ManagedProcess;
/// use std::process::Command;
///
/// let process = ManagedProcess::start(|| {
/// Command::new("sleep")
/// .arg("10000")
/// .spawn()
/// });
///
/// process.shutdown().unwrap();
///
pub fn shutdown(self) -> Result<()> {
{
let mut kill_signal = self.kill_signal.lock().unwrap();
*kill_signal = 1;
}
// If there is no assigned pid, the process is not running.
let pid = self.get_pid();
if pid.is_none() {
self.join_thread();
return Ok(());
}
let pid = pid.unwrap() as i32;
// if the process has finished, and therefore had waitpid called,
// and we kill it, then on unix we might ending up killing a
// newer process that happens to have a re-used id
let status_result = try_wait(pid);
let needs_kill = match status_result {
Ok(Some(_)) => {
// Process is already exited
false
}
Ok(None) => {
// Process is still alive
true
}
Err(e) => {
// Something went wrong probably at the OS level, warn and don't
// try and kill the process.
warn!("{}", e);
false
}
};
if needs_kill {
debug!("Sending SIGKILL to pid: {}", pid);
if let Err(e) = unsafe { c_rv(libc::kill(pid, libc::SIGKILL)) } {
warn!("{}", e);
}
}
self.join_thread();
Ok(())
}
/// Wait for the thread to exit
fn join_thread(self) -> () {
self.thread.join().unwrap();
}
}
/// A non-blocking 'wait' for a given process id.
fn try_wait(id: i32) -> Result<Option<ExitStatus>> {
let mut status = 0 as c_int;
match c_rv_retry(|| unsafe { libc::waitpid(id, &mut status, libc::WNOHANG) }) {
Ok(0) => Ok(None),
Ok(n) if n == id => Ok(Some(ExitStatus(status))),
Ok(n) => Err(Error::new(ErrorKind::NotFound, format!("Unknown pid: {}", n))),
Err(e) => Err(Error::new(ErrorKind::Other, format!("Unknown waitpid error: {}", e))),
}
}
/// Check the return value of libc function and turn it into a
/// Result type
fn c_rv(t: c_int) -> Result<c_int> {
if t == -1 {
Err(Error::last_os_error())
} else {
Ok(t)
}
}
/// Check the return value of a libc function, but, retry the given function if
/// the returned error is EINTR (Interrupted)
fn c_rv_retry<F>(mut f: F) -> Result<c_int>
where F: FnMut() -> c_int
{
loop {
match c_rv(f()) {
Err(ref e) if e.kind() == ErrorKind::Interrupted => {}
other => return other,
}
}
}
#[cfg(test)]
mod test {
use super::Backoff;
use std::thread;
use std::time::Duration;
#[test]
fn test_backoff_immediate_if_failed_after_threshold() {
let mut backoff = Backoff::from_secs(2);
assert_eq!(backoff.next_backoff().as_secs(), 0);
// Simulate process running
thread::sleep(Duration::new(4, 0));
assert_eq!(backoff.next_backoff().as_secs(), 0);
}
#[test]
fn test_backoff_wait_if_failed_before_threshold() {
let mut backoff = Backoff::from_secs(1);
assert_eq!(backoff.next_backoff().as_secs(), 0);
assert_eq!(backoff.next_backoff().as_secs(), 2);
assert_eq!(backoff.next_backoff().as_secs(), 4);
assert_eq!(backoff.next_backoff().as_secs(), 8);
assert_eq!(backoff.next_backoff().as_secs(), 12);
assert_eq!(backoff.next_backoff().as_secs(), 18);
assert_eq!(backoff.next_backoff().as_secs(), 24);
}
#[test]
fn test_backoff_reset_if_running_for_more_than_threshold() {
let mut backoff = Backoff::from_secs(1);
assert_eq!(backoff.next_backoff().as_secs(), 0);
assert_eq!(backoff.next_backoff().as_secs(), 2);
assert_eq!(backoff.next_backoff().as_secs(), 4);
assert_eq!(backoff.next_backoff().as_secs(), 8);
// Simulate process running
thread::sleep(Duration::new(3, 0));
assert_eq!(backoff.next_backoff().as_secs(), 0);
}
}
#[test]
fn test_managed_process_restart() {
use std::process::Command;
let process = ManagedProcess::start(|| {
Command::new("sleep")
.arg("0")
.spawn()
})
.unwrap();
// Maybe spin with try_recv and check a duration
// to assert liveness?
let mut spin_count = 0;
while process.get_restart_count() < 2 {
if spin_count > 2 {
panic!("Process has not restarted twice, within the expected amount of time");
} else {
spin_count += 1;
thread::sleep(Duration::new(3, 0));
}
}
process.shutdown().unwrap();
}
#[test]
fn test_managed_process_shutdown() {
use std::process::Command;
// Ideally need a timeout. The test should be, if shutdown doesn't happen immediately,
// something's broken.
let process = ManagedProcess::start(|| {
Command::new("sleep")
.arg("1000")
.spawn()
})
.unwrap();
process.shutdown().unwrap();
}
|
}
impl ManagedProcess {
|
random_line_split
|
regions-close-object-into-object.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
trait A<T> {}
struct B<'a, T>(&'a A<T>+'a);
trait X {}
impl<'a, T> X for B<'a, T> {}
fn f<'a, T, U>(v: Box<A<T>+'static>) -> Box<X+'static> {
box B(&*v) as Box<X>
}
fn
|
<'a, T:'static>(v: Box<A<T>>) -> Box<X+'static> {
box B(&*v) as Box<X> //~ ERROR cannot infer
}
fn h<'a, T, U>(v: Box<A<U>+'static>) -> Box<X+'static> {
box B(&*v) as Box<X>
}
fn i<'a, T, U>(v: Box<A<U>>) -> Box<X+'static> {
box B(&*v) as Box<X> //~ ERROR cannot infer
}
fn main() {}
|
g
|
identifier_name
|
regions-close-object-into-object.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
trait A<T> {}
struct B<'a, T>(&'a A<T>+'a);
trait X {}
impl<'a, T> X for B<'a, T> {}
fn f<'a, T, U>(v: Box<A<T>+'static>) -> Box<X+'static> {
box B(&*v) as Box<X>
}
fn g<'a, T:'static>(v: Box<A<T>>) -> Box<X+'static> {
box B(&*v) as Box<X> //~ ERROR cannot infer
}
|
fn i<'a, T, U>(v: Box<A<U>>) -> Box<X+'static> {
box B(&*v) as Box<X> //~ ERROR cannot infer
}
fn main() {}
|
fn h<'a, T, U>(v: Box<A<U>+'static>) -> Box<X+'static> {
box B(&*v) as Box<X>
}
|
random_line_split
|
regions-close-object-into-object.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
trait A<T> {}
struct B<'a, T>(&'a A<T>+'a);
trait X {}
impl<'a, T> X for B<'a, T> {}
fn f<'a, T, U>(v: Box<A<T>+'static>) -> Box<X+'static> {
box B(&*v) as Box<X>
}
fn g<'a, T:'static>(v: Box<A<T>>) -> Box<X+'static>
|
fn h<'a, T, U>(v: Box<A<U>+'static>) -> Box<X+'static> {
box B(&*v) as Box<X>
}
fn i<'a, T, U>(v: Box<A<U>>) -> Box<X+'static> {
box B(&*v) as Box<X> //~ ERROR cannot infer
}
fn main() {}
|
{
box B(&*v) as Box<X> //~ ERROR cannot infer
}
|
identifier_body
|
mod.rs
|
// Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
pub mod device;
pub mod event_handler;
pub mod persist;
pub mod test_utils;
mod utils;
use vm_memory::GuestMemoryError;
pub use self::device::Balloon;
pub use self::device::BalloonConfig;
pub use self::device::BalloonStats;
pub use self::event_handler::*;
/// Device ID used in MMIO device identification.
/// Because Balloon is unique per-vm, this ID can be hardcoded.
pub const BALLOON_DEV_ID: &str = "balloon";
pub const CONFIG_SPACE_SIZE: usize = 8;
pub const QUEUE_SIZE: u16 = 256;
pub const NUM_QUEUES: usize = 3;
pub const QUEUE_SIZES: &[u16] = &[QUEUE_SIZE, QUEUE_SIZE, QUEUE_SIZE];
// Number of 4K pages in a MiB.
pub const MIB_TO_4K_PAGES: u32 = 256;
// The maximum number of pages that can be received in a single descriptor.
pub const MAX_PAGES_IN_DESC: usize = 256;
// The maximum number of pages that can be compacted into ranges during process_inflate().
// Needs to be a multiple of MAX_PAGES_IN_DESC.
pub const MAX_PAGE_COMPACT_BUFFER: usize = 2048;
// The addresses given by the driver are divided by 4096.
pub const VIRTIO_BALLOON_PFN_SHIFT: u32 = 12;
// The index of the deflate queue from Balloon device queues/queues_evts vector.
pub const INFLATE_INDEX: usize = 0;
// The index of the deflate queue from Balloon device queues/queues_evts vector.
pub const DEFLATE_INDEX: usize = 1;
// The index of the deflate queue from Balloon device queues/queues_evts vector.
pub const STATS_INDEX: usize = 2;
// The feature bitmap for virtio balloon.
const VIRTIO_BALLOON_F_STATS_VQ: u32 = 1; // Enable statistics.
const VIRTIO_BALLOON_F_DEFLATE_ON_OOM: u32 = 2; // Deflate balloon on OOM.
// The statistics tags.
const VIRTIO_BALLOON_S_SWAP_IN: u16 = 0;
const VIRTIO_BALLOON_S_SWAP_OUT: u16 = 1;
const VIRTIO_BALLOON_S_MAJFLT: u16 = 2;
const VIRTIO_BALLOON_S_MINFLT: u16 = 3;
const VIRTIO_BALLOON_S_MEMFREE: u16 = 4;
const VIRTIO_BALLOON_S_MEMTOT: u16 = 5;
const VIRTIO_BALLOON_S_AVAIL: u16 = 6;
const VIRTIO_BALLOON_S_CACHES: u16 = 7;
const VIRTIO_BALLOON_S_HTLB_PGALLOC: u16 = 8;
const VIRTIO_BALLOON_S_HTLB_PGFAIL: u16 = 9;
#[derive(Debug)]
pub enum Error {
/// Activation error.
Activate(super::ActivateError),
/// No balloon device found.
DeviceNotFound,
/// Device not activated yet.
DeviceNotActive,
/// EventFd error.
EventFd(std::io::Error),
/// Guest gave us bad memory addresses.
GuestMemory(GuestMemoryError),
/// Received error while sending an interrupt.
InterruptError(std::io::Error),
/// Guest gave us a malformed descriptor.
MalformedDescriptor,
/// Guest gave us a malformed payload.
MalformedPayload,
/// Error restoring the balloon device queues.
QueueRestoreError,
/// Received stats querry when stats are disabled.
StatisticsDisabled,
/// Statistics cannot be enabled/disabled after activation.
StatisticsStateChange,
/// Amount of pages requested cannot fit in `u32`.
TooManyPagesRequested,
/// Error while processing the virt queues.
Queue(super::QueueError),
/// Error removing a memory region at inflate time.
RemoveMemoryRegion(RemoveRegionError),
/// Error creating the statistics timer.
Timer(std::io::Error),
}
#[derive(Debug)]
pub enum
|
{
AddressTranslation,
MalformedRange,
MadviseFail(std::io::Error),
MmapFail(std::io::Error),
RegionNotFound,
}
pub type Result<T> = std::result::Result<T, Error>;
|
RemoveRegionError
|
identifier_name
|
mod.rs
|
// Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
pub mod device;
pub mod event_handler;
pub mod persist;
pub mod test_utils;
mod utils;
use vm_memory::GuestMemoryError;
pub use self::device::Balloon;
pub use self::device::BalloonConfig;
pub use self::device::BalloonStats;
pub use self::event_handler::*;
/// Device ID used in MMIO device identification.
/// Because Balloon is unique per-vm, this ID can be hardcoded.
pub const BALLOON_DEV_ID: &str = "balloon";
pub const CONFIG_SPACE_SIZE: usize = 8;
pub const QUEUE_SIZE: u16 = 256;
pub const NUM_QUEUES: usize = 3;
pub const QUEUE_SIZES: &[u16] = &[QUEUE_SIZE, QUEUE_SIZE, QUEUE_SIZE];
// Number of 4K pages in a MiB.
pub const MIB_TO_4K_PAGES: u32 = 256;
// The maximum number of pages that can be received in a single descriptor.
pub const MAX_PAGES_IN_DESC: usize = 256;
// The maximum number of pages that can be compacted into ranges during process_inflate().
// Needs to be a multiple of MAX_PAGES_IN_DESC.
pub const MAX_PAGE_COMPACT_BUFFER: usize = 2048;
// The addresses given by the driver are divided by 4096.
pub const VIRTIO_BALLOON_PFN_SHIFT: u32 = 12;
// The index of the deflate queue from Balloon device queues/queues_evts vector.
pub const INFLATE_INDEX: usize = 0;
// The index of the deflate queue from Balloon device queues/queues_evts vector.
pub const DEFLATE_INDEX: usize = 1;
// The index of the deflate queue from Balloon device queues/queues_evts vector.
pub const STATS_INDEX: usize = 2;
// The feature bitmap for virtio balloon.
const VIRTIO_BALLOON_F_STATS_VQ: u32 = 1; // Enable statistics.
const VIRTIO_BALLOON_F_DEFLATE_ON_OOM: u32 = 2; // Deflate balloon on OOM.
|
const VIRTIO_BALLOON_S_MINFLT: u16 = 3;
const VIRTIO_BALLOON_S_MEMFREE: u16 = 4;
const VIRTIO_BALLOON_S_MEMTOT: u16 = 5;
const VIRTIO_BALLOON_S_AVAIL: u16 = 6;
const VIRTIO_BALLOON_S_CACHES: u16 = 7;
const VIRTIO_BALLOON_S_HTLB_PGALLOC: u16 = 8;
const VIRTIO_BALLOON_S_HTLB_PGFAIL: u16 = 9;
#[derive(Debug)]
pub enum Error {
/// Activation error.
Activate(super::ActivateError),
/// No balloon device found.
DeviceNotFound,
/// Device not activated yet.
DeviceNotActive,
/// EventFd error.
EventFd(std::io::Error),
/// Guest gave us bad memory addresses.
GuestMemory(GuestMemoryError),
/// Received error while sending an interrupt.
InterruptError(std::io::Error),
/// Guest gave us a malformed descriptor.
MalformedDescriptor,
/// Guest gave us a malformed payload.
MalformedPayload,
/// Error restoring the balloon device queues.
QueueRestoreError,
/// Received stats querry when stats are disabled.
StatisticsDisabled,
/// Statistics cannot be enabled/disabled after activation.
StatisticsStateChange,
/// Amount of pages requested cannot fit in `u32`.
TooManyPagesRequested,
/// Error while processing the virt queues.
Queue(super::QueueError),
/// Error removing a memory region at inflate time.
RemoveMemoryRegion(RemoveRegionError),
/// Error creating the statistics timer.
Timer(std::io::Error),
}
#[derive(Debug)]
pub enum RemoveRegionError {
AddressTranslation,
MalformedRange,
MadviseFail(std::io::Error),
MmapFail(std::io::Error),
RegionNotFound,
}
pub type Result<T> = std::result::Result<T, Error>;
|
// The statistics tags.
const VIRTIO_BALLOON_S_SWAP_IN: u16 = 0;
const VIRTIO_BALLOON_S_SWAP_OUT: u16 = 1;
const VIRTIO_BALLOON_S_MAJFLT: u16 = 2;
|
random_line_split
|
renderer.rs
|
use crate::papers::{DocumentSpec, Workspace};
use crate::prelude::*;
use futures::{compat::*, StreamExt};
use slog::{debug, error};
use std::process::Command;
use tokio::{fs::File, io::AsyncWrite};
use tokio_process::CommandExt;
/// The name of the downloaded template inside our Tera instance.
const TEMPLATE_NAME: &str = "template";
pub struct Renderer {
/// The manifest for the document to render.
document_spec: DocumentSpec,
/// The path to the rendered document.
output_path: std::path::PathBuf,
/// The path to the downloaded template.
template_path: std::path::PathBuf,
/// The templating engine.
tera: tera::Tera,
/// See the docs for [`Workspace`](crate::papers::Workspace).
workspace: Workspace,
}
impl Renderer {
pub fn new(config: Arc<Config>, document_spec: DocumentSpec) -> Result<Self, failure::Error> {
let workspace = Workspace::new(config.logger.clone(), config)?;
let output_path = workspace
.temp_dir_path()
.join(&document_spec.output_filename);
let template_path = workspace
.temp_dir_path()
.join(document_spec.output_filename.replace("pdf", "tex"));
Ok(Renderer {
tera: crate::utils::templating::make_tera(),
workspace,
document_spec,
output_path,
template_path,
})
}
pub async fn preview(&mut self) -> Result<String, failure::Error> {
self.download_and_register_template().await?;
self.tera
.render(TEMPLATE_NAME, &self.document_spec.variables())
.map_err(|err| format_err!("Rendering error: {}", err))
}
/// This function does the whole generation process from a
/// [`DocumentSpec`](crate::papers::DocumentSpec).
///
/// This method takes ownership because it is meant to be used to create futures to be
/// spawned in the background.
pub async fn render(mut self) -> Result<(), ()> {
debug!(
self.workspace.logger(),
"Generating PDF with document spec: {:?}.", self.document_spec
);
match self.render_inner().await {
// it worked, move on
Ok(()) => (),
// it failed -> report it
Err(err) => {
self.report_failure(err).await.ok();
}
}
self.workspace
.upload_workspace()
.await
.map_err(|err| {
error!(
self.workspace.logger(),
"Error uploading workspace.tar: {:?}.", err
)
})
.ok();
Ok(())
}
async fn render_inner(&mut self) -> Result<(), failure::Error> {
// First download the template and populate it
self.download_and_register_template().await?;
self.render_template().await?;
// Download the assets and save them in the temporary directory
self.download_assets().await?;
// Then run latex
self.run_latex().await?;
// Upload the resulting PDF and construct a presigned URL to it
let presigned_url = self
.workspace
.upload_to_s3(self.output_path.to_owned())
.await?;
// Report to the callback URL
let callback_url = self.document_spec.callback_url();
self
.workspace
.report_success(presigned_url, &callback_url)
.await?;
Ok(())
}
fn template_path(&self) -> &std::path::Path {
&self.template_path
}
/// Downloads all assets from the document spec in the workspace in parallel. It fails if any of
/// those cannot be downloaded.
async fn download_assets(&self) -> Result<Vec<std::path::PathBuf>, failure::Error> {
debug!(
&self.workspace.logger(),
"Downloading assets: {:?}.",
self.document_spec.asset_urls().collect::<Vec<_>>()
);
let mut futures = futures::stream::FuturesUnordered::new();
for uri in self.document_spec.asset_urls() {
futures.push(self.workspace.download_file(uri));
}
let futures: Vec<Result<_, _>> = futures.collect().await;
futures.into_iter().collect()
}
/// Download and register the template in the Renderer's Tera instance.
async fn download_and_register_template(&mut self) -> Result<(), failure::Error> {
let file_path = self
.workspace
.download_file(&self.document_spec.template_url.0)
.await?;
self.tera
.add_template_file(&file_path, Some("template"))
.map_err(|err| format_err!("failed to add template: {:?}", err))?;
debug!(
self.workspace.logger(),
"Successfully downloaded the template."
);
Ok(())
}
async fn render_template(&self) -> Result<(), failure::Error> {
let rendered_template = self
.tera
.render(TEMPLATE_NAME, &self.document_spec.variables())
.map_err(|err| format_err!("Rendering error: {}.", err))?;
debug!(
self.workspace.logger(),
"Writing template to {:?}.",
&self.template_path()
);
let mut file = File::create(self.template_path().to_owned())
.compat()
.await?;
futures01::future::poll_fn(|| file.poll_write(rendered_template.as_bytes()))
.compat()
.await
.context("Could not write latex file.")?;
debug!(
self.workspace.logger(),
"Template successfully written to {:?}.",
&self.template_path()
);
Ok(())
}
async fn run_latex(&self) -> Result<(), failure::Error> {
debug!(
&self.workspace.logger(),
"Value of template_path: {:?}.",
self.template_path()
);
debug!(
&self.workspace.logger(),
"Rendered template exists: {:?}.",
self.template_path().exists()
);
debug!(&self.workspace.logger(), "Spawning latex.");
let latex_out = Command::new("xelatex")
.current_dir(&self.workspace.temp_dir_path())
.arg("-interaction=nonstopmode")
.arg("-file-line-error")
.arg("-shell-restricted")
.arg(self.template_path())
.output_async()
.compat()
.await
.context("Error generating PDF")?;
let stdout = String::from_utf8(latex_out.stdout)?;
if!latex_out.status.success()
|
debug!(&self.workspace.logger(), "LaTeX succeeded. Stdout:\n{}", stdout);
Ok(())
}
/// Report failure and move on.
async fn report_failure(&self, error: failure::Error) -> Result<(), ()> {
error!(
self.workspace.logger(),
"Error rendering document: {:?}.", error,
);
let callback_url = self.document_spec.callback_url();
match self.workspace.report_failure(error, callback_url).await {
Ok(()) => (),
Err(err) => error!(
self.workspace.logger(),
"Error reporting failure to callback_url: {:?}.", err
),
}
Ok(())
}
}
|
{
return Err(format_err!("LaTeX failed. Stdout:\n{}", stdout));
}
|
conditional_block
|
renderer.rs
|
use crate::papers::{DocumentSpec, Workspace};
use crate::prelude::*;
use futures::{compat::*, StreamExt};
use slog::{debug, error};
use std::process::Command;
use tokio::{fs::File, io::AsyncWrite};
use tokio_process::CommandExt;
/// The name of the downloaded template inside our Tera instance.
const TEMPLATE_NAME: &str = "template";
pub struct Renderer {
/// The manifest for the document to render.
document_spec: DocumentSpec,
/// The path to the rendered document.
output_path: std::path::PathBuf,
/// The path to the downloaded template.
template_path: std::path::PathBuf,
/// The templating engine.
tera: tera::Tera,
/// See the docs for [`Workspace`](crate::papers::Workspace).
workspace: Workspace,
}
impl Renderer {
pub fn new(config: Arc<Config>, document_spec: DocumentSpec) -> Result<Self, failure::Error> {
let workspace = Workspace::new(config.logger.clone(), config)?;
let output_path = workspace
.temp_dir_path()
.join(&document_spec.output_filename);
let template_path = workspace
.temp_dir_path()
.join(document_spec.output_filename.replace("pdf", "tex"));
Ok(Renderer {
tera: crate::utils::templating::make_tera(),
workspace,
document_spec,
output_path,
template_path,
})
}
pub async fn preview(&mut self) -> Result<String, failure::Error> {
self.download_and_register_template().await?;
self.tera
.render(TEMPLATE_NAME, &self.document_spec.variables())
.map_err(|err| format_err!("Rendering error: {}", err))
}
/// This function does the whole generation process from a
/// [`DocumentSpec`](crate::papers::DocumentSpec).
///
/// This method takes ownership because it is meant to be used to create futures to be
/// spawned in the background.
pub async fn render(mut self) -> Result<(), ()> {
debug!(
self.workspace.logger(),
"Generating PDF with document spec: {:?}.", self.document_spec
);
match self.render_inner().await {
// it worked, move on
Ok(()) => (),
// it failed -> report it
Err(err) => {
self.report_failure(err).await.ok();
}
}
self.workspace
.upload_workspace()
.await
.map_err(|err| {
error!(
self.workspace.logger(),
"Error uploading workspace.tar: {:?}.", err
)
})
.ok();
Ok(())
}
async fn render_inner(&mut self) -> Result<(), failure::Error> {
// First download the template and populate it
self.download_and_register_template().await?;
self.render_template().await?;
// Download the assets and save them in the temporary directory
self.download_assets().await?;
// Then run latex
self.run_latex().await?;
// Upload the resulting PDF and construct a presigned URL to it
let presigned_url = self
.workspace
.upload_to_s3(self.output_path.to_owned())
.await?;
// Report to the callback URL
let callback_url = self.document_spec.callback_url();
self
.workspace
.report_success(presigned_url, &callback_url)
.await?;
Ok(())
}
fn template_path(&self) -> &std::path::Path {
&self.template_path
}
/// Downloads all assets from the document spec in the workspace in parallel. It fails if any of
/// those cannot be downloaded.
async fn download_assets(&self) -> Result<Vec<std::path::PathBuf>, failure::Error> {
debug!(
&self.workspace.logger(),
"Downloading assets: {:?}.",
self.document_spec.asset_urls().collect::<Vec<_>>()
);
let mut futures = futures::stream::FuturesUnordered::new();
for uri in self.document_spec.asset_urls() {
futures.push(self.workspace.download_file(uri));
}
let futures: Vec<Result<_, _>> = futures.collect().await;
futures.into_iter().collect()
}
/// Download and register the template in the Renderer's Tera instance.
async fn download_and_register_template(&mut self) -> Result<(), failure::Error> {
let file_path = self
.workspace
.download_file(&self.document_spec.template_url.0)
.await?;
self.tera
.add_template_file(&file_path, Some("template"))
.map_err(|err| format_err!("failed to add template: {:?}", err))?;
debug!(
self.workspace.logger(),
"Successfully downloaded the template."
);
Ok(())
}
async fn
|
(&self) -> Result<(), failure::Error> {
let rendered_template = self
.tera
.render(TEMPLATE_NAME, &self.document_spec.variables())
.map_err(|err| format_err!("Rendering error: {}.", err))?;
debug!(
self.workspace.logger(),
"Writing template to {:?}.",
&self.template_path()
);
let mut file = File::create(self.template_path().to_owned())
.compat()
.await?;
futures01::future::poll_fn(|| file.poll_write(rendered_template.as_bytes()))
.compat()
.await
.context("Could not write latex file.")?;
debug!(
self.workspace.logger(),
"Template successfully written to {:?}.",
&self.template_path()
);
Ok(())
}
async fn run_latex(&self) -> Result<(), failure::Error> {
debug!(
&self.workspace.logger(),
"Value of template_path: {:?}.",
self.template_path()
);
debug!(
&self.workspace.logger(),
"Rendered template exists: {:?}.",
self.template_path().exists()
);
debug!(&self.workspace.logger(), "Spawning latex.");
let latex_out = Command::new("xelatex")
.current_dir(&self.workspace.temp_dir_path())
.arg("-interaction=nonstopmode")
.arg("-file-line-error")
.arg("-shell-restricted")
.arg(self.template_path())
.output_async()
.compat()
.await
.context("Error generating PDF")?;
let stdout = String::from_utf8(latex_out.stdout)?;
if!latex_out.status.success() {
return Err(format_err!("LaTeX failed. Stdout:\n{}", stdout));
}
debug!(&self.workspace.logger(), "LaTeX succeeded. Stdout:\n{}", stdout);
Ok(())
}
/// Report failure and move on.
async fn report_failure(&self, error: failure::Error) -> Result<(), ()> {
error!(
self.workspace.logger(),
"Error rendering document: {:?}.", error,
);
let callback_url = self.document_spec.callback_url();
match self.workspace.report_failure(error, callback_url).await {
Ok(()) => (),
Err(err) => error!(
self.workspace.logger(),
"Error reporting failure to callback_url: {:?}.", err
),
}
Ok(())
}
}
|
render_template
|
identifier_name
|
renderer.rs
|
use crate::papers::{DocumentSpec, Workspace};
use crate::prelude::*;
use futures::{compat::*, StreamExt};
use slog::{debug, error};
use std::process::Command;
use tokio::{fs::File, io::AsyncWrite};
use tokio_process::CommandExt;
/// The name of the downloaded template inside our Tera instance.
const TEMPLATE_NAME: &str = "template";
pub struct Renderer {
/// The manifest for the document to render.
document_spec: DocumentSpec,
/// The path to the rendered document.
output_path: std::path::PathBuf,
/// The path to the downloaded template.
template_path: std::path::PathBuf,
/// The templating engine.
tera: tera::Tera,
/// See the docs for [`Workspace`](crate::papers::Workspace).
workspace: Workspace,
}
impl Renderer {
pub fn new(config: Arc<Config>, document_spec: DocumentSpec) -> Result<Self, failure::Error> {
let workspace = Workspace::new(config.logger.clone(), config)?;
let output_path = workspace
.temp_dir_path()
.join(&document_spec.output_filename);
let template_path = workspace
.temp_dir_path()
.join(document_spec.output_filename.replace("pdf", "tex"));
Ok(Renderer {
tera: crate::utils::templating::make_tera(),
workspace,
document_spec,
output_path,
template_path,
})
}
pub async fn preview(&mut self) -> Result<String, failure::Error> {
self.download_and_register_template().await?;
self.tera
.render(TEMPLATE_NAME, &self.document_spec.variables())
.map_err(|err| format_err!("Rendering error: {}", err))
}
/// This function does the whole generation process from a
/// [`DocumentSpec`](crate::papers::DocumentSpec).
///
/// This method takes ownership because it is meant to be used to create futures to be
/// spawned in the background.
pub async fn render(mut self) -> Result<(), ()> {
debug!(
self.workspace.logger(),
"Generating PDF with document spec: {:?}.", self.document_spec
);
match self.render_inner().await {
// it worked, move on
Ok(()) => (),
// it failed -> report it
Err(err) => {
self.report_failure(err).await.ok();
}
}
self.workspace
.upload_workspace()
.await
.map_err(|err| {
error!(
self.workspace.logger(),
"Error uploading workspace.tar: {:?}.", err
)
})
.ok();
Ok(())
}
async fn render_inner(&mut self) -> Result<(), failure::Error> {
// First download the template and populate it
self.download_and_register_template().await?;
self.render_template().await?;
// Download the assets and save them in the temporary directory
self.download_assets().await?;
// Then run latex
self.run_latex().await?;
// Upload the resulting PDF and construct a presigned URL to it
let presigned_url = self
.workspace
.upload_to_s3(self.output_path.to_owned())
.await?;
// Report to the callback URL
let callback_url = self.document_spec.callback_url();
self
.workspace
.report_success(presigned_url, &callback_url)
.await?;
Ok(())
}
fn template_path(&self) -> &std::path::Path {
&self.template_path
}
/// Downloads all assets from the document spec in the workspace in parallel. It fails if any of
/// those cannot be downloaded.
async fn download_assets(&self) -> Result<Vec<std::path::PathBuf>, failure::Error> {
debug!(
&self.workspace.logger(),
"Downloading assets: {:?}.",
self.document_spec.asset_urls().collect::<Vec<_>>()
);
let mut futures = futures::stream::FuturesUnordered::new();
for uri in self.document_spec.asset_urls() {
futures.push(self.workspace.download_file(uri));
}
let futures: Vec<Result<_, _>> = futures.collect().await;
futures.into_iter().collect()
}
/// Download and register the template in the Renderer's Tera instance.
async fn download_and_register_template(&mut self) -> Result<(), failure::Error> {
let file_path = self
.workspace
.download_file(&self.document_spec.template_url.0)
.await?;
self.tera
.add_template_file(&file_path, Some("template"))
.map_err(|err| format_err!("failed to add template: {:?}", err))?;
debug!(
self.workspace.logger(),
"Successfully downloaded the template."
);
Ok(())
}
async fn render_template(&self) -> Result<(), failure::Error>
|
debug!(
self.workspace.logger(),
"Template successfully written to {:?}.",
&self.template_path()
);
Ok(())
}
async fn run_latex(&self) -> Result<(), failure::Error> {
debug!(
&self.workspace.logger(),
"Value of template_path: {:?}.",
self.template_path()
);
debug!(
&self.workspace.logger(),
"Rendered template exists: {:?}.",
self.template_path().exists()
);
debug!(&self.workspace.logger(), "Spawning latex.");
let latex_out = Command::new("xelatex")
.current_dir(&self.workspace.temp_dir_path())
.arg("-interaction=nonstopmode")
.arg("-file-line-error")
.arg("-shell-restricted")
.arg(self.template_path())
.output_async()
.compat()
.await
.context("Error generating PDF")?;
let stdout = String::from_utf8(latex_out.stdout)?;
if!latex_out.status.success() {
return Err(format_err!("LaTeX failed. Stdout:\n{}", stdout));
}
debug!(&self.workspace.logger(), "LaTeX succeeded. Stdout:\n{}", stdout);
Ok(())
}
/// Report failure and move on.
async fn report_failure(&self, error: failure::Error) -> Result<(), ()> {
error!(
self.workspace.logger(),
"Error rendering document: {:?}.", error,
);
let callback_url = self.document_spec.callback_url();
match self.workspace.report_failure(error, callback_url).await {
Ok(()) => (),
Err(err) => error!(
self.workspace.logger(),
"Error reporting failure to callback_url: {:?}.", err
),
}
Ok(())
}
}
|
{
let rendered_template = self
.tera
.render(TEMPLATE_NAME, &self.document_spec.variables())
.map_err(|err| format_err!("Rendering error: {}.", err))?;
debug!(
self.workspace.logger(),
"Writing template to {:?}.",
&self.template_path()
);
let mut file = File::create(self.template_path().to_owned())
.compat()
.await?;
futures01::future::poll_fn(|| file.poll_write(rendered_template.as_bytes()))
.compat()
.await
.context("Could not write latex file.")?;
|
identifier_body
|
renderer.rs
|
use crate::papers::{DocumentSpec, Workspace};
use crate::prelude::*;
use futures::{compat::*, StreamExt};
use slog::{debug, error};
use std::process::Command;
use tokio::{fs::File, io::AsyncWrite};
use tokio_process::CommandExt;
/// The name of the downloaded template inside our Tera instance.
const TEMPLATE_NAME: &str = "template";
pub struct Renderer {
/// The manifest for the document to render.
document_spec: DocumentSpec,
/// The path to the rendered document.
output_path: std::path::PathBuf,
/// The path to the downloaded template.
template_path: std::path::PathBuf,
/// The templating engine.
tera: tera::Tera,
/// See the docs for [`Workspace`](crate::papers::Workspace).
workspace: Workspace,
}
impl Renderer {
pub fn new(config: Arc<Config>, document_spec: DocumentSpec) -> Result<Self, failure::Error> {
let workspace = Workspace::new(config.logger.clone(), config)?;
let output_path = workspace
.temp_dir_path()
.join(&document_spec.output_filename);
let template_path = workspace
.temp_dir_path()
.join(document_spec.output_filename.replace("pdf", "tex"));
Ok(Renderer {
tera: crate::utils::templating::make_tera(),
workspace,
document_spec,
output_path,
template_path,
})
}
pub async fn preview(&mut self) -> Result<String, failure::Error> {
self.download_and_register_template().await?;
self.tera
.render(TEMPLATE_NAME, &self.document_spec.variables())
.map_err(|err| format_err!("Rendering error: {}", err))
}
/// This function does the whole generation process from a
/// [`DocumentSpec`](crate::papers::DocumentSpec).
///
/// This method takes ownership because it is meant to be used to create futures to be
/// spawned in the background.
pub async fn render(mut self) -> Result<(), ()> {
debug!(
self.workspace.logger(),
"Generating PDF with document spec: {:?}.", self.document_spec
);
match self.render_inner().await {
// it worked, move on
Ok(()) => (),
// it failed -> report it
Err(err) => {
self.report_failure(err).await.ok();
}
}
self.workspace
.upload_workspace()
.await
.map_err(|err| {
error!(
self.workspace.logger(),
"Error uploading workspace.tar: {:?}.", err
)
})
.ok();
Ok(())
}
async fn render_inner(&mut self) -> Result<(), failure::Error> {
// First download the template and populate it
self.download_and_register_template().await?;
self.render_template().await?;
// Download the assets and save them in the temporary directory
self.download_assets().await?;
// Then run latex
self.run_latex().await?;
// Upload the resulting PDF and construct a presigned URL to it
let presigned_url = self
.workspace
.upload_to_s3(self.output_path.to_owned())
.await?;
// Report to the callback URL
let callback_url = self.document_spec.callback_url();
self
.workspace
.report_success(presigned_url, &callback_url)
.await?;
Ok(())
}
fn template_path(&self) -> &std::path::Path {
&self.template_path
}
/// Downloads all assets from the document spec in the workspace in parallel. It fails if any of
/// those cannot be downloaded.
async fn download_assets(&self) -> Result<Vec<std::path::PathBuf>, failure::Error> {
debug!(
&self.workspace.logger(),
"Downloading assets: {:?}.",
self.document_spec.asset_urls().collect::<Vec<_>>()
);
let mut futures = futures::stream::FuturesUnordered::new();
for uri in self.document_spec.asset_urls() {
futures.push(self.workspace.download_file(uri));
}
let futures: Vec<Result<_, _>> = futures.collect().await;
futures.into_iter().collect()
}
/// Download and register the template in the Renderer's Tera instance.
async fn download_and_register_template(&mut self) -> Result<(), failure::Error> {
let file_path = self
.workspace
.download_file(&self.document_spec.template_url.0)
.await?;
self.tera
.add_template_file(&file_path, Some("template"))
.map_err(|err| format_err!("failed to add template: {:?}", err))?;
debug!(
self.workspace.logger(),
"Successfully downloaded the template."
);
Ok(())
}
async fn render_template(&self) -> Result<(), failure::Error> {
let rendered_template = self
.tera
.render(TEMPLATE_NAME, &self.document_spec.variables())
.map_err(|err| format_err!("Rendering error: {}.", err))?;
debug!(
self.workspace.logger(),
"Writing template to {:?}.",
&self.template_path()
);
let mut file = File::create(self.template_path().to_owned())
.compat()
.await?;
futures01::future::poll_fn(|| file.poll_write(rendered_template.as_bytes()))
.compat()
.await
.context("Could not write latex file.")?;
debug!(
self.workspace.logger(),
"Template successfully written to {:?}.",
&self.template_path()
);
Ok(())
}
|
"Value of template_path: {:?}.",
self.template_path()
);
debug!(
&self.workspace.logger(),
"Rendered template exists: {:?}.",
self.template_path().exists()
);
debug!(&self.workspace.logger(), "Spawning latex.");
let latex_out = Command::new("xelatex")
.current_dir(&self.workspace.temp_dir_path())
.arg("-interaction=nonstopmode")
.arg("-file-line-error")
.arg("-shell-restricted")
.arg(self.template_path())
.output_async()
.compat()
.await
.context("Error generating PDF")?;
let stdout = String::from_utf8(latex_out.stdout)?;
if!latex_out.status.success() {
return Err(format_err!("LaTeX failed. Stdout:\n{}", stdout));
}
debug!(&self.workspace.logger(), "LaTeX succeeded. Stdout:\n{}", stdout);
Ok(())
}
/// Report failure and move on.
async fn report_failure(&self, error: failure::Error) -> Result<(), ()> {
error!(
self.workspace.logger(),
"Error rendering document: {:?}.", error,
);
let callback_url = self.document_spec.callback_url();
match self.workspace.report_failure(error, callback_url).await {
Ok(()) => (),
Err(err) => error!(
self.workspace.logger(),
"Error reporting failure to callback_url: {:?}.", err
),
}
Ok(())
}
}
|
async fn run_latex(&self) -> Result<(), failure::Error> {
debug!(
&self.workspace.logger(),
|
random_line_split
|
main.rs
|
#![feature(slicing_syntax)]
#![feature(box_syntax)]
extern crate getopts;
#[macro_use]
extern crate linked_list;
use std::io;
use std::mem;
use std::iter::repeat;
use lib::{
SEle,
Env,
LamOrFn,
ScmAlert,
ScmAlertMode,
scheme_alert,
scheme_stdlib};
use lib::scm_macro::PrecompileEnv;
mod input;
mod lib;
mod parse;
fn to_strings(slc: &[&str]) -> Vec<String>
|
/// How many more closing parens there are than opening parens
fn close_paren_surplus(ts: &[parse::Token]) -> i32 {
use parse::Token::{OpenParen, CloseParen};
ts.iter().fold(0i32, |acc, t| match *t {
OpenParen => acc-1,
CloseParen => acc+1,
_ => acc })
}
fn interactive_shell(env: &mut Env, macro_env: &mut PrecompileEnv) {
print!("scheme-rs interactive shell. Copyright (C) 2014 Johan Johansson\n\
This program is free software released under the GPLv3 license.\n\n\
>> ");
let mut tokenizer = parse::Tokenizer::new();
let mut token_buf = Vec::new();
let mut rparen_surplus = 0;
let mut line_n = 1;
for mut line in io::stdin().lock().lines().map(|l|
tokenizer.tokenize_source(l.unwrap().as_slice()))
{
let tokens = if let Some((_, tokens)) = line.pop() {
tokens
} else { continue };
rparen_surplus += close_paren_surplus(tokens.as_slice());
if rparen_surplus < 0 {
token_buf.push((line_n, tokens));
print!(">> {}", repeat(' ').take((rparen_surplus * -2) as usize)
.collect::<String>());
line_n += 1;
} else if rparen_surplus > 0 {
scheme_alert(ScmAlert::Unexp(")"), &ScmAlertMode::Warn);
token_buf.clear();
line_n = 1;
rparen_surplus = 0;
print!(">> ");
} else {
token_buf.push((line_n, tokens));
{
let parsed_and_expanded = parse::parse_token_lines(
mem::replace(&mut token_buf, Vec::with_capacity(3)))
.into_iter().map(|e| macro_env.expand(e)).collect();
print!("{}\n>> ", env.eval_sequence(parsed_and_expanded));
}
line_n = 1;
rparen_surplus = 0;
}
}
}
fn main(){
let (internal_std_procs, internal_std_vars) = scheme_stdlib::standard_library();
let mut vars = Vec::with_capacity(internal_std_vars.len());
for (name, val) in internal_std_vars.into_iter() {
vars.push((name.to_string(), Some(val)));
}
for (name, func) in internal_std_procs.into_iter() {
vars.push((name.to_string(), Some(SEle::SProc(box LamOrFn::Fn(func)))));
}
let mut tokenizer = parse::Tokenizer::new();
let mut macro_env = PrecompileEnv::new();
let mut env = Env::new(vars);
let stdlib_src = input::read_stdlib();
let tokenized = tokenizer.tokenize_source(stdlib_src.as_slice());
let parsed = parse::parse_token_lines(tokenized);
env.eval_sequence(parsed.into_iter().map(|e| macro_env.expand(e)).collect());
if let Some(input) = input::get_input() {
let begin_wrapped = format!("(begin {})", input);
let tokenized = tokenizer.tokenize_source(begin_wrapped.as_slice());
// debug
// for &(n, ref t) in tokenized.iter() {
// println!("{}: {:?}", n, t);
// }
let mut parsed = parse::parse_token_lines(tokenized);
if parsed.len()!= 1 {
panic!("Parsed source is invalid")
}
env.eval(macro_env.expand(parsed.pop_head().unwrap()));
} else {
env.make_lenient();
interactive_shell(&mut env, &mut macro_env);
}
}
|
{
slc.iter().map(|s| s.to_string()).collect()
}
|
identifier_body
|
main.rs
|
#![feature(slicing_syntax)]
#![feature(box_syntax)]
extern crate getopts;
#[macro_use]
extern crate linked_list;
use std::io;
use std::mem;
use std::iter::repeat;
use lib::{
SEle,
Env,
LamOrFn,
ScmAlert,
ScmAlertMode,
scheme_alert,
scheme_stdlib};
use lib::scm_macro::PrecompileEnv;
mod input;
mod lib;
mod parse;
fn to_strings(slc: &[&str]) -> Vec<String> {
slc.iter().map(|s| s.to_string()).collect()
}
/// How many more closing parens there are than opening parens
fn
|
(ts: &[parse::Token]) -> i32 {
use parse::Token::{OpenParen, CloseParen};
ts.iter().fold(0i32, |acc, t| match *t {
OpenParen => acc-1,
CloseParen => acc+1,
_ => acc })
}
fn interactive_shell(env: &mut Env, macro_env: &mut PrecompileEnv) {
print!("scheme-rs interactive shell. Copyright (C) 2014 Johan Johansson\n\
This program is free software released under the GPLv3 license.\n\n\
>> ");
let mut tokenizer = parse::Tokenizer::new();
let mut token_buf = Vec::new();
let mut rparen_surplus = 0;
let mut line_n = 1;
for mut line in io::stdin().lock().lines().map(|l|
tokenizer.tokenize_source(l.unwrap().as_slice()))
{
let tokens = if let Some((_, tokens)) = line.pop() {
tokens
} else { continue };
rparen_surplus += close_paren_surplus(tokens.as_slice());
if rparen_surplus < 0 {
token_buf.push((line_n, tokens));
print!(">> {}", repeat(' ').take((rparen_surplus * -2) as usize)
.collect::<String>());
line_n += 1;
} else if rparen_surplus > 0 {
scheme_alert(ScmAlert::Unexp(")"), &ScmAlertMode::Warn);
token_buf.clear();
line_n = 1;
rparen_surplus = 0;
print!(">> ");
} else {
token_buf.push((line_n, tokens));
{
let parsed_and_expanded = parse::parse_token_lines(
mem::replace(&mut token_buf, Vec::with_capacity(3)))
.into_iter().map(|e| macro_env.expand(e)).collect();
print!("{}\n>> ", env.eval_sequence(parsed_and_expanded));
}
line_n = 1;
rparen_surplus = 0;
}
}
}
fn main(){
let (internal_std_procs, internal_std_vars) = scheme_stdlib::standard_library();
let mut vars = Vec::with_capacity(internal_std_vars.len());
for (name, val) in internal_std_vars.into_iter() {
vars.push((name.to_string(), Some(val)));
}
for (name, func) in internal_std_procs.into_iter() {
vars.push((name.to_string(), Some(SEle::SProc(box LamOrFn::Fn(func)))));
}
let mut tokenizer = parse::Tokenizer::new();
let mut macro_env = PrecompileEnv::new();
let mut env = Env::new(vars);
let stdlib_src = input::read_stdlib();
let tokenized = tokenizer.tokenize_source(stdlib_src.as_slice());
let parsed = parse::parse_token_lines(tokenized);
env.eval_sequence(parsed.into_iter().map(|e| macro_env.expand(e)).collect());
if let Some(input) = input::get_input() {
let begin_wrapped = format!("(begin {})", input);
let tokenized = tokenizer.tokenize_source(begin_wrapped.as_slice());
// debug
// for &(n, ref t) in tokenized.iter() {
// println!("{}: {:?}", n, t);
// }
let mut parsed = parse::parse_token_lines(tokenized);
if parsed.len()!= 1 {
panic!("Parsed source is invalid")
}
env.eval(macro_env.expand(parsed.pop_head().unwrap()));
} else {
env.make_lenient();
interactive_shell(&mut env, &mut macro_env);
}
}
|
close_paren_surplus
|
identifier_name
|
main.rs
|
#![feature(slicing_syntax)]
#![feature(box_syntax)]
extern crate getopts;
#[macro_use]
extern crate linked_list;
use std::io;
use std::mem;
use std::iter::repeat;
use lib::{
SEle,
Env,
LamOrFn,
ScmAlert,
ScmAlertMode,
scheme_alert,
scheme_stdlib};
use lib::scm_macro::PrecompileEnv;
mod input;
mod lib;
mod parse;
fn to_strings(slc: &[&str]) -> Vec<String> {
|
/// How many more closing parens there are than opening parens
fn close_paren_surplus(ts: &[parse::Token]) -> i32 {
use parse::Token::{OpenParen, CloseParen};
ts.iter().fold(0i32, |acc, t| match *t {
OpenParen => acc-1,
CloseParen => acc+1,
_ => acc })
}
fn interactive_shell(env: &mut Env, macro_env: &mut PrecompileEnv) {
print!("scheme-rs interactive shell. Copyright (C) 2014 Johan Johansson\n\
This program is free software released under the GPLv3 license.\n\n\
>> ");
let mut tokenizer = parse::Tokenizer::new();
let mut token_buf = Vec::new();
let mut rparen_surplus = 0;
let mut line_n = 1;
for mut line in io::stdin().lock().lines().map(|l|
tokenizer.tokenize_source(l.unwrap().as_slice()))
{
let tokens = if let Some((_, tokens)) = line.pop() {
tokens
} else { continue };
rparen_surplus += close_paren_surplus(tokens.as_slice());
if rparen_surplus < 0 {
token_buf.push((line_n, tokens));
print!(">> {}", repeat(' ').take((rparen_surplus * -2) as usize)
.collect::<String>());
line_n += 1;
} else if rparen_surplus > 0 {
scheme_alert(ScmAlert::Unexp(")"), &ScmAlertMode::Warn);
token_buf.clear();
line_n = 1;
rparen_surplus = 0;
print!(">> ");
} else {
token_buf.push((line_n, tokens));
{
let parsed_and_expanded = parse::parse_token_lines(
mem::replace(&mut token_buf, Vec::with_capacity(3)))
.into_iter().map(|e| macro_env.expand(e)).collect();
print!("{}\n>> ", env.eval_sequence(parsed_and_expanded));
}
line_n = 1;
rparen_surplus = 0;
}
}
}
fn main(){
let (internal_std_procs, internal_std_vars) = scheme_stdlib::standard_library();
let mut vars = Vec::with_capacity(internal_std_vars.len());
for (name, val) in internal_std_vars.into_iter() {
vars.push((name.to_string(), Some(val)));
}
for (name, func) in internal_std_procs.into_iter() {
vars.push((name.to_string(), Some(SEle::SProc(box LamOrFn::Fn(func)))));
}
let mut tokenizer = parse::Tokenizer::new();
let mut macro_env = PrecompileEnv::new();
let mut env = Env::new(vars);
let stdlib_src = input::read_stdlib();
let tokenized = tokenizer.tokenize_source(stdlib_src.as_slice());
let parsed = parse::parse_token_lines(tokenized);
env.eval_sequence(parsed.into_iter().map(|e| macro_env.expand(e)).collect());
if let Some(input) = input::get_input() {
let begin_wrapped = format!("(begin {})", input);
let tokenized = tokenizer.tokenize_source(begin_wrapped.as_slice());
// debug
// for &(n, ref t) in tokenized.iter() {
// println!("{}: {:?}", n, t);
// }
let mut parsed = parse::parse_token_lines(tokenized);
if parsed.len()!= 1 {
panic!("Parsed source is invalid")
}
env.eval(macro_env.expand(parsed.pop_head().unwrap()));
} else {
env.make_lenient();
interactive_shell(&mut env, &mut macro_env);
}
}
|
slc.iter().map(|s| s.to_string()).collect()
}
|
random_line_split
|
main.rs
|
#![feature(slicing_syntax)]
#![feature(box_syntax)]
extern crate getopts;
#[macro_use]
extern crate linked_list;
use std::io;
use std::mem;
use std::iter::repeat;
use lib::{
SEle,
Env,
LamOrFn,
ScmAlert,
ScmAlertMode,
scheme_alert,
scheme_stdlib};
use lib::scm_macro::PrecompileEnv;
mod input;
mod lib;
mod parse;
fn to_strings(slc: &[&str]) -> Vec<String> {
slc.iter().map(|s| s.to_string()).collect()
}
/// How many more closing parens there are than opening parens
fn close_paren_surplus(ts: &[parse::Token]) -> i32 {
use parse::Token::{OpenParen, CloseParen};
ts.iter().fold(0i32, |acc, t| match *t {
OpenParen => acc-1,
CloseParen => acc+1,
_ => acc })
}
fn interactive_shell(env: &mut Env, macro_env: &mut PrecompileEnv) {
print!("scheme-rs interactive shell. Copyright (C) 2014 Johan Johansson\n\
This program is free software released under the GPLv3 license.\n\n\
>> ");
let mut tokenizer = parse::Tokenizer::new();
let mut token_buf = Vec::new();
let mut rparen_surplus = 0;
let mut line_n = 1;
for mut line in io::stdin().lock().lines().map(|l|
tokenizer.tokenize_source(l.unwrap().as_slice()))
{
let tokens = if let Some((_, tokens)) = line.pop() {
tokens
} else { continue };
rparen_surplus += close_paren_surplus(tokens.as_slice());
if rparen_surplus < 0 {
token_buf.push((line_n, tokens));
print!(">> {}", repeat(' ').take((rparen_surplus * -2) as usize)
.collect::<String>());
line_n += 1;
} else if rparen_surplus > 0
|
else {
token_buf.push((line_n, tokens));
{
let parsed_and_expanded = parse::parse_token_lines(
mem::replace(&mut token_buf, Vec::with_capacity(3)))
.into_iter().map(|e| macro_env.expand(e)).collect();
print!("{}\n>> ", env.eval_sequence(parsed_and_expanded));
}
line_n = 1;
rparen_surplus = 0;
}
}
}
fn main(){
let (internal_std_procs, internal_std_vars) = scheme_stdlib::standard_library();
let mut vars = Vec::with_capacity(internal_std_vars.len());
for (name, val) in internal_std_vars.into_iter() {
vars.push((name.to_string(), Some(val)));
}
for (name, func) in internal_std_procs.into_iter() {
vars.push((name.to_string(), Some(SEle::SProc(box LamOrFn::Fn(func)))));
}
let mut tokenizer = parse::Tokenizer::new();
let mut macro_env = PrecompileEnv::new();
let mut env = Env::new(vars);
let stdlib_src = input::read_stdlib();
let tokenized = tokenizer.tokenize_source(stdlib_src.as_slice());
let parsed = parse::parse_token_lines(tokenized);
env.eval_sequence(parsed.into_iter().map(|e| macro_env.expand(e)).collect());
if let Some(input) = input::get_input() {
let begin_wrapped = format!("(begin {})", input);
let tokenized = tokenizer.tokenize_source(begin_wrapped.as_slice());
// debug
// for &(n, ref t) in tokenized.iter() {
// println!("{}: {:?}", n, t);
// }
let mut parsed = parse::parse_token_lines(tokenized);
if parsed.len()!= 1 {
panic!("Parsed source is invalid")
}
env.eval(macro_env.expand(parsed.pop_head().unwrap()));
} else {
env.make_lenient();
interactive_shell(&mut env, &mut macro_env);
}
}
|
{
scheme_alert(ScmAlert::Unexp(")"), &ScmAlertMode::Warn);
token_buf.clear();
line_n = 1;
rparen_surplus = 0;
print!(">> ");
}
|
conditional_block
|
documenttype.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::DocumentTypeBinding;
use dom::bindings::codegen::Bindings::DocumentTypeBinding::DocumentTypeMethods;
use dom::bindings::codegen::InheritTypes::{DocumentTypeDerived, NodeCast};
use dom::bindings::js::{JSRef, Temporary};
use dom::document::Document;
use dom::eventtarget::{EventTarget, EventTargetTypeId};
use dom::node::{Node, NodeHelpers, NodeTypeId};
use util::str::DOMString;
use std::borrow::ToOwned;
/// The `DOCTYPE` tag.
#[dom_struct]
pub struct DocumentType {
node: Node,
name: DOMString,
public_id: DOMString,
system_id: DOMString,
}
impl DocumentTypeDerived for EventTarget {
fn is_documenttype(&self) -> bool {
*self.type_id() == EventTargetTypeId::Node(NodeTypeId::DocumentType)
}
}
impl DocumentType {
fn new_inherited(name: DOMString,
public_id: Option<DOMString>,
system_id: Option<DOMString>,
document: JSRef<Document>)
-> DocumentType {
DocumentType {
node: Node::new_inherited(NodeTypeId::DocumentType, document),
name: name,
public_id: public_id.unwrap_or("".to_owned()),
system_id: system_id.unwrap_or("".to_owned())
}
}
#[allow(unrooted_must_root)]
pub fn new(name: DOMString,
public_id: Option<DOMString>,
system_id: Option<DOMString>,
document: JSRef<Document>)
-> Temporary<DocumentType> {
let documenttype = DocumentType::new_inherited(name,
public_id,
system_id,
document);
Node::reflect_node(box documenttype, document, DocumentTypeBinding::Wrap)
}
#[inline]
pub fn name<'a>(&'a self) -> &'a DOMString {
&self.name
}
#[inline]
pub fn public_id<'a>(&'a self) -> &'a DOMString {
&self.public_id
}
#[inline]
pub fn system_id<'a>(&'a self) -> &'a DOMString {
&self.system_id
}
}
impl<'a> DocumentTypeMethods for JSRef<'a, DocumentType> {
fn Name(self) -> DOMString {
self.name.clone()
}
fn PublicId(self) -> DOMString
|
fn SystemId(self) -> DOMString {
self.system_id.clone()
}
// http://dom.spec.whatwg.org/#dom-childnode-remove
fn Remove(self) {
let node: JSRef<Node> = NodeCast::from_ref(self);
node.remove_self();
}
}
|
{
self.public_id.clone()
}
|
identifier_body
|
documenttype.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::DocumentTypeBinding;
use dom::bindings::codegen::Bindings::DocumentTypeBinding::DocumentTypeMethods;
use dom::bindings::codegen::InheritTypes::{DocumentTypeDerived, NodeCast};
use dom::bindings::js::{JSRef, Temporary};
use dom::document::Document;
use dom::eventtarget::{EventTarget, EventTargetTypeId};
use dom::node::{Node, NodeHelpers, NodeTypeId};
use util::str::DOMString;
use std::borrow::ToOwned;
/// The `DOCTYPE` tag.
#[dom_struct]
pub struct DocumentType {
node: Node,
name: DOMString,
public_id: DOMString,
system_id: DOMString,
}
impl DocumentTypeDerived for EventTarget {
fn
|
(&self) -> bool {
*self.type_id() == EventTargetTypeId::Node(NodeTypeId::DocumentType)
}
}
impl DocumentType {
fn new_inherited(name: DOMString,
public_id: Option<DOMString>,
system_id: Option<DOMString>,
document: JSRef<Document>)
-> DocumentType {
DocumentType {
node: Node::new_inherited(NodeTypeId::DocumentType, document),
name: name,
public_id: public_id.unwrap_or("".to_owned()),
system_id: system_id.unwrap_or("".to_owned())
}
}
#[allow(unrooted_must_root)]
pub fn new(name: DOMString,
public_id: Option<DOMString>,
system_id: Option<DOMString>,
document: JSRef<Document>)
-> Temporary<DocumentType> {
let documenttype = DocumentType::new_inherited(name,
public_id,
system_id,
document);
Node::reflect_node(box documenttype, document, DocumentTypeBinding::Wrap)
}
#[inline]
pub fn name<'a>(&'a self) -> &'a DOMString {
&self.name
}
#[inline]
pub fn public_id<'a>(&'a self) -> &'a DOMString {
&self.public_id
}
#[inline]
pub fn system_id<'a>(&'a self) -> &'a DOMString {
&self.system_id
}
}
impl<'a> DocumentTypeMethods for JSRef<'a, DocumentType> {
fn Name(self) -> DOMString {
self.name.clone()
}
fn PublicId(self) -> DOMString {
self.public_id.clone()
}
fn SystemId(self) -> DOMString {
self.system_id.clone()
}
// http://dom.spec.whatwg.org/#dom-childnode-remove
fn Remove(self) {
let node: JSRef<Node> = NodeCast::from_ref(self);
node.remove_self();
}
}
|
is_documenttype
|
identifier_name
|
documenttype.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::DocumentTypeBinding;
use dom::bindings::codegen::Bindings::DocumentTypeBinding::DocumentTypeMethods;
use dom::bindings::codegen::InheritTypes::{DocumentTypeDerived, NodeCast};
use dom::bindings::js::{JSRef, Temporary};
use dom::document::Document;
use dom::eventtarget::{EventTarget, EventTargetTypeId};
use dom::node::{Node, NodeHelpers, NodeTypeId};
use util::str::DOMString;
use std::borrow::ToOwned;
/// The `DOCTYPE` tag.
#[dom_struct]
pub struct DocumentType {
node: Node,
name: DOMString,
public_id: DOMString,
system_id: DOMString,
}
impl DocumentTypeDerived for EventTarget {
fn is_documenttype(&self) -> bool {
*self.type_id() == EventTargetTypeId::Node(NodeTypeId::DocumentType)
}
}
impl DocumentType {
fn new_inherited(name: DOMString,
public_id: Option<DOMString>,
system_id: Option<DOMString>,
document: JSRef<Document>)
-> DocumentType {
DocumentType {
node: Node::new_inherited(NodeTypeId::DocumentType, document),
name: name,
public_id: public_id.unwrap_or("".to_owned()),
system_id: system_id.unwrap_or("".to_owned())
}
}
#[allow(unrooted_must_root)]
pub fn new(name: DOMString,
public_id: Option<DOMString>,
system_id: Option<DOMString>,
document: JSRef<Document>)
-> Temporary<DocumentType> {
let documenttype = DocumentType::new_inherited(name,
public_id,
system_id,
document);
Node::reflect_node(box documenttype, document, DocumentTypeBinding::Wrap)
}
#[inline]
pub fn name<'a>(&'a self) -> &'a DOMString {
&self.name
}
#[inline]
pub fn public_id<'a>(&'a self) -> &'a DOMString {
&self.public_id
}
#[inline]
pub fn system_id<'a>(&'a self) -> &'a DOMString {
&self.system_id
}
}
impl<'a> DocumentTypeMethods for JSRef<'a, DocumentType> {
fn Name(self) -> DOMString {
|
fn PublicId(self) -> DOMString {
self.public_id.clone()
}
fn SystemId(self) -> DOMString {
self.system_id.clone()
}
// http://dom.spec.whatwg.org/#dom-childnode-remove
fn Remove(self) {
let node: JSRef<Node> = NodeCast::from_ref(self);
node.remove_self();
}
}
|
self.name.clone()
}
|
random_line_split
|
worker.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::WorkerBinding;
use dom::bindings::codegen::Bindings::WorkerBinding::WorkerMethods;
use dom::bindings::codegen::Bindings::EventHandlerBinding::EventHandlerNonNull;
use dom::bindings::codegen::InheritTypes::EventTargetCast;
use dom::bindings::error::{Fallible, Syntax};
use dom::bindings::global::{GlobalRef, GlobalField};
use dom::bindings::js::{JS, JSRef, Temporary};
use dom::bindings::utils::{Reflectable, Reflector, reflect_dom_object};
use dom::dedicatedworkerglobalscope::DedicatedWorkerGlobalScope;
use dom::eventtarget::{EventTarget, EventTargetHelpers, WorkerTypeId};
use dom::messageevent::MessageEvent;
use script_task::{ScriptChan, DOMMessage};
use servo_util::str::DOMString;
use js::glue::JS_STRUCTURED_CLONE_VERSION;
use js::jsapi::{JSContext, JS_AddObjectRoot, JS_RemoveObjectRoot};
use js::jsapi::{JS_ReadStructuredClone, JS_WriteStructuredClone};
use js::jsval::{JSVal, UndefinedValue};
use url::UrlParser;
use libc::{c_void, size_t};
use std::cell::Cell;
use std::ptr;
pub struct TrustedWorkerAddress(pub *const c_void);
#[deriving(Encodable)]
#[must_root]
pub struct Worker {
eventtarget: EventTarget,
refcount: Cell<uint>,
global: GlobalField,
/// Sender to the Receiver associated with the DedicatedWorkerGlobalScope
/// this Worker created.
sender: ScriptChan,
}
impl Worker {
pub fn new_inherited(global: &GlobalRef, sender: ScriptChan) -> Worker {
Worker {
eventtarget: EventTarget::new_inherited(WorkerTypeId),
refcount: Cell::new(0),
global: GlobalField::from_rooted(global),
sender: sender,
}
}
pub fn new(global: &GlobalRef, sender: ScriptChan) -> Temporary<Worker> {
reflect_dom_object(box Worker::new_inherited(global, sender),
global,
WorkerBinding::Wrap)
}
// http://www.whatwg.org/html/#dom-worker
pub fn Constructor(global: &GlobalRef, scriptURL: DOMString) -> Fallible<Temporary<Worker>> {
// Step 2-4.
let worker_url = match UrlParser::new().base_url(&global.get_url())
.parse(scriptURL.as_slice()) {
Ok(url) => url,
Err(_) => return Err(Syntax),
};
let resource_task = global.resource_task();
let (receiver, sender) = ScriptChan::new();
let worker = Worker::new(global, sender.clone()).root();
let worker_ref = worker.addref();
DedicatedWorkerGlobalScope::run_worker_scope(
worker_url, worker_ref, resource_task, global.script_chan().clone(),
sender, receiver);
Ok(Temporary::from_rooted(*worker))
}
pub fn handle_message(address: TrustedWorkerAddress,
data: *mut u64, nbytes: size_t) {
let worker = unsafe { JS::from_trusted_worker_address(address).root() };
let global = worker.global.root();
let mut message = UndefinedValue();
unsafe {
assert!(JS_ReadStructuredClone(
global.root_ref().get_cx(), data as *const u64, nbytes,
JS_STRUCTURED_CLONE_VERSION, &mut message,
ptr::null(), ptr::null_mut())!= 0);
}
let target: JSRef<EventTarget> = EventTargetCast::from_ref(*worker);
MessageEvent::dispatch_jsval(target, &global.root_ref(), message);
}
}
impl Worker {
// Creates a trusted address to the object, and roots it. Always pair this with a release()
pub fn addref(&self) -> TrustedWorkerAddress
|
pub fn release(&self) {
let refcount = self.refcount.get();
assert!(refcount > 0)
self.refcount.set(refcount - 1);
if refcount == 1 {
let cx = self.global.root().root_ref().get_cx();
unsafe {
JS_RemoveObjectRoot(cx, self.reflector().rootable());
}
}
}
pub fn handle_release(address: TrustedWorkerAddress) {
let worker = unsafe { JS::from_trusted_worker_address(address).root() };
worker.release();
}
}
impl<'a> WorkerMethods for JSRef<'a, Worker> {
fn PostMessage(self, cx: *mut JSContext, message: JSVal) {
let mut data = ptr::null_mut();
let mut nbytes = 0;
unsafe {
assert!(JS_WriteStructuredClone(cx, message, &mut data, &mut nbytes,
ptr::null(), ptr::null_mut())!= 0);
}
self.addref();
let ScriptChan(ref sender) = self.sender;
sender.send(DOMMessage(data, nbytes));
}
fn GetOnmessage(self) -> Option<EventHandlerNonNull> {
let eventtarget: JSRef<EventTarget> = EventTargetCast::from_ref(self);
eventtarget.get_event_handler_common("message")
}
fn SetOnmessage(self, listener: Option<EventHandlerNonNull>) {
let eventtarget: JSRef<EventTarget> = EventTargetCast::from_ref(self);
eventtarget.set_event_handler_common("message", listener)
}
}
impl Reflectable for Worker {
fn reflector<'a>(&'a self) -> &'a Reflector {
self.eventtarget.reflector()
}
}
|
{
let refcount = self.refcount.get();
if refcount == 0 {
let cx = self.global.root().root_ref().get_cx();
unsafe {
JS_AddObjectRoot(cx, self.reflector().rootable());
}
}
self.refcount.set(refcount + 1);
TrustedWorkerAddress(self as *const Worker as *const c_void)
}
|
identifier_body
|
worker.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::WorkerBinding;
use dom::bindings::codegen::Bindings::WorkerBinding::WorkerMethods;
use dom::bindings::codegen::Bindings::EventHandlerBinding::EventHandlerNonNull;
use dom::bindings::codegen::InheritTypes::EventTargetCast;
use dom::bindings::error::{Fallible, Syntax};
use dom::bindings::global::{GlobalRef, GlobalField};
use dom::bindings::js::{JS, JSRef, Temporary};
use dom::bindings::utils::{Reflectable, Reflector, reflect_dom_object};
use dom::dedicatedworkerglobalscope::DedicatedWorkerGlobalScope;
use dom::eventtarget::{EventTarget, EventTargetHelpers, WorkerTypeId};
use dom::messageevent::MessageEvent;
use script_task::{ScriptChan, DOMMessage};
use servo_util::str::DOMString;
use js::glue::JS_STRUCTURED_CLONE_VERSION;
use js::jsapi::{JSContext, JS_AddObjectRoot, JS_RemoveObjectRoot};
use js::jsapi::{JS_ReadStructuredClone, JS_WriteStructuredClone};
use js::jsval::{JSVal, UndefinedValue};
use url::UrlParser;
use libc::{c_void, size_t};
use std::cell::Cell;
use std::ptr;
pub struct TrustedWorkerAddress(pub *const c_void);
#[deriving(Encodable)]
#[must_root]
pub struct Worker {
eventtarget: EventTarget,
refcount: Cell<uint>,
global: GlobalField,
/// Sender to the Receiver associated with the DedicatedWorkerGlobalScope
/// this Worker created.
sender: ScriptChan,
}
impl Worker {
pub fn new_inherited(global: &GlobalRef, sender: ScriptChan) -> Worker {
Worker {
eventtarget: EventTarget::new_inherited(WorkerTypeId),
refcount: Cell::new(0),
global: GlobalField::from_rooted(global),
sender: sender,
}
}
pub fn new(global: &GlobalRef, sender: ScriptChan) -> Temporary<Worker> {
reflect_dom_object(box Worker::new_inherited(global, sender),
global,
WorkerBinding::Wrap)
}
// http://www.whatwg.org/html/#dom-worker
pub fn Constructor(global: &GlobalRef, scriptURL: DOMString) -> Fallible<Temporary<Worker>> {
// Step 2-4.
let worker_url = match UrlParser::new().base_url(&global.get_url())
.parse(scriptURL.as_slice()) {
Ok(url) => url,
Err(_) => return Err(Syntax),
};
let resource_task = global.resource_task();
let (receiver, sender) = ScriptChan::new();
let worker = Worker::new(global, sender.clone()).root();
let worker_ref = worker.addref();
DedicatedWorkerGlobalScope::run_worker_scope(
worker_url, worker_ref, resource_task, global.script_chan().clone(),
sender, receiver);
Ok(Temporary::from_rooted(*worker))
}
pub fn handle_message(address: TrustedWorkerAddress,
data: *mut u64, nbytes: size_t) {
let worker = unsafe { JS::from_trusted_worker_address(address).root() };
let global = worker.global.root();
let mut message = UndefinedValue();
unsafe {
assert!(JS_ReadStructuredClone(
global.root_ref().get_cx(), data as *const u64, nbytes,
JS_STRUCTURED_CLONE_VERSION, &mut message,
ptr::null(), ptr::null_mut())!= 0);
}
let target: JSRef<EventTarget> = EventTargetCast::from_ref(*worker);
MessageEvent::dispatch_jsval(target, &global.root_ref(), message);
}
}
impl Worker {
// Creates a trusted address to the object, and roots it. Always pair this with a release()
pub fn addref(&self) -> TrustedWorkerAddress {
let refcount = self.refcount.get();
if refcount == 0 {
let cx = self.global.root().root_ref().get_cx();
unsafe {
JS_AddObjectRoot(cx, self.reflector().rootable());
}
}
self.refcount.set(refcount + 1);
TrustedWorkerAddress(self as *const Worker as *const c_void)
}
pub fn release(&self) {
let refcount = self.refcount.get();
assert!(refcount > 0)
self.refcount.set(refcount - 1);
if refcount == 1 {
let cx = self.global.root().root_ref().get_cx();
unsafe {
JS_RemoveObjectRoot(cx, self.reflector().rootable());
}
}
}
pub fn handle_release(address: TrustedWorkerAddress) {
let worker = unsafe { JS::from_trusted_worker_address(address).root() };
worker.release();
}
}
impl<'a> WorkerMethods for JSRef<'a, Worker> {
fn PostMessage(self, cx: *mut JSContext, message: JSVal) {
let mut data = ptr::null_mut();
let mut nbytes = 0;
unsafe {
assert!(JS_WriteStructuredClone(cx, message, &mut data, &mut nbytes,
ptr::null(), ptr::null_mut())!= 0);
}
self.addref();
let ScriptChan(ref sender) = self.sender;
sender.send(DOMMessage(data, nbytes));
}
fn GetOnmessage(self) -> Option<EventHandlerNonNull> {
let eventtarget: JSRef<EventTarget> = EventTargetCast::from_ref(self);
eventtarget.get_event_handler_common("message")
|
}
fn SetOnmessage(self, listener: Option<EventHandlerNonNull>) {
let eventtarget: JSRef<EventTarget> = EventTargetCast::from_ref(self);
eventtarget.set_event_handler_common("message", listener)
}
}
impl Reflectable for Worker {
fn reflector<'a>(&'a self) -> &'a Reflector {
self.eventtarget.reflector()
}
}
|
random_line_split
|
|
worker.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::WorkerBinding;
use dom::bindings::codegen::Bindings::WorkerBinding::WorkerMethods;
use dom::bindings::codegen::Bindings::EventHandlerBinding::EventHandlerNonNull;
use dom::bindings::codegen::InheritTypes::EventTargetCast;
use dom::bindings::error::{Fallible, Syntax};
use dom::bindings::global::{GlobalRef, GlobalField};
use dom::bindings::js::{JS, JSRef, Temporary};
use dom::bindings::utils::{Reflectable, Reflector, reflect_dom_object};
use dom::dedicatedworkerglobalscope::DedicatedWorkerGlobalScope;
use dom::eventtarget::{EventTarget, EventTargetHelpers, WorkerTypeId};
use dom::messageevent::MessageEvent;
use script_task::{ScriptChan, DOMMessage};
use servo_util::str::DOMString;
use js::glue::JS_STRUCTURED_CLONE_VERSION;
use js::jsapi::{JSContext, JS_AddObjectRoot, JS_RemoveObjectRoot};
use js::jsapi::{JS_ReadStructuredClone, JS_WriteStructuredClone};
use js::jsval::{JSVal, UndefinedValue};
use url::UrlParser;
use libc::{c_void, size_t};
use std::cell::Cell;
use std::ptr;
pub struct TrustedWorkerAddress(pub *const c_void);
#[deriving(Encodable)]
#[must_root]
pub struct Worker {
eventtarget: EventTarget,
refcount: Cell<uint>,
global: GlobalField,
/// Sender to the Receiver associated with the DedicatedWorkerGlobalScope
/// this Worker created.
sender: ScriptChan,
}
impl Worker {
pub fn new_inherited(global: &GlobalRef, sender: ScriptChan) -> Worker {
Worker {
eventtarget: EventTarget::new_inherited(WorkerTypeId),
refcount: Cell::new(0),
global: GlobalField::from_rooted(global),
sender: sender,
}
}
pub fn new(global: &GlobalRef, sender: ScriptChan) -> Temporary<Worker> {
reflect_dom_object(box Worker::new_inherited(global, sender),
global,
WorkerBinding::Wrap)
}
// http://www.whatwg.org/html/#dom-worker
pub fn Constructor(global: &GlobalRef, scriptURL: DOMString) -> Fallible<Temporary<Worker>> {
// Step 2-4.
let worker_url = match UrlParser::new().base_url(&global.get_url())
.parse(scriptURL.as_slice()) {
Ok(url) => url,
Err(_) => return Err(Syntax),
};
let resource_task = global.resource_task();
let (receiver, sender) = ScriptChan::new();
let worker = Worker::new(global, sender.clone()).root();
let worker_ref = worker.addref();
DedicatedWorkerGlobalScope::run_worker_scope(
worker_url, worker_ref, resource_task, global.script_chan().clone(),
sender, receiver);
Ok(Temporary::from_rooted(*worker))
}
pub fn handle_message(address: TrustedWorkerAddress,
data: *mut u64, nbytes: size_t) {
let worker = unsafe { JS::from_trusted_worker_address(address).root() };
let global = worker.global.root();
let mut message = UndefinedValue();
unsafe {
assert!(JS_ReadStructuredClone(
global.root_ref().get_cx(), data as *const u64, nbytes,
JS_STRUCTURED_CLONE_VERSION, &mut message,
ptr::null(), ptr::null_mut())!= 0);
}
let target: JSRef<EventTarget> = EventTargetCast::from_ref(*worker);
MessageEvent::dispatch_jsval(target, &global.root_ref(), message);
}
}
impl Worker {
// Creates a trusted address to the object, and roots it. Always pair this with a release()
pub fn addref(&self) -> TrustedWorkerAddress {
let refcount = self.refcount.get();
if refcount == 0 {
let cx = self.global.root().root_ref().get_cx();
unsafe {
JS_AddObjectRoot(cx, self.reflector().rootable());
}
}
self.refcount.set(refcount + 1);
TrustedWorkerAddress(self as *const Worker as *const c_void)
}
pub fn release(&self) {
let refcount = self.refcount.get();
assert!(refcount > 0)
self.refcount.set(refcount - 1);
if refcount == 1 {
let cx = self.global.root().root_ref().get_cx();
unsafe {
JS_RemoveObjectRoot(cx, self.reflector().rootable());
}
}
}
pub fn handle_release(address: TrustedWorkerAddress) {
let worker = unsafe { JS::from_trusted_worker_address(address).root() };
worker.release();
}
}
impl<'a> WorkerMethods for JSRef<'a, Worker> {
fn PostMessage(self, cx: *mut JSContext, message: JSVal) {
let mut data = ptr::null_mut();
let mut nbytes = 0;
unsafe {
assert!(JS_WriteStructuredClone(cx, message, &mut data, &mut nbytes,
ptr::null(), ptr::null_mut())!= 0);
}
self.addref();
let ScriptChan(ref sender) = self.sender;
sender.send(DOMMessage(data, nbytes));
}
fn GetOnmessage(self) -> Option<EventHandlerNonNull> {
let eventtarget: JSRef<EventTarget> = EventTargetCast::from_ref(self);
eventtarget.get_event_handler_common("message")
}
fn SetOnmessage(self, listener: Option<EventHandlerNonNull>) {
let eventtarget: JSRef<EventTarget> = EventTargetCast::from_ref(self);
eventtarget.set_event_handler_common("message", listener)
}
}
impl Reflectable for Worker {
fn
|
<'a>(&'a self) -> &'a Reflector {
self.eventtarget.reflector()
}
}
|
reflector
|
identifier_name
|
worker.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::WorkerBinding;
use dom::bindings::codegen::Bindings::WorkerBinding::WorkerMethods;
use dom::bindings::codegen::Bindings::EventHandlerBinding::EventHandlerNonNull;
use dom::bindings::codegen::InheritTypes::EventTargetCast;
use dom::bindings::error::{Fallible, Syntax};
use dom::bindings::global::{GlobalRef, GlobalField};
use dom::bindings::js::{JS, JSRef, Temporary};
use dom::bindings::utils::{Reflectable, Reflector, reflect_dom_object};
use dom::dedicatedworkerglobalscope::DedicatedWorkerGlobalScope;
use dom::eventtarget::{EventTarget, EventTargetHelpers, WorkerTypeId};
use dom::messageevent::MessageEvent;
use script_task::{ScriptChan, DOMMessage};
use servo_util::str::DOMString;
use js::glue::JS_STRUCTURED_CLONE_VERSION;
use js::jsapi::{JSContext, JS_AddObjectRoot, JS_RemoveObjectRoot};
use js::jsapi::{JS_ReadStructuredClone, JS_WriteStructuredClone};
use js::jsval::{JSVal, UndefinedValue};
use url::UrlParser;
use libc::{c_void, size_t};
use std::cell::Cell;
use std::ptr;
pub struct TrustedWorkerAddress(pub *const c_void);
#[deriving(Encodable)]
#[must_root]
pub struct Worker {
eventtarget: EventTarget,
refcount: Cell<uint>,
global: GlobalField,
/// Sender to the Receiver associated with the DedicatedWorkerGlobalScope
/// this Worker created.
sender: ScriptChan,
}
impl Worker {
pub fn new_inherited(global: &GlobalRef, sender: ScriptChan) -> Worker {
Worker {
eventtarget: EventTarget::new_inherited(WorkerTypeId),
refcount: Cell::new(0),
global: GlobalField::from_rooted(global),
sender: sender,
}
}
pub fn new(global: &GlobalRef, sender: ScriptChan) -> Temporary<Worker> {
reflect_dom_object(box Worker::new_inherited(global, sender),
global,
WorkerBinding::Wrap)
}
// http://www.whatwg.org/html/#dom-worker
pub fn Constructor(global: &GlobalRef, scriptURL: DOMString) -> Fallible<Temporary<Worker>> {
// Step 2-4.
let worker_url = match UrlParser::new().base_url(&global.get_url())
.parse(scriptURL.as_slice()) {
Ok(url) => url,
Err(_) => return Err(Syntax),
};
let resource_task = global.resource_task();
let (receiver, sender) = ScriptChan::new();
let worker = Worker::new(global, sender.clone()).root();
let worker_ref = worker.addref();
DedicatedWorkerGlobalScope::run_worker_scope(
worker_url, worker_ref, resource_task, global.script_chan().clone(),
sender, receiver);
Ok(Temporary::from_rooted(*worker))
}
pub fn handle_message(address: TrustedWorkerAddress,
data: *mut u64, nbytes: size_t) {
let worker = unsafe { JS::from_trusted_worker_address(address).root() };
let global = worker.global.root();
let mut message = UndefinedValue();
unsafe {
assert!(JS_ReadStructuredClone(
global.root_ref().get_cx(), data as *const u64, nbytes,
JS_STRUCTURED_CLONE_VERSION, &mut message,
ptr::null(), ptr::null_mut())!= 0);
}
let target: JSRef<EventTarget> = EventTargetCast::from_ref(*worker);
MessageEvent::dispatch_jsval(target, &global.root_ref(), message);
}
}
impl Worker {
// Creates a trusted address to the object, and roots it. Always pair this with a release()
pub fn addref(&self) -> TrustedWorkerAddress {
let refcount = self.refcount.get();
if refcount == 0
|
self.refcount.set(refcount + 1);
TrustedWorkerAddress(self as *const Worker as *const c_void)
}
pub fn release(&self) {
let refcount = self.refcount.get();
assert!(refcount > 0)
self.refcount.set(refcount - 1);
if refcount == 1 {
let cx = self.global.root().root_ref().get_cx();
unsafe {
JS_RemoveObjectRoot(cx, self.reflector().rootable());
}
}
}
pub fn handle_release(address: TrustedWorkerAddress) {
let worker = unsafe { JS::from_trusted_worker_address(address).root() };
worker.release();
}
}
impl<'a> WorkerMethods for JSRef<'a, Worker> {
fn PostMessage(self, cx: *mut JSContext, message: JSVal) {
let mut data = ptr::null_mut();
let mut nbytes = 0;
unsafe {
assert!(JS_WriteStructuredClone(cx, message, &mut data, &mut nbytes,
ptr::null(), ptr::null_mut())!= 0);
}
self.addref();
let ScriptChan(ref sender) = self.sender;
sender.send(DOMMessage(data, nbytes));
}
fn GetOnmessage(self) -> Option<EventHandlerNonNull> {
let eventtarget: JSRef<EventTarget> = EventTargetCast::from_ref(self);
eventtarget.get_event_handler_common("message")
}
fn SetOnmessage(self, listener: Option<EventHandlerNonNull>) {
let eventtarget: JSRef<EventTarget> = EventTargetCast::from_ref(self);
eventtarget.set_event_handler_common("message", listener)
}
}
impl Reflectable for Worker {
fn reflector<'a>(&'a self) -> &'a Reflector {
self.eventtarget.reflector()
}
}
|
{
let cx = self.global.root().root_ref().get_cx();
unsafe {
JS_AddObjectRoot(cx, self.reflector().rootable());
}
}
|
conditional_block
|
builtin.rs
|
use lexer::Span;
use interpreter::*;
use std::io;
use std::io::prelude::*;
impl<'a> Interpreter<'a> {
pub fn builtin_print(&'a self,
context: *mut InterpreterContext<'a>,
args: &[Box<Expression>],
span: Span)
-> Result<Value> {
if args.len()!= 1 {
return Err(Error {
kind: ErrorKind::InvalidArgCount,
span: span,
});
};
match try!(self.value_from_expression(context, args.get(0).unwrap())) {
Value::String(s) => print!("{}", s),
Value::Integer(i) => print!("{}", i),
Value::Bool(b) => print!("{}", b),
Value::Char(c) => print!("{}", c),
Value::Struct(_, s) => print!("{:?}", s),
Value::Array(_, a) => print!("{:?}", a),
Value::Map(_, _, m) => print!("{:?}", m),
Value::Reference(v) => print!("ref {:?}", v),
Value::MutReference(v) => print!("mutref {:?}", v),
Value::Func(_, f) => print!("{:?}", f),
Value::Nil => print!("nil"),
};
Ok(Value::Nil)
}
pub fn builtin_readln(&self, args: &[Box<Expression>], span: Span) -> Result<Value> {
if args.len()!= 0 {
return Err(Error {
kind: ErrorKind::InvalidArgCount,
span: span,
});
};
let mut line = String::new();
let stdin = io::stdin();
stdin.lock().read_line(&mut line).unwrap();
let size = line.len();
line.remove(size - 1);
if line.ends_with('\r') {
line.remove(size - 2);
}
Ok(Value::String(line))
}
}
|
use parser::ast::*;
|
random_line_split
|
|
builtin.rs
|
use parser::ast::*;
use lexer::Span;
use interpreter::*;
use std::io;
use std::io::prelude::*;
impl<'a> Interpreter<'a> {
pub fn builtin_print(&'a self,
context: *mut InterpreterContext<'a>,
args: &[Box<Expression>],
span: Span)
-> Result<Value>
|
};
Ok(Value::Nil)
}
pub fn builtin_readln(&self, args: &[Box<Expression>], span: Span) -> Result<Value> {
if args.len()!= 0 {
return Err(Error {
kind: ErrorKind::InvalidArgCount,
span: span,
});
};
let mut line = String::new();
let stdin = io::stdin();
stdin.lock().read_line(&mut line).unwrap();
let size = line.len();
line.remove(size - 1);
if line.ends_with('\r') {
line.remove(size - 2);
}
Ok(Value::String(line))
}
}
|
{
if args.len() != 1 {
return Err(Error {
kind: ErrorKind::InvalidArgCount,
span: span,
});
};
match try!(self.value_from_expression(context, args.get(0).unwrap())) {
Value::String(s) => print!("{}", s),
Value::Integer(i) => print!("{}", i),
Value::Bool(b) => print!("{}", b),
Value::Char(c) => print!("{}", c),
Value::Struct(_, s) => print!("{:?}", s),
Value::Array(_, a) => print!("{:?}", a),
Value::Map(_, _, m) => print!("{:?}", m),
Value::Reference(v) => print!("ref {:?}", v),
Value::MutReference(v) => print!("mutref {:?}", v),
Value::Func(_, f) => print!("{:?}", f),
Value::Nil => print!("nil"),
|
identifier_body
|
builtin.rs
|
use parser::ast::*;
use lexer::Span;
use interpreter::*;
use std::io;
use std::io::prelude::*;
impl<'a> Interpreter<'a> {
pub fn
|
(&'a self,
context: *mut InterpreterContext<'a>,
args: &[Box<Expression>],
span: Span)
-> Result<Value> {
if args.len()!= 1 {
return Err(Error {
kind: ErrorKind::InvalidArgCount,
span: span,
});
};
match try!(self.value_from_expression(context, args.get(0).unwrap())) {
Value::String(s) => print!("{}", s),
Value::Integer(i) => print!("{}", i),
Value::Bool(b) => print!("{}", b),
Value::Char(c) => print!("{}", c),
Value::Struct(_, s) => print!("{:?}", s),
Value::Array(_, a) => print!("{:?}", a),
Value::Map(_, _, m) => print!("{:?}", m),
Value::Reference(v) => print!("ref {:?}", v),
Value::MutReference(v) => print!("mutref {:?}", v),
Value::Func(_, f) => print!("{:?}", f),
Value::Nil => print!("nil"),
};
Ok(Value::Nil)
}
pub fn builtin_readln(&self, args: &[Box<Expression>], span: Span) -> Result<Value> {
if args.len()!= 0 {
return Err(Error {
kind: ErrorKind::InvalidArgCount,
span: span,
});
};
let mut line = String::new();
let stdin = io::stdin();
stdin.lock().read_line(&mut line).unwrap();
let size = line.len();
line.remove(size - 1);
if line.ends_with('\r') {
line.remove(size - 2);
}
Ok(Value::String(line))
}
}
|
builtin_print
|
identifier_name
|
name.rs
|
use lazy_static::lazy_static;
use regex::Regex;
use serde::Deserialize;
use std::fs;
#[derive(Debug, Deserialize)]
struct FirstName {
male: Vec<Vec<String>>,
female: Vec<Vec<String>>,
}
#[derive(Debug, Deserialize)]
struct Data {
first_name: FirstName,
last_name: Vec<Vec<String>>,
}
lazy_static! {
static ref DATA: Data = serde_yaml::from_str(&fs::read_to_string("src/data/names.yml").unwrap()).unwrap();
}
#[test]
fn male_first_name() {
let name = gimei::male();
let items = &DATA.first_name.male;
assert!(items.iter().any(|x| x[0] == name.first.kanji));
assert!(items.iter().any(|x| x[1] == name.first.hiragana));
assert!(items.iter().any(|x| x[2] == name.first.katakana));
}
#[test]
fn female_first_name() {
let name = gimei::female();
let items = &DATA.first_name.female;
assert!(items.iter().any(|x| x[0] == name.first.kanji));
assert!(items.iter().any(|x| x[1] == name.first.hiragana));
assert!(items.iter().any(|x| x[2] == name.first.katakana));
}
#[test]
fn
|
() {
let name = gimei::name();
let items = &DATA.last_name;
assert!(items.iter().any(|x| x[0] == name.last.kanji));
assert!(items.iter().any(|x| x[1] == name.last.hiragana));
assert!(items.iter().any(|x| x[2] == name.last.katakana));
}
#[test]
fn to_kanji() {
let name = gimei::name();
let re = Regex::new(r"^\p{Han}+\s\p{Han}+$").unwrap();
assert!(re.is_match(&name.to_kanji()));
}
#[test]
fn to_hiragana() {
let name = gimei::name();
let re = Regex::new(r"^\p{Hiragana}+\s\p{Hiragana}+$").unwrap();
assert!(re.is_match(&name.to_hiragana()));
}
#[test]
fn to_katakana() {
let name = gimei::name();
let re = Regex::new(r"^\p{Katakana}+\s\p{Katakana}+$").unwrap();
assert!(re.is_match(&name.to_katakana()));
}
#[test]
fn is_male() {
let male_name = gimei::male();
assert!(male_name.is_male());
let female_name = gimei::female();
assert!(!female_name.is_male());
}
#[test]
fn is_female() {
let male_name = gimei::male();
assert!(!male_name.is_female());
let female_name = gimei::female();
assert!(female_name.is_female());
}
#[test]
fn display() {
let name = gimei::name();
assert_eq!(format!("{}", name), name.to_kanji());
}
|
last_name
|
identifier_name
|
name.rs
|
use lazy_static::lazy_static;
use regex::Regex;
use serde::Deserialize;
use std::fs;
#[derive(Debug, Deserialize)]
struct FirstName {
male: Vec<Vec<String>>,
female: Vec<Vec<String>>,
}
#[derive(Debug, Deserialize)]
struct Data {
first_name: FirstName,
last_name: Vec<Vec<String>>,
}
lazy_static! {
static ref DATA: Data = serde_yaml::from_str(&fs::read_to_string("src/data/names.yml").unwrap()).unwrap();
}
#[test]
fn male_first_name() {
let name = gimei::male();
let items = &DATA.first_name.male;
assert!(items.iter().any(|x| x[0] == name.first.kanji));
assert!(items.iter().any(|x| x[1] == name.first.hiragana));
|
fn female_first_name() {
let name = gimei::female();
let items = &DATA.first_name.female;
assert!(items.iter().any(|x| x[0] == name.first.kanji));
assert!(items.iter().any(|x| x[1] == name.first.hiragana));
assert!(items.iter().any(|x| x[2] == name.first.katakana));
}
#[test]
fn last_name() {
let name = gimei::name();
let items = &DATA.last_name;
assert!(items.iter().any(|x| x[0] == name.last.kanji));
assert!(items.iter().any(|x| x[1] == name.last.hiragana));
assert!(items.iter().any(|x| x[2] == name.last.katakana));
}
#[test]
fn to_kanji() {
let name = gimei::name();
let re = Regex::new(r"^\p{Han}+\s\p{Han}+$").unwrap();
assert!(re.is_match(&name.to_kanji()));
}
#[test]
fn to_hiragana() {
let name = gimei::name();
let re = Regex::new(r"^\p{Hiragana}+\s\p{Hiragana}+$").unwrap();
assert!(re.is_match(&name.to_hiragana()));
}
#[test]
fn to_katakana() {
let name = gimei::name();
let re = Regex::new(r"^\p{Katakana}+\s\p{Katakana}+$").unwrap();
assert!(re.is_match(&name.to_katakana()));
}
#[test]
fn is_male() {
let male_name = gimei::male();
assert!(male_name.is_male());
let female_name = gimei::female();
assert!(!female_name.is_male());
}
#[test]
fn is_female() {
let male_name = gimei::male();
assert!(!male_name.is_female());
let female_name = gimei::female();
assert!(female_name.is_female());
}
#[test]
fn display() {
let name = gimei::name();
assert_eq!(format!("{}", name), name.to_kanji());
}
|
assert!(items.iter().any(|x| x[2] == name.first.katakana));
}
#[test]
|
random_line_split
|
name.rs
|
use lazy_static::lazy_static;
use regex::Regex;
use serde::Deserialize;
use std::fs;
#[derive(Debug, Deserialize)]
struct FirstName {
male: Vec<Vec<String>>,
female: Vec<Vec<String>>,
}
#[derive(Debug, Deserialize)]
struct Data {
first_name: FirstName,
last_name: Vec<Vec<String>>,
}
lazy_static! {
static ref DATA: Data = serde_yaml::from_str(&fs::read_to_string("src/data/names.yml").unwrap()).unwrap();
}
#[test]
fn male_first_name() {
let name = gimei::male();
let items = &DATA.first_name.male;
assert!(items.iter().any(|x| x[0] == name.first.kanji));
assert!(items.iter().any(|x| x[1] == name.first.hiragana));
assert!(items.iter().any(|x| x[2] == name.first.katakana));
}
#[test]
fn female_first_name() {
let name = gimei::female();
let items = &DATA.first_name.female;
assert!(items.iter().any(|x| x[0] == name.first.kanji));
assert!(items.iter().any(|x| x[1] == name.first.hiragana));
assert!(items.iter().any(|x| x[2] == name.first.katakana));
}
#[test]
fn last_name() {
let name = gimei::name();
let items = &DATA.last_name;
assert!(items.iter().any(|x| x[0] == name.last.kanji));
assert!(items.iter().any(|x| x[1] == name.last.hiragana));
assert!(items.iter().any(|x| x[2] == name.last.katakana));
}
#[test]
fn to_kanji() {
let name = gimei::name();
let re = Regex::new(r"^\p{Han}+\s\p{Han}+$").unwrap();
assert!(re.is_match(&name.to_kanji()));
}
#[test]
fn to_hiragana() {
let name = gimei::name();
let re = Regex::new(r"^\p{Hiragana}+\s\p{Hiragana}+$").unwrap();
assert!(re.is_match(&name.to_hiragana()));
}
#[test]
fn to_katakana()
|
#[test]
fn is_male() {
let male_name = gimei::male();
assert!(male_name.is_male());
let female_name = gimei::female();
assert!(!female_name.is_male());
}
#[test]
fn is_female() {
let male_name = gimei::male();
assert!(!male_name.is_female());
let female_name = gimei::female();
assert!(female_name.is_female());
}
#[test]
fn display() {
let name = gimei::name();
assert_eq!(format!("{}", name), name.to_kanji());
}
|
{
let name = gimei::name();
let re = Regex::new(r"^\p{Katakana}+\s\p{Katakana}+$").unwrap();
assert!(re.is_match(&name.to_katakana()));
}
|
identifier_body
|
for_kv_map.rs
|
use super::FOR_KV_MAP;
use clippy_utils::diagnostics::{multispan_sugg, span_lint_and_then};
use clippy_utils::source::snippet;
use clippy_utils::sugg;
use clippy_utils::ty::is_type_diagnostic_item;
use clippy_utils::visitors::LocalUsedVisitor;
use rustc_hir::{BorrowKind, Expr, ExprKind, Mutability, Pat, PatKind};
use rustc_lint::LateContext;
use rustc_middle::ty;
use rustc_span::sym;
/// Checks for the `FOR_KV_MAP` lint.
pub(super) fn check<'tcx>(
cx: &LateContext<'tcx>,
pat: &'tcx Pat<'_>,
arg: &'tcx Expr<'_>,
body: &'tcx Expr<'_>,
expr: &'tcx Expr<'_>,
) {
let pat_span = pat.span;
if let PatKind::Tuple(pat, _) = pat.kind {
if pat.len() == 2 {
let arg_span = arg.span;
let (new_pat_span, kind, ty, mutbl) = match *cx.typeck_results().expr_ty(arg).kind() {
ty::Ref(_, ty, mutbl) => match (&pat[0].kind, &pat[1].kind) {
(key, _) if pat_is_wild(cx, key, body) => (pat[1].span, "value", ty, mutbl),
(_, value) if pat_is_wild(cx, value, body) => (pat[0].span, "key", ty, Mutability::Not),
_ => return,
},
_ => return,
};
let mutbl = match mutbl {
Mutability::Not => "",
Mutability::Mut => "_mut",
};
let arg = match arg.kind {
ExprKind::AddrOf(BorrowKind::Ref, _, expr) => expr,
_ => arg,
};
if is_type_diagnostic_item(cx, ty, sym::hashmap_type) || is_type_diagnostic_item(cx, ty, sym::BTreeMap) {
span_lint_and_then(
cx,
FOR_KV_MAP,
expr.span,
&format!("you seem to want to iterate on a map's {}s", kind),
|diag| {
let map = sugg::Sugg::hir(cx, arg, "map");
multispan_sugg(
diag,
"use the corresponding method",
vec![
(pat_span, snippet(cx, new_pat_span, kind).into_owned()),
(arg_span, format!("{}.{}s{}()", map.maybe_par(), kind, mutbl)),
],
);
},
);
}
}
}
}
/// Returns `true` if the pattern is a `PatWild` or an ident prefixed with `_`.
fn pat_is_wild<'tcx>(cx: &LateContext<'tcx>, pat: &'tcx PatKind<'_>, body: &'tcx Expr<'_>) -> bool
|
{
match *pat {
PatKind::Wild => true,
PatKind::Binding(_, id, ident, None) if ident.as_str().starts_with('_') => {
!LocalUsedVisitor::new(cx, id).check_expr(body)
},
_ => false,
}
}
|
identifier_body
|
|
for_kv_map.rs
|
use super::FOR_KV_MAP;
use clippy_utils::diagnostics::{multispan_sugg, span_lint_and_then};
use clippy_utils::source::snippet;
use clippy_utils::sugg;
use clippy_utils::ty::is_type_diagnostic_item;
use clippy_utils::visitors::LocalUsedVisitor;
use rustc_hir::{BorrowKind, Expr, ExprKind, Mutability, Pat, PatKind};
use rustc_lint::LateContext;
use rustc_middle::ty;
use rustc_span::sym;
/// Checks for the `FOR_KV_MAP` lint.
pub(super) fn check<'tcx>(
cx: &LateContext<'tcx>,
pat: &'tcx Pat<'_>,
arg: &'tcx Expr<'_>,
body: &'tcx Expr<'_>,
expr: &'tcx Expr<'_>,
) {
let pat_span = pat.span;
if let PatKind::Tuple(pat, _) = pat.kind {
if pat.len() == 2 {
let arg_span = arg.span;
let (new_pat_span, kind, ty, mutbl) = match *cx.typeck_results().expr_ty(arg).kind() {
ty::Ref(_, ty, mutbl) => match (&pat[0].kind, &pat[1].kind) {
(key, _) if pat_is_wild(cx, key, body) => (pat[1].span, "value", ty, mutbl),
(_, value) if pat_is_wild(cx, value, body) => (pat[0].span, "key", ty, Mutability::Not),
_ => return,
},
_ => return,
};
let mutbl = match mutbl {
Mutability::Not => "",
Mutability::Mut => "_mut",
};
let arg = match arg.kind {
ExprKind::AddrOf(BorrowKind::Ref, _, expr) => expr,
_ => arg,
};
if is_type_diagnostic_item(cx, ty, sym::hashmap_type) || is_type_diagnostic_item(cx, ty, sym::BTreeMap) {
span_lint_and_then(
cx,
FOR_KV_MAP,
expr.span,
&format!("you seem to want to iterate on a map's {}s", kind),
|diag| {
let map = sugg::Sugg::hir(cx, arg, "map");
multispan_sugg(
diag,
"use the corresponding method",
vec![
(pat_span, snippet(cx, new_pat_span, kind).into_owned()),
(arg_span, format!("{}.{}s{}()", map.maybe_par(), kind, mutbl)),
],
);
},
);
}
}
}
}
/// Returns `true` if the pattern is a `PatWild` or an ident prefixed with `_`.
fn
|
<'tcx>(cx: &LateContext<'tcx>, pat: &'tcx PatKind<'_>, body: &'tcx Expr<'_>) -> bool {
match *pat {
PatKind::Wild => true,
PatKind::Binding(_, id, ident, None) if ident.as_str().starts_with('_') => {
!LocalUsedVisitor::new(cx, id).check_expr(body)
},
_ => false,
}
}
|
pat_is_wild
|
identifier_name
|
for_kv_map.rs
|
use super::FOR_KV_MAP;
use clippy_utils::diagnostics::{multispan_sugg, span_lint_and_then};
use clippy_utils::source::snippet;
use clippy_utils::sugg;
use clippy_utils::ty::is_type_diagnostic_item;
use clippy_utils::visitors::LocalUsedVisitor;
use rustc_hir::{BorrowKind, Expr, ExprKind, Mutability, Pat, PatKind};
use rustc_lint::LateContext;
use rustc_middle::ty;
use rustc_span::sym;
/// Checks for the `FOR_KV_MAP` lint.
pub(super) fn check<'tcx>(
cx: &LateContext<'tcx>,
pat: &'tcx Pat<'_>,
arg: &'tcx Expr<'_>,
body: &'tcx Expr<'_>,
expr: &'tcx Expr<'_>,
) {
let pat_span = pat.span;
|
let (new_pat_span, kind, ty, mutbl) = match *cx.typeck_results().expr_ty(arg).kind() {
ty::Ref(_, ty, mutbl) => match (&pat[0].kind, &pat[1].kind) {
(key, _) if pat_is_wild(cx, key, body) => (pat[1].span, "value", ty, mutbl),
(_, value) if pat_is_wild(cx, value, body) => (pat[0].span, "key", ty, Mutability::Not),
_ => return,
},
_ => return,
};
let mutbl = match mutbl {
Mutability::Not => "",
Mutability::Mut => "_mut",
};
let arg = match arg.kind {
ExprKind::AddrOf(BorrowKind::Ref, _, expr) => expr,
_ => arg,
};
if is_type_diagnostic_item(cx, ty, sym::hashmap_type) || is_type_diagnostic_item(cx, ty, sym::BTreeMap) {
span_lint_and_then(
cx,
FOR_KV_MAP,
expr.span,
&format!("you seem to want to iterate on a map's {}s", kind),
|diag| {
let map = sugg::Sugg::hir(cx, arg, "map");
multispan_sugg(
diag,
"use the corresponding method",
vec![
(pat_span, snippet(cx, new_pat_span, kind).into_owned()),
(arg_span, format!("{}.{}s{}()", map.maybe_par(), kind, mutbl)),
],
);
},
);
}
}
}
}
/// Returns `true` if the pattern is a `PatWild` or an ident prefixed with `_`.
fn pat_is_wild<'tcx>(cx: &LateContext<'tcx>, pat: &'tcx PatKind<'_>, body: &'tcx Expr<'_>) -> bool {
match *pat {
PatKind::Wild => true,
PatKind::Binding(_, id, ident, None) if ident.as_str().starts_with('_') => {
!LocalUsedVisitor::new(cx, id).check_expr(body)
},
_ => false,
}
}
|
if let PatKind::Tuple(pat, _) = pat.kind {
if pat.len() == 2 {
let arg_span = arg.span;
|
random_line_split
|
borrowck-uniq-via-box.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct Rec {
f: ~int,
}
struct Outer {
f: Inner
}
struct Inner {
g: Innermost
}
struct Innermost {
|
fn borrow(_v: &int) {}
fn box_mut(v: @mut ~int) {
borrow(*v); //~ ERROR illegal borrow unless pure
}
fn box_mut_rec(v: @mut Rec) {
borrow(v.f); //~ ERROR illegal borrow unless pure
}
fn box_mut_recs(v: @mut Outer) {
borrow(v.f.g.h); //~ ERROR illegal borrow unless pure
}
fn box_imm(v: @~int) {
borrow(*v); // OK
}
fn box_imm_rec(v: @Rec) {
borrow(v.f); // OK
}
fn box_imm_recs(v: @Outer) {
borrow(v.f.g.h); // OK
}
fn main() {
}
|
h: ~int,
}
|
random_line_split
|
borrowck-uniq-via-box.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct Rec {
f: ~int,
}
struct Outer {
f: Inner
}
struct Inner {
g: Innermost
}
struct Innermost {
h: ~int,
}
fn borrow(_v: &int) {}
fn box_mut(v: @mut ~int) {
borrow(*v); //~ ERROR illegal borrow unless pure
}
fn box_mut_rec(v: @mut Rec) {
borrow(v.f); //~ ERROR illegal borrow unless pure
}
fn box_mut_recs(v: @mut Outer) {
borrow(v.f.g.h); //~ ERROR illegal borrow unless pure
}
fn box_imm(v: @~int) {
borrow(*v); // OK
}
fn box_imm_rec(v: @Rec) {
borrow(v.f); // OK
}
fn box_imm_recs(v: @Outer) {
borrow(v.f.g.h); // OK
}
fn main()
|
{
}
|
identifier_body
|
|
borrowck-uniq-via-box.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct Rec {
f: ~int,
}
struct Outer {
f: Inner
}
struct Inner {
g: Innermost
}
struct Innermost {
h: ~int,
}
fn borrow(_v: &int) {}
fn box_mut(v: @mut ~int) {
borrow(*v); //~ ERROR illegal borrow unless pure
}
fn box_mut_rec(v: @mut Rec) {
borrow(v.f); //~ ERROR illegal borrow unless pure
}
fn box_mut_recs(v: @mut Outer) {
borrow(v.f.g.h); //~ ERROR illegal borrow unless pure
}
fn box_imm(v: @~int) {
borrow(*v); // OK
}
fn
|
(v: @Rec) {
borrow(v.f); // OK
}
fn box_imm_recs(v: @Outer) {
borrow(v.f.g.h); // OK
}
fn main() {
}
|
box_imm_rec
|
identifier_name
|
build.rs
|
extern crate serde_codegen;
extern crate serde_json;
extern crate glob;
use std::env;
use std::fs;
use std::path::Path;
/// generate an enum of Events
fn main() {
for entry in glob::glob("src/**/*.rs.in").expect("Failed to read glob pattern") {
println!("cargo:rerun-if-changed={}", entry.unwrap().display());
}
let out_dir = env::var_os("OUT_DIR").unwrap();
|
for entry in glob::glob("**/*.rs.in").expect("Failed to read glob pattern") {
match entry {
Ok(src) => {
let mut dst = Path::new(&out_dir).join(&src);
// Change ".rs.in" to ".rs".
dst.set_file_name(src.file_stem().expect("Failed to get file stem"));
dst.set_extension("rs");
// Make sure our target directory exists. We only need
// this if there are extra nested sudirectories under src/.
fs::create_dir_all(dst.parent().unwrap()).unwrap();
// Process our source file.
serde_codegen::expand(&src, &dst).unwrap();
}
Err(e) => {
panic!("Error globbing: {}", e);
}
}
}
}
|
// Switch to our `src` directory so that we have the right base for our
// globs, and so that we won't need to strip `src/` off every path.
env::set_current_dir("src").unwrap();
|
random_line_split
|
build.rs
|
extern crate serde_codegen;
extern crate serde_json;
extern crate glob;
use std::env;
use std::fs;
use std::path::Path;
/// generate an enum of Events
fn main() {
for entry in glob::glob("src/**/*.rs.in").expect("Failed to read glob pattern") {
println!("cargo:rerun-if-changed={}", entry.unwrap().display());
}
let out_dir = env::var_os("OUT_DIR").unwrap();
// Switch to our `src` directory so that we have the right base for our
// globs, and so that we won't need to strip `src/` off every path.
env::set_current_dir("src").unwrap();
for entry in glob::glob("**/*.rs.in").expect("Failed to read glob pattern") {
match entry {
Ok(src) =>
|
Err(e) => {
panic!("Error globbing: {}", e);
}
}
}
}
|
{
let mut dst = Path::new(&out_dir).join(&src);
// Change ".rs.in" to ".rs".
dst.set_file_name(src.file_stem().expect("Failed to get file stem"));
dst.set_extension("rs");
// Make sure our target directory exists. We only need
// this if there are extra nested sudirectories under src/.
fs::create_dir_all(dst.parent().unwrap()).unwrap();
// Process our source file.
serde_codegen::expand(&src, &dst).unwrap();
}
|
conditional_block
|
build.rs
|
extern crate serde_codegen;
extern crate serde_json;
extern crate glob;
use std::env;
use std::fs;
use std::path::Path;
/// generate an enum of Events
fn main()
|
// Make sure our target directory exists. We only need
// this if there are extra nested sudirectories under src/.
fs::create_dir_all(dst.parent().unwrap()).unwrap();
// Process our source file.
serde_codegen::expand(&src, &dst).unwrap();
}
Err(e) => {
panic!("Error globbing: {}", e);
}
}
}
}
|
{
for entry in glob::glob("src/**/*.rs.in").expect("Failed to read glob pattern") {
println!("cargo:rerun-if-changed={}", entry.unwrap().display());
}
let out_dir = env::var_os("OUT_DIR").unwrap();
// Switch to our `src` directory so that we have the right base for our
// globs, and so that we won't need to strip `src/` off every path.
env::set_current_dir("src").unwrap();
for entry in glob::glob("**/*.rs.in").expect("Failed to read glob pattern") {
match entry {
Ok(src) => {
let mut dst = Path::new(&out_dir).join(&src);
// Change ".rs.in" to ".rs".
dst.set_file_name(src.file_stem().expect("Failed to get file stem"));
dst.set_extension("rs");
|
identifier_body
|
build.rs
|
extern crate serde_codegen;
extern crate serde_json;
extern crate glob;
use std::env;
use std::fs;
use std::path::Path;
/// generate an enum of Events
fn
|
() {
for entry in glob::glob("src/**/*.rs.in").expect("Failed to read glob pattern") {
println!("cargo:rerun-if-changed={}", entry.unwrap().display());
}
let out_dir = env::var_os("OUT_DIR").unwrap();
// Switch to our `src` directory so that we have the right base for our
// globs, and so that we won't need to strip `src/` off every path.
env::set_current_dir("src").unwrap();
for entry in glob::glob("**/*.rs.in").expect("Failed to read glob pattern") {
match entry {
Ok(src) => {
let mut dst = Path::new(&out_dir).join(&src);
// Change ".rs.in" to ".rs".
dst.set_file_name(src.file_stem().expect("Failed to get file stem"));
dst.set_extension("rs");
// Make sure our target directory exists. We only need
// this if there are extra nested sudirectories under src/.
fs::create_dir_all(dst.parent().unwrap()).unwrap();
// Process our source file.
serde_codegen::expand(&src, &dst).unwrap();
}
Err(e) => {
panic!("Error globbing: {}", e);
}
}
}
}
|
main
|
identifier_name
|
mod.rs
|
//! Provides functions for maintaining database schema.
//!
//! A database migration always provides procedures to update the schema, as well as to revert
//! itself. Diesel's migrations are versioned, and run in order. Diesel also takes care of tracking
//! which migrations have already been run automatically. Your migrations don't need to be
//! idempotent, as Diesel will ensure no migration is run twice unless it has been reverted.
//!
//! Migrations should be placed in a `/migrations` directory at the root of your project (the same
//! directory as `Cargo.toml`). When any of these functions are run, Diesel will search for the
//! migrations directory in the current directory and its parents, stopping when it finds the
//! directory containing `Cargo.toml`.
//!
//! Individual migrations should be a folder containing exactly two files, `up.sql` and `down.sql`.
//! `up.sql` will be used to run the migration, while `down.sql` will be used for reverting it. The
//! folder itself should have the structure `{version}_{migration_name}`. It is recommended that
//! you use the timestamp of creation for the version.
//!
//! ## Example
//!
//! ```text
//! # Directory Structure
//! - 20151219180527_create_users
//! - up.sql
//! - down.sql
//! - 20160107082941_create_posts
//! - up.sql
//! - down.sql
//! ```
//!
//! ```sql
//! -- 20151219180527_create_users/up.sql
//! CREATE TABLE users (
//! id SERIAL PRIMARY KEY,
//! name VARCHAR NOT NULL,
//! hair_color VARCHAR
//! );
//! ```
//!
//! ```sql
//! -- 20151219180527_create_users/down.sql
//! DROP TABLE users;
//! ```
//!
//! ```sql
//! -- 20160107082941_create_posts/up.sql
//! CREATE TABLE posts (
//! id SERIAL PRIMARY KEY,
//! user_id INTEGER NOT NULL,
//! title VARCHAR NOT NULL,
//! body TEXT
//! );
//! ```
//!
//! ```sql
//! -- 20160107082941_create_posts/down.sql
//! DROP TABLE posts;
//! ```
mod migration;
mod migration_error;
#[doc(hidden)]
pub mod schema;
pub use self::migration_error::*;
use std::io::{stdout, Write};
use expression::expression_methods::*;
use persistable::Insertable;
use query_dsl::*;
use self::migration::*;
use self::migration_error::MigrationError::*;
use self::schema::NewMigration;
use self::schema::__diesel_schema_migrations::dsl::*;
use types::{FromSql, VarChar};
use {Connection, QueryResult};
use std::collections::HashSet;
use std::env;
use std::path::{PathBuf, Path};
/// Runs all migrations that have not yet been run. This function will print all progress to
/// stdout. This function will return an `Err` if some error occurs reading the migrations, or if
/// any migration fails to run. Each migration is run in its own transaction, so some migrations
/// may be committed, even if a later migration fails to run.
///
/// It should be noted that this runs all migrations that have not already been run, regardless of
/// whether or not their version is later than the latest run migration. This is generally not a
/// problem, and eases the more common case of two developers generating independent migrations on
/// a branch. Whoever created the second one will eventually need to run the first when both
/// branches are merged.
///
/// See the [module level documentation](index.html) for information on how migrations should be
/// structured, and where Diesel will look for them by default.
pub fn run_pending_migrations<Conn>(conn: &Conn) -> Result<(), RunMigrationsError> where
Conn: Connection,
String: FromSql<VarChar, Conn::Backend>,
for<'a> &'a NewMigration<'a>: Insertable<__diesel_schema_migrations, Conn::Backend>,
{
let migrations_dir = try!(find_migrations_directory());
run_pending_migrations_in_directory(conn, &migrations_dir, &mut stdout())
}
#[doc(hidden)]
pub fn run_pending_migrations_in_directory<Conn>(conn: &Conn, migrations_dir: &Path, output: &mut Write)
-> Result<(), RunMigrationsError> where
Conn: Connection,
String: FromSql<VarChar, Conn::Backend>,
for<'a> &'a NewMigration<'a>: Insertable<__diesel_schema_migrations, Conn::Backend>,
{
try!(create_schema_migrations_table_if_needed(conn));
let already_run = try!(previously_run_migration_versions(conn));
let all_migrations = try!(migrations_in_directory(migrations_dir));
let pending_migrations = all_migrations.into_iter().filter(|m| {
!already_run.contains(m.version())
});
run_migrations(conn, pending_migrations.collect(), output)
}
/// Reverts the last migration that was run. Returns the version that was reverted. Returns an
/// `Err` if no migrations have ever been run.
///
/// See the [module level documentation](index.html) for information on how migrations should be
/// structured, and where Diesel will look for them by default.
pub fn revert_latest_migration<Conn>(conn: &Conn) -> Result<String, RunMigrationsError> where
Conn: Connection,
String: FromSql<VarChar, Conn::Backend>,
{
try!(create_schema_migrations_table_if_needed(conn));
let latest_migration_version = try!(latest_run_migration_version(conn));
revert_migration_with_version(conn, &latest_migration_version, &mut stdout())
.map(|_| latest_migration_version)
}
#[doc(hidden)]
pub fn revert_migration_with_version<Conn: Connection>(conn: &Conn, ver: &str, output: &mut Write)
-> Result<(), RunMigrationsError>
{
migration_with_version(ver)
.map_err(|e| e.into())
.and_then(|m| revert_migration(conn, m, output))
}
#[doc(hidden)]
pub fn run_migration_with_version<Conn: Connection>(conn: &Conn, ver: &str, output: &mut Write)
-> Result<(), RunMigrationsError> where
for<'a> &'a NewMigration<'a>: Insertable<__diesel_schema_migrations, Conn::Backend>,
{
migration_with_version(ver)
.map_err(|e| e.into())
.and_then(|m| run_migration(conn, m, output))
}
fn migration_with_version(ver: &str) -> Result<Box<Migration>, MigrationError> {
let migrations_dir = try!(find_migrations_directory());
let all_migrations = try!(migrations_in_directory(&migrations_dir));
let migration = all_migrations.into_iter().find(|m| {
m.version() == ver
});
match migration {
Some(m) => Ok(m),
None => Err(UnknownMigrationVersion(ver.into())),
}
}
#[doc(hidden)]
pub fn create_schema_migrations_table_if_needed<Conn: Connection>(conn: &Conn) -> QueryResult<usize>
|
fn previously_run_migration_versions<Conn>(conn: &Conn) -> QueryResult<HashSet<String>> where
Conn: Connection,
String: FromSql<VarChar, Conn::Backend>,
{
__diesel_schema_migrations.select(version)
.load(conn)
.map(|r| r.collect())
}
fn latest_run_migration_version<Conn>(conn: &Conn) -> QueryResult<String> where
Conn: Connection,
String: FromSql<VarChar, Conn::Backend>,
{
use ::expression::dsl::max;
__diesel_schema_migrations.select(max(version))
.first(conn)
}
fn migrations_in_directory(path: &Path) -> Result<Vec<Box<Migration>>, MigrationError> {
use self::migration::migration_from;
try!(path.read_dir())
.filter_map(|entry| {
let entry = match entry {
Ok(e) => e,
Err(e) => return Some(Err(e.into())),
};
if!entry.file_name().to_string_lossy().starts_with(".") {
Some(migration_from(entry.path()))
} else {
None
}
}).collect()
}
fn run_migrations<Conn: Connection>(conn: &Conn, mut migrations: Vec<Box<Migration>>, output: &mut Write)
-> Result<(), RunMigrationsError> where
for<'a> &'a NewMigration<'a>: Insertable<__diesel_schema_migrations, Conn::Backend>,
{
migrations.sort_by(|a, b| a.version().cmp(b.version()));
for migration in migrations {
try!(run_migration(conn, migration, output));
}
Ok(())
}
fn run_migration<Conn: Connection>(conn: &Conn, migration: Box<Migration>, output: &mut Write)
-> Result<(), RunMigrationsError> where
for<'a> &'a NewMigration<'a>: Insertable<__diesel_schema_migrations, Conn::Backend>,
{
conn.transaction(|| {
try!(writeln!(output, "Running migration {}", migration.version()));
try!(migration.run(conn));
try!(::insert(&NewMigration(migration.version()))
.into(__diesel_schema_migrations)
.execute(conn));
Ok(())
}).map_err(|e| e.into())
}
fn revert_migration<Conn: Connection>(conn: &Conn, migration: Box<Migration>, output: &mut Write)
-> Result<(), RunMigrationsError>
{
try!(conn.transaction(|| {
try!(writeln!(output, "Rolling back migration {}", migration.version()));
try!(migration.revert(conn));
let target = __diesel_schema_migrations.filter(version.eq(migration.version()));
try!(::delete(target).execute(conn));
Ok(())
}));
Ok(())
}
/// Returns the directory containing migrations. Will look at for
/// $PWD/migrations. If it is not found, it will search the parents of the
/// current directory, until it reaches the root directory. Returns
/// `MigrationError::MigrationDirectoryNotFound` if no directory is found.
pub fn find_migrations_directory() -> Result<PathBuf, MigrationError> {
search_for_migrations_directory(&try!(env::current_dir()))
}
fn search_for_migrations_directory(path: &Path) -> Result<PathBuf, MigrationError> {
let migration_path = path.join("migrations");
if migration_path.is_dir() {
Ok(migration_path)
} else {
path.parent().map(search_for_migrations_directory)
.unwrap_or(Err(MigrationError::MigrationDirectoryNotFound))
}
}
#[cfg(test)]
mod tests {
extern crate tempdir;
use super::*;
use super::search_for_migrations_directory;
use self::tempdir::TempDir;
use std::fs;
#[test]
fn migration_directory_not_found_if_no_migration_dir_exists() {
let dir = TempDir::new("diesel").unwrap();
assert_eq!(Err(MigrationError::MigrationDirectoryNotFound),
search_for_migrations_directory(dir.path()));
}
#[test]
fn migration_directory_defaults_to_pwd_slash_migrations() {
let dir = TempDir::new("diesel").unwrap();
let temp_path = dir.path().canonicalize().unwrap();
let migrations_path = temp_path.join("migrations");
fs::create_dir(&migrations_path).unwrap();
assert_eq!(Ok(migrations_path), search_for_migrations_directory(&temp_path));
}
#[test]
fn migration_directory_checks_parents() {
let dir = TempDir::new("diesel").unwrap();
let temp_path = dir.path().canonicalize().unwrap();
let migrations_path = temp_path.join("migrations");
let child_path = temp_path.join("child");
fs::create_dir(&child_path).unwrap();
fs::create_dir(&migrations_path).unwrap();
assert_eq!(Ok(migrations_path), search_for_migrations_directory(&child_path));
}
}
|
{
conn.silence_notices(|| {
conn.execute("CREATE TABLE IF NOT EXISTS __diesel_schema_migrations (
version VARCHAR PRIMARY KEY NOT NULL,
run_on TIMESTAMP NOT NULL DEFAULT 'now'
)")
})
}
|
identifier_body
|
mod.rs
|
//! Provides functions for maintaining database schema.
//!
//! A database migration always provides procedures to update the schema, as well as to revert
//! itself. Diesel's migrations are versioned, and run in order. Diesel also takes care of tracking
//! which migrations have already been run automatically. Your migrations don't need to be
//! idempotent, as Diesel will ensure no migration is run twice unless it has been reverted.
//!
//! Migrations should be placed in a `/migrations` directory at the root of your project (the same
//! directory as `Cargo.toml`). When any of these functions are run, Diesel will search for the
//! migrations directory in the current directory and its parents, stopping when it finds the
//! directory containing `Cargo.toml`.
//!
//! Individual migrations should be a folder containing exactly two files, `up.sql` and `down.sql`.
//! `up.sql` will be used to run the migration, while `down.sql` will be used for reverting it. The
//! folder itself should have the structure `{version}_{migration_name}`. It is recommended that
//! you use the timestamp of creation for the version.
//!
//! ## Example
//!
|
//! - 20160107082941_create_posts
//! - up.sql
//! - down.sql
//! ```
//!
//! ```sql
//! -- 20151219180527_create_users/up.sql
//! CREATE TABLE users (
//! id SERIAL PRIMARY KEY,
//! name VARCHAR NOT NULL,
//! hair_color VARCHAR
//! );
//! ```
//!
//! ```sql
//! -- 20151219180527_create_users/down.sql
//! DROP TABLE users;
//! ```
//!
//! ```sql
//! -- 20160107082941_create_posts/up.sql
//! CREATE TABLE posts (
//! id SERIAL PRIMARY KEY,
//! user_id INTEGER NOT NULL,
//! title VARCHAR NOT NULL,
//! body TEXT
//! );
//! ```
//!
//! ```sql
//! -- 20160107082941_create_posts/down.sql
//! DROP TABLE posts;
//! ```
mod migration;
mod migration_error;
#[doc(hidden)]
pub mod schema;
pub use self::migration_error::*;
use std::io::{stdout, Write};
use expression::expression_methods::*;
use persistable::Insertable;
use query_dsl::*;
use self::migration::*;
use self::migration_error::MigrationError::*;
use self::schema::NewMigration;
use self::schema::__diesel_schema_migrations::dsl::*;
use types::{FromSql, VarChar};
use {Connection, QueryResult};
use std::collections::HashSet;
use std::env;
use std::path::{PathBuf, Path};
/// Runs all migrations that have not yet been run. This function will print all progress to
/// stdout. This function will return an `Err` if some error occurs reading the migrations, or if
/// any migration fails to run. Each migration is run in its own transaction, so some migrations
/// may be committed, even if a later migration fails to run.
///
/// It should be noted that this runs all migrations that have not already been run, regardless of
/// whether or not their version is later than the latest run migration. This is generally not a
/// problem, and eases the more common case of two developers generating independent migrations on
/// a branch. Whoever created the second one will eventually need to run the first when both
/// branches are merged.
///
/// See the [module level documentation](index.html) for information on how migrations should be
/// structured, and where Diesel will look for them by default.
pub fn run_pending_migrations<Conn>(conn: &Conn) -> Result<(), RunMigrationsError> where
Conn: Connection,
String: FromSql<VarChar, Conn::Backend>,
for<'a> &'a NewMigration<'a>: Insertable<__diesel_schema_migrations, Conn::Backend>,
{
let migrations_dir = try!(find_migrations_directory());
run_pending_migrations_in_directory(conn, &migrations_dir, &mut stdout())
}
#[doc(hidden)]
pub fn run_pending_migrations_in_directory<Conn>(conn: &Conn, migrations_dir: &Path, output: &mut Write)
-> Result<(), RunMigrationsError> where
Conn: Connection,
String: FromSql<VarChar, Conn::Backend>,
for<'a> &'a NewMigration<'a>: Insertable<__diesel_schema_migrations, Conn::Backend>,
{
try!(create_schema_migrations_table_if_needed(conn));
let already_run = try!(previously_run_migration_versions(conn));
let all_migrations = try!(migrations_in_directory(migrations_dir));
let pending_migrations = all_migrations.into_iter().filter(|m| {
!already_run.contains(m.version())
});
run_migrations(conn, pending_migrations.collect(), output)
}
/// Reverts the last migration that was run. Returns the version that was reverted. Returns an
/// `Err` if no migrations have ever been run.
///
/// See the [module level documentation](index.html) for information on how migrations should be
/// structured, and where Diesel will look for them by default.
pub fn revert_latest_migration<Conn>(conn: &Conn) -> Result<String, RunMigrationsError> where
Conn: Connection,
String: FromSql<VarChar, Conn::Backend>,
{
try!(create_schema_migrations_table_if_needed(conn));
let latest_migration_version = try!(latest_run_migration_version(conn));
revert_migration_with_version(conn, &latest_migration_version, &mut stdout())
.map(|_| latest_migration_version)
}
#[doc(hidden)]
pub fn revert_migration_with_version<Conn: Connection>(conn: &Conn, ver: &str, output: &mut Write)
-> Result<(), RunMigrationsError>
{
migration_with_version(ver)
.map_err(|e| e.into())
.and_then(|m| revert_migration(conn, m, output))
}
#[doc(hidden)]
pub fn run_migration_with_version<Conn: Connection>(conn: &Conn, ver: &str, output: &mut Write)
-> Result<(), RunMigrationsError> where
for<'a> &'a NewMigration<'a>: Insertable<__diesel_schema_migrations, Conn::Backend>,
{
migration_with_version(ver)
.map_err(|e| e.into())
.and_then(|m| run_migration(conn, m, output))
}
fn migration_with_version(ver: &str) -> Result<Box<Migration>, MigrationError> {
let migrations_dir = try!(find_migrations_directory());
let all_migrations = try!(migrations_in_directory(&migrations_dir));
let migration = all_migrations.into_iter().find(|m| {
m.version() == ver
});
match migration {
Some(m) => Ok(m),
None => Err(UnknownMigrationVersion(ver.into())),
}
}
#[doc(hidden)]
pub fn create_schema_migrations_table_if_needed<Conn: Connection>(conn: &Conn) -> QueryResult<usize> {
conn.silence_notices(|| {
conn.execute("CREATE TABLE IF NOT EXISTS __diesel_schema_migrations (
version VARCHAR PRIMARY KEY NOT NULL,
run_on TIMESTAMP NOT NULL DEFAULT 'now'
)")
})
}
fn previously_run_migration_versions<Conn>(conn: &Conn) -> QueryResult<HashSet<String>> where
Conn: Connection,
String: FromSql<VarChar, Conn::Backend>,
{
__diesel_schema_migrations.select(version)
.load(conn)
.map(|r| r.collect())
}
fn latest_run_migration_version<Conn>(conn: &Conn) -> QueryResult<String> where
Conn: Connection,
String: FromSql<VarChar, Conn::Backend>,
{
use ::expression::dsl::max;
__diesel_schema_migrations.select(max(version))
.first(conn)
}
fn migrations_in_directory(path: &Path) -> Result<Vec<Box<Migration>>, MigrationError> {
use self::migration::migration_from;
try!(path.read_dir())
.filter_map(|entry| {
let entry = match entry {
Ok(e) => e,
Err(e) => return Some(Err(e.into())),
};
if!entry.file_name().to_string_lossy().starts_with(".") {
Some(migration_from(entry.path()))
} else {
None
}
}).collect()
}
fn run_migrations<Conn: Connection>(conn: &Conn, mut migrations: Vec<Box<Migration>>, output: &mut Write)
-> Result<(), RunMigrationsError> where
for<'a> &'a NewMigration<'a>: Insertable<__diesel_schema_migrations, Conn::Backend>,
{
migrations.sort_by(|a, b| a.version().cmp(b.version()));
for migration in migrations {
try!(run_migration(conn, migration, output));
}
Ok(())
}
fn run_migration<Conn: Connection>(conn: &Conn, migration: Box<Migration>, output: &mut Write)
-> Result<(), RunMigrationsError> where
for<'a> &'a NewMigration<'a>: Insertable<__diesel_schema_migrations, Conn::Backend>,
{
conn.transaction(|| {
try!(writeln!(output, "Running migration {}", migration.version()));
try!(migration.run(conn));
try!(::insert(&NewMigration(migration.version()))
.into(__diesel_schema_migrations)
.execute(conn));
Ok(())
}).map_err(|e| e.into())
}
fn revert_migration<Conn: Connection>(conn: &Conn, migration: Box<Migration>, output: &mut Write)
-> Result<(), RunMigrationsError>
{
try!(conn.transaction(|| {
try!(writeln!(output, "Rolling back migration {}", migration.version()));
try!(migration.revert(conn));
let target = __diesel_schema_migrations.filter(version.eq(migration.version()));
try!(::delete(target).execute(conn));
Ok(())
}));
Ok(())
}
/// Returns the directory containing migrations. Will look at for
/// $PWD/migrations. If it is not found, it will search the parents of the
/// current directory, until it reaches the root directory. Returns
/// `MigrationError::MigrationDirectoryNotFound` if no directory is found.
pub fn find_migrations_directory() -> Result<PathBuf, MigrationError> {
search_for_migrations_directory(&try!(env::current_dir()))
}
fn search_for_migrations_directory(path: &Path) -> Result<PathBuf, MigrationError> {
let migration_path = path.join("migrations");
if migration_path.is_dir() {
Ok(migration_path)
} else {
path.parent().map(search_for_migrations_directory)
.unwrap_or(Err(MigrationError::MigrationDirectoryNotFound))
}
}
#[cfg(test)]
mod tests {
extern crate tempdir;
use super::*;
use super::search_for_migrations_directory;
use self::tempdir::TempDir;
use std::fs;
#[test]
fn migration_directory_not_found_if_no_migration_dir_exists() {
let dir = TempDir::new("diesel").unwrap();
assert_eq!(Err(MigrationError::MigrationDirectoryNotFound),
search_for_migrations_directory(dir.path()));
}
#[test]
fn migration_directory_defaults_to_pwd_slash_migrations() {
let dir = TempDir::new("diesel").unwrap();
let temp_path = dir.path().canonicalize().unwrap();
let migrations_path = temp_path.join("migrations");
fs::create_dir(&migrations_path).unwrap();
assert_eq!(Ok(migrations_path), search_for_migrations_directory(&temp_path));
}
#[test]
fn migration_directory_checks_parents() {
let dir = TempDir::new("diesel").unwrap();
let temp_path = dir.path().canonicalize().unwrap();
let migrations_path = temp_path.join("migrations");
let child_path = temp_path.join("child");
fs::create_dir(&child_path).unwrap();
fs::create_dir(&migrations_path).unwrap();
assert_eq!(Ok(migrations_path), search_for_migrations_directory(&child_path));
}
}
|
//! ```text
//! # Directory Structure
//! - 20151219180527_create_users
//! - up.sql
//! - down.sql
|
random_line_split
|
mod.rs
|
//! Provides functions for maintaining database schema.
//!
//! A database migration always provides procedures to update the schema, as well as to revert
//! itself. Diesel's migrations are versioned, and run in order. Diesel also takes care of tracking
//! which migrations have already been run automatically. Your migrations don't need to be
//! idempotent, as Diesel will ensure no migration is run twice unless it has been reverted.
//!
//! Migrations should be placed in a `/migrations` directory at the root of your project (the same
//! directory as `Cargo.toml`). When any of these functions are run, Diesel will search for the
//! migrations directory in the current directory and its parents, stopping when it finds the
//! directory containing `Cargo.toml`.
//!
//! Individual migrations should be a folder containing exactly two files, `up.sql` and `down.sql`.
//! `up.sql` will be used to run the migration, while `down.sql` will be used for reverting it. The
//! folder itself should have the structure `{version}_{migration_name}`. It is recommended that
//! you use the timestamp of creation for the version.
//!
//! ## Example
//!
//! ```text
//! # Directory Structure
//! - 20151219180527_create_users
//! - up.sql
//! - down.sql
//! - 20160107082941_create_posts
//! - up.sql
//! - down.sql
//! ```
//!
//! ```sql
//! -- 20151219180527_create_users/up.sql
//! CREATE TABLE users (
//! id SERIAL PRIMARY KEY,
//! name VARCHAR NOT NULL,
//! hair_color VARCHAR
//! );
//! ```
//!
//! ```sql
//! -- 20151219180527_create_users/down.sql
//! DROP TABLE users;
//! ```
//!
//! ```sql
//! -- 20160107082941_create_posts/up.sql
//! CREATE TABLE posts (
//! id SERIAL PRIMARY KEY,
//! user_id INTEGER NOT NULL,
//! title VARCHAR NOT NULL,
//! body TEXT
//! );
//! ```
//!
//! ```sql
//! -- 20160107082941_create_posts/down.sql
//! DROP TABLE posts;
//! ```
mod migration;
mod migration_error;
#[doc(hidden)]
pub mod schema;
pub use self::migration_error::*;
use std::io::{stdout, Write};
use expression::expression_methods::*;
use persistable::Insertable;
use query_dsl::*;
use self::migration::*;
use self::migration_error::MigrationError::*;
use self::schema::NewMigration;
use self::schema::__diesel_schema_migrations::dsl::*;
use types::{FromSql, VarChar};
use {Connection, QueryResult};
use std::collections::HashSet;
use std::env;
use std::path::{PathBuf, Path};
/// Runs all migrations that have not yet been run. This function will print all progress to
/// stdout. This function will return an `Err` if some error occurs reading the migrations, or if
/// any migration fails to run. Each migration is run in its own transaction, so some migrations
/// may be committed, even if a later migration fails to run.
///
/// It should be noted that this runs all migrations that have not already been run, regardless of
/// whether or not their version is later than the latest run migration. This is generally not a
/// problem, and eases the more common case of two developers generating independent migrations on
/// a branch. Whoever created the second one will eventually need to run the first when both
/// branches are merged.
///
/// See the [module level documentation](index.html) for information on how migrations should be
/// structured, and where Diesel will look for them by default.
pub fn run_pending_migrations<Conn>(conn: &Conn) -> Result<(), RunMigrationsError> where
Conn: Connection,
String: FromSql<VarChar, Conn::Backend>,
for<'a> &'a NewMigration<'a>: Insertable<__diesel_schema_migrations, Conn::Backend>,
{
let migrations_dir = try!(find_migrations_directory());
run_pending_migrations_in_directory(conn, &migrations_dir, &mut stdout())
}
#[doc(hidden)]
pub fn run_pending_migrations_in_directory<Conn>(conn: &Conn, migrations_dir: &Path, output: &mut Write)
-> Result<(), RunMigrationsError> where
Conn: Connection,
String: FromSql<VarChar, Conn::Backend>,
for<'a> &'a NewMigration<'a>: Insertable<__diesel_schema_migrations, Conn::Backend>,
{
try!(create_schema_migrations_table_if_needed(conn));
let already_run = try!(previously_run_migration_versions(conn));
let all_migrations = try!(migrations_in_directory(migrations_dir));
let pending_migrations = all_migrations.into_iter().filter(|m| {
!already_run.contains(m.version())
});
run_migrations(conn, pending_migrations.collect(), output)
}
/// Reverts the last migration that was run. Returns the version that was reverted. Returns an
/// `Err` if no migrations have ever been run.
///
/// See the [module level documentation](index.html) for information on how migrations should be
/// structured, and where Diesel will look for them by default.
pub fn revert_latest_migration<Conn>(conn: &Conn) -> Result<String, RunMigrationsError> where
Conn: Connection,
String: FromSql<VarChar, Conn::Backend>,
{
try!(create_schema_migrations_table_if_needed(conn));
let latest_migration_version = try!(latest_run_migration_version(conn));
revert_migration_with_version(conn, &latest_migration_version, &mut stdout())
.map(|_| latest_migration_version)
}
#[doc(hidden)]
pub fn revert_migration_with_version<Conn: Connection>(conn: &Conn, ver: &str, output: &mut Write)
-> Result<(), RunMigrationsError>
{
migration_with_version(ver)
.map_err(|e| e.into())
.and_then(|m| revert_migration(conn, m, output))
}
#[doc(hidden)]
pub fn
|
<Conn: Connection>(conn: &Conn, ver: &str, output: &mut Write)
-> Result<(), RunMigrationsError> where
for<'a> &'a NewMigration<'a>: Insertable<__diesel_schema_migrations, Conn::Backend>,
{
migration_with_version(ver)
.map_err(|e| e.into())
.and_then(|m| run_migration(conn, m, output))
}
fn migration_with_version(ver: &str) -> Result<Box<Migration>, MigrationError> {
let migrations_dir = try!(find_migrations_directory());
let all_migrations = try!(migrations_in_directory(&migrations_dir));
let migration = all_migrations.into_iter().find(|m| {
m.version() == ver
});
match migration {
Some(m) => Ok(m),
None => Err(UnknownMigrationVersion(ver.into())),
}
}
#[doc(hidden)]
pub fn create_schema_migrations_table_if_needed<Conn: Connection>(conn: &Conn) -> QueryResult<usize> {
conn.silence_notices(|| {
conn.execute("CREATE TABLE IF NOT EXISTS __diesel_schema_migrations (
version VARCHAR PRIMARY KEY NOT NULL,
run_on TIMESTAMP NOT NULL DEFAULT 'now'
)")
})
}
fn previously_run_migration_versions<Conn>(conn: &Conn) -> QueryResult<HashSet<String>> where
Conn: Connection,
String: FromSql<VarChar, Conn::Backend>,
{
__diesel_schema_migrations.select(version)
.load(conn)
.map(|r| r.collect())
}
fn latest_run_migration_version<Conn>(conn: &Conn) -> QueryResult<String> where
Conn: Connection,
String: FromSql<VarChar, Conn::Backend>,
{
use ::expression::dsl::max;
__diesel_schema_migrations.select(max(version))
.first(conn)
}
fn migrations_in_directory(path: &Path) -> Result<Vec<Box<Migration>>, MigrationError> {
use self::migration::migration_from;
try!(path.read_dir())
.filter_map(|entry| {
let entry = match entry {
Ok(e) => e,
Err(e) => return Some(Err(e.into())),
};
if!entry.file_name().to_string_lossy().starts_with(".") {
Some(migration_from(entry.path()))
} else {
None
}
}).collect()
}
fn run_migrations<Conn: Connection>(conn: &Conn, mut migrations: Vec<Box<Migration>>, output: &mut Write)
-> Result<(), RunMigrationsError> where
for<'a> &'a NewMigration<'a>: Insertable<__diesel_schema_migrations, Conn::Backend>,
{
migrations.sort_by(|a, b| a.version().cmp(b.version()));
for migration in migrations {
try!(run_migration(conn, migration, output));
}
Ok(())
}
fn run_migration<Conn: Connection>(conn: &Conn, migration: Box<Migration>, output: &mut Write)
-> Result<(), RunMigrationsError> where
for<'a> &'a NewMigration<'a>: Insertable<__diesel_schema_migrations, Conn::Backend>,
{
conn.transaction(|| {
try!(writeln!(output, "Running migration {}", migration.version()));
try!(migration.run(conn));
try!(::insert(&NewMigration(migration.version()))
.into(__diesel_schema_migrations)
.execute(conn));
Ok(())
}).map_err(|e| e.into())
}
fn revert_migration<Conn: Connection>(conn: &Conn, migration: Box<Migration>, output: &mut Write)
-> Result<(), RunMigrationsError>
{
try!(conn.transaction(|| {
try!(writeln!(output, "Rolling back migration {}", migration.version()));
try!(migration.revert(conn));
let target = __diesel_schema_migrations.filter(version.eq(migration.version()));
try!(::delete(target).execute(conn));
Ok(())
}));
Ok(())
}
/// Returns the directory containing migrations. Will look at for
/// $PWD/migrations. If it is not found, it will search the parents of the
/// current directory, until it reaches the root directory. Returns
/// `MigrationError::MigrationDirectoryNotFound` if no directory is found.
pub fn find_migrations_directory() -> Result<PathBuf, MigrationError> {
search_for_migrations_directory(&try!(env::current_dir()))
}
fn search_for_migrations_directory(path: &Path) -> Result<PathBuf, MigrationError> {
let migration_path = path.join("migrations");
if migration_path.is_dir() {
Ok(migration_path)
} else {
path.parent().map(search_for_migrations_directory)
.unwrap_or(Err(MigrationError::MigrationDirectoryNotFound))
}
}
#[cfg(test)]
mod tests {
extern crate tempdir;
use super::*;
use super::search_for_migrations_directory;
use self::tempdir::TempDir;
use std::fs;
#[test]
fn migration_directory_not_found_if_no_migration_dir_exists() {
let dir = TempDir::new("diesel").unwrap();
assert_eq!(Err(MigrationError::MigrationDirectoryNotFound),
search_for_migrations_directory(dir.path()));
}
#[test]
fn migration_directory_defaults_to_pwd_slash_migrations() {
let dir = TempDir::new("diesel").unwrap();
let temp_path = dir.path().canonicalize().unwrap();
let migrations_path = temp_path.join("migrations");
fs::create_dir(&migrations_path).unwrap();
assert_eq!(Ok(migrations_path), search_for_migrations_directory(&temp_path));
}
#[test]
fn migration_directory_checks_parents() {
let dir = TempDir::new("diesel").unwrap();
let temp_path = dir.path().canonicalize().unwrap();
let migrations_path = temp_path.join("migrations");
let child_path = temp_path.join("child");
fs::create_dir(&child_path).unwrap();
fs::create_dir(&migrations_path).unwrap();
assert_eq!(Ok(migrations_path), search_for_migrations_directory(&child_path));
}
}
|
run_migration_with_version
|
identifier_name
|
document_condition.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! [@document rules](https://www.w3.org/TR/2012/WD-css3-conditional-20120911/#at-document)
//! initially in CSS Conditional Rules Module Level 3, @document has been postponed to the level 4.
//! We implement the prefixed `@-moz-document`.
use cssparser::{Parser, Token, serialize_string};
#[cfg(feature = "gecko")]
use gecko_bindings::bindings::Gecko_DocumentRule_UseForPresentation;
#[cfg(feature = "gecko")]
use gecko_bindings::structs::URLMatchingFunction as GeckoUrlMatchingFunction;
use media_queries::Device;
#[cfg(feature = "gecko")]
use nsstring::nsCString;
use parser::{Parse, ParserContext};
use std::fmt;
use style_traits::ToCss;
use values::specified::url::SpecifiedUrl;
/// A URL matching function for a `@document` rule's condition.
#[derive(Clone, Debug)]
pub enum UrlMatchingFunction {
/// Exact URL matching function. It evaluates to true whenever the
/// URL of the document being styled is exactly the URL given.
Url(SpecifiedUrl),
/// URL prefix matching function. It evaluates to true whenever the
/// URL of the document being styled has the argument to the
/// function as an initial substring (which is true when the two
/// strings are equal). When the argument is the empty string,
/// it evaluates to true for all documents.
UrlPrefix(String),
/// Domain matching function. It evaluates to true whenever the URL
/// of the document being styled has a host subcomponent and that
/// host subcomponent is exactly the argument to the ‘domain()’
/// function or a final substring of the host component is a
/// period (U+002E) immediately followed by the argument to the
/// ‘domain()’ function.
Domain(String),
/// Regular expression matching function. It evaluates to true
/// whenever the regular expression matches the entirety of the URL
/// of the document being styled.
RegExp(String),
}
macro_rules! parse_quoted_or_unquoted_string {
($input:ident, $url_matching_function:expr) => {
$input.parse_nested_block(|input| {
let start = input.position();
input.parse_entirely(|input| {
match input.next() {
Ok(Token::QuotedString(value)) =>
Ok($url_matching_function(value.into_owned())),
_ => Err(()),
}
}).or_else(|_| {
while let Ok(_) = input.next() {}
Ok($url_matching_function(input.slice_from(start).to_string()))
})
})
}
}
impl UrlMatchingFunction {
/// Parse a URL matching function for a`@document` rule's condition.
pub fn parse(context: &ParserContext, input: &mut Parser)
-> Result<UrlMatchingFunction, ()> {
if input.try(|input| input.expect_function_matching("url-prefix")).is_ok() {
parse_quoted_or_unquoted_string!(input, UrlMatchingFunction::UrlPrefix)
} else if input.try(|input| input.expect_function_matching("domain")).is_ok() {
parse_quoted_or_unquoted_string!(input, UrlMatchingFunction::Domain)
} else if input.try(|input| input.expect_function_matching("regexp")).is_ok() {
input.parse_nested_block(|input| {
Ok(UrlMatchingFunction::RegExp(input.expect_string()?.into_owned()))
})
} else if let Ok(url) = input.try(|input| SpecifiedUrl::parse(context, input)) {
|
Err(())
}
}
#[cfg(feature = "gecko")]
/// Evaluate a URL matching function.
pub fn evaluate(&self, device: &Device) -> bool {
let func = match *self {
UrlMatchingFunction::Url(_) => GeckoUrlMatchingFunction::eURL,
UrlMatchingFunction::UrlPrefix(_) => GeckoUrlMatchingFunction::eURLPrefix,
UrlMatchingFunction::Domain(_) => GeckoUrlMatchingFunction::eDomain,
UrlMatchingFunction::RegExp(_) => GeckoUrlMatchingFunction::eRegExp,
};
let pattern = nsCString::from(match *self {
UrlMatchingFunction::Url(ref url) => url.as_str(),
UrlMatchingFunction::UrlPrefix(ref pat) |
UrlMatchingFunction::Domain(ref pat) |
UrlMatchingFunction::RegExp(ref pat) => pat,
});
unsafe {
Gecko_DocumentRule_UseForPresentation(&*device.pres_context, &*pattern, func)
}
}
#[cfg(not(feature = "gecko"))]
/// Evaluate a URL matching function.
pub fn evaluate(&self, _: &Device) -> bool {
false
}
}
impl ToCss for UrlMatchingFunction {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result
where W: fmt::Write {
match *self {
UrlMatchingFunction::Url(ref url) => {
url.to_css(dest)
},
UrlMatchingFunction::UrlPrefix(ref url_prefix) => {
dest.write_str("url-prefix(")?;
serialize_string(url_prefix, dest)?;
dest.write_str(")")
},
UrlMatchingFunction::Domain(ref domain) => {
dest.write_str("domain(")?;
serialize_string(domain, dest)?;
dest.write_str(")")
},
UrlMatchingFunction::RegExp(ref regex) => {
dest.write_str("regexp(")?;
serialize_string(regex, dest)?;
dest.write_str(")")
},
}
}
}
/// A `@document` rule's condition.
///
/// https://www.w3.org/TR/2012/WD-css3-conditional-20120911/#at-document
///
/// The `@document` rule's condition is written as a comma-separated list of
/// URL matching functions, and the condition evaluates to true whenever any
/// one of those functions evaluates to true.
#[derive(Clone, Debug)]
pub struct DocumentCondition(Vec<UrlMatchingFunction>);
impl DocumentCondition {
/// Parse a document condition.
pub fn parse(context: &ParserContext, input: &mut Parser)
-> Result<Self, ()> {
input.parse_comma_separated(|input| UrlMatchingFunction::parse(context, input))
.map(DocumentCondition)
}
/// Evaluate a document condition.
pub fn evaluate(&self, device: &Device) -> bool {
self.0.iter().any(|ref url_matching_function|
url_matching_function.evaluate(device)
)
}
}
impl ToCss for DocumentCondition {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result
where W: fmt::Write {
let mut iter = self.0.iter();
let first = iter.next()
.expect("Empty DocumentCondition, should contain at least one URL matching function");
first.to_css(dest)?;
for url_matching_function in iter {
dest.write_str(", ")?;
url_matching_function.to_css(dest)?;
}
Ok(())
}
}
|
Ok(UrlMatchingFunction::Url(url))
} else {
|
conditional_block
|
document_condition.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! [@document rules](https://www.w3.org/TR/2012/WD-css3-conditional-20120911/#at-document)
//! initially in CSS Conditional Rules Module Level 3, @document has been postponed to the level 4.
//! We implement the prefixed `@-moz-document`.
use cssparser::{Parser, Token, serialize_string};
#[cfg(feature = "gecko")]
use gecko_bindings::bindings::Gecko_DocumentRule_UseForPresentation;
#[cfg(feature = "gecko")]
use gecko_bindings::structs::URLMatchingFunction as GeckoUrlMatchingFunction;
use media_queries::Device;
#[cfg(feature = "gecko")]
use nsstring::nsCString;
use parser::{Parse, ParserContext};
use std::fmt;
use style_traits::ToCss;
use values::specified::url::SpecifiedUrl;
/// A URL matching function for a `@document` rule's condition.
#[derive(Clone, Debug)]
pub enum UrlMatchingFunction {
/// Exact URL matching function. It evaluates to true whenever the
/// URL of the document being styled is exactly the URL given.
Url(SpecifiedUrl),
/// URL prefix matching function. It evaluates to true whenever the
/// URL of the document being styled has the argument to the
/// function as an initial substring (which is true when the two
/// strings are equal). When the argument is the empty string,
/// it evaluates to true for all documents.
UrlPrefix(String),
/// Domain matching function. It evaluates to true whenever the URL
/// of the document being styled has a host subcomponent and that
/// host subcomponent is exactly the argument to the ‘domain()’
/// function or a final substring of the host component is a
/// period (U+002E) immediately followed by the argument to the
/// ‘domain()’ function.
Domain(String),
/// Regular expression matching function. It evaluates to true
/// whenever the regular expression matches the entirety of the URL
/// of the document being styled.
RegExp(String),
}
macro_rules! parse_quoted_or_unquoted_string {
($input:ident, $url_matching_function:expr) => {
$input.parse_nested_block(|input| {
let start = input.position();
input.parse_entirely(|input| {
match input.next() {
Ok(Token::QuotedString(value)) =>
Ok($url_matching_function(value.into_owned())),
_ => Err(()),
}
}).or_else(|_| {
while let Ok(_) = input.next() {}
Ok($url_matching_function(input.slice_from(start).to_string()))
})
})
}
}
impl UrlMatchingFunction {
/// Parse a URL matching function for a`@document` rule's condition.
pub fn parse(context: &ParserContext, input: &mut Parser)
-> Result<UrlMatchingFunction, ()> {
if input.try(|input| input.expect_function_matching("url-prefix")).is_ok() {
parse_quoted_or_unquoted_string!(input, UrlMatchingFunction::UrlPrefix)
} else if input.try(|input| input.expect_function_matching("domain")).is_ok() {
parse_quoted_or_unquoted_string!(input, UrlMatchingFunction::Domain)
} else if input.try(|input| input.expect_function_matching("regexp")).is_ok() {
input.parse_nested_block(|input| {
Ok(UrlMatchingFunction::RegExp(input.expect_string()?.into_owned()))
})
} else if let Ok(url) = input.try(|input| SpecifiedUrl::parse(context, input)) {
Ok(UrlMatchingFunction::Url(url))
} else {
Err(())
}
}
#[cfg(feature = "gecko")]
/// Evaluate a URL matching function.
pub fn evaluate(&self, device: &Device) -> bool {
let func = match *self {
UrlMatchingFunction::Url(_) => GeckoUrlMatchingFunction::eURL,
UrlMatchingFunction::UrlPrefix(_) => GeckoUrlMatchingFunction::eURLPrefix,
UrlMatchingFunction::Domain(_) => GeckoUrlMatchingFunction::eDomain,
UrlMatchingFunction::RegExp(_) => GeckoUrlMatchingFunction::eRegExp,
};
let pattern = nsCString::from(match *self {
UrlMatchingFunction::Url(ref url) => url.as_str(),
UrlMatchingFunction::UrlPrefix(ref pat) |
UrlMatchingFunction::Domain(ref pat) |
UrlMatchingFunction::RegExp(ref pat) => pat,
});
unsafe {
Gecko_DocumentRule_UseForPresentation(&*device.pres_context, &*pattern, func)
}
}
#[cfg(not(feature = "gecko"))]
/// Evaluate a URL matching function.
pub fn evaluate(&self, _: &Device) -> bool {
false
}
}
impl ToCss for UrlMatchingFunction {
fn to_css<W
|
f, dest: &mut W) -> fmt::Result
where W: fmt::Write {
match *self {
UrlMatchingFunction::Url(ref url) => {
url.to_css(dest)
},
UrlMatchingFunction::UrlPrefix(ref url_prefix) => {
dest.write_str("url-prefix(")?;
serialize_string(url_prefix, dest)?;
dest.write_str(")")
},
UrlMatchingFunction::Domain(ref domain) => {
dest.write_str("domain(")?;
serialize_string(domain, dest)?;
dest.write_str(")")
},
UrlMatchingFunction::RegExp(ref regex) => {
dest.write_str("regexp(")?;
serialize_string(regex, dest)?;
dest.write_str(")")
},
}
}
}
/// A `@document` rule's condition.
///
/// https://www.w3.org/TR/2012/WD-css3-conditional-20120911/#at-document
///
/// The `@document` rule's condition is written as a comma-separated list of
/// URL matching functions, and the condition evaluates to true whenever any
/// one of those functions evaluates to true.
#[derive(Clone, Debug)]
pub struct DocumentCondition(Vec<UrlMatchingFunction>);
impl DocumentCondition {
/// Parse a document condition.
pub fn parse(context: &ParserContext, input: &mut Parser)
-> Result<Self, ()> {
input.parse_comma_separated(|input| UrlMatchingFunction::parse(context, input))
.map(DocumentCondition)
}
/// Evaluate a document condition.
pub fn evaluate(&self, device: &Device) -> bool {
self.0.iter().any(|ref url_matching_function|
url_matching_function.evaluate(device)
)
}
}
impl ToCss for DocumentCondition {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result
where W: fmt::Write {
let mut iter = self.0.iter();
let first = iter.next()
.expect("Empty DocumentCondition, should contain at least one URL matching function");
first.to_css(dest)?;
for url_matching_function in iter {
dest.write_str(", ")?;
url_matching_function.to_css(dest)?;
}
Ok(())
}
}
|
>(&sel
|
identifier_name
|
document_condition.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! [@document rules](https://www.w3.org/TR/2012/WD-css3-conditional-20120911/#at-document)
//! initially in CSS Conditional Rules Module Level 3, @document has been postponed to the level 4.
//! We implement the prefixed `@-moz-document`.
use cssparser::{Parser, Token, serialize_string};
#[cfg(feature = "gecko")]
use gecko_bindings::bindings::Gecko_DocumentRule_UseForPresentation;
#[cfg(feature = "gecko")]
use gecko_bindings::structs::URLMatchingFunction as GeckoUrlMatchingFunction;
use media_queries::Device;
#[cfg(feature = "gecko")]
use nsstring::nsCString;
use parser::{Parse, ParserContext};
use std::fmt;
use style_traits::ToCss;
use values::specified::url::SpecifiedUrl;
/// A URL matching function for a `@document` rule's condition.
#[derive(Clone, Debug)]
pub enum UrlMatchingFunction {
/// Exact URL matching function. It evaluates to true whenever the
/// URL of the document being styled is exactly the URL given.
Url(SpecifiedUrl),
/// URL prefix matching function. It evaluates to true whenever the
/// URL of the document being styled has the argument to the
/// function as an initial substring (which is true when the two
/// strings are equal). When the argument is the empty string,
/// it evaluates to true for all documents.
UrlPrefix(String),
/// Domain matching function. It evaluates to true whenever the URL
/// of the document being styled has a host subcomponent and that
/// host subcomponent is exactly the argument to the ‘domain()’
/// function or a final substring of the host component is a
/// period (U+002E) immediately followed by the argument to the
/// ‘domain()’ function.
Domain(String),
/// Regular expression matching function. It evaluates to true
/// whenever the regular expression matches the entirety of the URL
/// of the document being styled.
RegExp(String),
}
macro_rules! parse_quoted_or_unquoted_string {
($input:ident, $url_matching_function:expr) => {
$input.parse_nested_block(|input| {
let start = input.position();
input.parse_entirely(|input| {
match input.next() {
Ok(Token::QuotedString(value)) =>
Ok($url_matching_function(value.into_owned())),
_ => Err(()),
}
}).or_else(|_| {
while let Ok(_) = input.next() {}
Ok($url_matching_function(input.slice_from(start).to_string()))
})
})
}
}
impl UrlMatchingFunction {
/// Parse a URL matching function for a`@document` rule's condition.
pub fn parse(context: &ParserContext, input: &mut Parser)
-> Result<UrlMatchingFunction, ()> {
if input.try(|input| input.expect_function_matching("url-prefix")).is_ok() {
parse_quoted_or_unquoted_string!(input, UrlMatchingFunction::UrlPrefix)
} else if input.try(|input| input.expect_function_matching("domain")).is_ok() {
parse_quoted_or_unquoted_string!(input, UrlMatchingFunction::Domain)
} else if input.try(|input| input.expect_function_matching("regexp")).is_ok() {
input.parse_nested_block(|input| {
Ok(UrlMatchingFunction::RegExp(input.expect_string()?.into_owned()))
})
} else if let Ok(url) = input.try(|input| SpecifiedUrl::parse(context, input)) {
Ok(UrlMatchingFunction::Url(url))
} else {
Err(())
}
}
#[cfg(feature = "gecko")]
/// Evaluate a URL matching function.
pub fn evaluate(&self, device: &Device) -> bool {
|
cfg(not(feature = "gecko"))]
/// Evaluate a URL matching function.
pub fn evaluate(&self, _: &Device) -> bool {
false
}
}
impl ToCss for UrlMatchingFunction {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result
where W: fmt::Write {
match *self {
UrlMatchingFunction::Url(ref url) => {
url.to_css(dest)
},
UrlMatchingFunction::UrlPrefix(ref url_prefix) => {
dest.write_str("url-prefix(")?;
serialize_string(url_prefix, dest)?;
dest.write_str(")")
},
UrlMatchingFunction::Domain(ref domain) => {
dest.write_str("domain(")?;
serialize_string(domain, dest)?;
dest.write_str(")")
},
UrlMatchingFunction::RegExp(ref regex) => {
dest.write_str("regexp(")?;
serialize_string(regex, dest)?;
dest.write_str(")")
},
}
}
}
/// A `@document` rule's condition.
///
/// https://www.w3.org/TR/2012/WD-css3-conditional-20120911/#at-document
///
/// The `@document` rule's condition is written as a comma-separated list of
/// URL matching functions, and the condition evaluates to true whenever any
/// one of those functions evaluates to true.
#[derive(Clone, Debug)]
pub struct DocumentCondition(Vec<UrlMatchingFunction>);
impl DocumentCondition {
/// Parse a document condition.
pub fn parse(context: &ParserContext, input: &mut Parser)
-> Result<Self, ()> {
input.parse_comma_separated(|input| UrlMatchingFunction::parse(context, input))
.map(DocumentCondition)
}
/// Evaluate a document condition.
pub fn evaluate(&self, device: &Device) -> bool {
self.0.iter().any(|ref url_matching_function|
url_matching_function.evaluate(device)
)
}
}
impl ToCss for DocumentCondition {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result
where W: fmt::Write {
let mut iter = self.0.iter();
let first = iter.next()
.expect("Empty DocumentCondition, should contain at least one URL matching function");
first.to_css(dest)?;
for url_matching_function in iter {
dest.write_str(", ")?;
url_matching_function.to_css(dest)?;
}
Ok(())
}
}
|
let func = match *self {
UrlMatchingFunction::Url(_) => GeckoUrlMatchingFunction::eURL,
UrlMatchingFunction::UrlPrefix(_) => GeckoUrlMatchingFunction::eURLPrefix,
UrlMatchingFunction::Domain(_) => GeckoUrlMatchingFunction::eDomain,
UrlMatchingFunction::RegExp(_) => GeckoUrlMatchingFunction::eRegExp,
};
let pattern = nsCString::from(match *self {
UrlMatchingFunction::Url(ref url) => url.as_str(),
UrlMatchingFunction::UrlPrefix(ref pat) |
UrlMatchingFunction::Domain(ref pat) |
UrlMatchingFunction::RegExp(ref pat) => pat,
});
unsafe {
Gecko_DocumentRule_UseForPresentation(&*device.pres_context, &*pattern, func)
}
}
#[
|
identifier_body
|
document_condition.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! [@document rules](https://www.w3.org/TR/2012/WD-css3-conditional-20120911/#at-document)
//! initially in CSS Conditional Rules Module Level 3, @document has been postponed to the level 4.
//! We implement the prefixed `@-moz-document`.
use cssparser::{Parser, Token, serialize_string};
#[cfg(feature = "gecko")]
use gecko_bindings::bindings::Gecko_DocumentRule_UseForPresentation;
#[cfg(feature = "gecko")]
use gecko_bindings::structs::URLMatchingFunction as GeckoUrlMatchingFunction;
use media_queries::Device;
#[cfg(feature = "gecko")]
use nsstring::nsCString;
use parser::{Parse, ParserContext};
use std::fmt;
use style_traits::ToCss;
use values::specified::url::SpecifiedUrl;
/// A URL matching function for a `@document` rule's condition.
#[derive(Clone, Debug)]
pub enum UrlMatchingFunction {
/// Exact URL matching function. It evaluates to true whenever the
/// URL of the document being styled is exactly the URL given.
Url(SpecifiedUrl),
/// URL prefix matching function. It evaluates to true whenever the
/// URL of the document being styled has the argument to the
/// function as an initial substring (which is true when the two
/// strings are equal). When the argument is the empty string,
/// it evaluates to true for all documents.
UrlPrefix(String),
/// Domain matching function. It evaluates to true whenever the URL
/// of the document being styled has a host subcomponent and that
/// host subcomponent is exactly the argument to the ‘domain()’
/// function or a final substring of the host component is a
/// period (U+002E) immediately followed by the argument to the
/// ‘domain()’ function.
Domain(String),
/// Regular expression matching function. It evaluates to true
/// whenever the regular expression matches the entirety of the URL
/// of the document being styled.
RegExp(String),
}
macro_rules! parse_quoted_or_unquoted_string {
($input:ident, $url_matching_function:expr) => {
$input.parse_nested_block(|input| {
let start = input.position();
input.parse_entirely(|input| {
match input.next() {
Ok(Token::QuotedString(value)) =>
Ok($url_matching_function(value.into_owned())),
_ => Err(()),
}
}).or_else(|_| {
|
})
}
}
impl UrlMatchingFunction {
/// Parse a URL matching function for a`@document` rule's condition.
pub fn parse(context: &ParserContext, input: &mut Parser)
-> Result<UrlMatchingFunction, ()> {
if input.try(|input| input.expect_function_matching("url-prefix")).is_ok() {
parse_quoted_or_unquoted_string!(input, UrlMatchingFunction::UrlPrefix)
} else if input.try(|input| input.expect_function_matching("domain")).is_ok() {
parse_quoted_or_unquoted_string!(input, UrlMatchingFunction::Domain)
} else if input.try(|input| input.expect_function_matching("regexp")).is_ok() {
input.parse_nested_block(|input| {
Ok(UrlMatchingFunction::RegExp(input.expect_string()?.into_owned()))
})
} else if let Ok(url) = input.try(|input| SpecifiedUrl::parse(context, input)) {
Ok(UrlMatchingFunction::Url(url))
} else {
Err(())
}
}
#[cfg(feature = "gecko")]
/// Evaluate a URL matching function.
pub fn evaluate(&self, device: &Device) -> bool {
let func = match *self {
UrlMatchingFunction::Url(_) => GeckoUrlMatchingFunction::eURL,
UrlMatchingFunction::UrlPrefix(_) => GeckoUrlMatchingFunction::eURLPrefix,
UrlMatchingFunction::Domain(_) => GeckoUrlMatchingFunction::eDomain,
UrlMatchingFunction::RegExp(_) => GeckoUrlMatchingFunction::eRegExp,
};
let pattern = nsCString::from(match *self {
UrlMatchingFunction::Url(ref url) => url.as_str(),
UrlMatchingFunction::UrlPrefix(ref pat) |
UrlMatchingFunction::Domain(ref pat) |
UrlMatchingFunction::RegExp(ref pat) => pat,
});
unsafe {
Gecko_DocumentRule_UseForPresentation(&*device.pres_context, &*pattern, func)
}
}
#[cfg(not(feature = "gecko"))]
/// Evaluate a URL matching function.
pub fn evaluate(&self, _: &Device) -> bool {
false
}
}
impl ToCss for UrlMatchingFunction {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result
where W: fmt::Write {
match *self {
UrlMatchingFunction::Url(ref url) => {
url.to_css(dest)
},
UrlMatchingFunction::UrlPrefix(ref url_prefix) => {
dest.write_str("url-prefix(")?;
serialize_string(url_prefix, dest)?;
dest.write_str(")")
},
UrlMatchingFunction::Domain(ref domain) => {
dest.write_str("domain(")?;
serialize_string(domain, dest)?;
dest.write_str(")")
},
UrlMatchingFunction::RegExp(ref regex) => {
dest.write_str("regexp(")?;
serialize_string(regex, dest)?;
dest.write_str(")")
},
}
}
}
/// A `@document` rule's condition.
///
/// https://www.w3.org/TR/2012/WD-css3-conditional-20120911/#at-document
///
/// The `@document` rule's condition is written as a comma-separated list of
/// URL matching functions, and the condition evaluates to true whenever any
/// one of those functions evaluates to true.
#[derive(Clone, Debug)]
pub struct DocumentCondition(Vec<UrlMatchingFunction>);
impl DocumentCondition {
/// Parse a document condition.
pub fn parse(context: &ParserContext, input: &mut Parser)
-> Result<Self, ()> {
input.parse_comma_separated(|input| UrlMatchingFunction::parse(context, input))
.map(DocumentCondition)
}
/// Evaluate a document condition.
pub fn evaluate(&self, device: &Device) -> bool {
self.0.iter().any(|ref url_matching_function|
url_matching_function.evaluate(device)
)
}
}
impl ToCss for DocumentCondition {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result
where W: fmt::Write {
let mut iter = self.0.iter();
let first = iter.next()
.expect("Empty DocumentCondition, should contain at least one URL matching function");
first.to_css(dest)?;
for url_matching_function in iter {
dest.write_str(", ")?;
url_matching_function.to_css(dest)?;
}
Ok(())
}
}
|
while let Ok(_) = input.next() {}
Ok($url_matching_function(input.slice_from(start).to_string()))
})
|
random_line_split
|
nvic.rs
|
// Zinc, the bare metal stack for rust.
// Copyright 2014 Ben Harris <[email protected]>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Interface to Nested Vector Interrupt Controller.
//!
//! NVIC memory location is 0xE000_E000.
// Link: http://infocenter.arm.com/help/topic/com.arm.doc.dui0552a/CIHIGCIF.html
#[inline(always)]
fn get_reg() -> &'static reg::NVIC {
unsafe { &*(0xE000_E000 as *mut reg::NVIC) }
}
/// Enable an interrupt
pub fn
|
(irqn: usize) {
get_reg().iser[irqn / 32].clear_iser(irqn % 32);
}
/// Disable an interrupt
pub fn disable_irq(irqn: usize) {
get_reg().icer[irqn / 32].clear_icer(irqn % 32);
}
/// Return whether the given interrupt is enabled
pub fn is_enabled(irqn: usize) -> bool {
get_reg().iser[irqn / 32].iser(irqn % 32)
}
/// Clear the pending flag for the given interrupt
pub fn clear_pending(irqn: usize) {
get_reg().icpr[irqn / 32].clear_icpr(irqn % 32);
}
/// Return whether the given interrupt is pending
pub fn is_pending(irqn: usize) -> bool {
get_reg().ispr[irqn / 32].ispr(irqn % 32)
}
/// Return whether the given interrupt is active
pub fn is_active(irqn: usize) -> bool {
get_reg().iabr[irqn / 32].iabr(irqn % 32)
}
/// Set the priority for the given interrupt
pub fn set_priority(irqn: usize, prio: u8) {
get_reg().ipr[irqn / 4].set_ipr(irqn % 4, prio as u32);
}
/// Return the priority for the given interrupt
pub fn get_priority(irqn: usize) -> u8 {
get_reg().ipr[irqn / 4].ipr(irqn % 4) as u8
}
mod reg {
use volatile_cell::VolatileCell;
use core::ops::Drop;
ioregs!(NVIC = {
0x100 => reg32 iser[8] { //! Interrupt set enable register
0..31 => iser[32]: set_to_clear,
}
0x180 => reg32 icer[8] { //! Interrupt clear enable register
0..31 => icer[32]: set_to_clear,
}
0x200 => reg32 ispr[8] { //! Interrupt set pending register
0..31 => ispr[32]: set_to_clear,
}
0x280 => reg32 icpr[8] { //! Interrupt clear pending register
0..31 => icpr[32]: set_to_clear,
}
0x300 => reg32 iabr[8] { //! Interrupt active bit register
0..31 => iabr[32]: ro,
}
0x400 => reg32 ipr[8] { //! Interrupt priority register
0..31 => ipr[4],
}
0xF00 => reg32 stir[8] { //! Software triggered interrupt register
0..8 => stir,
}
});
}
|
enable_irq
|
identifier_name
|
nvic.rs
|
// Zinc, the bare metal stack for rust.
// Copyright 2014 Ben Harris <[email protected]>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Interface to Nested Vector Interrupt Controller.
//!
//! NVIC memory location is 0xE000_E000.
// Link: http://infocenter.arm.com/help/topic/com.arm.doc.dui0552a/CIHIGCIF.html
#[inline(always)]
fn get_reg() -> &'static reg::NVIC {
unsafe { &*(0xE000_E000 as *mut reg::NVIC) }
}
/// Enable an interrupt
pub fn enable_irq(irqn: usize) {
get_reg().iser[irqn / 32].clear_iser(irqn % 32);
}
/// Disable an interrupt
pub fn disable_irq(irqn: usize) {
get_reg().icer[irqn / 32].clear_icer(irqn % 32);
}
/// Return whether the given interrupt is enabled
pub fn is_enabled(irqn: usize) -> bool {
get_reg().iser[irqn / 32].iser(irqn % 32)
}
/// Clear the pending flag for the given interrupt
pub fn clear_pending(irqn: usize)
|
/// Return whether the given interrupt is pending
pub fn is_pending(irqn: usize) -> bool {
get_reg().ispr[irqn / 32].ispr(irqn % 32)
}
/// Return whether the given interrupt is active
pub fn is_active(irqn: usize) -> bool {
get_reg().iabr[irqn / 32].iabr(irqn % 32)
}
/// Set the priority for the given interrupt
pub fn set_priority(irqn: usize, prio: u8) {
get_reg().ipr[irqn / 4].set_ipr(irqn % 4, prio as u32);
}
/// Return the priority for the given interrupt
pub fn get_priority(irqn: usize) -> u8 {
get_reg().ipr[irqn / 4].ipr(irqn % 4) as u8
}
mod reg {
use volatile_cell::VolatileCell;
use core::ops::Drop;
ioregs!(NVIC = {
0x100 => reg32 iser[8] { //! Interrupt set enable register
0..31 => iser[32]: set_to_clear,
}
0x180 => reg32 icer[8] { //! Interrupt clear enable register
0..31 => icer[32]: set_to_clear,
}
0x200 => reg32 ispr[8] { //! Interrupt set pending register
0..31 => ispr[32]: set_to_clear,
}
0x280 => reg32 icpr[8] { //! Interrupt clear pending register
0..31 => icpr[32]: set_to_clear,
}
0x300 => reg32 iabr[8] { //! Interrupt active bit register
0..31 => iabr[32]: ro,
}
0x400 => reg32 ipr[8] { //! Interrupt priority register
0..31 => ipr[4],
}
0xF00 => reg32 stir[8] { //! Software triggered interrupt register
0..8 => stir,
}
});
}
|
{
get_reg().icpr[irqn / 32].clear_icpr(irqn % 32);
}
|
identifier_body
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.