file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
foreach-put-structured.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn pairs(it: &fn((int, int))) {
let mut i: int = 0;
let mut j: int = 0;
while i < 10 { it((i, j)); i += 1; j += i; }
}
pub fn main() {
let mut i: int = 10;
let mut j: int = 0;
do pairs() |p| {
let (_0, _1) = p;
info!(_0);
info!(_1);
assert_eq!(_0 + 10, i);
i += 1;
j = _1;
};
|
}
|
assert_eq!(j, 45);
|
random_line_split
|
foreach-put-structured.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn pairs(it: &fn((int, int))) {
let mut i: int = 0;
let mut j: int = 0;
while i < 10 { it((i, j)); i += 1; j += i; }
}
pub fn
|
() {
let mut i: int = 10;
let mut j: int = 0;
do pairs() |p| {
let (_0, _1) = p;
info!(_0);
info!(_1);
assert_eq!(_0 + 10, i);
i += 1;
j = _1;
};
assert_eq!(j, 45);
}
|
main
|
identifier_name
|
foreach-put-structured.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn pairs(it: &fn((int, int)))
|
pub fn main() {
let mut i: int = 10;
let mut j: int = 0;
do pairs() |p| {
let (_0, _1) = p;
info!(_0);
info!(_1);
assert_eq!(_0 + 10, i);
i += 1;
j = _1;
};
assert_eq!(j, 45);
}
|
{
let mut i: int = 0;
let mut j: int = 0;
while i < 10 { it((i, j)); i += 1; j += i; }
}
|
identifier_body
|
operator.rs
|
use super::graph::{Graph, IndexType};
use super::EdgeType;
use crate::visit::IntoNodeReferences;
/// \[Generic\] complement of the graph
///
/// Computes the graph complement of the input Graphand stores it
/// in the provided empty output Graph.
///
/// The function does not create self-loops.
///
/// Computes in **O(|V|^2*log(|V|))** time (average).
///
/// Returns the complement.
///
/// # Example
/// ```rust
/// use petgraph::Graph;
/// use petgraph::operator::complement;
/// use petgraph::prelude::*;
///
/// let mut graph: Graph<(),(),Directed> = Graph::new();
/// let a = graph.add_node(()); // node with no weight
/// let b = graph.add_node(());
/// let c = graph.add_node(());
/// let d = graph.add_node(());
///
/// graph.extend_with_edges(&[
/// (a, b),
/// (b, c),
/// (c, d),
/// ]);
/// // a ----> b ----> c ----> d
///
/// graph.extend_with_edges(&[(a, b), (b, c), (c, d)]);
/// let mut output: Graph<(), (), Directed> = Graph::new();
///
/// complement(&graph, &mut output, ());
///
/// let mut expected_res: Graph<(), (), Directed> = Graph::new();
/// let a = expected_res.add_node(());
/// let b = expected_res.add_node(());
/// let c = expected_res.add_node(());
/// let d = expected_res.add_node(());
/// expected_res.extend_with_edges(&[
/// (a, c),
/// (a, d),
/// (b, a),
/// (b, d),
/// (c, a),
/// (c, b),
/// (d, a),
/// (d, b),
/// (d, c),
/// ]);
///
/// for x in graph.node_indices() {
/// for y in graph.node_indices() {
/// assert_eq!(output.contains_edge(x, y), expected_res.contains_edge(x, y));
/// }
/// }
/// ```
pub fn complement<N, E, Ty, Ix>(
input: &Graph<N, E, Ty, Ix>,
output: &mut Graph<N, E, Ty, Ix>,
weight: E,
) where
Ty: EdgeType,
Ix: IndexType,
E: Clone,
N: Clone,
{
for (_node, weight) in input.node_references() {
output.add_node(weight.clone());
}
for x in input.node_indices() {
for y in input.node_indices() {
if x!= y &&!input.contains_edge(x, y)
|
}
}
}
|
{
output.add_edge(x, y, weight.clone());
}
|
conditional_block
|
operator.rs
|
use super::graph::{Graph, IndexType};
use super::EdgeType;
use crate::visit::IntoNodeReferences;
/// \[Generic\] complement of the graph
///
/// Computes the graph complement of the input Graphand stores it
/// in the provided empty output Graph.
///
/// The function does not create self-loops.
///
/// Computes in **O(|V|^2*log(|V|))** time (average).
///
/// Returns the complement.
///
/// # Example
/// ```rust
/// use petgraph::Graph;
/// use petgraph::operator::complement;
/// use petgraph::prelude::*;
///
/// let mut graph: Graph<(),(),Directed> = Graph::new();
/// let a = graph.add_node(()); // node with no weight
/// let b = graph.add_node(());
/// let c = graph.add_node(());
/// let d = graph.add_node(());
///
/// graph.extend_with_edges(&[
/// (a, b),
/// (b, c),
/// (c, d),
/// ]);
/// // a ----> b ----> c ----> d
///
/// graph.extend_with_edges(&[(a, b), (b, c), (c, d)]);
/// let mut output: Graph<(), (), Directed> = Graph::new();
///
/// complement(&graph, &mut output, ());
///
/// let mut expected_res: Graph<(), (), Directed> = Graph::new();
/// let a = expected_res.add_node(());
/// let b = expected_res.add_node(());
|
/// (a, c),
/// (a, d),
/// (b, a),
/// (b, d),
/// (c, a),
/// (c, b),
/// (d, a),
/// (d, b),
/// (d, c),
/// ]);
///
/// for x in graph.node_indices() {
/// for y in graph.node_indices() {
/// assert_eq!(output.contains_edge(x, y), expected_res.contains_edge(x, y));
/// }
/// }
/// ```
pub fn complement<N, E, Ty, Ix>(
input: &Graph<N, E, Ty, Ix>,
output: &mut Graph<N, E, Ty, Ix>,
weight: E,
) where
Ty: EdgeType,
Ix: IndexType,
E: Clone,
N: Clone,
{
for (_node, weight) in input.node_references() {
output.add_node(weight.clone());
}
for x in input.node_indices() {
for y in input.node_indices() {
if x!= y &&!input.contains_edge(x, y) {
output.add_edge(x, y, weight.clone());
}
}
}
}
|
/// let c = expected_res.add_node(());
/// let d = expected_res.add_node(());
/// expected_res.extend_with_edges(&[
|
random_line_split
|
operator.rs
|
use super::graph::{Graph, IndexType};
use super::EdgeType;
use crate::visit::IntoNodeReferences;
/// \[Generic\] complement of the graph
///
/// Computes the graph complement of the input Graphand stores it
/// in the provided empty output Graph.
///
/// The function does not create self-loops.
///
/// Computes in **O(|V|^2*log(|V|))** time (average).
///
/// Returns the complement.
///
/// # Example
/// ```rust
/// use petgraph::Graph;
/// use petgraph::operator::complement;
/// use petgraph::prelude::*;
///
/// let mut graph: Graph<(),(),Directed> = Graph::new();
/// let a = graph.add_node(()); // node with no weight
/// let b = graph.add_node(());
/// let c = graph.add_node(());
/// let d = graph.add_node(());
///
/// graph.extend_with_edges(&[
/// (a, b),
/// (b, c),
/// (c, d),
/// ]);
/// // a ----> b ----> c ----> d
///
/// graph.extend_with_edges(&[(a, b), (b, c), (c, d)]);
/// let mut output: Graph<(), (), Directed> = Graph::new();
///
/// complement(&graph, &mut output, ());
///
/// let mut expected_res: Graph<(), (), Directed> = Graph::new();
/// let a = expected_res.add_node(());
/// let b = expected_res.add_node(());
/// let c = expected_res.add_node(());
/// let d = expected_res.add_node(());
/// expected_res.extend_with_edges(&[
/// (a, c),
/// (a, d),
/// (b, a),
/// (b, d),
/// (c, a),
/// (c, b),
/// (d, a),
/// (d, b),
/// (d, c),
/// ]);
///
/// for x in graph.node_indices() {
/// for y in graph.node_indices() {
/// assert_eq!(output.contains_edge(x, y), expected_res.contains_edge(x, y));
/// }
/// }
/// ```
pub fn
|
<N, E, Ty, Ix>(
input: &Graph<N, E, Ty, Ix>,
output: &mut Graph<N, E, Ty, Ix>,
weight: E,
) where
Ty: EdgeType,
Ix: IndexType,
E: Clone,
N: Clone,
{
for (_node, weight) in input.node_references() {
output.add_node(weight.clone());
}
for x in input.node_indices() {
for y in input.node_indices() {
if x!= y &&!input.contains_edge(x, y) {
output.add_edge(x, y, weight.clone());
}
}
}
}
|
complement
|
identifier_name
|
operator.rs
|
use super::graph::{Graph, IndexType};
use super::EdgeType;
use crate::visit::IntoNodeReferences;
/// \[Generic\] complement of the graph
///
/// Computes the graph complement of the input Graphand stores it
/// in the provided empty output Graph.
///
/// The function does not create self-loops.
///
/// Computes in **O(|V|^2*log(|V|))** time (average).
///
/// Returns the complement.
///
/// # Example
/// ```rust
/// use petgraph::Graph;
/// use petgraph::operator::complement;
/// use petgraph::prelude::*;
///
/// let mut graph: Graph<(),(),Directed> = Graph::new();
/// let a = graph.add_node(()); // node with no weight
/// let b = graph.add_node(());
/// let c = graph.add_node(());
/// let d = graph.add_node(());
///
/// graph.extend_with_edges(&[
/// (a, b),
/// (b, c),
/// (c, d),
/// ]);
/// // a ----> b ----> c ----> d
///
/// graph.extend_with_edges(&[(a, b), (b, c), (c, d)]);
/// let mut output: Graph<(), (), Directed> = Graph::new();
///
/// complement(&graph, &mut output, ());
///
/// let mut expected_res: Graph<(), (), Directed> = Graph::new();
/// let a = expected_res.add_node(());
/// let b = expected_res.add_node(());
/// let c = expected_res.add_node(());
/// let d = expected_res.add_node(());
/// expected_res.extend_with_edges(&[
/// (a, c),
/// (a, d),
/// (b, a),
/// (b, d),
/// (c, a),
/// (c, b),
/// (d, a),
/// (d, b),
/// (d, c),
/// ]);
///
/// for x in graph.node_indices() {
/// for y in graph.node_indices() {
/// assert_eq!(output.contains_edge(x, y), expected_res.contains_edge(x, y));
/// }
/// }
/// ```
pub fn complement<N, E, Ty, Ix>(
input: &Graph<N, E, Ty, Ix>,
output: &mut Graph<N, E, Ty, Ix>,
weight: E,
) where
Ty: EdgeType,
Ix: IndexType,
E: Clone,
N: Clone,
|
{
for (_node, weight) in input.node_references() {
output.add_node(weight.clone());
}
for x in input.node_indices() {
for y in input.node_indices() {
if x != y && !input.contains_edge(x, y) {
output.add_edge(x, y, weight.clone());
}
}
}
}
|
identifier_body
|
|
htmlselectelement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::attr::{Attr, AttrHelpers, AttrValue};
use dom::bindings::codegen::Bindings::HTMLSelectElementBinding;
use dom::bindings::codegen::Bindings::HTMLSelectElementBinding::HTMLSelectElementMethods;
use dom::bindings::codegen::InheritTypes::{HTMLElementCast, NodeCast};
use dom::bindings::codegen::InheritTypes::{HTMLSelectElementDerived, HTMLFieldSetElementDerived};
use dom::bindings::codegen::UnionTypes::HTMLElementOrLong;
use dom::bindings::codegen::UnionTypes::HTMLOptionElementOrHTMLOptGroupElement;
use dom::bindings::js::Root;
use dom::document::Document;
use dom::element::AttributeHandlers;
use dom::eventtarget::{EventTarget, EventTargetTypeId};
use dom::element::ElementTypeId;
use dom::htmlelement::{HTMLElement, HTMLElementTypeId};
use dom::node::{DisabledStateHelpers, Node, NodeHelpers, NodeTypeId, window_from_node};
use dom::validitystate::ValidityState;
use dom::virtualmethods::VirtualMethods;
use util::str::DOMString;
use string_cache::Atom;
use std::borrow::ToOwned;
#[dom_struct]
#[derive(HeapSizeOf)]
pub struct HTMLSelectElement {
htmlelement: HTMLElement
}
impl HTMLSelectElementDerived for EventTarget {
fn is_htmlselectelement(&self) -> bool {
*self.type_id() ==
EventTargetTypeId::Node(
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLSelectElement)))
}
}
static DEFAULT_SELECT_SIZE: u32 = 0;
impl HTMLSelectElement {
fn new_inherited(localName: DOMString,
prefix: Option<DOMString>,
document: &Document) -> HTMLSelectElement {
HTMLSelectElement {
htmlelement:
HTMLElement::new_inherited(HTMLElementTypeId::HTMLSelectElement, localName, prefix, document)
}
}
|
Node::reflect_node(box element, document, HTMLSelectElementBinding::Wrap)
}
}
impl<'a> HTMLSelectElementMethods for &'a HTMLSelectElement {
// https://html.spec.whatwg.org/multipage/#dom-cva-validity
fn Validity(self) -> Root<ValidityState> {
let window = window_from_node(self);
ValidityState::new(window.r())
}
// Note: this function currently only exists for test_union.html.
// https://html.spec.whatwg.org/multipage/#dom-select-add
fn Add(self, _element: HTMLOptionElementOrHTMLOptGroupElement, _before: Option<HTMLElementOrLong>) {
}
// https://www.whatwg.org/html/#dom-fe-disabled
make_bool_getter!(Disabled);
// https://www.whatwg.org/html/#dom-fe-disabled
make_bool_setter!(SetDisabled, "disabled");
// https://html.spec.whatwg.org/multipage/#dom-select-multiple
make_bool_getter!(Multiple);
// https://html.spec.whatwg.org/multipage/#dom-select-multiple
make_bool_setter!(SetMultiple, "multiple");
// https://html.spec.whatwg.org/multipage/#dom-fe-name
make_getter!(Name);
// https://html.spec.whatwg.org/multipage/#dom-fe-name
make_setter!(SetName, "name");
// https://html.spec.whatwg.org/multipage/#dom-select-size
make_uint_getter!(Size, "size", DEFAULT_SELECT_SIZE);
// https://html.spec.whatwg.org/multipage/#dom-select-size
make_uint_setter!(SetSize, "size", DEFAULT_SELECT_SIZE);
// https://html.spec.whatwg.org/multipage/#dom-select-type
fn Type(self) -> DOMString {
if self.Multiple() {
"select-multiple".to_owned()
} else {
"select-one".to_owned()
}
}
}
impl<'a> VirtualMethods for &'a HTMLSelectElement {
fn super_type<'b>(&'b self) -> Option<&'b VirtualMethods> {
let htmlelement: &&HTMLElement = HTMLElementCast::from_borrowed_ref(self);
Some(htmlelement as &VirtualMethods)
}
fn after_set_attr(&self, attr: &Attr) {
if let Some(ref s) = self.super_type() {
s.after_set_attr(attr);
}
match attr.local_name() {
&atom!("disabled") => {
let node = NodeCast::from_ref(*self);
node.set_disabled_state(true);
node.set_enabled_state(false);
},
_ => ()
}
}
fn before_remove_attr(&self, attr: &Attr) {
if let Some(ref s) = self.super_type() {
s.before_remove_attr(attr);
}
match attr.local_name() {
&atom!("disabled") => {
let node = NodeCast::from_ref(*self);
node.set_disabled_state(false);
node.set_enabled_state(true);
node.check_ancestors_disabled_state_for_form_control();
},
_ => ()
}
}
fn bind_to_tree(&self, tree_in_doc: bool) {
if let Some(ref s) = self.super_type() {
s.bind_to_tree(tree_in_doc);
}
let node = NodeCast::from_ref(*self);
node.check_ancestors_disabled_state_for_form_control();
}
fn unbind_from_tree(&self, tree_in_doc: bool) {
if let Some(ref s) = self.super_type() {
s.unbind_from_tree(tree_in_doc);
}
let node = NodeCast::from_ref(*self);
if node.ancestors().any(|ancestor| ancestor.r().is_htmlfieldsetelement()) {
node.check_ancestors_disabled_state_for_form_control();
} else {
node.check_disabled_attribute();
}
}
fn parse_plain_attribute(&self, local_name: &Atom, value: DOMString) -> AttrValue {
match local_name {
&atom!("size") => AttrValue::from_u32(value, DEFAULT_SELECT_SIZE),
_ => self.super_type().unwrap().parse_plain_attribute(local_name, value),
}
}
}
|
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString,
prefix: Option<DOMString>,
document: &Document) -> Root<HTMLSelectElement> {
let element = HTMLSelectElement::new_inherited(localName, prefix, document);
|
random_line_split
|
htmlselectelement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::attr::{Attr, AttrHelpers, AttrValue};
use dom::bindings::codegen::Bindings::HTMLSelectElementBinding;
use dom::bindings::codegen::Bindings::HTMLSelectElementBinding::HTMLSelectElementMethods;
use dom::bindings::codegen::InheritTypes::{HTMLElementCast, NodeCast};
use dom::bindings::codegen::InheritTypes::{HTMLSelectElementDerived, HTMLFieldSetElementDerived};
use dom::bindings::codegen::UnionTypes::HTMLElementOrLong;
use dom::bindings::codegen::UnionTypes::HTMLOptionElementOrHTMLOptGroupElement;
use dom::bindings::js::Root;
use dom::document::Document;
use dom::element::AttributeHandlers;
use dom::eventtarget::{EventTarget, EventTargetTypeId};
use dom::element::ElementTypeId;
use dom::htmlelement::{HTMLElement, HTMLElementTypeId};
use dom::node::{DisabledStateHelpers, Node, NodeHelpers, NodeTypeId, window_from_node};
use dom::validitystate::ValidityState;
use dom::virtualmethods::VirtualMethods;
use util::str::DOMString;
use string_cache::Atom;
use std::borrow::ToOwned;
#[dom_struct]
#[derive(HeapSizeOf)]
pub struct HTMLSelectElement {
htmlelement: HTMLElement
}
impl HTMLSelectElementDerived for EventTarget {
fn is_htmlselectelement(&self) -> bool
|
}
static DEFAULT_SELECT_SIZE: u32 = 0;
impl HTMLSelectElement {
fn new_inherited(localName: DOMString,
prefix: Option<DOMString>,
document: &Document) -> HTMLSelectElement {
HTMLSelectElement {
htmlelement:
HTMLElement::new_inherited(HTMLElementTypeId::HTMLSelectElement, localName, prefix, document)
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString,
prefix: Option<DOMString>,
document: &Document) -> Root<HTMLSelectElement> {
let element = HTMLSelectElement::new_inherited(localName, prefix, document);
Node::reflect_node(box element, document, HTMLSelectElementBinding::Wrap)
}
}
impl<'a> HTMLSelectElementMethods for &'a HTMLSelectElement {
// https://html.spec.whatwg.org/multipage/#dom-cva-validity
fn Validity(self) -> Root<ValidityState> {
let window = window_from_node(self);
ValidityState::new(window.r())
}
// Note: this function currently only exists for test_union.html.
// https://html.spec.whatwg.org/multipage/#dom-select-add
fn Add(self, _element: HTMLOptionElementOrHTMLOptGroupElement, _before: Option<HTMLElementOrLong>) {
}
// https://www.whatwg.org/html/#dom-fe-disabled
make_bool_getter!(Disabled);
// https://www.whatwg.org/html/#dom-fe-disabled
make_bool_setter!(SetDisabled, "disabled");
// https://html.spec.whatwg.org/multipage/#dom-select-multiple
make_bool_getter!(Multiple);
// https://html.spec.whatwg.org/multipage/#dom-select-multiple
make_bool_setter!(SetMultiple, "multiple");
// https://html.spec.whatwg.org/multipage/#dom-fe-name
make_getter!(Name);
// https://html.spec.whatwg.org/multipage/#dom-fe-name
make_setter!(SetName, "name");
// https://html.spec.whatwg.org/multipage/#dom-select-size
make_uint_getter!(Size, "size", DEFAULT_SELECT_SIZE);
// https://html.spec.whatwg.org/multipage/#dom-select-size
make_uint_setter!(SetSize, "size", DEFAULT_SELECT_SIZE);
// https://html.spec.whatwg.org/multipage/#dom-select-type
fn Type(self) -> DOMString {
if self.Multiple() {
"select-multiple".to_owned()
} else {
"select-one".to_owned()
}
}
}
impl<'a> VirtualMethods for &'a HTMLSelectElement {
fn super_type<'b>(&'b self) -> Option<&'b VirtualMethods> {
let htmlelement: &&HTMLElement = HTMLElementCast::from_borrowed_ref(self);
Some(htmlelement as &VirtualMethods)
}
fn after_set_attr(&self, attr: &Attr) {
if let Some(ref s) = self.super_type() {
s.after_set_attr(attr);
}
match attr.local_name() {
&atom!("disabled") => {
let node = NodeCast::from_ref(*self);
node.set_disabled_state(true);
node.set_enabled_state(false);
},
_ => ()
}
}
fn before_remove_attr(&self, attr: &Attr) {
if let Some(ref s) = self.super_type() {
s.before_remove_attr(attr);
}
match attr.local_name() {
&atom!("disabled") => {
let node = NodeCast::from_ref(*self);
node.set_disabled_state(false);
node.set_enabled_state(true);
node.check_ancestors_disabled_state_for_form_control();
},
_ => ()
}
}
fn bind_to_tree(&self, tree_in_doc: bool) {
if let Some(ref s) = self.super_type() {
s.bind_to_tree(tree_in_doc);
}
let node = NodeCast::from_ref(*self);
node.check_ancestors_disabled_state_for_form_control();
}
fn unbind_from_tree(&self, tree_in_doc: bool) {
if let Some(ref s) = self.super_type() {
s.unbind_from_tree(tree_in_doc);
}
let node = NodeCast::from_ref(*self);
if node.ancestors().any(|ancestor| ancestor.r().is_htmlfieldsetelement()) {
node.check_ancestors_disabled_state_for_form_control();
} else {
node.check_disabled_attribute();
}
}
fn parse_plain_attribute(&self, local_name: &Atom, value: DOMString) -> AttrValue {
match local_name {
&atom!("size") => AttrValue::from_u32(value, DEFAULT_SELECT_SIZE),
_ => self.super_type().unwrap().parse_plain_attribute(local_name, value),
}
}
}
|
{
*self.type_id() ==
EventTargetTypeId::Node(
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLSelectElement)))
}
|
identifier_body
|
htmlselectelement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::attr::{Attr, AttrHelpers, AttrValue};
use dom::bindings::codegen::Bindings::HTMLSelectElementBinding;
use dom::bindings::codegen::Bindings::HTMLSelectElementBinding::HTMLSelectElementMethods;
use dom::bindings::codegen::InheritTypes::{HTMLElementCast, NodeCast};
use dom::bindings::codegen::InheritTypes::{HTMLSelectElementDerived, HTMLFieldSetElementDerived};
use dom::bindings::codegen::UnionTypes::HTMLElementOrLong;
use dom::bindings::codegen::UnionTypes::HTMLOptionElementOrHTMLOptGroupElement;
use dom::bindings::js::Root;
use dom::document::Document;
use dom::element::AttributeHandlers;
use dom::eventtarget::{EventTarget, EventTargetTypeId};
use dom::element::ElementTypeId;
use dom::htmlelement::{HTMLElement, HTMLElementTypeId};
use dom::node::{DisabledStateHelpers, Node, NodeHelpers, NodeTypeId, window_from_node};
use dom::validitystate::ValidityState;
use dom::virtualmethods::VirtualMethods;
use util::str::DOMString;
use string_cache::Atom;
use std::borrow::ToOwned;
#[dom_struct]
#[derive(HeapSizeOf)]
pub struct HTMLSelectElement {
htmlelement: HTMLElement
}
impl HTMLSelectElementDerived for EventTarget {
fn is_htmlselectelement(&self) -> bool {
*self.type_id() ==
EventTargetTypeId::Node(
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLSelectElement)))
}
}
static DEFAULT_SELECT_SIZE: u32 = 0;
impl HTMLSelectElement {
fn new_inherited(localName: DOMString,
prefix: Option<DOMString>,
document: &Document) -> HTMLSelectElement {
HTMLSelectElement {
htmlelement:
HTMLElement::new_inherited(HTMLElementTypeId::HTMLSelectElement, localName, prefix, document)
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString,
prefix: Option<DOMString>,
document: &Document) -> Root<HTMLSelectElement> {
let element = HTMLSelectElement::new_inherited(localName, prefix, document);
Node::reflect_node(box element, document, HTMLSelectElementBinding::Wrap)
}
}
impl<'a> HTMLSelectElementMethods for &'a HTMLSelectElement {
// https://html.spec.whatwg.org/multipage/#dom-cva-validity
fn Validity(self) -> Root<ValidityState> {
let window = window_from_node(self);
ValidityState::new(window.r())
}
// Note: this function currently only exists for test_union.html.
// https://html.spec.whatwg.org/multipage/#dom-select-add
fn Add(self, _element: HTMLOptionElementOrHTMLOptGroupElement, _before: Option<HTMLElementOrLong>) {
}
// https://www.whatwg.org/html/#dom-fe-disabled
make_bool_getter!(Disabled);
// https://www.whatwg.org/html/#dom-fe-disabled
make_bool_setter!(SetDisabled, "disabled");
// https://html.spec.whatwg.org/multipage/#dom-select-multiple
make_bool_getter!(Multiple);
// https://html.spec.whatwg.org/multipage/#dom-select-multiple
make_bool_setter!(SetMultiple, "multiple");
// https://html.spec.whatwg.org/multipage/#dom-fe-name
make_getter!(Name);
// https://html.spec.whatwg.org/multipage/#dom-fe-name
make_setter!(SetName, "name");
// https://html.spec.whatwg.org/multipage/#dom-select-size
make_uint_getter!(Size, "size", DEFAULT_SELECT_SIZE);
// https://html.spec.whatwg.org/multipage/#dom-select-size
make_uint_setter!(SetSize, "size", DEFAULT_SELECT_SIZE);
// https://html.spec.whatwg.org/multipage/#dom-select-type
fn Type(self) -> DOMString {
if self.Multiple() {
"select-multiple".to_owned()
} else {
"select-one".to_owned()
}
}
}
impl<'a> VirtualMethods for &'a HTMLSelectElement {
fn super_type<'b>(&'b self) -> Option<&'b VirtualMethods> {
let htmlelement: &&HTMLElement = HTMLElementCast::from_borrowed_ref(self);
Some(htmlelement as &VirtualMethods)
}
fn
|
(&self, attr: &Attr) {
if let Some(ref s) = self.super_type() {
s.after_set_attr(attr);
}
match attr.local_name() {
&atom!("disabled") => {
let node = NodeCast::from_ref(*self);
node.set_disabled_state(true);
node.set_enabled_state(false);
},
_ => ()
}
}
fn before_remove_attr(&self, attr: &Attr) {
if let Some(ref s) = self.super_type() {
s.before_remove_attr(attr);
}
match attr.local_name() {
&atom!("disabled") => {
let node = NodeCast::from_ref(*self);
node.set_disabled_state(false);
node.set_enabled_state(true);
node.check_ancestors_disabled_state_for_form_control();
},
_ => ()
}
}
fn bind_to_tree(&self, tree_in_doc: bool) {
if let Some(ref s) = self.super_type() {
s.bind_to_tree(tree_in_doc);
}
let node = NodeCast::from_ref(*self);
node.check_ancestors_disabled_state_for_form_control();
}
fn unbind_from_tree(&self, tree_in_doc: bool) {
if let Some(ref s) = self.super_type() {
s.unbind_from_tree(tree_in_doc);
}
let node = NodeCast::from_ref(*self);
if node.ancestors().any(|ancestor| ancestor.r().is_htmlfieldsetelement()) {
node.check_ancestors_disabled_state_for_form_control();
} else {
node.check_disabled_attribute();
}
}
fn parse_plain_attribute(&self, local_name: &Atom, value: DOMString) -> AttrValue {
match local_name {
&atom!("size") => AttrValue::from_u32(value, DEFAULT_SELECT_SIZE),
_ => self.super_type().unwrap().parse_plain_attribute(local_name, value),
}
}
}
|
after_set_attr
|
identifier_name
|
mod.rs
|
// Copyright 2015, 2016 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Fetchable Dapps support.
//! Manages downloaded (cached) Dapps and downloads them when necessary.
//! Uses `URLHint` to resolve addresses into Dapps bundle file location.
mod installers;
use std::{fs, env};
use std::path::PathBuf;
use std::sync::Arc;
use rustc_serialize::hex::FromHex;
use fetch::{Client as FetchClient, Fetch};
use hash_fetch::urlhint::{URLHintContract, URLHint, URLHintResult};
use parity_reactor::Remote;
use hyper;
use hyper::status::StatusCode;
use {SyncStatus, random_filename};
use util::Mutex;
use page::LocalPageEndpoint;
use handlers::{ContentHandler, ContentFetcherHandler};
use endpoint::{Endpoint, EndpointPath, Handler};
use apps::cache::{ContentCache, ContentStatus};
/// Limit of cached dapps/content
const MAX_CACHED_DAPPS: usize = 20;
pub trait Fetcher: Send + Sync +'static {
fn contains(&self, content_id: &str) -> bool;
fn to_async_handler(&self, path: EndpointPath, control: hyper::Control) -> Box<Handler>;
}
pub struct
|
<F: Fetch = FetchClient, R: URLHint + Send + Sync +'static = URLHintContract> {
dapps_path: PathBuf,
resolver: R,
cache: Arc<Mutex<ContentCache>>,
sync: Arc<SyncStatus>,
embeddable_on: Option<(String, u16)>,
remote: Remote,
fetch: F,
}
impl<R: URLHint + Send + Sync +'static, F: Fetch> Drop for ContentFetcher<F, R> {
fn drop(&mut self) {
// Clear cache path
let _ = fs::remove_dir_all(&self.dapps_path);
}
}
impl<R: URLHint + Send + Sync +'static, F: Fetch> ContentFetcher<F, R> {
pub fn new(resolver: R, sync_status: Arc<SyncStatus>, embeddable_on: Option<(String, u16)>, remote: Remote, fetch: F) -> Self {
let mut dapps_path = env::temp_dir();
dapps_path.push(random_filename());
ContentFetcher {
dapps_path: dapps_path,
resolver: resolver,
sync: sync_status,
cache: Arc::new(Mutex::new(ContentCache::default())),
embeddable_on: embeddable_on,
remote: remote,
fetch: fetch,
}
}
fn still_syncing(address: Option<(String, u16)>) -> Box<Handler> {
Box::new(ContentHandler::error(
StatusCode::ServiceUnavailable,
"Sync In Progress",
"Your node is still syncing. We cannot resolve any content before it's fully synced.",
Some("<a href=\"javascript:window.location.reload()\">Refresh</a>"),
address,
))
}
#[cfg(test)]
fn set_status(&self, content_id: &str, status: ContentStatus) {
self.cache.lock().insert(content_id.to_owned(), status);
}
}
impl<R: URLHint + Send + Sync +'static, F: Fetch> Fetcher for ContentFetcher<F, R> {
fn contains(&self, content_id: &str) -> bool {
{
let mut cache = self.cache.lock();
// Check if we already have the app
if cache.get(content_id).is_some() {
return true;
}
}
// fallback to resolver
if let Ok(content_id) = content_id.from_hex() {
// else try to resolve the app_id
let has_content = self.resolver.resolve(content_id).is_some();
// if there is content or we are syncing return true
has_content || self.sync.is_major_importing()
} else {
false
}
}
fn to_async_handler(&self, path: EndpointPath, control: hyper::Control) -> Box<Handler> {
let mut cache = self.cache.lock();
let content_id = path.app_id.clone();
let (new_status, handler) = {
let status = cache.get(&content_id);
match status {
// Just serve the content
Some(&mut ContentStatus::Ready(ref endpoint)) => {
(None, endpoint.to_async_handler(path, control))
},
// Content is already being fetched
Some(&mut ContentStatus::Fetching(ref fetch_control)) if!fetch_control.is_deadline_reached() => {
trace!(target: "dapps", "Content fetching in progress. Waiting...");
(None, fetch_control.to_async_handler(path, control))
},
// We need to start fetching the content
_ => {
trace!(target: "dapps", "Content unavailable. Fetching... {:?}", content_id);
let content_hex = content_id.from_hex().expect("to_handler is called only when `contains` returns true.");
let content = self.resolver.resolve(content_hex);
let cache = self.cache.clone();
let id = content_id.clone();
let on_done = move |result: Option<LocalPageEndpoint>| {
let mut cache = cache.lock();
match result {
Some(endpoint) => cache.insert(id.clone(), ContentStatus::Ready(endpoint)),
// In case of error
None => cache.remove(&id),
};
};
match content {
// Don't serve dapps if we are still syncing (but serve content)
Some(URLHintResult::Dapp(_)) if self.sync.is_major_importing() => {
(None, Self::still_syncing(self.embeddable_on.clone()))
},
Some(URLHintResult::Dapp(dapp)) => {
let handler = ContentFetcherHandler::new(
dapp.url(),
path,
control,
installers::Dapp::new(
content_id.clone(),
self.dapps_path.clone(),
Box::new(on_done),
self.embeddable_on.clone(),
),
self.embeddable_on.clone(),
self.remote.clone(),
self.fetch.clone(),
);
(Some(ContentStatus::Fetching(handler.fetch_control())), Box::new(handler) as Box<Handler>)
},
Some(URLHintResult::Content(content)) => {
let handler = ContentFetcherHandler::new(
content.url,
path,
control,
installers::Content::new(
content_id.clone(),
content.mime,
self.dapps_path.clone(),
Box::new(on_done),
),
self.embeddable_on.clone(),
self.remote.clone(),
self.fetch.clone(),
);
(Some(ContentStatus::Fetching(handler.fetch_control())), Box::new(handler) as Box<Handler>)
},
None if self.sync.is_major_importing() => {
(None, Self::still_syncing(self.embeddable_on.clone()))
},
None => {
// This may happen when sync status changes in between
// `contains` and `to_handler`
(None, Box::new(ContentHandler::error(
StatusCode::NotFound,
"Resource Not Found",
"Requested resource was not found.",
None,
self.embeddable_on.clone(),
)) as Box<Handler>)
},
}
},
}
};
if let Some(status) = new_status {
cache.clear_garbage(MAX_CACHED_DAPPS);
cache.insert(content_id, status);
}
handler
}
}
#[cfg(test)]
mod tests {
use std::env;
use std::sync::Arc;
use util::Bytes;
use fetch::{Fetch, Client};
use hash_fetch::urlhint::{URLHint, URLHintResult};
use parity_reactor::Remote;
use apps::cache::ContentStatus;
use endpoint::EndpointInfo;
use page::LocalPageEndpoint;
use super::{ContentFetcher, Fetcher};
struct FakeResolver;
impl URLHint for FakeResolver {
fn resolve(&self, _id: Bytes) -> Option<URLHintResult> {
None
}
}
#[test]
fn should_true_if_contains_the_app() {
// given
let path = env::temp_dir();
let fetcher = ContentFetcher::new(FakeResolver, Arc::new(|| false), None, Remote::new_sync(), Client::new().unwrap());
let handler = LocalPageEndpoint::new(path, EndpointInfo {
name: "fake".into(),
description: "".into(),
version: "".into(),
author: "".into(),
icon_url: "".into(),
}, Default::default(), None);
// when
fetcher.set_status("test", ContentStatus::Ready(handler));
fetcher.set_status("test2", ContentStatus::Fetching(Default::default()));
// then
assert_eq!(fetcher.contains("test"), true);
assert_eq!(fetcher.contains("test2"), true);
assert_eq!(fetcher.contains("test3"), false);
}
}
|
ContentFetcher
|
identifier_name
|
mod.rs
|
// Copyright 2015, 2016 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Fetchable Dapps support.
//! Manages downloaded (cached) Dapps and downloads them when necessary.
//! Uses `URLHint` to resolve addresses into Dapps bundle file location.
mod installers;
use std::{fs, env};
use std::path::PathBuf;
use std::sync::Arc;
use rustc_serialize::hex::FromHex;
use fetch::{Client as FetchClient, Fetch};
use hash_fetch::urlhint::{URLHintContract, URLHint, URLHintResult};
use parity_reactor::Remote;
use hyper;
use hyper::status::StatusCode;
use {SyncStatus, random_filename};
use util::Mutex;
use page::LocalPageEndpoint;
use handlers::{ContentHandler, ContentFetcherHandler};
use endpoint::{Endpoint, EndpointPath, Handler};
use apps::cache::{ContentCache, ContentStatus};
/// Limit of cached dapps/content
const MAX_CACHED_DAPPS: usize = 20;
pub trait Fetcher: Send + Sync +'static {
fn contains(&self, content_id: &str) -> bool;
fn to_async_handler(&self, path: EndpointPath, control: hyper::Control) -> Box<Handler>;
}
pub struct ContentFetcher<F: Fetch = FetchClient, R: URLHint + Send + Sync +'static = URLHintContract> {
dapps_path: PathBuf,
resolver: R,
cache: Arc<Mutex<ContentCache>>,
sync: Arc<SyncStatus>,
embeddable_on: Option<(String, u16)>,
remote: Remote,
fetch: F,
}
impl<R: URLHint + Send + Sync +'static, F: Fetch> Drop for ContentFetcher<F, R> {
fn drop(&mut self) {
// Clear cache path
let _ = fs::remove_dir_all(&self.dapps_path);
}
}
impl<R: URLHint + Send + Sync +'static, F: Fetch> ContentFetcher<F, R> {
pub fn new(resolver: R, sync_status: Arc<SyncStatus>, embeddable_on: Option<(String, u16)>, remote: Remote, fetch: F) -> Self {
let mut dapps_path = env::temp_dir();
dapps_path.push(random_filename());
ContentFetcher {
dapps_path: dapps_path,
resolver: resolver,
sync: sync_status,
cache: Arc::new(Mutex::new(ContentCache::default())),
embeddable_on: embeddable_on,
remote: remote,
fetch: fetch,
}
}
fn still_syncing(address: Option<(String, u16)>) -> Box<Handler> {
Box::new(ContentHandler::error(
StatusCode::ServiceUnavailable,
"Sync In Progress",
"Your node is still syncing. We cannot resolve any content before it's fully synced.",
Some("<a href=\"javascript:window.location.reload()\">Refresh</a>"),
address,
))
}
#[cfg(test)]
fn set_status(&self, content_id: &str, status: ContentStatus) {
self.cache.lock().insert(content_id.to_owned(), status);
}
}
impl<R: URLHint + Send + Sync +'static, F: Fetch> Fetcher for ContentFetcher<F, R> {
fn contains(&self, content_id: &str) -> bool {
{
let mut cache = self.cache.lock();
// Check if we already have the app
if cache.get(content_id).is_some() {
return true;
}
}
// fallback to resolver
if let Ok(content_id) = content_id.from_hex() {
// else try to resolve the app_id
let has_content = self.resolver.resolve(content_id).is_some();
// if there is content or we are syncing return true
has_content || self.sync.is_major_importing()
} else
|
}
fn to_async_handler(&self, path: EndpointPath, control: hyper::Control) -> Box<Handler> {
let mut cache = self.cache.lock();
let content_id = path.app_id.clone();
let (new_status, handler) = {
let status = cache.get(&content_id);
match status {
// Just serve the content
Some(&mut ContentStatus::Ready(ref endpoint)) => {
(None, endpoint.to_async_handler(path, control))
},
// Content is already being fetched
Some(&mut ContentStatus::Fetching(ref fetch_control)) if!fetch_control.is_deadline_reached() => {
trace!(target: "dapps", "Content fetching in progress. Waiting...");
(None, fetch_control.to_async_handler(path, control))
},
// We need to start fetching the content
_ => {
trace!(target: "dapps", "Content unavailable. Fetching... {:?}", content_id);
let content_hex = content_id.from_hex().expect("to_handler is called only when `contains` returns true.");
let content = self.resolver.resolve(content_hex);
let cache = self.cache.clone();
let id = content_id.clone();
let on_done = move |result: Option<LocalPageEndpoint>| {
let mut cache = cache.lock();
match result {
Some(endpoint) => cache.insert(id.clone(), ContentStatus::Ready(endpoint)),
// In case of error
None => cache.remove(&id),
};
};
match content {
// Don't serve dapps if we are still syncing (but serve content)
Some(URLHintResult::Dapp(_)) if self.sync.is_major_importing() => {
(None, Self::still_syncing(self.embeddable_on.clone()))
},
Some(URLHintResult::Dapp(dapp)) => {
let handler = ContentFetcherHandler::new(
dapp.url(),
path,
control,
installers::Dapp::new(
content_id.clone(),
self.dapps_path.clone(),
Box::new(on_done),
self.embeddable_on.clone(),
),
self.embeddable_on.clone(),
self.remote.clone(),
self.fetch.clone(),
);
(Some(ContentStatus::Fetching(handler.fetch_control())), Box::new(handler) as Box<Handler>)
},
Some(URLHintResult::Content(content)) => {
let handler = ContentFetcherHandler::new(
content.url,
path,
control,
installers::Content::new(
content_id.clone(),
content.mime,
self.dapps_path.clone(),
Box::new(on_done),
),
self.embeddable_on.clone(),
self.remote.clone(),
self.fetch.clone(),
);
(Some(ContentStatus::Fetching(handler.fetch_control())), Box::new(handler) as Box<Handler>)
},
None if self.sync.is_major_importing() => {
(None, Self::still_syncing(self.embeddable_on.clone()))
},
None => {
// This may happen when sync status changes in between
// `contains` and `to_handler`
(None, Box::new(ContentHandler::error(
StatusCode::NotFound,
"Resource Not Found",
"Requested resource was not found.",
None,
self.embeddable_on.clone(),
)) as Box<Handler>)
},
}
},
}
};
if let Some(status) = new_status {
cache.clear_garbage(MAX_CACHED_DAPPS);
cache.insert(content_id, status);
}
handler
}
}
#[cfg(test)]
mod tests {
use std::env;
use std::sync::Arc;
use util::Bytes;
use fetch::{Fetch, Client};
use hash_fetch::urlhint::{URLHint, URLHintResult};
use parity_reactor::Remote;
use apps::cache::ContentStatus;
use endpoint::EndpointInfo;
use page::LocalPageEndpoint;
use super::{ContentFetcher, Fetcher};
struct FakeResolver;
impl URLHint for FakeResolver {
fn resolve(&self, _id: Bytes) -> Option<URLHintResult> {
None
}
}
#[test]
fn should_true_if_contains_the_app() {
// given
let path = env::temp_dir();
let fetcher = ContentFetcher::new(FakeResolver, Arc::new(|| false), None, Remote::new_sync(), Client::new().unwrap());
let handler = LocalPageEndpoint::new(path, EndpointInfo {
name: "fake".into(),
description: "".into(),
version: "".into(),
author: "".into(),
icon_url: "".into(),
}, Default::default(), None);
// when
fetcher.set_status("test", ContentStatus::Ready(handler));
fetcher.set_status("test2", ContentStatus::Fetching(Default::default()));
// then
assert_eq!(fetcher.contains("test"), true);
assert_eq!(fetcher.contains("test2"), true);
assert_eq!(fetcher.contains("test3"), false);
}
}
|
{
false
}
|
conditional_block
|
mod.rs
|
// Copyright 2015, 2016 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Fetchable Dapps support.
//! Manages downloaded (cached) Dapps and downloads them when necessary.
//! Uses `URLHint` to resolve addresses into Dapps bundle file location.
mod installers;
use std::{fs, env};
use std::path::PathBuf;
use std::sync::Arc;
use rustc_serialize::hex::FromHex;
use fetch::{Client as FetchClient, Fetch};
use hash_fetch::urlhint::{URLHintContract, URLHint, URLHintResult};
use parity_reactor::Remote;
use hyper;
use hyper::status::StatusCode;
use {SyncStatus, random_filename};
use util::Mutex;
use page::LocalPageEndpoint;
use handlers::{ContentHandler, ContentFetcherHandler};
use endpoint::{Endpoint, EndpointPath, Handler};
use apps::cache::{ContentCache, ContentStatus};
/// Limit of cached dapps/content
const MAX_CACHED_DAPPS: usize = 20;
pub trait Fetcher: Send + Sync +'static {
fn contains(&self, content_id: &str) -> bool;
fn to_async_handler(&self, path: EndpointPath, control: hyper::Control) -> Box<Handler>;
}
pub struct ContentFetcher<F: Fetch = FetchClient, R: URLHint + Send + Sync +'static = URLHintContract> {
dapps_path: PathBuf,
resolver: R,
cache: Arc<Mutex<ContentCache>>,
sync: Arc<SyncStatus>,
embeddable_on: Option<(String, u16)>,
remote: Remote,
fetch: F,
}
impl<R: URLHint + Send + Sync +'static, F: Fetch> Drop for ContentFetcher<F, R> {
fn drop(&mut self) {
// Clear cache path
let _ = fs::remove_dir_all(&self.dapps_path);
}
}
impl<R: URLHint + Send + Sync +'static, F: Fetch> ContentFetcher<F, R> {
pub fn new(resolver: R, sync_status: Arc<SyncStatus>, embeddable_on: Option<(String, u16)>, remote: Remote, fetch: F) -> Self {
let mut dapps_path = env::temp_dir();
dapps_path.push(random_filename());
ContentFetcher {
dapps_path: dapps_path,
resolver: resolver,
sync: sync_status,
cache: Arc::new(Mutex::new(ContentCache::default())),
embeddable_on: embeddable_on,
remote: remote,
fetch: fetch,
}
}
fn still_syncing(address: Option<(String, u16)>) -> Box<Handler> {
Box::new(ContentHandler::error(
StatusCode::ServiceUnavailable,
"Sync In Progress",
"Your node is still syncing. We cannot resolve any content before it's fully synced.",
Some("<a href=\"javascript:window.location.reload()\">Refresh</a>"),
address,
))
}
#[cfg(test)]
fn set_status(&self, content_id: &str, status: ContentStatus) {
self.cache.lock().insert(content_id.to_owned(), status);
}
}
impl<R: URLHint + Send + Sync +'static, F: Fetch> Fetcher for ContentFetcher<F, R> {
fn contains(&self, content_id: &str) -> bool {
{
let mut cache = self.cache.lock();
// Check if we already have the app
if cache.get(content_id).is_some() {
return true;
}
}
// fallback to resolver
if let Ok(content_id) = content_id.from_hex() {
// else try to resolve the app_id
let has_content = self.resolver.resolve(content_id).is_some();
// if there is content or we are syncing return true
has_content || self.sync.is_major_importing()
} else {
false
}
}
fn to_async_handler(&self, path: EndpointPath, control: hyper::Control) -> Box<Handler> {
let mut cache = self.cache.lock();
let content_id = path.app_id.clone();
let (new_status, handler) = {
let status = cache.get(&content_id);
match status {
// Just serve the content
Some(&mut ContentStatus::Ready(ref endpoint)) => {
(None, endpoint.to_async_handler(path, control))
},
// Content is already being fetched
Some(&mut ContentStatus::Fetching(ref fetch_control)) if!fetch_control.is_deadline_reached() => {
trace!(target: "dapps", "Content fetching in progress. Waiting...");
(None, fetch_control.to_async_handler(path, control))
},
// We need to start fetching the content
_ => {
trace!(target: "dapps", "Content unavailable. Fetching... {:?}", content_id);
let content_hex = content_id.from_hex().expect("to_handler is called only when `contains` returns true.");
let content = self.resolver.resolve(content_hex);
let cache = self.cache.clone();
let id = content_id.clone();
let on_done = move |result: Option<LocalPageEndpoint>| {
let mut cache = cache.lock();
match result {
Some(endpoint) => cache.insert(id.clone(), ContentStatus::Ready(endpoint)),
// In case of error
None => cache.remove(&id),
};
};
match content {
// Don't serve dapps if we are still syncing (but serve content)
Some(URLHintResult::Dapp(_)) if self.sync.is_major_importing() => {
(None, Self::still_syncing(self.embeddable_on.clone()))
},
Some(URLHintResult::Dapp(dapp)) => {
let handler = ContentFetcherHandler::new(
dapp.url(),
path,
control,
installers::Dapp::new(
content_id.clone(),
self.dapps_path.clone(),
Box::new(on_done),
self.embeddable_on.clone(),
),
self.embeddable_on.clone(),
self.remote.clone(),
self.fetch.clone(),
);
(Some(ContentStatus::Fetching(handler.fetch_control())), Box::new(handler) as Box<Handler>)
},
Some(URLHintResult::Content(content)) => {
let handler = ContentFetcherHandler::new(
content.url,
path,
control,
installers::Content::new(
content_id.clone(),
content.mime,
self.dapps_path.clone(),
Box::new(on_done),
),
self.embeddable_on.clone(),
self.remote.clone(),
self.fetch.clone(),
);
(Some(ContentStatus::Fetching(handler.fetch_control())), Box::new(handler) as Box<Handler>)
},
None if self.sync.is_major_importing() => {
(None, Self::still_syncing(self.embeddable_on.clone()))
},
None => {
// This may happen when sync status changes in between
// `contains` and `to_handler`
(None, Box::new(ContentHandler::error(
StatusCode::NotFound,
"Resource Not Found",
"Requested resource was not found.",
None,
self.embeddable_on.clone(),
)) as Box<Handler>)
},
}
},
}
};
if let Some(status) = new_status {
cache.clear_garbage(MAX_CACHED_DAPPS);
cache.insert(content_id, status);
}
handler
}
}
#[cfg(test)]
mod tests {
use std::env;
use std::sync::Arc;
use util::Bytes;
use fetch::{Fetch, Client};
use hash_fetch::urlhint::{URLHint, URLHintResult};
use parity_reactor::Remote;
use apps::cache::ContentStatus;
use endpoint::EndpointInfo;
use page::LocalPageEndpoint;
use super::{ContentFetcher, Fetcher};
struct FakeResolver;
impl URLHint for FakeResolver {
fn resolve(&self, _id: Bytes) -> Option<URLHintResult>
|
}
#[test]
fn should_true_if_contains_the_app() {
// given
let path = env::temp_dir();
let fetcher = ContentFetcher::new(FakeResolver, Arc::new(|| false), None, Remote::new_sync(), Client::new().unwrap());
let handler = LocalPageEndpoint::new(path, EndpointInfo {
name: "fake".into(),
description: "".into(),
version: "".into(),
author: "".into(),
icon_url: "".into(),
}, Default::default(), None);
// when
fetcher.set_status("test", ContentStatus::Ready(handler));
fetcher.set_status("test2", ContentStatus::Fetching(Default::default()));
// then
assert_eq!(fetcher.contains("test"), true);
assert_eq!(fetcher.contains("test2"), true);
assert_eq!(fetcher.contains("test3"), false);
}
}
|
{
None
}
|
identifier_body
|
mod.rs
|
// Copyright 2015, 2016 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Fetchable Dapps support.
//! Manages downloaded (cached) Dapps and downloads them when necessary.
//! Uses `URLHint` to resolve addresses into Dapps bundle file location.
mod installers;
use std::{fs, env};
use std::path::PathBuf;
use std::sync::Arc;
use rustc_serialize::hex::FromHex;
use fetch::{Client as FetchClient, Fetch};
use hash_fetch::urlhint::{URLHintContract, URLHint, URLHintResult};
use parity_reactor::Remote;
use hyper;
use hyper::status::StatusCode;
use {SyncStatus, random_filename};
use util::Mutex;
use page::LocalPageEndpoint;
use handlers::{ContentHandler, ContentFetcherHandler};
use endpoint::{Endpoint, EndpointPath, Handler};
use apps::cache::{ContentCache, ContentStatus};
/// Limit of cached dapps/content
const MAX_CACHED_DAPPS: usize = 20;
pub trait Fetcher: Send + Sync +'static {
fn contains(&self, content_id: &str) -> bool;
fn to_async_handler(&self, path: EndpointPath, control: hyper::Control) -> Box<Handler>;
}
pub struct ContentFetcher<F: Fetch = FetchClient, R: URLHint + Send + Sync +'static = URLHintContract> {
dapps_path: PathBuf,
resolver: R,
cache: Arc<Mutex<ContentCache>>,
sync: Arc<SyncStatus>,
embeddable_on: Option<(String, u16)>,
remote: Remote,
fetch: F,
}
impl<R: URLHint + Send + Sync +'static, F: Fetch> Drop for ContentFetcher<F, R> {
fn drop(&mut self) {
// Clear cache path
let _ = fs::remove_dir_all(&self.dapps_path);
}
}
impl<R: URLHint + Send + Sync +'static, F: Fetch> ContentFetcher<F, R> {
pub fn new(resolver: R, sync_status: Arc<SyncStatus>, embeddable_on: Option<(String, u16)>, remote: Remote, fetch: F) -> Self {
let mut dapps_path = env::temp_dir();
dapps_path.push(random_filename());
ContentFetcher {
dapps_path: dapps_path,
resolver: resolver,
sync: sync_status,
cache: Arc::new(Mutex::new(ContentCache::default())),
embeddable_on: embeddable_on,
remote: remote,
fetch: fetch,
}
}
fn still_syncing(address: Option<(String, u16)>) -> Box<Handler> {
Box::new(ContentHandler::error(
StatusCode::ServiceUnavailable,
"Sync In Progress",
"Your node is still syncing. We cannot resolve any content before it's fully synced.",
Some("<a href=\"javascript:window.location.reload()\">Refresh</a>"),
address,
))
}
#[cfg(test)]
fn set_status(&self, content_id: &str, status: ContentStatus) {
self.cache.lock().insert(content_id.to_owned(), status);
}
}
impl<R: URLHint + Send + Sync +'static, F: Fetch> Fetcher for ContentFetcher<F, R> {
fn contains(&self, content_id: &str) -> bool {
{
let mut cache = self.cache.lock();
// Check if we already have the app
if cache.get(content_id).is_some() {
return true;
}
}
// fallback to resolver
if let Ok(content_id) = content_id.from_hex() {
// else try to resolve the app_id
let has_content = self.resolver.resolve(content_id).is_some();
// if there is content or we are syncing return true
has_content || self.sync.is_major_importing()
} else {
false
}
}
fn to_async_handler(&self, path: EndpointPath, control: hyper::Control) -> Box<Handler> {
let mut cache = self.cache.lock();
let content_id = path.app_id.clone();
let (new_status, handler) = {
let status = cache.get(&content_id);
match status {
// Just serve the content
Some(&mut ContentStatus::Ready(ref endpoint)) => {
(None, endpoint.to_async_handler(path, control))
},
// Content is already being fetched
Some(&mut ContentStatus::Fetching(ref fetch_control)) if!fetch_control.is_deadline_reached() => {
trace!(target: "dapps", "Content fetching in progress. Waiting...");
(None, fetch_control.to_async_handler(path, control))
},
// We need to start fetching the content
_ => {
trace!(target: "dapps", "Content unavailable. Fetching... {:?}", content_id);
let content_hex = content_id.from_hex().expect("to_handler is called only when `contains` returns true.");
let content = self.resolver.resolve(content_hex);
let cache = self.cache.clone();
let id = content_id.clone();
let on_done = move |result: Option<LocalPageEndpoint>| {
let mut cache = cache.lock();
match result {
Some(endpoint) => cache.insert(id.clone(), ContentStatus::Ready(endpoint)),
// In case of error
None => cache.remove(&id),
};
};
match content {
// Don't serve dapps if we are still syncing (but serve content)
Some(URLHintResult::Dapp(_)) if self.sync.is_major_importing() => {
|
},
Some(URLHintResult::Dapp(dapp)) => {
let handler = ContentFetcherHandler::new(
dapp.url(),
path,
control,
installers::Dapp::new(
content_id.clone(),
self.dapps_path.clone(),
Box::new(on_done),
self.embeddable_on.clone(),
),
self.embeddable_on.clone(),
self.remote.clone(),
self.fetch.clone(),
);
(Some(ContentStatus::Fetching(handler.fetch_control())), Box::new(handler) as Box<Handler>)
},
Some(URLHintResult::Content(content)) => {
let handler = ContentFetcherHandler::new(
content.url,
path,
control,
installers::Content::new(
content_id.clone(),
content.mime,
self.dapps_path.clone(),
Box::new(on_done),
),
self.embeddable_on.clone(),
self.remote.clone(),
self.fetch.clone(),
);
(Some(ContentStatus::Fetching(handler.fetch_control())), Box::new(handler) as Box<Handler>)
},
None if self.sync.is_major_importing() => {
(None, Self::still_syncing(self.embeddable_on.clone()))
},
None => {
// This may happen when sync status changes in between
// `contains` and `to_handler`
(None, Box::new(ContentHandler::error(
StatusCode::NotFound,
"Resource Not Found",
"Requested resource was not found.",
None,
self.embeddable_on.clone(),
)) as Box<Handler>)
},
}
},
}
};
if let Some(status) = new_status {
cache.clear_garbage(MAX_CACHED_DAPPS);
cache.insert(content_id, status);
}
handler
}
}
#[cfg(test)]
mod tests {
use std::env;
use std::sync::Arc;
use util::Bytes;
use fetch::{Fetch, Client};
use hash_fetch::urlhint::{URLHint, URLHintResult};
use parity_reactor::Remote;
use apps::cache::ContentStatus;
use endpoint::EndpointInfo;
use page::LocalPageEndpoint;
use super::{ContentFetcher, Fetcher};
struct FakeResolver;
impl URLHint for FakeResolver {
fn resolve(&self, _id: Bytes) -> Option<URLHintResult> {
None
}
}
#[test]
fn should_true_if_contains_the_app() {
// given
let path = env::temp_dir();
let fetcher = ContentFetcher::new(FakeResolver, Arc::new(|| false), None, Remote::new_sync(), Client::new().unwrap());
let handler = LocalPageEndpoint::new(path, EndpointInfo {
name: "fake".into(),
description: "".into(),
version: "".into(),
author: "".into(),
icon_url: "".into(),
}, Default::default(), None);
// when
fetcher.set_status("test", ContentStatus::Ready(handler));
fetcher.set_status("test2", ContentStatus::Fetching(Default::default()));
// then
assert_eq!(fetcher.contains("test"), true);
assert_eq!(fetcher.contains("test2"), true);
assert_eq!(fetcher.contains("test3"), false);
}
}
|
(None, Self::still_syncing(self.embeddable_on.clone()))
|
random_line_split
|
media_rule.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! An [`@media`][media] urle.
//!
//! [media]: https://drafts.csswg.org/css-conditional/#at-ruledef-media
use cssparser::SourceLocation;
#[cfg(feature = "gecko")]
use malloc_size_of::{MallocSizeOfOps, MallocUnconditionalShallowSizeOf};
use media_queries::MediaList;
use servo_arc::Arc;
use shared_lock::{DeepCloneParams, DeepCloneWithLock, Locked, SharedRwLock, SharedRwLockReadGuard, ToCssWithGuard};
use std::fmt;
use style_traits::ToCss;
use stylesheets::CssRules;
/// An [`@media`][media] urle.
///
/// [media]: https://drafts.csswg.org/css-conditional/#at-ruledef-media
#[derive(Debug)]
pub struct MediaRule {
/// The list of media queries used by this media rule.
pub media_queries: Arc<Locked<MediaList>>,
/// The nested rules to this media rule.
pub rules: Arc<Locked<CssRules>>,
/// The source position where this media rule was found.
pub source_location: SourceLocation,
}
impl MediaRule {
/// Measure heap usage.
#[cfg(feature = "gecko")]
pub fn size_of(&self, guard: &SharedRwLockReadGuard, ops: &mut MallocSizeOfOps) -> usize
|
}
impl ToCssWithGuard for MediaRule {
// Serialization of MediaRule is not specced.
// https://drafts.csswg.org/cssom/#serialize-a-css-rule CSSMediaRule
fn to_css<W>(&self, guard: &SharedRwLockReadGuard, dest: &mut W) -> fmt::Result
where W: fmt::Write {
dest.write_str("@media ")?;
self.media_queries.read_with(guard).to_css(dest)?;
dest.write_str(" {")?;
for rule in self.rules.read_with(guard).0.iter() {
dest.write_str(" ")?;
rule.to_css(guard, dest)?;
}
dest.write_str(" }")
}
}
impl DeepCloneWithLock for MediaRule {
fn deep_clone_with_lock(
&self,
lock: &SharedRwLock,
guard: &SharedRwLockReadGuard,
params: &DeepCloneParams,
) -> Self {
let media_queries = self.media_queries.read_with(guard);
let rules = self.rules.read_with(guard);
MediaRule {
media_queries: Arc::new(lock.wrap(media_queries.clone())),
rules: Arc::new(lock.wrap(rules.deep_clone_with_lock(lock, guard, params))),
source_location: self.source_location.clone(),
}
}
}
|
{
// Measurement of other fields may be added later.
self.rules.unconditional_shallow_size_of(ops) +
self.rules.read_with(guard).size_of(guard, ops)
}
|
identifier_body
|
media_rule.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! An [`@media`][media] urle.
//!
//! [media]: https://drafts.csswg.org/css-conditional/#at-ruledef-media
use cssparser::SourceLocation;
#[cfg(feature = "gecko")]
use malloc_size_of::{MallocSizeOfOps, MallocUnconditionalShallowSizeOf};
use media_queries::MediaList;
use servo_arc::Arc;
use shared_lock::{DeepCloneParams, DeepCloneWithLock, Locked, SharedRwLock, SharedRwLockReadGuard, ToCssWithGuard};
use std::fmt;
use style_traits::ToCss;
use stylesheets::CssRules;
/// An [`@media`][media] urle.
///
/// [media]: https://drafts.csswg.org/css-conditional/#at-ruledef-media
#[derive(Debug)]
pub struct
|
{
/// The list of media queries used by this media rule.
pub media_queries: Arc<Locked<MediaList>>,
/// The nested rules to this media rule.
pub rules: Arc<Locked<CssRules>>,
/// The source position where this media rule was found.
pub source_location: SourceLocation,
}
impl MediaRule {
/// Measure heap usage.
#[cfg(feature = "gecko")]
pub fn size_of(&self, guard: &SharedRwLockReadGuard, ops: &mut MallocSizeOfOps) -> usize {
// Measurement of other fields may be added later.
self.rules.unconditional_shallow_size_of(ops) +
self.rules.read_with(guard).size_of(guard, ops)
}
}
impl ToCssWithGuard for MediaRule {
// Serialization of MediaRule is not specced.
// https://drafts.csswg.org/cssom/#serialize-a-css-rule CSSMediaRule
fn to_css<W>(&self, guard: &SharedRwLockReadGuard, dest: &mut W) -> fmt::Result
where W: fmt::Write {
dest.write_str("@media ")?;
self.media_queries.read_with(guard).to_css(dest)?;
dest.write_str(" {")?;
for rule in self.rules.read_with(guard).0.iter() {
dest.write_str(" ")?;
rule.to_css(guard, dest)?;
}
dest.write_str(" }")
}
}
impl DeepCloneWithLock for MediaRule {
fn deep_clone_with_lock(
&self,
lock: &SharedRwLock,
guard: &SharedRwLockReadGuard,
params: &DeepCloneParams,
) -> Self {
let media_queries = self.media_queries.read_with(guard);
let rules = self.rules.read_with(guard);
MediaRule {
media_queries: Arc::new(lock.wrap(media_queries.clone())),
rules: Arc::new(lock.wrap(rules.deep_clone_with_lock(lock, guard, params))),
source_location: self.source_location.clone(),
}
}
}
|
MediaRule
|
identifier_name
|
media_rule.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! An [`@media`][media] urle.
//!
//! [media]: https://drafts.csswg.org/css-conditional/#at-ruledef-media
use cssparser::SourceLocation;
#[cfg(feature = "gecko")]
use malloc_size_of::{MallocSizeOfOps, MallocUnconditionalShallowSizeOf};
use media_queries::MediaList;
use servo_arc::Arc;
use shared_lock::{DeepCloneParams, DeepCloneWithLock, Locked, SharedRwLock, SharedRwLockReadGuard, ToCssWithGuard};
use std::fmt;
use style_traits::ToCss;
use stylesheets::CssRules;
|
/// [media]: https://drafts.csswg.org/css-conditional/#at-ruledef-media
#[derive(Debug)]
pub struct MediaRule {
/// The list of media queries used by this media rule.
pub media_queries: Arc<Locked<MediaList>>,
/// The nested rules to this media rule.
pub rules: Arc<Locked<CssRules>>,
/// The source position where this media rule was found.
pub source_location: SourceLocation,
}
impl MediaRule {
/// Measure heap usage.
#[cfg(feature = "gecko")]
pub fn size_of(&self, guard: &SharedRwLockReadGuard, ops: &mut MallocSizeOfOps) -> usize {
// Measurement of other fields may be added later.
self.rules.unconditional_shallow_size_of(ops) +
self.rules.read_with(guard).size_of(guard, ops)
}
}
impl ToCssWithGuard for MediaRule {
// Serialization of MediaRule is not specced.
// https://drafts.csswg.org/cssom/#serialize-a-css-rule CSSMediaRule
fn to_css<W>(&self, guard: &SharedRwLockReadGuard, dest: &mut W) -> fmt::Result
where W: fmt::Write {
dest.write_str("@media ")?;
self.media_queries.read_with(guard).to_css(dest)?;
dest.write_str(" {")?;
for rule in self.rules.read_with(guard).0.iter() {
dest.write_str(" ")?;
rule.to_css(guard, dest)?;
}
dest.write_str(" }")
}
}
impl DeepCloneWithLock for MediaRule {
fn deep_clone_with_lock(
&self,
lock: &SharedRwLock,
guard: &SharedRwLockReadGuard,
params: &DeepCloneParams,
) -> Self {
let media_queries = self.media_queries.read_with(guard);
let rules = self.rules.read_with(guard);
MediaRule {
media_queries: Arc::new(lock.wrap(media_queries.clone())),
rules: Arc::new(lock.wrap(rules.deep_clone_with_lock(lock, guard, params))),
source_location: self.source_location.clone(),
}
}
}
|
/// An [`@media`][media] urle.
///
|
random_line_split
|
optimize.rs
|
// @lecorref - github.com/lecorref, @geam - github.com/geam,
// @adjivas - github.com/adjivas. See the LICENSE
// file at the top-level directory of this distribution and at
// https://github.com/adjivas/krpsim
//
// This file may not be copied, modified, or distributed
// except according to those terms.
extern crate std;
/// The `Optimize` structure is a list of keywords/stock-names who
/// have the priority queue.
pub struct Optimize {
pub stock: Vec<String>,
pub time: bool,
}
impl Optimize {
/// The `new` constructor function returns the optimization's items.
pub fn
|
(
stock: Vec<String>,
time: bool,
) -> Self {
Optimize {
stock: stock,
time: time,
}
}
/// The `from_line` constructor function returns the optimization's item
/// for a parsed line.
pub fn from_line (
line: String,
) -> Self {
let stock: Vec<String> = line.split(&['(', ';', ')'][..])
.filter(|&a|!a.is_empty())
.map(|a| a.to_string())
.collect::<Vec<String>>();
Optimize::new(
stock.iter().filter(|&a| a!= "time")
.map(|a| a.to_string())
.collect::<Vec<String>>(),
stock.iter().any(|a| a == "time"),
)
}
/// The `len` interface function returns the number of elements
/// in the list.
pub fn len (
&self,
) -> usize {
self.stock.len()
}
/// The `is_empty` interface function returns true if
/// the vector Optimize contains no elements.
pub fn is_empty (
&self
) -> bool {
self.stock.is_empty() &&!self.time
}
}
impl std::default::Default for Optimize {
/// The `default` constructor function returns a empty optimize.
fn default() -> Self {
Optimize {
stock: Vec::new(),
time: false,
}
}
}
impl std::fmt::Display for Optimize {
/// The `fmt` function prints the Optimization's items.
fn fmt (
&self,
f: &mut std::fmt::Formatter,
) -> Result<(), std::fmt::Error> {
write!(f, "(optimize: {})", self.stock.iter().map(|a| format!("{}", a))
.collect::<Vec<String>>()
.concat())
}
}
|
new
|
identifier_name
|
optimize.rs
|
// @lecorref - github.com/lecorref, @geam - github.com/geam,
// @adjivas - github.com/adjivas. See the LICENSE
// file at the top-level directory of this distribution and at
// https://github.com/adjivas/krpsim
//
// This file may not be copied, modified, or distributed
// except according to those terms.
extern crate std;
/// The `Optimize` structure is a list of keywords/stock-names who
/// have the priority queue.
pub struct Optimize {
pub stock: Vec<String>,
pub time: bool,
}
impl Optimize {
/// The `new` constructor function returns the optimization's items.
pub fn new (
stock: Vec<String>,
time: bool,
) -> Self {
Optimize {
stock: stock,
time: time,
}
}
/// The `from_line` constructor function returns the optimization's item
/// for a parsed line.
pub fn from_line (
line: String,
) -> Self {
let stock: Vec<String> = line.split(&['(', ';', ')'][..])
.filter(|&a|!a.is_empty())
.map(|a| a.to_string())
.collect::<Vec<String>>();
Optimize::new(
stock.iter().filter(|&a| a!= "time")
.map(|a| a.to_string())
.collect::<Vec<String>>(),
stock.iter().any(|a| a == "time"),
)
}
/// The `len` interface function returns the number of elements
/// in the list.
pub fn len (
&self,
) -> usize {
self.stock.len()
}
/// The `is_empty` interface function returns true if
/// the vector Optimize contains no elements.
pub fn is_empty (
&self
) -> bool {
self.stock.is_empty() &&!self.time
}
}
impl std::default::Default for Optimize {
|
Optimize {
stock: Vec::new(),
time: false,
}
}
}
impl std::fmt::Display for Optimize {
/// The `fmt` function prints the Optimization's items.
fn fmt (
&self,
f: &mut std::fmt::Formatter,
) -> Result<(), std::fmt::Error> {
write!(f, "(optimize: {})", self.stock.iter().map(|a| format!("{}", a))
.collect::<Vec<String>>()
.concat())
}
}
|
/// The `default` constructor function returns a empty optimize.
fn default() -> Self {
|
random_line_split
|
mod.rs
|
#![cfg(target_os = "android")]
pub use api::android::*;
use ContextError;
pub struct HeadlessContext(i32);
impl HeadlessContext {
/// See the docs in the crate root file.
pub fn new(_builder: BuilderAttribs) -> Result<HeadlessContext, CreationError> {
unimplemented!()
}
/// See the docs in the crate root file.
pub unsafe fn make_current(&self) -> Result<(), ContextError> {
unimplemented!()
}
/// See the docs in the crate root file.
pub fn is_current(&self) -> bool {
unimplemented!()
}
/// See the docs in the crate root file.
pub fn
|
(&self, _addr: &str) -> *const () {
unimplemented!()
}
pub fn get_api(&self) -> ::Api {
::Api::OpenGlEs
}
}
unsafe impl Send for HeadlessContext {}
unsafe impl Sync for HeadlessContext {}
|
get_proc_address
|
identifier_name
|
mod.rs
|
#![cfg(target_os = "android")]
pub use api::android::*;
use ContextError;
pub struct HeadlessContext(i32);
impl HeadlessContext {
/// See the docs in the crate root file.
pub fn new(_builder: BuilderAttribs) -> Result<HeadlessContext, CreationError> {
unimplemented!()
}
/// See the docs in the crate root file.
pub unsafe fn make_current(&self) -> Result<(), ContextError>
|
/// See the docs in the crate root file.
pub fn is_current(&self) -> bool {
unimplemented!()
}
/// See the docs in the crate root file.
pub fn get_proc_address(&self, _addr: &str) -> *const () {
unimplemented!()
}
pub fn get_api(&self) -> ::Api {
::Api::OpenGlEs
}
}
unsafe impl Send for HeadlessContext {}
unsafe impl Sync for HeadlessContext {}
|
{
unimplemented!()
}
|
identifier_body
|
mod.rs
|
#![cfg(target_os = "android")]
|
pub struct HeadlessContext(i32);
impl HeadlessContext {
/// See the docs in the crate root file.
pub fn new(_builder: BuilderAttribs) -> Result<HeadlessContext, CreationError> {
unimplemented!()
}
/// See the docs in the crate root file.
pub unsafe fn make_current(&self) -> Result<(), ContextError> {
unimplemented!()
}
/// See the docs in the crate root file.
pub fn is_current(&self) -> bool {
unimplemented!()
}
/// See the docs in the crate root file.
pub fn get_proc_address(&self, _addr: &str) -> *const () {
unimplemented!()
}
pub fn get_api(&self) -> ::Api {
::Api::OpenGlEs
}
}
unsafe impl Send for HeadlessContext {}
unsafe impl Sync for HeadlessContext {}
|
pub use api::android::*;
use ContextError;
|
random_line_split
|
main.rs
|
use instr::Instr;
use cpu::Cpu;
use cpu::CpuInterrupt;
use time::precise_time_ns;
use std::fs::File;
use std::cmp::Ordering;
use std::collections::BinaryHeap;
use std::thread;
use std::sync::mpsc;
use std::sync::mpsc::TryRecvError;
use glium::DisplayBuild;
use glium::Surface;
use glium::SwapBuffersError;
use glium::glutin::Api;
use glium::glutin::GlRequest;
use glium::glutin::Event;
use mem::{RwMemory, WriteObserver};
extern crate time;
extern crate getopts;
#[macro_use]
extern crate glium;
extern crate cgmath;
mod instr;
mod cpu;
mod mem;
mod render;
#[derive(Copy, Clone)]
pub enum IntType {
Vblank,
Hblank,
IoTimer,
}
struct ClockInt {
pub int_target: u64,
pub int_type: IntType,
}
impl PartialEq for ClockInt {
#[inline]
fn eq(&self, other: &Self) -> bool {
self.int_target == other.int_target
}
}
impl Eq for ClockInt {}
impl PartialOrd for ClockInt {
#[inline]
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
match self.int_target.partial_cmp(&other.int_target) {
Some(Ordering::Less) => Some(Ordering::Greater),
Some(Ordering::Greater) => Some(Ordering::Less),
ord => ord,
}
}
}
impl Ord for ClockInt {
#[inline]
fn cmp(&self, other: &Self) -> Ordering {
match self.int_target.cmp(&other.int_target) {
Ordering::Less => Ordering::Greater,
Ordering::Greater => Ordering::Less,
ord => ord,
}
}
}
const NS_PER_S: u64 = 1_000_000_000;
const NS_PER_MS: u64 = 1_000_000;
// 10ms
const BUSY_WAIT_THRESHOLD: u64 = 10_000_000;
pub struct Clock {
freq: u32,
period: u64,
int_heap: BinaryHeap<ClockInt>,
}
impl Clock {
pub fn new(freq: u32) -> Clock {
Clock {
freq: freq,
period: NS_PER_S / (freq as u64),
int_heap: BinaryHeap::new(),
}
}
pub fn set_interrupt(&mut self, itype: IntType, period: u64) {
let start = precise_time_ns();
let int = ClockInt {
int_type: itype,
int_target: start + period,
};
self.int_heap.push(int);
}
pub fn wait_cycles(&mut self, n: u32) -> Option<IntType> {
let start = precise_time_ns();
let real_wait = self.period * (n as u64);
let real_target = real_wait + start;
let (target, result) = if let Some(interrupt) = self.int_heap.pop() {
if real_target > interrupt.int_target {
(interrupt.int_target, Some(interrupt.int_type))
} else {
self.int_heap.push(interrupt);
(real_target, None)
}
} else {
(real_target, None)
};
let mut curtime = start;
if target > start && target - start > BUSY_WAIT_THRESHOLD {
std::thread::sleep_ms(((target - start) / NS_PER_MS) as u32);
return result;
} else {
loop {
if curtime >= target {
return result;
}
curtime = precise_time_ns();
}
}
}
}
pub enum WorkerCmd {
TakeSnapshot(Box<RwMemory>, WriteObserver),
Shutdown,
}
fn main() {
// Gather command line args
let args: Vec<String> = std::env::args().collect();
let mut opts = getopts::Options::new();
let matches = match opts.parse(&args[1..]) {
Ok(m) => { m },
Err(e) => panic!("Error: {}", e),
};
let input = if!matches.free.is_empty() {
matches.free[0].clone()
} else {
println!("No input ROM");
return;
};
// Build graphics context and window
let display = glium::glutin::WindowBuilder::new()
.with_title("Gameboy Rust".to_string())
.with_gl(GlRequest::Specific(Api::OpenGl, (3, 2)))
.build_glium()
.unwrap();
// Do machine initialization
let mut cpu = Cpu::new();
cpu.init();
{
let mut ram = cpu.get_ram();
let mut biosfile = match File::open(std::path::Path::new("rom/bios.bin")) {
Ok(f) => { f },
Err(e) => {
println!("Error opening bios file");
return;
},
};
let mut romfile = match File::open(std::path::Path::new(&input)) {
Ok(f) => { f },
Err(e) => {
println!("Error opening file: {}", e);
return;
}
};
if let Err(e) = ram.load_bios(&mut biosfile) {
println!("Error loading bios data: {}", e);
return;
}
if let Err(e) = ram.load_rom(&mut romfile)
|
}
// Initialize virtual LCD
let mut lcd = render::GbDisplay::new(&display);
let mut viewport = {
let window = display.get_window();
let (width, height) = window.unwrap().get_inner_size_pixels().unwrap();
render::calculate_viewport(width, height)
};
let (io_tx, sim_rx) = mpsc::channel();
let (sim_tx, io_rx) = mpsc::channel();
let sim_worker = thread::Builder::new()
.name("simulation worker".to_string())
.spawn(move || {
// Initialize virtual hardware clocks
let mut clock = Clock::new(cpu::GB_FREQUENCY);
clock.set_interrupt(IntType::Vblank, render::VBLANK_PERIOD);
clock.set_interrupt(IntType::Hblank, render::HBLANK_PERIOD);
clock.set_interrupt(IntType::IoTimer, cpu::TIMER_BASE_PERIOD_NS);
// TODO: Abstract LCD simulation better
// Track ly here
let mut ly = 0;
'main: loop {
// Simulate CPU and hardware timers
'sim: loop {
if let Some(int) = clock.wait_cycles(cpu.do_instr()) {
// Handle timer interrupt
match int {
// Interrupt at the start of the vblank period
IntType::Vblank => {
clock.set_interrupt(IntType::Vblank, render::VBLANK_PERIOD);
cpu.interrupt(CpuInterrupt::Vblank);
ly = 144; // set_ly_vblank
let ram = cpu.get_ram();
ram.sys_write(mem::IOREG_LY, ly);
}
// ~10 H-Blanks occur after the V-Blank starts
IntType::Hblank => {
clock.set_interrupt(IntType::Hblank, render::HBLANK_PERIOD);
// inc_ly_counter
if ly >= 153 {
ly = 0;
} else {
ly += 1
}
let ram = cpu.get_ram();
ram.sys_write(mem::IOREG_LY, ly);
// At the end, collect data from VRAM and render it
if ly == 0 {
break'sim;
}
}
// Do timer computations
IntType::IoTimer => {
clock.set_interrupt(IntType::IoTimer, cpu::TIMER_BASE_PERIOD_NS);
cpu.inc_io_timer();
}
}
}
}
// Check commands from master
match sim_rx.try_recv() {
Ok(WorkerCmd::TakeSnapshot(oldsnap, mut observer)) => {
let ram = cpu.get_ram();
let newsnap = ram.swap_backup(oldsnap);
ram.get_observer().apply(&mut observer);
sim_tx.send((newsnap, observer));
if!ram.verify_backup() {
println!("Backup verify failed!")
}
},
Ok(WorkerCmd::Shutdown) => break'main,
Err(TryRecvError::Empty) => (),
Err(TryRecvError::Disconnected) => {
panic!("I/O thread disconnected without notifying");
}
}
}
});
// Create a memory snapshot, and write observer
let mut oldsnap = Some(Box::new(RwMemory::new()));
let mut oldobserver = Some(WriteObserver::new());
// Simulate CPU
'main: loop {
// Collect user input
for ev in display.poll_events() {
match ev {
Event::Closed => {
break'main;
},
Event::Resized(..) => {
let window = display.get_window();
let (width, height) = window.unwrap().get_inner_size_pixels().unwrap();
viewport = render::calculate_viewport(width, height);
},
_ => (),
}
}
// Request memory snapshot from simulation
io_tx.send(WorkerCmd::TakeSnapshot(oldsnap.take().unwrap(), oldobserver.take().unwrap()));
let (snapshot, mut observer) = match io_rx.recv() {
Ok(v) => v,
Err(_) => panic!("Did not receive snapshot from simulation thread"),
};
// Redraw screen
let pre_clear = precise_time_ns();
let mut target = display.draw();
target.clear_color(0.0, 0.0, 0.0, 1.0);
lcd.clear_viewport(&mut target, viewport, (1.0, 1.0, 1.0, 1.0));
let post_clear = precise_time_ns();
let clear_time = (post_clear - pre_clear) as f32 / NS_PER_MS as f32;
if clear_time > 5.0f32 {
println!("clear time: {}ms", clear_time);
}
let pre_draw = precise_time_ns();
lcd.draw(&display, &mut target, viewport, &snapshot, &mut observer);
let post_draw = precise_time_ns();
let draw_time = (post_draw - pre_draw) as f32 / NS_PER_MS as f32;
if draw_time > 5.0f32 {
println!("lcd.draw time: {}ms", draw_time);
}
match target.finish().err() {
Some(SwapBuffersError::ContextLost) => {
panic!("OpenGL contetxt lost!");
},
Some(SwapBuffersError::AlreadySwapped) => {
println!("Warning: OpenGL buffer already swapped");
},
None => (),
}
let pre_flush = precise_time_ns();
display.flush();
let post_flush = precise_time_ns();
let flush_time = (post_flush - pre_flush) as f32 / NS_PER_MS as f32;
if flush_time > 5.0f32 {
println!("flush time: {}ms", flush_time);
}
oldsnap = Some(snapshot);
oldobserver = Some(observer);
}
// Shutdown sim thread
io_tx.send(WorkerCmd::Shutdown);
}
|
{
println!("Error loading rom data: {}", e);
return;
}
|
conditional_block
|
main.rs
|
use instr::Instr;
use cpu::Cpu;
use cpu::CpuInterrupt;
use time::precise_time_ns;
use std::fs::File;
use std::cmp::Ordering;
use std::collections::BinaryHeap;
use std::thread;
use std::sync::mpsc;
use std::sync::mpsc::TryRecvError;
use glium::DisplayBuild;
use glium::Surface;
use glium::SwapBuffersError;
use glium::glutin::Api;
use glium::glutin::GlRequest;
use glium::glutin::Event;
use mem::{RwMemory, WriteObserver};
extern crate time;
extern crate getopts;
#[macro_use]
extern crate glium;
extern crate cgmath;
mod instr;
mod cpu;
mod mem;
mod render;
#[derive(Copy, Clone)]
pub enum IntType {
Vblank,
Hblank,
IoTimer,
}
struct ClockInt {
pub int_target: u64,
pub int_type: IntType,
}
impl PartialEq for ClockInt {
#[inline]
fn eq(&self, other: &Self) -> bool {
self.int_target == other.int_target
}
}
|
match self.int_target.partial_cmp(&other.int_target) {
Some(Ordering::Less) => Some(Ordering::Greater),
Some(Ordering::Greater) => Some(Ordering::Less),
ord => ord,
}
}
}
impl Ord for ClockInt {
#[inline]
fn cmp(&self, other: &Self) -> Ordering {
match self.int_target.cmp(&other.int_target) {
Ordering::Less => Ordering::Greater,
Ordering::Greater => Ordering::Less,
ord => ord,
}
}
}
const NS_PER_S: u64 = 1_000_000_000;
const NS_PER_MS: u64 = 1_000_000;
// 10ms
const BUSY_WAIT_THRESHOLD: u64 = 10_000_000;
pub struct Clock {
freq: u32,
period: u64,
int_heap: BinaryHeap<ClockInt>,
}
impl Clock {
pub fn new(freq: u32) -> Clock {
Clock {
freq: freq,
period: NS_PER_S / (freq as u64),
int_heap: BinaryHeap::new(),
}
}
pub fn set_interrupt(&mut self, itype: IntType, period: u64) {
let start = precise_time_ns();
let int = ClockInt {
int_type: itype,
int_target: start + period,
};
self.int_heap.push(int);
}
pub fn wait_cycles(&mut self, n: u32) -> Option<IntType> {
let start = precise_time_ns();
let real_wait = self.period * (n as u64);
let real_target = real_wait + start;
let (target, result) = if let Some(interrupt) = self.int_heap.pop() {
if real_target > interrupt.int_target {
(interrupt.int_target, Some(interrupt.int_type))
} else {
self.int_heap.push(interrupt);
(real_target, None)
}
} else {
(real_target, None)
};
let mut curtime = start;
if target > start && target - start > BUSY_WAIT_THRESHOLD {
std::thread::sleep_ms(((target - start) / NS_PER_MS) as u32);
return result;
} else {
loop {
if curtime >= target {
return result;
}
curtime = precise_time_ns();
}
}
}
}
pub enum WorkerCmd {
TakeSnapshot(Box<RwMemory>, WriteObserver),
Shutdown,
}
fn main() {
// Gather command line args
let args: Vec<String> = std::env::args().collect();
let mut opts = getopts::Options::new();
let matches = match opts.parse(&args[1..]) {
Ok(m) => { m },
Err(e) => panic!("Error: {}", e),
};
let input = if!matches.free.is_empty() {
matches.free[0].clone()
} else {
println!("No input ROM");
return;
};
// Build graphics context and window
let display = glium::glutin::WindowBuilder::new()
.with_title("Gameboy Rust".to_string())
.with_gl(GlRequest::Specific(Api::OpenGl, (3, 2)))
.build_glium()
.unwrap();
// Do machine initialization
let mut cpu = Cpu::new();
cpu.init();
{
let mut ram = cpu.get_ram();
let mut biosfile = match File::open(std::path::Path::new("rom/bios.bin")) {
Ok(f) => { f },
Err(e) => {
println!("Error opening bios file");
return;
},
};
let mut romfile = match File::open(std::path::Path::new(&input)) {
Ok(f) => { f },
Err(e) => {
println!("Error opening file: {}", e);
return;
}
};
if let Err(e) = ram.load_bios(&mut biosfile) {
println!("Error loading bios data: {}", e);
return;
}
if let Err(e) = ram.load_rom(&mut romfile) {
println!("Error loading rom data: {}", e);
return;
}
}
// Initialize virtual LCD
let mut lcd = render::GbDisplay::new(&display);
let mut viewport = {
let window = display.get_window();
let (width, height) = window.unwrap().get_inner_size_pixels().unwrap();
render::calculate_viewport(width, height)
};
let (io_tx, sim_rx) = mpsc::channel();
let (sim_tx, io_rx) = mpsc::channel();
let sim_worker = thread::Builder::new()
.name("simulation worker".to_string())
.spawn(move || {
// Initialize virtual hardware clocks
let mut clock = Clock::new(cpu::GB_FREQUENCY);
clock.set_interrupt(IntType::Vblank, render::VBLANK_PERIOD);
clock.set_interrupt(IntType::Hblank, render::HBLANK_PERIOD);
clock.set_interrupt(IntType::IoTimer, cpu::TIMER_BASE_PERIOD_NS);
// TODO: Abstract LCD simulation better
// Track ly here
let mut ly = 0;
'main: loop {
// Simulate CPU and hardware timers
'sim: loop {
if let Some(int) = clock.wait_cycles(cpu.do_instr()) {
// Handle timer interrupt
match int {
// Interrupt at the start of the vblank period
IntType::Vblank => {
clock.set_interrupt(IntType::Vblank, render::VBLANK_PERIOD);
cpu.interrupt(CpuInterrupt::Vblank);
ly = 144; // set_ly_vblank
let ram = cpu.get_ram();
ram.sys_write(mem::IOREG_LY, ly);
}
// ~10 H-Blanks occur after the V-Blank starts
IntType::Hblank => {
clock.set_interrupt(IntType::Hblank, render::HBLANK_PERIOD);
// inc_ly_counter
if ly >= 153 {
ly = 0;
} else {
ly += 1
}
let ram = cpu.get_ram();
ram.sys_write(mem::IOREG_LY, ly);
// At the end, collect data from VRAM and render it
if ly == 0 {
break'sim;
}
}
// Do timer computations
IntType::IoTimer => {
clock.set_interrupt(IntType::IoTimer, cpu::TIMER_BASE_PERIOD_NS);
cpu.inc_io_timer();
}
}
}
}
// Check commands from master
match sim_rx.try_recv() {
Ok(WorkerCmd::TakeSnapshot(oldsnap, mut observer)) => {
let ram = cpu.get_ram();
let newsnap = ram.swap_backup(oldsnap);
ram.get_observer().apply(&mut observer);
sim_tx.send((newsnap, observer));
if!ram.verify_backup() {
println!("Backup verify failed!")
}
},
Ok(WorkerCmd::Shutdown) => break'main,
Err(TryRecvError::Empty) => (),
Err(TryRecvError::Disconnected) => {
panic!("I/O thread disconnected without notifying");
}
}
}
});
// Create a memory snapshot, and write observer
let mut oldsnap = Some(Box::new(RwMemory::new()));
let mut oldobserver = Some(WriteObserver::new());
// Simulate CPU
'main: loop {
// Collect user input
for ev in display.poll_events() {
match ev {
Event::Closed => {
break'main;
},
Event::Resized(..) => {
let window = display.get_window();
let (width, height) = window.unwrap().get_inner_size_pixels().unwrap();
viewport = render::calculate_viewport(width, height);
},
_ => (),
}
}
// Request memory snapshot from simulation
io_tx.send(WorkerCmd::TakeSnapshot(oldsnap.take().unwrap(), oldobserver.take().unwrap()));
let (snapshot, mut observer) = match io_rx.recv() {
Ok(v) => v,
Err(_) => panic!("Did not receive snapshot from simulation thread"),
};
// Redraw screen
let pre_clear = precise_time_ns();
let mut target = display.draw();
target.clear_color(0.0, 0.0, 0.0, 1.0);
lcd.clear_viewport(&mut target, viewport, (1.0, 1.0, 1.0, 1.0));
let post_clear = precise_time_ns();
let clear_time = (post_clear - pre_clear) as f32 / NS_PER_MS as f32;
if clear_time > 5.0f32 {
println!("clear time: {}ms", clear_time);
}
let pre_draw = precise_time_ns();
lcd.draw(&display, &mut target, viewport, &snapshot, &mut observer);
let post_draw = precise_time_ns();
let draw_time = (post_draw - pre_draw) as f32 / NS_PER_MS as f32;
if draw_time > 5.0f32 {
println!("lcd.draw time: {}ms", draw_time);
}
match target.finish().err() {
Some(SwapBuffersError::ContextLost) => {
panic!("OpenGL contetxt lost!");
},
Some(SwapBuffersError::AlreadySwapped) => {
println!("Warning: OpenGL buffer already swapped");
},
None => (),
}
let pre_flush = precise_time_ns();
display.flush();
let post_flush = precise_time_ns();
let flush_time = (post_flush - pre_flush) as f32 / NS_PER_MS as f32;
if flush_time > 5.0f32 {
println!("flush time: {}ms", flush_time);
}
oldsnap = Some(snapshot);
oldobserver = Some(observer);
}
// Shutdown sim thread
io_tx.send(WorkerCmd::Shutdown);
}
|
impl Eq for ClockInt {}
impl PartialOrd for ClockInt {
#[inline]
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
random_line_split
|
main.rs
|
use instr::Instr;
use cpu::Cpu;
use cpu::CpuInterrupt;
use time::precise_time_ns;
use std::fs::File;
use std::cmp::Ordering;
use std::collections::BinaryHeap;
use std::thread;
use std::sync::mpsc;
use std::sync::mpsc::TryRecvError;
use glium::DisplayBuild;
use glium::Surface;
use glium::SwapBuffersError;
use glium::glutin::Api;
use glium::glutin::GlRequest;
use glium::glutin::Event;
use mem::{RwMemory, WriteObserver};
extern crate time;
extern crate getopts;
#[macro_use]
extern crate glium;
extern crate cgmath;
mod instr;
mod cpu;
mod mem;
mod render;
#[derive(Copy, Clone)]
pub enum IntType {
Vblank,
Hblank,
IoTimer,
}
struct ClockInt {
pub int_target: u64,
pub int_type: IntType,
}
impl PartialEq for ClockInt {
#[inline]
fn eq(&self, other: &Self) -> bool {
self.int_target == other.int_target
}
}
impl Eq for ClockInt {}
impl PartialOrd for ClockInt {
#[inline]
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
match self.int_target.partial_cmp(&other.int_target) {
Some(Ordering::Less) => Some(Ordering::Greater),
Some(Ordering::Greater) => Some(Ordering::Less),
ord => ord,
}
}
}
impl Ord for ClockInt {
#[inline]
fn cmp(&self, other: &Self) -> Ordering
|
}
const NS_PER_S: u64 = 1_000_000_000;
const NS_PER_MS: u64 = 1_000_000;
// 10ms
const BUSY_WAIT_THRESHOLD: u64 = 10_000_000;
pub struct Clock {
freq: u32,
period: u64,
int_heap: BinaryHeap<ClockInt>,
}
impl Clock {
pub fn new(freq: u32) -> Clock {
Clock {
freq: freq,
period: NS_PER_S / (freq as u64),
int_heap: BinaryHeap::new(),
}
}
pub fn set_interrupt(&mut self, itype: IntType, period: u64) {
let start = precise_time_ns();
let int = ClockInt {
int_type: itype,
int_target: start + period,
};
self.int_heap.push(int);
}
pub fn wait_cycles(&mut self, n: u32) -> Option<IntType> {
let start = precise_time_ns();
let real_wait = self.period * (n as u64);
let real_target = real_wait + start;
let (target, result) = if let Some(interrupt) = self.int_heap.pop() {
if real_target > interrupt.int_target {
(interrupt.int_target, Some(interrupt.int_type))
} else {
self.int_heap.push(interrupt);
(real_target, None)
}
} else {
(real_target, None)
};
let mut curtime = start;
if target > start && target - start > BUSY_WAIT_THRESHOLD {
std::thread::sleep_ms(((target - start) / NS_PER_MS) as u32);
return result;
} else {
loop {
if curtime >= target {
return result;
}
curtime = precise_time_ns();
}
}
}
}
pub enum WorkerCmd {
TakeSnapshot(Box<RwMemory>, WriteObserver),
Shutdown,
}
fn main() {
// Gather command line args
let args: Vec<String> = std::env::args().collect();
let mut opts = getopts::Options::new();
let matches = match opts.parse(&args[1..]) {
Ok(m) => { m },
Err(e) => panic!("Error: {}", e),
};
let input = if!matches.free.is_empty() {
matches.free[0].clone()
} else {
println!("No input ROM");
return;
};
// Build graphics context and window
let display = glium::glutin::WindowBuilder::new()
.with_title("Gameboy Rust".to_string())
.with_gl(GlRequest::Specific(Api::OpenGl, (3, 2)))
.build_glium()
.unwrap();
// Do machine initialization
let mut cpu = Cpu::new();
cpu.init();
{
let mut ram = cpu.get_ram();
let mut biosfile = match File::open(std::path::Path::new("rom/bios.bin")) {
Ok(f) => { f },
Err(e) => {
println!("Error opening bios file");
return;
},
};
let mut romfile = match File::open(std::path::Path::new(&input)) {
Ok(f) => { f },
Err(e) => {
println!("Error opening file: {}", e);
return;
}
};
if let Err(e) = ram.load_bios(&mut biosfile) {
println!("Error loading bios data: {}", e);
return;
}
if let Err(e) = ram.load_rom(&mut romfile) {
println!("Error loading rom data: {}", e);
return;
}
}
// Initialize virtual LCD
let mut lcd = render::GbDisplay::new(&display);
let mut viewport = {
let window = display.get_window();
let (width, height) = window.unwrap().get_inner_size_pixels().unwrap();
render::calculate_viewport(width, height)
};
let (io_tx, sim_rx) = mpsc::channel();
let (sim_tx, io_rx) = mpsc::channel();
let sim_worker = thread::Builder::new()
.name("simulation worker".to_string())
.spawn(move || {
// Initialize virtual hardware clocks
let mut clock = Clock::new(cpu::GB_FREQUENCY);
clock.set_interrupt(IntType::Vblank, render::VBLANK_PERIOD);
clock.set_interrupt(IntType::Hblank, render::HBLANK_PERIOD);
clock.set_interrupt(IntType::IoTimer, cpu::TIMER_BASE_PERIOD_NS);
// TODO: Abstract LCD simulation better
// Track ly here
let mut ly = 0;
'main: loop {
// Simulate CPU and hardware timers
'sim: loop {
if let Some(int) = clock.wait_cycles(cpu.do_instr()) {
// Handle timer interrupt
match int {
// Interrupt at the start of the vblank period
IntType::Vblank => {
clock.set_interrupt(IntType::Vblank, render::VBLANK_PERIOD);
cpu.interrupt(CpuInterrupt::Vblank);
ly = 144; // set_ly_vblank
let ram = cpu.get_ram();
ram.sys_write(mem::IOREG_LY, ly);
}
// ~10 H-Blanks occur after the V-Blank starts
IntType::Hblank => {
clock.set_interrupt(IntType::Hblank, render::HBLANK_PERIOD);
// inc_ly_counter
if ly >= 153 {
ly = 0;
} else {
ly += 1
}
let ram = cpu.get_ram();
ram.sys_write(mem::IOREG_LY, ly);
// At the end, collect data from VRAM and render it
if ly == 0 {
break'sim;
}
}
// Do timer computations
IntType::IoTimer => {
clock.set_interrupt(IntType::IoTimer, cpu::TIMER_BASE_PERIOD_NS);
cpu.inc_io_timer();
}
}
}
}
// Check commands from master
match sim_rx.try_recv() {
Ok(WorkerCmd::TakeSnapshot(oldsnap, mut observer)) => {
let ram = cpu.get_ram();
let newsnap = ram.swap_backup(oldsnap);
ram.get_observer().apply(&mut observer);
sim_tx.send((newsnap, observer));
if!ram.verify_backup() {
println!("Backup verify failed!")
}
},
Ok(WorkerCmd::Shutdown) => break'main,
Err(TryRecvError::Empty) => (),
Err(TryRecvError::Disconnected) => {
panic!("I/O thread disconnected without notifying");
}
}
}
});
// Create a memory snapshot, and write observer
let mut oldsnap = Some(Box::new(RwMemory::new()));
let mut oldobserver = Some(WriteObserver::new());
// Simulate CPU
'main: loop {
// Collect user input
for ev in display.poll_events() {
match ev {
Event::Closed => {
break'main;
},
Event::Resized(..) => {
let window = display.get_window();
let (width, height) = window.unwrap().get_inner_size_pixels().unwrap();
viewport = render::calculate_viewport(width, height);
},
_ => (),
}
}
// Request memory snapshot from simulation
io_tx.send(WorkerCmd::TakeSnapshot(oldsnap.take().unwrap(), oldobserver.take().unwrap()));
let (snapshot, mut observer) = match io_rx.recv() {
Ok(v) => v,
Err(_) => panic!("Did not receive snapshot from simulation thread"),
};
// Redraw screen
let pre_clear = precise_time_ns();
let mut target = display.draw();
target.clear_color(0.0, 0.0, 0.0, 1.0);
lcd.clear_viewport(&mut target, viewport, (1.0, 1.0, 1.0, 1.0));
let post_clear = precise_time_ns();
let clear_time = (post_clear - pre_clear) as f32 / NS_PER_MS as f32;
if clear_time > 5.0f32 {
println!("clear time: {}ms", clear_time);
}
let pre_draw = precise_time_ns();
lcd.draw(&display, &mut target, viewport, &snapshot, &mut observer);
let post_draw = precise_time_ns();
let draw_time = (post_draw - pre_draw) as f32 / NS_PER_MS as f32;
if draw_time > 5.0f32 {
println!("lcd.draw time: {}ms", draw_time);
}
match target.finish().err() {
Some(SwapBuffersError::ContextLost) => {
panic!("OpenGL contetxt lost!");
},
Some(SwapBuffersError::AlreadySwapped) => {
println!("Warning: OpenGL buffer already swapped");
},
None => (),
}
let pre_flush = precise_time_ns();
display.flush();
let post_flush = precise_time_ns();
let flush_time = (post_flush - pre_flush) as f32 / NS_PER_MS as f32;
if flush_time > 5.0f32 {
println!("flush time: {}ms", flush_time);
}
oldsnap = Some(snapshot);
oldobserver = Some(observer);
}
// Shutdown sim thread
io_tx.send(WorkerCmd::Shutdown);
}
|
{
match self.int_target.cmp(&other.int_target) {
Ordering::Less => Ordering::Greater,
Ordering::Greater => Ordering::Less,
ord => ord,
}
}
|
identifier_body
|
main.rs
|
use instr::Instr;
use cpu::Cpu;
use cpu::CpuInterrupt;
use time::precise_time_ns;
use std::fs::File;
use std::cmp::Ordering;
use std::collections::BinaryHeap;
use std::thread;
use std::sync::mpsc;
use std::sync::mpsc::TryRecvError;
use glium::DisplayBuild;
use glium::Surface;
use glium::SwapBuffersError;
use glium::glutin::Api;
use glium::glutin::GlRequest;
use glium::glutin::Event;
use mem::{RwMemory, WriteObserver};
extern crate time;
extern crate getopts;
#[macro_use]
extern crate glium;
extern crate cgmath;
mod instr;
mod cpu;
mod mem;
mod render;
#[derive(Copy, Clone)]
pub enum IntType {
Vblank,
Hblank,
IoTimer,
}
struct ClockInt {
pub int_target: u64,
pub int_type: IntType,
}
impl PartialEq for ClockInt {
#[inline]
fn eq(&self, other: &Self) -> bool {
self.int_target == other.int_target
}
}
impl Eq for ClockInt {}
impl PartialOrd for ClockInt {
#[inline]
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
match self.int_target.partial_cmp(&other.int_target) {
Some(Ordering::Less) => Some(Ordering::Greater),
Some(Ordering::Greater) => Some(Ordering::Less),
ord => ord,
}
}
}
impl Ord for ClockInt {
#[inline]
fn cmp(&self, other: &Self) -> Ordering {
match self.int_target.cmp(&other.int_target) {
Ordering::Less => Ordering::Greater,
Ordering::Greater => Ordering::Less,
ord => ord,
}
}
}
const NS_PER_S: u64 = 1_000_000_000;
const NS_PER_MS: u64 = 1_000_000;
// 10ms
const BUSY_WAIT_THRESHOLD: u64 = 10_000_000;
pub struct Clock {
freq: u32,
period: u64,
int_heap: BinaryHeap<ClockInt>,
}
impl Clock {
pub fn new(freq: u32) -> Clock {
Clock {
freq: freq,
period: NS_PER_S / (freq as u64),
int_heap: BinaryHeap::new(),
}
}
pub fn set_interrupt(&mut self, itype: IntType, period: u64) {
let start = precise_time_ns();
let int = ClockInt {
int_type: itype,
int_target: start + period,
};
self.int_heap.push(int);
}
pub fn wait_cycles(&mut self, n: u32) -> Option<IntType> {
let start = precise_time_ns();
let real_wait = self.period * (n as u64);
let real_target = real_wait + start;
let (target, result) = if let Some(interrupt) = self.int_heap.pop() {
if real_target > interrupt.int_target {
(interrupt.int_target, Some(interrupt.int_type))
} else {
self.int_heap.push(interrupt);
(real_target, None)
}
} else {
(real_target, None)
};
let mut curtime = start;
if target > start && target - start > BUSY_WAIT_THRESHOLD {
std::thread::sleep_ms(((target - start) / NS_PER_MS) as u32);
return result;
} else {
loop {
if curtime >= target {
return result;
}
curtime = precise_time_ns();
}
}
}
}
pub enum WorkerCmd {
TakeSnapshot(Box<RwMemory>, WriteObserver),
Shutdown,
}
fn
|
() {
// Gather command line args
let args: Vec<String> = std::env::args().collect();
let mut opts = getopts::Options::new();
let matches = match opts.parse(&args[1..]) {
Ok(m) => { m },
Err(e) => panic!("Error: {}", e),
};
let input = if!matches.free.is_empty() {
matches.free[0].clone()
} else {
println!("No input ROM");
return;
};
// Build graphics context and window
let display = glium::glutin::WindowBuilder::new()
.with_title("Gameboy Rust".to_string())
.with_gl(GlRequest::Specific(Api::OpenGl, (3, 2)))
.build_glium()
.unwrap();
// Do machine initialization
let mut cpu = Cpu::new();
cpu.init();
{
let mut ram = cpu.get_ram();
let mut biosfile = match File::open(std::path::Path::new("rom/bios.bin")) {
Ok(f) => { f },
Err(e) => {
println!("Error opening bios file");
return;
},
};
let mut romfile = match File::open(std::path::Path::new(&input)) {
Ok(f) => { f },
Err(e) => {
println!("Error opening file: {}", e);
return;
}
};
if let Err(e) = ram.load_bios(&mut biosfile) {
println!("Error loading bios data: {}", e);
return;
}
if let Err(e) = ram.load_rom(&mut romfile) {
println!("Error loading rom data: {}", e);
return;
}
}
// Initialize virtual LCD
let mut lcd = render::GbDisplay::new(&display);
let mut viewport = {
let window = display.get_window();
let (width, height) = window.unwrap().get_inner_size_pixels().unwrap();
render::calculate_viewport(width, height)
};
let (io_tx, sim_rx) = mpsc::channel();
let (sim_tx, io_rx) = mpsc::channel();
let sim_worker = thread::Builder::new()
.name("simulation worker".to_string())
.spawn(move || {
// Initialize virtual hardware clocks
let mut clock = Clock::new(cpu::GB_FREQUENCY);
clock.set_interrupt(IntType::Vblank, render::VBLANK_PERIOD);
clock.set_interrupt(IntType::Hblank, render::HBLANK_PERIOD);
clock.set_interrupt(IntType::IoTimer, cpu::TIMER_BASE_PERIOD_NS);
// TODO: Abstract LCD simulation better
// Track ly here
let mut ly = 0;
'main: loop {
// Simulate CPU and hardware timers
'sim: loop {
if let Some(int) = clock.wait_cycles(cpu.do_instr()) {
// Handle timer interrupt
match int {
// Interrupt at the start of the vblank period
IntType::Vblank => {
clock.set_interrupt(IntType::Vblank, render::VBLANK_PERIOD);
cpu.interrupt(CpuInterrupt::Vblank);
ly = 144; // set_ly_vblank
let ram = cpu.get_ram();
ram.sys_write(mem::IOREG_LY, ly);
}
// ~10 H-Blanks occur after the V-Blank starts
IntType::Hblank => {
clock.set_interrupt(IntType::Hblank, render::HBLANK_PERIOD);
// inc_ly_counter
if ly >= 153 {
ly = 0;
} else {
ly += 1
}
let ram = cpu.get_ram();
ram.sys_write(mem::IOREG_LY, ly);
// At the end, collect data from VRAM and render it
if ly == 0 {
break'sim;
}
}
// Do timer computations
IntType::IoTimer => {
clock.set_interrupt(IntType::IoTimer, cpu::TIMER_BASE_PERIOD_NS);
cpu.inc_io_timer();
}
}
}
}
// Check commands from master
match sim_rx.try_recv() {
Ok(WorkerCmd::TakeSnapshot(oldsnap, mut observer)) => {
let ram = cpu.get_ram();
let newsnap = ram.swap_backup(oldsnap);
ram.get_observer().apply(&mut observer);
sim_tx.send((newsnap, observer));
if!ram.verify_backup() {
println!("Backup verify failed!")
}
},
Ok(WorkerCmd::Shutdown) => break'main,
Err(TryRecvError::Empty) => (),
Err(TryRecvError::Disconnected) => {
panic!("I/O thread disconnected without notifying");
}
}
}
});
// Create a memory snapshot, and write observer
let mut oldsnap = Some(Box::new(RwMemory::new()));
let mut oldobserver = Some(WriteObserver::new());
// Simulate CPU
'main: loop {
// Collect user input
for ev in display.poll_events() {
match ev {
Event::Closed => {
break'main;
},
Event::Resized(..) => {
let window = display.get_window();
let (width, height) = window.unwrap().get_inner_size_pixels().unwrap();
viewport = render::calculate_viewport(width, height);
},
_ => (),
}
}
// Request memory snapshot from simulation
io_tx.send(WorkerCmd::TakeSnapshot(oldsnap.take().unwrap(), oldobserver.take().unwrap()));
let (snapshot, mut observer) = match io_rx.recv() {
Ok(v) => v,
Err(_) => panic!("Did not receive snapshot from simulation thread"),
};
// Redraw screen
let pre_clear = precise_time_ns();
let mut target = display.draw();
target.clear_color(0.0, 0.0, 0.0, 1.0);
lcd.clear_viewport(&mut target, viewport, (1.0, 1.0, 1.0, 1.0));
let post_clear = precise_time_ns();
let clear_time = (post_clear - pre_clear) as f32 / NS_PER_MS as f32;
if clear_time > 5.0f32 {
println!("clear time: {}ms", clear_time);
}
let pre_draw = precise_time_ns();
lcd.draw(&display, &mut target, viewport, &snapshot, &mut observer);
let post_draw = precise_time_ns();
let draw_time = (post_draw - pre_draw) as f32 / NS_PER_MS as f32;
if draw_time > 5.0f32 {
println!("lcd.draw time: {}ms", draw_time);
}
match target.finish().err() {
Some(SwapBuffersError::ContextLost) => {
panic!("OpenGL contetxt lost!");
},
Some(SwapBuffersError::AlreadySwapped) => {
println!("Warning: OpenGL buffer already swapped");
},
None => (),
}
let pre_flush = precise_time_ns();
display.flush();
let post_flush = precise_time_ns();
let flush_time = (post_flush - pre_flush) as f32 / NS_PER_MS as f32;
if flush_time > 5.0f32 {
println!("flush time: {}ms", flush_time);
}
oldsnap = Some(snapshot);
oldobserver = Some(observer);
}
// Shutdown sim thread
io_tx.send(WorkerCmd::Shutdown);
}
|
main
|
identifier_name
|
mod.rs
|
//! Entry point for the CSS filters infrastructure.
use cssparser::{BasicParseError, Parser};
use markup5ever::{expanded_name, local_name, namespace_url, ns};
use std::rc::Rc;
use std::time::Instant;
use crate::bbox::BoundingBox;
use crate::document::AcquiredNodes;
use crate::drawing_ctx::DrawingCtx;
use crate::element::{Draw, ElementResult, SetAttributes};
use crate::error::{ElementError, ParseError, RenderingError};
use crate::filter::UserSpaceFilter;
use crate::length::*;
use crate::node::{Node, NodeBorrow};
use crate::paint_server::UserSpacePaintSource;
use crate::parsers::{CustomIdent, Parse, ParseValue};
use crate::properties::ColorInterpolationFilters;
use crate::surface_utils::shared_surface::{SharedImageSurface, SurfaceType};
use crate::transform::Transform;
use crate::xml::Attributes;
mod bounds;
use self::bounds::BoundsBuilder;
pub mod context;
use self::context::{FilterContext, FilterOutput, FilterResult};
mod error;
use self::error::FilterError;
pub use self::error::FilterResolveError;
/// A filter primitive interface.
pub trait FilterEffect: SetAttributes + Draw {
fn resolve(
&self,
acquired_nodes: &mut AcquiredNodes<'_>,
node: &Node,
) -> Result<ResolvedPrimitive, FilterResolveError>;
}
// Filter Effects do not need to draw themselves
impl<T: FilterEffect> Draw for T {}
pub mod blend;
pub mod color_matrix;
pub mod component_transfer;
pub mod composite;
pub mod convolve_matrix;
pub mod displacement_map;
pub mod flood;
pub mod gaussian_blur;
pub mod image;
pub mod lighting;
pub mod merge;
pub mod morphology;
pub mod offset;
pub mod tile;
pub mod turbulence;
pub struct FilterSpec {
pub user_space_filter: UserSpaceFilter,
pub primitives: Vec<UserSpacePrimitive>,
}
/// Resolved parameters for each filter primitive.
///
/// These gather all the data that a primitive may need during rendering:
/// the `feFoo` element's attributes, any computed values from its properties,
/// and parameters extracted from the element's children (for example,
/// `feMerge` gathers info from its `feMergNode` children).
pub enum
|
{
Blend(blend::Blend),
ColorMatrix(color_matrix::ColorMatrix),
ComponentTransfer(component_transfer::ComponentTransfer),
Composite(composite::Composite),
ConvolveMatrix(convolve_matrix::ConvolveMatrix),
DiffuseLighting(lighting::DiffuseLighting),
DisplacementMap(displacement_map::DisplacementMap),
Flood(flood::Flood),
GaussianBlur(gaussian_blur::GaussianBlur),
Image(image::Image),
Merge(merge::Merge),
Morphology(morphology::Morphology),
Offset(offset::Offset),
SpecularLighting(lighting::SpecularLighting),
Tile(tile::Tile),
Turbulence(turbulence::Turbulence),
}
impl PrimitiveParams {
/// Returns a human-readable name for a primitive.
#[rustfmt::skip]
fn name(&self) -> &'static str {
use PrimitiveParams::*;
match self {
Blend(..) => "feBlend",
ColorMatrix(..) => "feColorMatrix",
ComponentTransfer(..) => "feComponentTransfer",
Composite(..) => "feComposite",
ConvolveMatrix(..) => "feConvolveMatrix",
DiffuseLighting(..) => "feDiffuseLighting",
DisplacementMap(..) => "feDisplacementMap",
Flood(..) => "feFlood",
GaussianBlur(..) => "feGaussianBlur",
Image(..) => "feImage",
Merge(..) => "feMerge",
Morphology(..) => "feMorphology",
Offset(..) => "feOffset",
SpecularLighting(..) => "feSpecularLighting",
Tile(..) => "feTile",
Turbulence(..) => "feTurbulence",
}
}
}
/// The base filter primitive node containing common properties.
#[derive(Default, Clone)]
pub struct Primitive {
pub x: Option<Length<Horizontal>>,
pub y: Option<Length<Vertical>>,
pub width: Option<ULength<Horizontal>>,
pub height: Option<ULength<Vertical>>,
pub result: Option<CustomIdent>,
}
pub struct ResolvedPrimitive {
pub primitive: Primitive,
pub params: PrimitiveParams,
}
/// A fully resolved filter primitive in user-space coordinates.
pub struct UserSpacePrimitive {
x: Option<f64>,
y: Option<f64>,
width: Option<f64>,
height: Option<f64>,
result: Option<CustomIdent>,
params: PrimitiveParams,
}
/// An enumeration of possible inputs for a filter primitive.
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub enum Input {
Unspecified,
SourceGraphic,
SourceAlpha,
BackgroundImage,
BackgroundAlpha,
FillPaint,
StrokePaint,
FilterOutput(CustomIdent),
}
enum_default!(Input, Input::Unspecified);
impl Parse for Input {
fn parse<'i>(parser: &mut Parser<'i, '_>) -> Result<Self, ParseError<'i>> {
parser
.try_parse(|p| {
parse_identifiers!(
p,
"SourceGraphic" => Input::SourceGraphic,
"SourceAlpha" => Input::SourceAlpha,
"BackgroundImage" => Input::BackgroundImage,
"BackgroundAlpha" => Input::BackgroundAlpha,
"FillPaint" => Input::FillPaint,
"StrokePaint" => Input::StrokePaint,
)
})
.or_else(|_: BasicParseError<'_>| {
let ident = CustomIdent::parse(parser)?;
Ok(Input::FilterOutput(ident))
})
}
}
impl ResolvedPrimitive {
pub fn into_user_space(self, params: &NormalizeParams) -> UserSpacePrimitive {
let x = self.primitive.x.map(|l| l.to_user(params));
let y = self.primitive.y.map(|l| l.to_user(params));
let width = self.primitive.width.map(|l| l.to_user(params));
let height = self.primitive.height.map(|l| l.to_user(params));
UserSpacePrimitive {
x,
y,
width,
height,
result: self.primitive.result,
params: self.params,
}
}
}
impl UserSpacePrimitive {
/// Validates attributes and returns the `BoundsBuilder` for bounds computation.
#[inline]
fn get_bounds(&self, ctx: &FilterContext) -> BoundsBuilder {
BoundsBuilder::new(self.x, self.y, self.width, self.height, ctx.paffine())
}
}
impl Primitive {
fn parse_standard_attributes(
&mut self,
attrs: &Attributes,
) -> Result<(Input, Input), ElementError> {
let mut input_1 = Input::Unspecified;
let mut input_2 = Input::Unspecified;
for (attr, value) in attrs.iter() {
match attr.expanded() {
expanded_name!("", "x") => self.x = attr.parse(value)?,
expanded_name!("", "y") => self.y = attr.parse(value)?,
expanded_name!("", "width") => self.width = attr.parse(value)?,
expanded_name!("", "height") => self.height = attr.parse(value)?,
expanded_name!("", "result") => self.result = attr.parse(value)?,
expanded_name!("", "in") => input_1 = attr.parse(value)?,
expanded_name!("", "in2") => input_2 = attr.parse(value)?,
_ => (),
}
}
Ok((input_1, input_2))
}
pub fn parse_no_inputs(&mut self, attrs: &Attributes) -> ElementResult {
let (_, _) = self.parse_standard_attributes(attrs)?;
Ok(())
}
pub fn parse_one_input(&mut self, attrs: &Attributes) -> Result<Input, ElementError> {
let (input_1, _) = self.parse_standard_attributes(attrs)?;
Ok(input_1)
}
pub fn parse_two_inputs(&mut self, attrs: &Attributes) -> Result<(Input, Input), ElementError> {
self.parse_standard_attributes(attrs)
}
}
pub fn extract_filter_from_filter_node(
filter_node: &Node,
acquired_nodes: &mut AcquiredNodes<'_>,
draw_ctx: &DrawingCtx,
) -> Result<FilterSpec, FilterResolveError> {
let filter_node = &*filter_node;
assert!(is_element_of_type!(filter_node, Filter));
let filter_element = filter_node.borrow_element();
let user_space_filter = {
let filter_values = filter_element.get_computed_values();
let filter = borrow_element_as!(filter_node, Filter);
filter.to_user_space(&NormalizeParams::new(
filter_values,
&draw_ctx.get_view_params_for_units(filter.get_filter_units()),
))
};
let primitives = filter_node
.children()
.filter(|c| c.is_element())
// Skip nodes in error.
.filter(|c| {
let in_error = c.borrow_element().is_in_error();
if in_error {
rsvg_log!("(ignoring filter primitive {} because it is in error)", c);
}
!in_error
})
// Keep only filter primitives (those that implement the Filter trait)
.filter(|c| c.borrow_element().as_filter_effect().is_some())
.map(|primitive_node| {
let elt = primitive_node.borrow_element();
let effect = elt.as_filter_effect().unwrap();
let primitive_name = format!("{}", primitive_node);
let primitive_values = elt.get_computed_values();
let params = NormalizeParams::new(
primitive_values,
&draw_ctx.get_view_params_for_units(user_space_filter.primitive_units),
);
effect
.resolve(acquired_nodes, &primitive_node)
.map_err(|e| {
rsvg_log!(
"(filter primitive {} returned an error: {})",
primitive_name,
e
);
e
})
.map(|primitive| primitive.into_user_space(¶ms))
})
.collect::<Result<Vec<UserSpacePrimitive>, FilterResolveError>>()?;
Ok(FilterSpec {
user_space_filter,
primitives,
})
}
/// Applies a filter and returns the resulting surface.
pub fn render(
filter: &FilterSpec,
stroke_paint_source: Rc<UserSpacePaintSource>,
fill_paint_source: Rc<UserSpacePaintSource>,
source_surface: SharedImageSurface,
acquired_nodes: &mut AcquiredNodes<'_>,
draw_ctx: &mut DrawingCtx,
transform: Transform,
node_bbox: BoundingBox,
) -> Result<SharedImageSurface, RenderingError> {
FilterContext::new(
&filter.user_space_filter,
stroke_paint_source,
fill_paint_source,
&source_surface,
transform,
node_bbox,
)
.and_then(|mut filter_ctx| {
for user_space_primitive in &filter.primitives {
let start = Instant::now();
match render_primitive(user_space_primitive, &filter_ctx, acquired_nodes, draw_ctx) {
Ok(output) => {
let elapsed = start.elapsed();
rsvg_log!(
"(rendered filter primitive {} in\n {} seconds)",
user_space_primitive.params.name(),
elapsed.as_secs() as f64 + f64::from(elapsed.subsec_nanos()) / 1e9
);
filter_ctx.store_result(FilterResult {
name: user_space_primitive.result.clone(),
output,
});
}
Err(err) => {
rsvg_log!(
"(filter primitive {} returned an error: {})",
user_space_primitive.params.name(),
err
);
// Exit early on Cairo errors. Continue rendering otherwise.
if let FilterError::CairoError(status) = err {
return Err(FilterError::CairoError(status));
}
}
}
}
Ok(filter_ctx.into_output()?)
})
.or_else(|err| match err {
FilterError::CairoError(status) => {
// Exit early on Cairo errors
Err(RenderingError::from(status))
}
_ => {
// ignore other filter errors and just return an empty surface
Ok(SharedImageSurface::empty(
source_surface.width(),
source_surface.height(),
SurfaceType::AlphaOnly,
)?)
}
})
}
#[rustfmt::skip]
fn render_primitive(
primitive: &UserSpacePrimitive,
ctx: &FilterContext,
acquired_nodes: &mut AcquiredNodes<'_>,
draw_ctx: &mut DrawingCtx,
) -> Result<FilterOutput, FilterError> {
use PrimitiveParams::*;
let bounds_builder = primitive.get_bounds(ctx);
match primitive.params {
Blend(ref p) => p.render(bounds_builder, ctx, acquired_nodes, draw_ctx),
ColorMatrix(ref p) => p.render(bounds_builder, ctx, acquired_nodes, draw_ctx),
ComponentTransfer(ref p) => p.render(bounds_builder, ctx, acquired_nodes, draw_ctx),
Composite(ref p) => p.render(bounds_builder, ctx, acquired_nodes, draw_ctx),
ConvolveMatrix(ref p) => p.render(bounds_builder, ctx, acquired_nodes, draw_ctx),
DiffuseLighting(ref p) => p.render(bounds_builder, ctx, acquired_nodes, draw_ctx),
DisplacementMap(ref p) => p.render(bounds_builder, ctx, acquired_nodes, draw_ctx),
Flood(ref p) => p.render(bounds_builder, ctx, acquired_nodes, draw_ctx),
GaussianBlur(ref p) => p.render(bounds_builder, ctx, acquired_nodes, draw_ctx),
Image(ref p) => p.render(bounds_builder, ctx, acquired_nodes, draw_ctx),
Merge(ref p) => p.render(bounds_builder, ctx, acquired_nodes, draw_ctx),
Morphology(ref p) => p.render(bounds_builder, ctx, acquired_nodes, draw_ctx),
Offset(ref p) => p.render(bounds_builder, ctx, acquired_nodes, draw_ctx),
SpecularLighting(ref p) => p.render(bounds_builder, ctx, acquired_nodes, draw_ctx),
Tile(ref p) => p.render(bounds_builder, ctx, acquired_nodes, draw_ctx),
Turbulence(ref p) => p.render(bounds_builder, ctx, acquired_nodes, draw_ctx),
}
}
impl From<ColorInterpolationFilters> for SurfaceType {
fn from(c: ColorInterpolationFilters) -> Self {
match c {
ColorInterpolationFilters::LinearRgb => SurfaceType::LinearRgb,
_ => SurfaceType::SRgb,
}
}
}
|
PrimitiveParams
|
identifier_name
|
mod.rs
|
//! Entry point for the CSS filters infrastructure.
use cssparser::{BasicParseError, Parser};
use markup5ever::{expanded_name, local_name, namespace_url, ns};
use std::rc::Rc;
use std::time::Instant;
use crate::bbox::BoundingBox;
use crate::document::AcquiredNodes;
use crate::drawing_ctx::DrawingCtx;
use crate::element::{Draw, ElementResult, SetAttributes};
use crate::error::{ElementError, ParseError, RenderingError};
use crate::filter::UserSpaceFilter;
use crate::length::*;
use crate::node::{Node, NodeBorrow};
use crate::paint_server::UserSpacePaintSource;
use crate::parsers::{CustomIdent, Parse, ParseValue};
use crate::properties::ColorInterpolationFilters;
use crate::surface_utils::shared_surface::{SharedImageSurface, SurfaceType};
use crate::transform::Transform;
use crate::xml::Attributes;
mod bounds;
use self::bounds::BoundsBuilder;
pub mod context;
use self::context::{FilterContext, FilterOutput, FilterResult};
mod error;
use self::error::FilterError;
pub use self::error::FilterResolveError;
/// A filter primitive interface.
pub trait FilterEffect: SetAttributes + Draw {
fn resolve(
&self,
acquired_nodes: &mut AcquiredNodes<'_>,
node: &Node,
) -> Result<ResolvedPrimitive, FilterResolveError>;
}
// Filter Effects do not need to draw themselves
impl<T: FilterEffect> Draw for T {}
pub mod blend;
pub mod color_matrix;
pub mod component_transfer;
pub mod composite;
pub mod convolve_matrix;
pub mod displacement_map;
pub mod flood;
pub mod gaussian_blur;
pub mod image;
pub mod lighting;
pub mod merge;
pub mod morphology;
pub mod offset;
pub mod tile;
pub mod turbulence;
pub struct FilterSpec {
pub user_space_filter: UserSpaceFilter,
pub primitives: Vec<UserSpacePrimitive>,
}
/// Resolved parameters for each filter primitive.
///
/// These gather all the data that a primitive may need during rendering:
/// the `feFoo` element's attributes, any computed values from its properties,
/// and parameters extracted from the element's children (for example,
/// `feMerge` gathers info from its `feMergNode` children).
pub enum PrimitiveParams {
Blend(blend::Blend),
ColorMatrix(color_matrix::ColorMatrix),
|
DiffuseLighting(lighting::DiffuseLighting),
DisplacementMap(displacement_map::DisplacementMap),
Flood(flood::Flood),
GaussianBlur(gaussian_blur::GaussianBlur),
Image(image::Image),
Merge(merge::Merge),
Morphology(morphology::Morphology),
Offset(offset::Offset),
SpecularLighting(lighting::SpecularLighting),
Tile(tile::Tile),
Turbulence(turbulence::Turbulence),
}
impl PrimitiveParams {
/// Returns a human-readable name for a primitive.
#[rustfmt::skip]
fn name(&self) -> &'static str {
use PrimitiveParams::*;
match self {
Blend(..) => "feBlend",
ColorMatrix(..) => "feColorMatrix",
ComponentTransfer(..) => "feComponentTransfer",
Composite(..) => "feComposite",
ConvolveMatrix(..) => "feConvolveMatrix",
DiffuseLighting(..) => "feDiffuseLighting",
DisplacementMap(..) => "feDisplacementMap",
Flood(..) => "feFlood",
GaussianBlur(..) => "feGaussianBlur",
Image(..) => "feImage",
Merge(..) => "feMerge",
Morphology(..) => "feMorphology",
Offset(..) => "feOffset",
SpecularLighting(..) => "feSpecularLighting",
Tile(..) => "feTile",
Turbulence(..) => "feTurbulence",
}
}
}
/// The base filter primitive node containing common properties.
#[derive(Default, Clone)]
pub struct Primitive {
pub x: Option<Length<Horizontal>>,
pub y: Option<Length<Vertical>>,
pub width: Option<ULength<Horizontal>>,
pub height: Option<ULength<Vertical>>,
pub result: Option<CustomIdent>,
}
pub struct ResolvedPrimitive {
pub primitive: Primitive,
pub params: PrimitiveParams,
}
/// A fully resolved filter primitive in user-space coordinates.
pub struct UserSpacePrimitive {
x: Option<f64>,
y: Option<f64>,
width: Option<f64>,
height: Option<f64>,
result: Option<CustomIdent>,
params: PrimitiveParams,
}
/// An enumeration of possible inputs for a filter primitive.
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub enum Input {
Unspecified,
SourceGraphic,
SourceAlpha,
BackgroundImage,
BackgroundAlpha,
FillPaint,
StrokePaint,
FilterOutput(CustomIdent),
}
enum_default!(Input, Input::Unspecified);
impl Parse for Input {
fn parse<'i>(parser: &mut Parser<'i, '_>) -> Result<Self, ParseError<'i>> {
parser
.try_parse(|p| {
parse_identifiers!(
p,
"SourceGraphic" => Input::SourceGraphic,
"SourceAlpha" => Input::SourceAlpha,
"BackgroundImage" => Input::BackgroundImage,
"BackgroundAlpha" => Input::BackgroundAlpha,
"FillPaint" => Input::FillPaint,
"StrokePaint" => Input::StrokePaint,
)
})
.or_else(|_: BasicParseError<'_>| {
let ident = CustomIdent::parse(parser)?;
Ok(Input::FilterOutput(ident))
})
}
}
impl ResolvedPrimitive {
pub fn into_user_space(self, params: &NormalizeParams) -> UserSpacePrimitive {
let x = self.primitive.x.map(|l| l.to_user(params));
let y = self.primitive.y.map(|l| l.to_user(params));
let width = self.primitive.width.map(|l| l.to_user(params));
let height = self.primitive.height.map(|l| l.to_user(params));
UserSpacePrimitive {
x,
y,
width,
height,
result: self.primitive.result,
params: self.params,
}
}
}
impl UserSpacePrimitive {
/// Validates attributes and returns the `BoundsBuilder` for bounds computation.
#[inline]
fn get_bounds(&self, ctx: &FilterContext) -> BoundsBuilder {
BoundsBuilder::new(self.x, self.y, self.width, self.height, ctx.paffine())
}
}
impl Primitive {
fn parse_standard_attributes(
&mut self,
attrs: &Attributes,
) -> Result<(Input, Input), ElementError> {
let mut input_1 = Input::Unspecified;
let mut input_2 = Input::Unspecified;
for (attr, value) in attrs.iter() {
match attr.expanded() {
expanded_name!("", "x") => self.x = attr.parse(value)?,
expanded_name!("", "y") => self.y = attr.parse(value)?,
expanded_name!("", "width") => self.width = attr.parse(value)?,
expanded_name!("", "height") => self.height = attr.parse(value)?,
expanded_name!("", "result") => self.result = attr.parse(value)?,
expanded_name!("", "in") => input_1 = attr.parse(value)?,
expanded_name!("", "in2") => input_2 = attr.parse(value)?,
_ => (),
}
}
Ok((input_1, input_2))
}
pub fn parse_no_inputs(&mut self, attrs: &Attributes) -> ElementResult {
let (_, _) = self.parse_standard_attributes(attrs)?;
Ok(())
}
pub fn parse_one_input(&mut self, attrs: &Attributes) -> Result<Input, ElementError> {
let (input_1, _) = self.parse_standard_attributes(attrs)?;
Ok(input_1)
}
pub fn parse_two_inputs(&mut self, attrs: &Attributes) -> Result<(Input, Input), ElementError> {
self.parse_standard_attributes(attrs)
}
}
pub fn extract_filter_from_filter_node(
filter_node: &Node,
acquired_nodes: &mut AcquiredNodes<'_>,
draw_ctx: &DrawingCtx,
) -> Result<FilterSpec, FilterResolveError> {
let filter_node = &*filter_node;
assert!(is_element_of_type!(filter_node, Filter));
let filter_element = filter_node.borrow_element();
let user_space_filter = {
let filter_values = filter_element.get_computed_values();
let filter = borrow_element_as!(filter_node, Filter);
filter.to_user_space(&NormalizeParams::new(
filter_values,
&draw_ctx.get_view_params_for_units(filter.get_filter_units()),
))
};
let primitives = filter_node
.children()
.filter(|c| c.is_element())
// Skip nodes in error.
.filter(|c| {
let in_error = c.borrow_element().is_in_error();
if in_error {
rsvg_log!("(ignoring filter primitive {} because it is in error)", c);
}
!in_error
})
// Keep only filter primitives (those that implement the Filter trait)
.filter(|c| c.borrow_element().as_filter_effect().is_some())
.map(|primitive_node| {
let elt = primitive_node.borrow_element();
let effect = elt.as_filter_effect().unwrap();
let primitive_name = format!("{}", primitive_node);
let primitive_values = elt.get_computed_values();
let params = NormalizeParams::new(
primitive_values,
&draw_ctx.get_view_params_for_units(user_space_filter.primitive_units),
);
effect
.resolve(acquired_nodes, &primitive_node)
.map_err(|e| {
rsvg_log!(
"(filter primitive {} returned an error: {})",
primitive_name,
e
);
e
})
.map(|primitive| primitive.into_user_space(¶ms))
})
.collect::<Result<Vec<UserSpacePrimitive>, FilterResolveError>>()?;
Ok(FilterSpec {
user_space_filter,
primitives,
})
}
/// Applies a filter and returns the resulting surface.
pub fn render(
filter: &FilterSpec,
stroke_paint_source: Rc<UserSpacePaintSource>,
fill_paint_source: Rc<UserSpacePaintSource>,
source_surface: SharedImageSurface,
acquired_nodes: &mut AcquiredNodes<'_>,
draw_ctx: &mut DrawingCtx,
transform: Transform,
node_bbox: BoundingBox,
) -> Result<SharedImageSurface, RenderingError> {
FilterContext::new(
&filter.user_space_filter,
stroke_paint_source,
fill_paint_source,
&source_surface,
transform,
node_bbox,
)
.and_then(|mut filter_ctx| {
for user_space_primitive in &filter.primitives {
let start = Instant::now();
match render_primitive(user_space_primitive, &filter_ctx, acquired_nodes, draw_ctx) {
Ok(output) => {
let elapsed = start.elapsed();
rsvg_log!(
"(rendered filter primitive {} in\n {} seconds)",
user_space_primitive.params.name(),
elapsed.as_secs() as f64 + f64::from(elapsed.subsec_nanos()) / 1e9
);
filter_ctx.store_result(FilterResult {
name: user_space_primitive.result.clone(),
output,
});
}
Err(err) => {
rsvg_log!(
"(filter primitive {} returned an error: {})",
user_space_primitive.params.name(),
err
);
// Exit early on Cairo errors. Continue rendering otherwise.
if let FilterError::CairoError(status) = err {
return Err(FilterError::CairoError(status));
}
}
}
}
Ok(filter_ctx.into_output()?)
})
.or_else(|err| match err {
FilterError::CairoError(status) => {
// Exit early on Cairo errors
Err(RenderingError::from(status))
}
_ => {
// ignore other filter errors and just return an empty surface
Ok(SharedImageSurface::empty(
source_surface.width(),
source_surface.height(),
SurfaceType::AlphaOnly,
)?)
}
})
}
#[rustfmt::skip]
fn render_primitive(
primitive: &UserSpacePrimitive,
ctx: &FilterContext,
acquired_nodes: &mut AcquiredNodes<'_>,
draw_ctx: &mut DrawingCtx,
) -> Result<FilterOutput, FilterError> {
use PrimitiveParams::*;
let bounds_builder = primitive.get_bounds(ctx);
match primitive.params {
Blend(ref p) => p.render(bounds_builder, ctx, acquired_nodes, draw_ctx),
ColorMatrix(ref p) => p.render(bounds_builder, ctx, acquired_nodes, draw_ctx),
ComponentTransfer(ref p) => p.render(bounds_builder, ctx, acquired_nodes, draw_ctx),
Composite(ref p) => p.render(bounds_builder, ctx, acquired_nodes, draw_ctx),
ConvolveMatrix(ref p) => p.render(bounds_builder, ctx, acquired_nodes, draw_ctx),
DiffuseLighting(ref p) => p.render(bounds_builder, ctx, acquired_nodes, draw_ctx),
DisplacementMap(ref p) => p.render(bounds_builder, ctx, acquired_nodes, draw_ctx),
Flood(ref p) => p.render(bounds_builder, ctx, acquired_nodes, draw_ctx),
GaussianBlur(ref p) => p.render(bounds_builder, ctx, acquired_nodes, draw_ctx),
Image(ref p) => p.render(bounds_builder, ctx, acquired_nodes, draw_ctx),
Merge(ref p) => p.render(bounds_builder, ctx, acquired_nodes, draw_ctx),
Morphology(ref p) => p.render(bounds_builder, ctx, acquired_nodes, draw_ctx),
Offset(ref p) => p.render(bounds_builder, ctx, acquired_nodes, draw_ctx),
SpecularLighting(ref p) => p.render(bounds_builder, ctx, acquired_nodes, draw_ctx),
Tile(ref p) => p.render(bounds_builder, ctx, acquired_nodes, draw_ctx),
Turbulence(ref p) => p.render(bounds_builder, ctx, acquired_nodes, draw_ctx),
}
}
impl From<ColorInterpolationFilters> for SurfaceType {
fn from(c: ColorInterpolationFilters) -> Self {
match c {
ColorInterpolationFilters::LinearRgb => SurfaceType::LinearRgb,
_ => SurfaceType::SRgb,
}
}
}
|
ComponentTransfer(component_transfer::ComponentTransfer),
Composite(composite::Composite),
ConvolveMatrix(convolve_matrix::ConvolveMatrix),
|
random_line_split
|
parsed_jvm_command_lines.rs
|
use itertools::Itertools;
use std::slice::Iter;
/// Represents the result of parsing the args of a nailgunnable Process
/// TODO(#8481) We may want to split the classpath by the ":", and store it as a Vec<String>
/// to allow for deep fingerprinting.
#[derive(PartialEq, Eq, Debug)]
pub struct ParsedJVMCommandLines {
pub nailgun_args: Vec<String>,
pub client_args: Vec<String>,
pub client_main_class: String,
}
impl ParsedJVMCommandLines {
///
/// Given a list of args that one would likely pass to a java call,
/// we automatically split it to generate two argument lists:
/// - nailgun arguments: The list of arguments needed to start the nailgun server.
/// These arguments include everything in the arg list up to (but not including) the main class.
/// These arguments represent roughly JVM options (-Xmx...), and the classpath (-cp...).
///
/// - client arguments: The list of arguments that will be used to run the jvm program under nailgun.
/// These arguments can be thought of as "passthrough args" that are sent to the jvm via the nailgun client.
/// These arguments include everything starting from the main class.
///
/// We assume that:
/// - Every args list has a main class.
/// - There is exactly one argument that doesn't begin with a `-` in the command line before the main class,
/// and it's the value of the classpath (i.e. `-cp scala-library.jar`).
///
/// We think these assumptions are valid as per: https://github.com/pantsbuild/pants/issues/8387
///
pub fn parse_command_lines(args: &[String]) -> Result<ParsedJVMCommandLines, String> {
let mut args_to_consume = args.iter();
let nailgun_args_before_classpath = Self::parse_to_classpath(&mut args_to_consume)?;
let (classpath_flag, classpath_value) = Self::parse_classpath(&mut args_to_consume)?;
let nailgun_args_after_classpath = Self::parse_jvm_args(&mut args_to_consume)?;
let main_class = Self::parse_main_class(&mut args_to_consume)?;
let client_args = Self::parse_to_end(&mut args_to_consume)?;
if args_to_consume.clone().peekable().peek().is_some() {
return Err(format!(
"Malformed command line: There are still arguments to consume: {:?}",
&args_to_consume
));
}
let mut nailgun_args = nailgun_args_before_classpath;
nailgun_args.push(classpath_flag);
nailgun_args.push(classpath_value);
nailgun_args.extend(nailgun_args_after_classpath);
Ok(ParsedJVMCommandLines {
nailgun_args,
client_args,
client_main_class: main_class,
})
}
fn parse_to_classpath(args_to_consume: &mut Iter<String>) -> Result<Vec<String>, String> {
Ok(
args_to_consume
.take_while_ref(|elem|!ParsedJVMCommandLines::is_classpath_flag(elem))
.cloned()
.collect(),
)
}
fn parse_classpath(args_to_consume: &mut Iter<String>) -> Result<(String, String), String> {
let classpath_flag = args_to_consume
.next()
.filter(|e| ParsedJVMCommandLines::is_classpath_flag(e))
.ok_or_else(|| "No classpath flag found.".to_string())
.map(|e| e.clone())?;
let classpath_value = args_to_consume
.next()
.ok_or_else(|| "No classpath value found!".to_string())
.and_then(|elem| {
if ParsedJVMCommandLines::is_flag(elem)
|
else {
Ok(elem)
}
})?
.clone();
Ok((classpath_flag, classpath_value))
}
fn parse_jvm_args(args_to_consume: &mut Iter<String>) -> Result<Vec<String>, String> {
Ok(
args_to_consume
.take_while_ref(|elem| ParsedJVMCommandLines::is_flag(elem))
.cloned()
.collect(),
)
}
fn parse_main_class(args_to_consume: &mut Iter<String>) -> Result<String, String> {
args_to_consume
.next()
.filter(|e|!ParsedJVMCommandLines::is_flag(e))
.ok_or_else(|| "No main class provided.".to_string())
.map(|e| e.clone())
}
fn parse_to_end(args_to_consume: &mut Iter<String>) -> Result<Vec<String>, String> {
Ok(args_to_consume.cloned().collect())
}
fn is_flag(arg: &str) -> bool {
arg.starts_with('-') || arg.starts_with('@')
}
fn is_classpath_flag(arg: &str) -> bool {
arg == "-cp" || arg == "-classpath"
}
}
|
{
Err(format!(
"Classpath value has incorrect formatting {}.",
elem
))
}
|
conditional_block
|
parsed_jvm_command_lines.rs
|
use itertools::Itertools;
use std::slice::Iter;
/// Represents the result of parsing the args of a nailgunnable Process
/// TODO(#8481) We may want to split the classpath by the ":", and store it as a Vec<String>
/// to allow for deep fingerprinting.
#[derive(PartialEq, Eq, Debug)]
pub struct ParsedJVMCommandLines {
pub nailgun_args: Vec<String>,
pub client_args: Vec<String>,
pub client_main_class: String,
}
impl ParsedJVMCommandLines {
///
/// Given a list of args that one would likely pass to a java call,
/// we automatically split it to generate two argument lists:
/// - nailgun arguments: The list of arguments needed to start the nailgun server.
/// These arguments include everything in the arg list up to (but not including) the main class.
/// These arguments represent roughly JVM options (-Xmx...), and the classpath (-cp...).
///
/// - client arguments: The list of arguments that will be used to run the jvm program under nailgun.
/// These arguments can be thought of as "passthrough args" that are sent to the jvm via the nailgun client.
/// These arguments include everything starting from the main class.
///
/// We assume that:
/// - Every args list has a main class.
/// - There is exactly one argument that doesn't begin with a `-` in the command line before the main class,
/// and it's the value of the classpath (i.e. `-cp scala-library.jar`).
///
/// We think these assumptions are valid as per: https://github.com/pantsbuild/pants/issues/8387
///
pub fn parse_command_lines(args: &[String]) -> Result<ParsedJVMCommandLines, String> {
let mut args_to_consume = args.iter();
let nailgun_args_before_classpath = Self::parse_to_classpath(&mut args_to_consume)?;
let (classpath_flag, classpath_value) = Self::parse_classpath(&mut args_to_consume)?;
let nailgun_args_after_classpath = Self::parse_jvm_args(&mut args_to_consume)?;
let main_class = Self::parse_main_class(&mut args_to_consume)?;
let client_args = Self::parse_to_end(&mut args_to_consume)?;
if args_to_consume.clone().peekable().peek().is_some() {
return Err(format!(
"Malformed command line: There are still arguments to consume: {:?}",
&args_to_consume
));
}
let mut nailgun_args = nailgun_args_before_classpath;
nailgun_args.push(classpath_flag);
nailgun_args.push(classpath_value);
nailgun_args.extend(nailgun_args_after_classpath);
Ok(ParsedJVMCommandLines {
nailgun_args,
client_args,
client_main_class: main_class,
})
}
fn parse_to_classpath(args_to_consume: &mut Iter<String>) -> Result<Vec<String>, String> {
Ok(
args_to_consume
.take_while_ref(|elem|!ParsedJVMCommandLines::is_classpath_flag(elem))
.cloned()
.collect(),
)
}
fn parse_classpath(args_to_consume: &mut Iter<String>) -> Result<(String, String), String> {
let classpath_flag = args_to_consume
.next()
.filter(|e| ParsedJVMCommandLines::is_classpath_flag(e))
.ok_or_else(|| "No classpath flag found.".to_string())
.map(|e| e.clone())?;
let classpath_value = args_to_consume
.next()
.ok_or_else(|| "No classpath value found!".to_string())
.and_then(|elem| {
if ParsedJVMCommandLines::is_flag(elem) {
Err(format!(
"Classpath value has incorrect formatting {}.",
elem
))
} else {
Ok(elem)
}
})?
.clone();
Ok((classpath_flag, classpath_value))
}
fn parse_jvm_args(args_to_consume: &mut Iter<String>) -> Result<Vec<String>, String> {
Ok(
args_to_consume
.take_while_ref(|elem| ParsedJVMCommandLines::is_flag(elem))
.cloned()
.collect(),
)
}
fn parse_main_class(args_to_consume: &mut Iter<String>) -> Result<String, String> {
args_to_consume
.next()
.filter(|e|!ParsedJVMCommandLines::is_flag(e))
.ok_or_else(|| "No main class provided.".to_string())
.map(|e| e.clone())
}
fn parse_to_end(args_to_consume: &mut Iter<String>) -> Result<Vec<String>, String>
|
fn is_flag(arg: &str) -> bool {
arg.starts_with('-') || arg.starts_with('@')
}
fn is_classpath_flag(arg: &str) -> bool {
arg == "-cp" || arg == "-classpath"
}
}
|
{
Ok(args_to_consume.cloned().collect())
}
|
identifier_body
|
parsed_jvm_command_lines.rs
|
use itertools::Itertools;
use std::slice::Iter;
/// Represents the result of parsing the args of a nailgunnable Process
/// TODO(#8481) We may want to split the classpath by the ":", and store it as a Vec<String>
/// to allow for deep fingerprinting.
#[derive(PartialEq, Eq, Debug)]
pub struct ParsedJVMCommandLines {
pub nailgun_args: Vec<String>,
pub client_args: Vec<String>,
pub client_main_class: String,
}
impl ParsedJVMCommandLines {
///
/// Given a list of args that one would likely pass to a java call,
/// we automatically split it to generate two argument lists:
/// - nailgun arguments: The list of arguments needed to start the nailgun server.
/// These arguments include everything in the arg list up to (but not including) the main class.
/// These arguments represent roughly JVM options (-Xmx...), and the classpath (-cp...).
///
/// - client arguments: The list of arguments that will be used to run the jvm program under nailgun.
/// These arguments can be thought of as "passthrough args" that are sent to the jvm via the nailgun client.
/// These arguments include everything starting from the main class.
///
/// We assume that:
/// - Every args list has a main class.
/// - There is exactly one argument that doesn't begin with a `-` in the command line before the main class,
/// and it's the value of the classpath (i.e. `-cp scala-library.jar`).
///
/// We think these assumptions are valid as per: https://github.com/pantsbuild/pants/issues/8387
///
pub fn parse_command_lines(args: &[String]) -> Result<ParsedJVMCommandLines, String> {
let mut args_to_consume = args.iter();
let nailgun_args_before_classpath = Self::parse_to_classpath(&mut args_to_consume)?;
let (classpath_flag, classpath_value) = Self::parse_classpath(&mut args_to_consume)?;
let nailgun_args_after_classpath = Self::parse_jvm_args(&mut args_to_consume)?;
let main_class = Self::parse_main_class(&mut args_to_consume)?;
let client_args = Self::parse_to_end(&mut args_to_consume)?;
if args_to_consume.clone().peekable().peek().is_some() {
return Err(format!(
"Malformed command line: There are still arguments to consume: {:?}",
&args_to_consume
));
}
let mut nailgun_args = nailgun_args_before_classpath;
nailgun_args.push(classpath_flag);
nailgun_args.push(classpath_value);
nailgun_args.extend(nailgun_args_after_classpath);
Ok(ParsedJVMCommandLines {
nailgun_args,
client_args,
client_main_class: main_class,
})
}
fn parse_to_classpath(args_to_consume: &mut Iter<String>) -> Result<Vec<String>, String> {
Ok(
args_to_consume
.take_while_ref(|elem|!ParsedJVMCommandLines::is_classpath_flag(elem))
.cloned()
.collect(),
)
}
fn parse_classpath(args_to_consume: &mut Iter<String>) -> Result<(String, String), String> {
let classpath_flag = args_to_consume
.next()
.filter(|e| ParsedJVMCommandLines::is_classpath_flag(e))
.ok_or_else(|| "No classpath flag found.".to_string())
.map(|e| e.clone())?;
let classpath_value = args_to_consume
.next()
.ok_or_else(|| "No classpath value found!".to_string())
.and_then(|elem| {
if ParsedJVMCommandLines::is_flag(elem) {
Err(format!(
"Classpath value has incorrect formatting {}.",
elem
))
} else {
Ok(elem)
}
})?
.clone();
Ok((classpath_flag, classpath_value))
}
fn parse_jvm_args(args_to_consume: &mut Iter<String>) -> Result<Vec<String>, String> {
Ok(
args_to_consume
.take_while_ref(|elem| ParsedJVMCommandLines::is_flag(elem))
.cloned()
.collect(),
)
}
fn parse_main_class(args_to_consume: &mut Iter<String>) -> Result<String, String> {
args_to_consume
.next()
.filter(|e|!ParsedJVMCommandLines::is_flag(e))
.ok_or_else(|| "No main class provided.".to_string())
.map(|e| e.clone())
}
fn parse_to_end(args_to_consume: &mut Iter<String>) -> Result<Vec<String>, String> {
Ok(args_to_consume.cloned().collect())
}
fn
|
(arg: &str) -> bool {
arg.starts_with('-') || arg.starts_with('@')
}
fn is_classpath_flag(arg: &str) -> bool {
arg == "-cp" || arg == "-classpath"
}
}
|
is_flag
|
identifier_name
|
parsed_jvm_command_lines.rs
|
use itertools::Itertools;
use std::slice::Iter;
/// Represents the result of parsing the args of a nailgunnable Process
/// TODO(#8481) We may want to split the classpath by the ":", and store it as a Vec<String>
/// to allow for deep fingerprinting.
#[derive(PartialEq, Eq, Debug)]
pub struct ParsedJVMCommandLines {
pub nailgun_args: Vec<String>,
pub client_args: Vec<String>,
pub client_main_class: String,
}
impl ParsedJVMCommandLines {
///
/// Given a list of args that one would likely pass to a java call,
/// we automatically split it to generate two argument lists:
/// - nailgun arguments: The list of arguments needed to start the nailgun server.
/// These arguments include everything in the arg list up to (but not including) the main class.
/// These arguments represent roughly JVM options (-Xmx...), and the classpath (-cp...).
///
/// - client arguments: The list of arguments that will be used to run the jvm program under nailgun.
/// These arguments can be thought of as "passthrough args" that are sent to the jvm via the nailgun client.
/// These arguments include everything starting from the main class.
///
/// We assume that:
/// - Every args list has a main class.
/// - There is exactly one argument that doesn't begin with a `-` in the command line before the main class,
/// and it's the value of the classpath (i.e. `-cp scala-library.jar`).
///
/// We think these assumptions are valid as per: https://github.com/pantsbuild/pants/issues/8387
///
pub fn parse_command_lines(args: &[String]) -> Result<ParsedJVMCommandLines, String> {
let mut args_to_consume = args.iter();
let nailgun_args_before_classpath = Self::parse_to_classpath(&mut args_to_consume)?;
let (classpath_flag, classpath_value) = Self::parse_classpath(&mut args_to_consume)?;
let nailgun_args_after_classpath = Self::parse_jvm_args(&mut args_to_consume)?;
let main_class = Self::parse_main_class(&mut args_to_consume)?;
let client_args = Self::parse_to_end(&mut args_to_consume)?;
if args_to_consume.clone().peekable().peek().is_some() {
return Err(format!(
"Malformed command line: There are still arguments to consume: {:?}",
&args_to_consume
));
}
|
nailgun_args.push(classpath_value);
nailgun_args.extend(nailgun_args_after_classpath);
Ok(ParsedJVMCommandLines {
nailgun_args,
client_args,
client_main_class: main_class,
})
}
fn parse_to_classpath(args_to_consume: &mut Iter<String>) -> Result<Vec<String>, String> {
Ok(
args_to_consume
.take_while_ref(|elem|!ParsedJVMCommandLines::is_classpath_flag(elem))
.cloned()
.collect(),
)
}
fn parse_classpath(args_to_consume: &mut Iter<String>) -> Result<(String, String), String> {
let classpath_flag = args_to_consume
.next()
.filter(|e| ParsedJVMCommandLines::is_classpath_flag(e))
.ok_or_else(|| "No classpath flag found.".to_string())
.map(|e| e.clone())?;
let classpath_value = args_to_consume
.next()
.ok_or_else(|| "No classpath value found!".to_string())
.and_then(|elem| {
if ParsedJVMCommandLines::is_flag(elem) {
Err(format!(
"Classpath value has incorrect formatting {}.",
elem
))
} else {
Ok(elem)
}
})?
.clone();
Ok((classpath_flag, classpath_value))
}
fn parse_jvm_args(args_to_consume: &mut Iter<String>) -> Result<Vec<String>, String> {
Ok(
args_to_consume
.take_while_ref(|elem| ParsedJVMCommandLines::is_flag(elem))
.cloned()
.collect(),
)
}
fn parse_main_class(args_to_consume: &mut Iter<String>) -> Result<String, String> {
args_to_consume
.next()
.filter(|e|!ParsedJVMCommandLines::is_flag(e))
.ok_or_else(|| "No main class provided.".to_string())
.map(|e| e.clone())
}
fn parse_to_end(args_to_consume: &mut Iter<String>) -> Result<Vec<String>, String> {
Ok(args_to_consume.cloned().collect())
}
fn is_flag(arg: &str) -> bool {
arg.starts_with('-') || arg.starts_with('@')
}
fn is_classpath_flag(arg: &str) -> bool {
arg == "-cp" || arg == "-classpath"
}
}
|
let mut nailgun_args = nailgun_args_before_classpath;
nailgun_args.push(classpath_flag);
|
random_line_split
|
loads.rs
|
use bus::Bus;
use super::super::{AddressingMode, Cpu};
// LD
#[inline(always)]
pub fn ld<T>(cpu: &mut Cpu, bus: &mut Bus, dest: &dyn AddressingMode<T>, src: &dyn AddressingMode<T>) {
let val = src.read(cpu, bus);
dest.write(cpu, bus, val);
}
// LDHL SP, r8
// Affects flags: Z, N, H, C
#[inline(always)]
pub fn ldhl(cpu: &mut Cpu, bus: &mut Bus, src: &dyn AddressingMode<u8>) {
let sp = cpu.regs.sp();
let unsigned = src.read(cpu, bus) as u16;
let signed = src.read(cpu, bus) as i8;
if signed < 0 {
cpu.regs.set_hl(sp.wrapping_sub(signed.abs() as u16));
} else
|
cpu.regs.set_carry(((sp & 0xFF) + (unsigned & 0xFF)) & 0x100 == 0x100);
cpu.regs.set_halfcarry(((sp & 0xF) + (unsigned & 0xF)) & 0x10 == 0x10);
cpu.regs.set_subtract(false);
cpu.regs.set_zero(false);
}
// LDD
#[inline(always)]
pub fn ldd(cpu: &mut Cpu, bus: &mut Bus, dest: &dyn AddressingMode<u8>, src: &dyn AddressingMode<u8>) {
let val = src.read(cpu, bus);
let hl = cpu.regs.hl();
dest.write(cpu, bus, val);
cpu.regs.set_hl(hl.wrapping_sub(1));
}
// LDI
#[inline(always)]
pub fn ldi(cpu: &mut Cpu, bus: &mut Bus, dest: &dyn AddressingMode<u8>, src: &dyn AddressingMode<u8>) {
let val = src.read(cpu, bus);
let hl = cpu.regs.hl();
dest.write(cpu, bus, val);
cpu.regs.set_hl(hl.wrapping_add(1));
}
// PUSH
#[inline(always)]
pub fn push(cpu: &mut Cpu, bus: &mut Bus, src: &dyn AddressingMode<u16>) {
let val = src.read(cpu, bus);
cpu.push_stack(bus, val);
}
// POP
#[inline(always)]
pub fn pop(cpu: &mut Cpu, bus: &mut Bus, dest: &dyn AddressingMode<u16>) {
let val = cpu.pop_stack(bus);
dest.write(cpu, bus, val);
}
|
{
cpu.regs.set_hl(sp.wrapping_add(signed.abs() as u16));
}
|
conditional_block
|
loads.rs
|
use bus::Bus;
use super::super::{AddressingMode, Cpu};
// LD
#[inline(always)]
pub fn ld<T>(cpu: &mut Cpu, bus: &mut Bus, dest: &dyn AddressingMode<T>, src: &dyn AddressingMode<T>) {
let val = src.read(cpu, bus);
dest.write(cpu, bus, val);
}
// LDHL SP, r8
// Affects flags: Z, N, H, C
#[inline(always)]
pub fn ldhl(cpu: &mut Cpu, bus: &mut Bus, src: &dyn AddressingMode<u8>) {
let sp = cpu.regs.sp();
let unsigned = src.read(cpu, bus) as u16;
let signed = src.read(cpu, bus) as i8;
if signed < 0 {
cpu.regs.set_hl(sp.wrapping_sub(signed.abs() as u16));
} else {
cpu.regs.set_hl(sp.wrapping_add(signed.abs() as u16));
}
cpu.regs.set_carry(((sp & 0xFF) + (unsigned & 0xFF)) & 0x100 == 0x100);
cpu.regs.set_halfcarry(((sp & 0xF) + (unsigned & 0xF)) & 0x10 == 0x10);
cpu.regs.set_subtract(false);
cpu.regs.set_zero(false);
}
// LDD
#[inline(always)]
pub fn
|
(cpu: &mut Cpu, bus: &mut Bus, dest: &dyn AddressingMode<u8>, src: &dyn AddressingMode<u8>) {
let val = src.read(cpu, bus);
let hl = cpu.regs.hl();
dest.write(cpu, bus, val);
cpu.regs.set_hl(hl.wrapping_sub(1));
}
// LDI
#[inline(always)]
pub fn ldi(cpu: &mut Cpu, bus: &mut Bus, dest: &dyn AddressingMode<u8>, src: &dyn AddressingMode<u8>) {
let val = src.read(cpu, bus);
let hl = cpu.regs.hl();
dest.write(cpu, bus, val);
cpu.regs.set_hl(hl.wrapping_add(1));
}
// PUSH
#[inline(always)]
pub fn push(cpu: &mut Cpu, bus: &mut Bus, src: &dyn AddressingMode<u16>) {
let val = src.read(cpu, bus);
cpu.push_stack(bus, val);
}
// POP
#[inline(always)]
pub fn pop(cpu: &mut Cpu, bus: &mut Bus, dest: &dyn AddressingMode<u16>) {
let val = cpu.pop_stack(bus);
dest.write(cpu, bus, val);
}
|
ldd
|
identifier_name
|
loads.rs
|
use bus::Bus;
use super::super::{AddressingMode, Cpu};
// LD
#[inline(always)]
pub fn ld<T>(cpu: &mut Cpu, bus: &mut Bus, dest: &dyn AddressingMode<T>, src: &dyn AddressingMode<T>) {
let val = src.read(cpu, bus);
dest.write(cpu, bus, val);
}
// LDHL SP, r8
// Affects flags: Z, N, H, C
#[inline(always)]
pub fn ldhl(cpu: &mut Cpu, bus: &mut Bus, src: &dyn AddressingMode<u8>) {
let sp = cpu.regs.sp();
let unsigned = src.read(cpu, bus) as u16;
let signed = src.read(cpu, bus) as i8;
if signed < 0 {
cpu.regs.set_hl(sp.wrapping_sub(signed.abs() as u16));
} else {
cpu.regs.set_hl(sp.wrapping_add(signed.abs() as u16));
}
cpu.regs.set_carry(((sp & 0xFF) + (unsigned & 0xFF)) & 0x100 == 0x100);
cpu.regs.set_halfcarry(((sp & 0xF) + (unsigned & 0xF)) & 0x10 == 0x10);
cpu.regs.set_subtract(false);
cpu.regs.set_zero(false);
}
// LDD
#[inline(always)]
pub fn ldd(cpu: &mut Cpu, bus: &mut Bus, dest: &dyn AddressingMode<u8>, src: &dyn AddressingMode<u8>) {
let val = src.read(cpu, bus);
let hl = cpu.regs.hl();
dest.write(cpu, bus, val);
cpu.regs.set_hl(hl.wrapping_sub(1));
}
// LDI
#[inline(always)]
pub fn ldi(cpu: &mut Cpu, bus: &mut Bus, dest: &dyn AddressingMode<u8>, src: &dyn AddressingMode<u8>) {
let val = src.read(cpu, bus);
let hl = cpu.regs.hl();
dest.write(cpu, bus, val);
cpu.regs.set_hl(hl.wrapping_add(1));
}
// PUSH
#[inline(always)]
pub fn push(cpu: &mut Cpu, bus: &mut Bus, src: &dyn AddressingMode<u16>) {
let val = src.read(cpu, bus);
cpu.push_stack(bus, val);
}
// POP
#[inline(always)]
pub fn pop(cpu: &mut Cpu, bus: &mut Bus, dest: &dyn AddressingMode<u16>)
|
{
let val = cpu.pop_stack(bus);
dest.write(cpu, bus, val);
}
|
identifier_body
|
|
loads.rs
|
use bus::Bus;
use super::super::{AddressingMode, Cpu};
// LD
#[inline(always)]
pub fn ld<T>(cpu: &mut Cpu, bus: &mut Bus, dest: &dyn AddressingMode<T>, src: &dyn AddressingMode<T>) {
let val = src.read(cpu, bus);
dest.write(cpu, bus, val);
}
// LDHL SP, r8
// Affects flags: Z, N, H, C
#[inline(always)]
pub fn ldhl(cpu: &mut Cpu, bus: &mut Bus, src: &dyn AddressingMode<u8>) {
let sp = cpu.regs.sp();
let unsigned = src.read(cpu, bus) as u16;
let signed = src.read(cpu, bus) as i8;
if signed < 0 {
cpu.regs.set_hl(sp.wrapping_sub(signed.abs() as u16));
} else {
cpu.regs.set_hl(sp.wrapping_add(signed.abs() as u16));
}
cpu.regs.set_carry(((sp & 0xFF) + (unsigned & 0xFF)) & 0x100 == 0x100);
cpu.regs.set_halfcarry(((sp & 0xF) + (unsigned & 0xF)) & 0x10 == 0x10);
cpu.regs.set_subtract(false);
cpu.regs.set_zero(false);
}
// LDD
#[inline(always)]
pub fn ldd(cpu: &mut Cpu, bus: &mut Bus, dest: &dyn AddressingMode<u8>, src: &dyn AddressingMode<u8>) {
let val = src.read(cpu, bus);
let hl = cpu.regs.hl();
|
// LDI
#[inline(always)]
pub fn ldi(cpu: &mut Cpu, bus: &mut Bus, dest: &dyn AddressingMode<u8>, src: &dyn AddressingMode<u8>) {
let val = src.read(cpu, bus);
let hl = cpu.regs.hl();
dest.write(cpu, bus, val);
cpu.regs.set_hl(hl.wrapping_add(1));
}
// PUSH
#[inline(always)]
pub fn push(cpu: &mut Cpu, bus: &mut Bus, src: &dyn AddressingMode<u16>) {
let val = src.read(cpu, bus);
cpu.push_stack(bus, val);
}
// POP
#[inline(always)]
pub fn pop(cpu: &mut Cpu, bus: &mut Bus, dest: &dyn AddressingMode<u16>) {
let val = cpu.pop_stack(bus);
dest.write(cpu, bus, val);
}
|
dest.write(cpu, bus, val);
cpu.regs.set_hl(hl.wrapping_sub(1));
}
|
random_line_split
|
lib.rs
|
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Butterfly is the [SWIM](https://www.cs.cornell.edu/~asdas/research/dsn02-swim.pdf)
//! implementation for Habitat, along with a ZeroMQ based gossip protocol.
//!
//! It implements SWIM+Susp+Inf. It uses Newscast-style "heat" tracking to share membership rumors,
//! while trying to keep UDP packet sizes below 512 bytes. It has the following changes:
//!
//! 1. It uses a single membership rumor with internal logic for applying the rumors state, rather
//! than sending differential event messages.
//! 1. If an "Alive" membership rumor is received with a higher incarnation, it takes precedent
//! over "Confirmed" membership rumors.
//! 1. Members can be marked "persistent", which means that they will always be taken through the
//! Probe cycle, regardless of their status. This allows networks to heal from partitions.
//!
//! The SWIM implementation has three working threads:
//!
//! 1. An inbound thread, handling receipt of SWIM messages.
//! 1. An outbound thread, which handles the Ping->PingReq cycle and protocol timing.
//! 1. An expire thread, which handles timing out suspected members.
//!
//! The Gossip implementation has two working threads:
//!
//! 1. A 'push' thread, which fans out to 5 members every second (or longer, if it takes longer
//! than 1 second to send all the messages to all the members in the fan-out; no more frequently
//! than one second).
//! 1. A 'pull' thread, which takes messages from any push source and applies them locally.
//!
//! Start exploring the code base by following the thread of execution in the `server` module.
#![cfg_attr(feature="clippy", feature(plugin))]
#![cfg_attr(feature="clippy", plugin(clippy))]
extern crate byteorder;
extern crate habitat_core;
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate log;
extern crate protobuf;
extern crate rand;
extern crate serde;
#[macro_use]
extern crate serde_derive;
extern crate time;
extern crate toml;
extern crate uuid;
extern crate zmq;
#[macro_use]
pub mod trace;
pub mod client;
pub mod error;
pub mod member;
pub mod message;
pub mod rumor;
pub mod server;
use std::cell::UnsafeCell;
pub use server::Server;
lazy_static! {
/// A threadsafe shared ZMQ context for consuming services.
///
/// You probably want to use this context to create new ZMQ sockets unless you *do not* want to
/// connect them together using an in-proc queue.
pub static ref ZMQ_CONTEXT: Box<ServerContext> = {
let ctx = ServerContext(UnsafeCell::new(zmq::Context::new()));
Box::new(ctx)
};
}
/// This is a wrapper to provide interior mutability of an underlying `zmq::Context` and allows
/// for sharing/sending of a `zmq::Context` between threads.
pub struct ServerContext(UnsafeCell<zmq::Context>);
impl ServerContext {
pub fn as_mut(&self) -> &mut zmq::Context {
unsafe { &mut *self.0.get() }
}
}
unsafe impl Send for ServerContext {}
unsafe impl Sync for ServerContext {}
|
// Copyright (c) 2016-2017 Chef Software Inc. and/or applicable contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
|
random_line_split
|
|
lib.rs
|
// Copyright (c) 2016-2017 Chef Software Inc. and/or applicable contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Butterfly is the [SWIM](https://www.cs.cornell.edu/~asdas/research/dsn02-swim.pdf)
//! implementation for Habitat, along with a ZeroMQ based gossip protocol.
//!
//! It implements SWIM+Susp+Inf. It uses Newscast-style "heat" tracking to share membership rumors,
//! while trying to keep UDP packet sizes below 512 bytes. It has the following changes:
//!
//! 1. It uses a single membership rumor with internal logic for applying the rumors state, rather
//! than sending differential event messages.
//! 1. If an "Alive" membership rumor is received with a higher incarnation, it takes precedent
//! over "Confirmed" membership rumors.
//! 1. Members can be marked "persistent", which means that they will always be taken through the
//! Probe cycle, regardless of their status. This allows networks to heal from partitions.
//!
//! The SWIM implementation has three working threads:
//!
//! 1. An inbound thread, handling receipt of SWIM messages.
//! 1. An outbound thread, which handles the Ping->PingReq cycle and protocol timing.
//! 1. An expire thread, which handles timing out suspected members.
//!
//! The Gossip implementation has two working threads:
//!
//! 1. A 'push' thread, which fans out to 5 members every second (or longer, if it takes longer
//! than 1 second to send all the messages to all the members in the fan-out; no more frequently
//! than one second).
//! 1. A 'pull' thread, which takes messages from any push source and applies them locally.
//!
//! Start exploring the code base by following the thread of execution in the `server` module.
#![cfg_attr(feature="clippy", feature(plugin))]
#![cfg_attr(feature="clippy", plugin(clippy))]
extern crate byteorder;
extern crate habitat_core;
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate log;
extern crate protobuf;
extern crate rand;
extern crate serde;
#[macro_use]
extern crate serde_derive;
extern crate time;
extern crate toml;
extern crate uuid;
extern crate zmq;
#[macro_use]
pub mod trace;
pub mod client;
pub mod error;
pub mod member;
pub mod message;
pub mod rumor;
pub mod server;
use std::cell::UnsafeCell;
pub use server::Server;
lazy_static! {
/// A threadsafe shared ZMQ context for consuming services.
///
/// You probably want to use this context to create new ZMQ sockets unless you *do not* want to
/// connect them together using an in-proc queue.
pub static ref ZMQ_CONTEXT: Box<ServerContext> = {
let ctx = ServerContext(UnsafeCell::new(zmq::Context::new()));
Box::new(ctx)
};
}
/// This is a wrapper to provide interior mutability of an underlying `zmq::Context` and allows
/// for sharing/sending of a `zmq::Context` between threads.
pub struct ServerContext(UnsafeCell<zmq::Context>);
impl ServerContext {
pub fn
|
(&self) -> &mut zmq::Context {
unsafe { &mut *self.0.get() }
}
}
unsafe impl Send for ServerContext {}
unsafe impl Sync for ServerContext {}
|
as_mut
|
identifier_name
|
lib.rs
|
pub struct WordProblem {
cmd: String,
}
#[derive(PartialEq, Debug)]
enum Token {
CmdWhat,
CmdIs,
Number,
Operator,
OperatorBy,
}
enum Operator {
Plus,
Minus,
Multiply,
Divide,
}
impl WordProblem {
pub fn new(command: &str) -> WordProblem {
WordProblem {
cmd: command.to_string(),
}
}
pub fn answer(&self) -> Result<i32, String> {
let command = self.cmd
.split(|x| x == '?' || char::is_whitespace(x))
.filter(|w|!w.is_empty())
.collect::<Vec<_>>();
let mut result: i32 = 0;
let mut lastop = Operator::Plus;
let mut status = Token::CmdWhat;
for word in command {
match word {
"What" if status == Token::CmdWhat => status = Token::CmdIs,
"is" if status == Token::CmdIs => status = Token::Number,
"plus" if status == Token::Operator => {
lastop = Operator::Plus;
status = Token::Number
}
"minus" if status == Token::Operator => {
lastop = Operator::Minus;
status = Token::Number
}
"multiplied" if status == Token::Operator => {
lastop = Operator::Multiply;
status = Token::OperatorBy
}
"divided" if status == Token::Operator => {
lastop = Operator::Divide;
status = Token::OperatorBy
}
"by" if status == Token::OperatorBy => status = Token::Number,
_ if status == Token::Number => {
let value: i32;
if let Ok(v) = word.parse::<i32>() {
value = v;
} else
|
match lastop {
Operator::Plus => result += value,
Operator::Minus => result -= value,
Operator::Multiply => result *= value,
Operator::Divide => result /= value,
}
status = Token::Operator
}
_ => return Err("Invalid command".to_string()),
}
}
Ok(result)
}
}
|
{
return Err("Invalid number".to_string());
}
|
conditional_block
|
lib.rs
|
pub struct WordProblem {
cmd: String,
}
|
CmdIs,
Number,
Operator,
OperatorBy,
}
enum Operator {
Plus,
Minus,
Multiply,
Divide,
}
impl WordProblem {
pub fn new(command: &str) -> WordProblem {
WordProblem {
cmd: command.to_string(),
}
}
pub fn answer(&self) -> Result<i32, String> {
let command = self.cmd
.split(|x| x == '?' || char::is_whitespace(x))
.filter(|w|!w.is_empty())
.collect::<Vec<_>>();
let mut result: i32 = 0;
let mut lastop = Operator::Plus;
let mut status = Token::CmdWhat;
for word in command {
match word {
"What" if status == Token::CmdWhat => status = Token::CmdIs,
"is" if status == Token::CmdIs => status = Token::Number,
"plus" if status == Token::Operator => {
lastop = Operator::Plus;
status = Token::Number
}
"minus" if status == Token::Operator => {
lastop = Operator::Minus;
status = Token::Number
}
"multiplied" if status == Token::Operator => {
lastop = Operator::Multiply;
status = Token::OperatorBy
}
"divided" if status == Token::Operator => {
lastop = Operator::Divide;
status = Token::OperatorBy
}
"by" if status == Token::OperatorBy => status = Token::Number,
_ if status == Token::Number => {
let value: i32;
if let Ok(v) = word.parse::<i32>() {
value = v;
} else {
return Err("Invalid number".to_string());
}
match lastop {
Operator::Plus => result += value,
Operator::Minus => result -= value,
Operator::Multiply => result *= value,
Operator::Divide => result /= value,
}
status = Token::Operator
}
_ => return Err("Invalid command".to_string()),
}
}
Ok(result)
}
}
|
#[derive(PartialEq, Debug)]
enum Token {
CmdWhat,
|
random_line_split
|
lib.rs
|
pub struct WordProblem {
cmd: String,
}
#[derive(PartialEq, Debug)]
enum Token {
CmdWhat,
CmdIs,
Number,
Operator,
OperatorBy,
}
enum Operator {
Plus,
Minus,
Multiply,
Divide,
}
impl WordProblem {
pub fn new(command: &str) -> WordProblem {
WordProblem {
cmd: command.to_string(),
}
}
pub fn answer(&self) -> Result<i32, String>
|
status = Token::Number
}
"multiplied" if status == Token::Operator => {
lastop = Operator::Multiply;
status = Token::OperatorBy
}
"divided" if status == Token::Operator => {
lastop = Operator::Divide;
status = Token::OperatorBy
}
"by" if status == Token::OperatorBy => status = Token::Number,
_ if status == Token::Number => {
let value: i32;
if let Ok(v) = word.parse::<i32>() {
value = v;
} else {
return Err("Invalid number".to_string());
}
match lastop {
Operator::Plus => result += value,
Operator::Minus => result -= value,
Operator::Multiply => result *= value,
Operator::Divide => result /= value,
}
status = Token::Operator
}
_ => return Err("Invalid command".to_string()),
}
}
Ok(result)
}
}
|
{
let command = self.cmd
.split(|x| x == '?' || char::is_whitespace(x))
.filter(|w| !w.is_empty())
.collect::<Vec<_>>();
let mut result: i32 = 0;
let mut lastop = Operator::Plus;
let mut status = Token::CmdWhat;
for word in command {
match word {
"What" if status == Token::CmdWhat => status = Token::CmdIs,
"is" if status == Token::CmdIs => status = Token::Number,
"plus" if status == Token::Operator => {
lastop = Operator::Plus;
status = Token::Number
}
"minus" if status == Token::Operator => {
lastop = Operator::Minus;
|
identifier_body
|
lib.rs
|
pub struct WordProblem {
cmd: String,
}
#[derive(PartialEq, Debug)]
enum Token {
CmdWhat,
CmdIs,
Number,
Operator,
OperatorBy,
}
enum Operator {
Plus,
Minus,
Multiply,
Divide,
}
impl WordProblem {
pub fn
|
(command: &str) -> WordProblem {
WordProblem {
cmd: command.to_string(),
}
}
pub fn answer(&self) -> Result<i32, String> {
let command = self.cmd
.split(|x| x == '?' || char::is_whitespace(x))
.filter(|w|!w.is_empty())
.collect::<Vec<_>>();
let mut result: i32 = 0;
let mut lastop = Operator::Plus;
let mut status = Token::CmdWhat;
for word in command {
match word {
"What" if status == Token::CmdWhat => status = Token::CmdIs,
"is" if status == Token::CmdIs => status = Token::Number,
"plus" if status == Token::Operator => {
lastop = Operator::Plus;
status = Token::Number
}
"minus" if status == Token::Operator => {
lastop = Operator::Minus;
status = Token::Number
}
"multiplied" if status == Token::Operator => {
lastop = Operator::Multiply;
status = Token::OperatorBy
}
"divided" if status == Token::Operator => {
lastop = Operator::Divide;
status = Token::OperatorBy
}
"by" if status == Token::OperatorBy => status = Token::Number,
_ if status == Token::Number => {
let value: i32;
if let Ok(v) = word.parse::<i32>() {
value = v;
} else {
return Err("Invalid number".to_string());
}
match lastop {
Operator::Plus => result += value,
Operator::Minus => result -= value,
Operator::Multiply => result *= value,
Operator::Divide => result /= value,
}
status = Token::Operator
}
_ => return Err("Invalid command".to_string()),
}
}
Ok(result)
}
}
|
new
|
identifier_name
|
typetest.rs
|
// This file is part of Grust, GObject introspection bindings for Rust
//
// Copyright (C) 2015 Mikhail Zabaluev <[email protected]>
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
use grust::flags;
use grust::value::Value;
use gio::FileAttributeInfoFlags;
use gio::flags::file_attribute_info::{NONE, COPY_WITH_FILE, COPY_WHEN_MOVED};
#[test]
fn flags() {
assert_eq!(NONE, FileAttributeInfoFlags::empty());
let flags: FileAttributeInfoFlags
= flags::from_uint(COPY_WITH_FILE.bits() | COPY_WHEN_MOVED.bits())
.unwrap();
assert_eq!(flags, COPY_WITH_FILE | COPY_WHEN_MOVED);
}
#[test]
fn unknown_flags() {
let a = COPY_WITH_FILE.bits() | 0b10000;
let unknown_flags = flags::from_uint::<FileAttributeInfoFlags>(a)
.err().unwrap();
assert_eq!(unknown_flags.actual(), a);
assert_eq!(unknown_flags.known(), COPY_WITH_FILE.bits());
assert_eq!(unknown_flags.unknown(), 0b10000);
}
#[test]
#[should_panic]
fn flags_unknown_panic()
|
#[test]
fn value_flags() {
let mut value = Value::new(flags::type_of::<FileAttributeInfoFlags>());
let flags = value.get_flags::<FileAttributeInfoFlags>().unwrap();
assert_eq!(flags, FileAttributeInfoFlags::empty());
value.set_flags(COPY_WITH_FILE | COPY_WHEN_MOVED);
let value = value.clone();
let flags = value.get_flags::<FileAttributeInfoFlags>().unwrap();
assert_eq!(flags, COPY_WITH_FILE | COPY_WHEN_MOVED);
}
|
{
let a = COPY_WITH_FILE.bits() | 0b10000;
let _ = flags::from_uint::<FileAttributeInfoFlags>(a).unwrap();
}
|
identifier_body
|
typetest.rs
|
// This file is part of Grust, GObject introspection bindings for Rust
//
// Copyright (C) 2015 Mikhail Zabaluev <[email protected]>
//
// This library is free software; you can redistribute it and/or
|
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
use grust::flags;
use grust::value::Value;
use gio::FileAttributeInfoFlags;
use gio::flags::file_attribute_info::{NONE, COPY_WITH_FILE, COPY_WHEN_MOVED};
#[test]
fn flags() {
assert_eq!(NONE, FileAttributeInfoFlags::empty());
let flags: FileAttributeInfoFlags
= flags::from_uint(COPY_WITH_FILE.bits() | COPY_WHEN_MOVED.bits())
.unwrap();
assert_eq!(flags, COPY_WITH_FILE | COPY_WHEN_MOVED);
}
#[test]
fn unknown_flags() {
let a = COPY_WITH_FILE.bits() | 0b10000;
let unknown_flags = flags::from_uint::<FileAttributeInfoFlags>(a)
.err().unwrap();
assert_eq!(unknown_flags.actual(), a);
assert_eq!(unknown_flags.known(), COPY_WITH_FILE.bits());
assert_eq!(unknown_flags.unknown(), 0b10000);
}
#[test]
#[should_panic]
fn flags_unknown_panic() {
let a = COPY_WITH_FILE.bits() | 0b10000;
let _ = flags::from_uint::<FileAttributeInfoFlags>(a).unwrap();
}
#[test]
fn value_flags() {
let mut value = Value::new(flags::type_of::<FileAttributeInfoFlags>());
let flags = value.get_flags::<FileAttributeInfoFlags>().unwrap();
assert_eq!(flags, FileAttributeInfoFlags::empty());
value.set_flags(COPY_WITH_FILE | COPY_WHEN_MOVED);
let value = value.clone();
let flags = value.get_flags::<FileAttributeInfoFlags>().unwrap();
assert_eq!(flags, COPY_WITH_FILE | COPY_WHEN_MOVED);
}
|
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
//
|
random_line_split
|
typetest.rs
|
// This file is part of Grust, GObject introspection bindings for Rust
//
// Copyright (C) 2015 Mikhail Zabaluev <[email protected]>
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
use grust::flags;
use grust::value::Value;
use gio::FileAttributeInfoFlags;
use gio::flags::file_attribute_info::{NONE, COPY_WITH_FILE, COPY_WHEN_MOVED};
#[test]
fn
|
() {
assert_eq!(NONE, FileAttributeInfoFlags::empty());
let flags: FileAttributeInfoFlags
= flags::from_uint(COPY_WITH_FILE.bits() | COPY_WHEN_MOVED.bits())
.unwrap();
assert_eq!(flags, COPY_WITH_FILE | COPY_WHEN_MOVED);
}
#[test]
fn unknown_flags() {
let a = COPY_WITH_FILE.bits() | 0b10000;
let unknown_flags = flags::from_uint::<FileAttributeInfoFlags>(a)
.err().unwrap();
assert_eq!(unknown_flags.actual(), a);
assert_eq!(unknown_flags.known(), COPY_WITH_FILE.bits());
assert_eq!(unknown_flags.unknown(), 0b10000);
}
#[test]
#[should_panic]
fn flags_unknown_panic() {
let a = COPY_WITH_FILE.bits() | 0b10000;
let _ = flags::from_uint::<FileAttributeInfoFlags>(a).unwrap();
}
#[test]
fn value_flags() {
let mut value = Value::new(flags::type_of::<FileAttributeInfoFlags>());
let flags = value.get_flags::<FileAttributeInfoFlags>().unwrap();
assert_eq!(flags, FileAttributeInfoFlags::empty());
value.set_flags(COPY_WITH_FILE | COPY_WHEN_MOVED);
let value = value.clone();
let flags = value.get_flags::<FileAttributeInfoFlags>().unwrap();
assert_eq!(flags, COPY_WITH_FILE | COPY_WHEN_MOVED);
}
|
flags
|
identifier_name
|
compiletest.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[crate_type = "bin"];
#[allow(non_camel_case_types)];
#[deny(warnings)];
extern mod extra;
use std::os;
use std::rt;
use std::io::fs;
use extra::getopts;
use extra::getopts::groups::{optopt, optflag, reqopt};
use extra::test;
use common::config;
use common::mode_run_pass;
use common::mode_run_fail;
use common::mode_compile_fail;
use common::mode_pretty;
use common::mode_debug_info;
use common::mode_codegen;
use common::mode;
use util::logv;
pub mod procsrv;
pub mod util;
pub mod header;
pub mod runtest;
pub mod common;
pub mod errors;
pub fn main() {
let args = os::args();
let config = parse_config(args);
log_config(&config);
run_tests(&config);
}
pub fn parse_config(args: ~[~str]) -> config
|
optopt("", "save-metrics", "file to save metrics to", "FILE"),
optopt("", "ratchet-metrics", "file to ratchet metrics against", "FILE"),
optopt("", "ratchet-noise-percent",
"percent change in metrics to consider noise", "N"),
optflag("", "jit", "run tests under the JIT"),
optopt("", "target", "the target to build for", "TARGET"),
optopt("", "adb-path", "path to the android debugger", "PATH"),
optopt("", "adb-test-dir", "path to tests for the android debugger", "PATH"),
optopt("", "test-shard", "run shard A, of B shards, worth of the testsuite", "A.B"),
optflag("h", "help", "show this message"),
];
assert!(!args.is_empty());
let argv0 = args[0].clone();
let args_ = args.tail();
if args[1] == ~"-h" || args[1] == ~"--help" {
let message = format!("Usage: {} [OPTIONS] [TESTNAME...]", argv0);
println(getopts::groups::usage(message, groups));
println("");
fail!()
}
let matches =
&match getopts::groups::getopts(args_, groups) {
Ok(m) => m,
Err(f) => fail!("{}", f.to_err_msg())
};
if matches.opt_present("h") || matches.opt_present("help") {
let message = format!("Usage: {} [OPTIONS] [TESTNAME...]", argv0);
println(getopts::groups::usage(message, groups));
println("");
fail!()
}
fn opt_path(m: &getopts::Matches, nm: &str) -> Path {
Path::new(m.opt_str(nm).unwrap())
}
config {
compile_lib_path: matches.opt_str("compile-lib-path").unwrap(),
run_lib_path: matches.opt_str("run-lib-path").unwrap(),
rustc_path: opt_path(matches, "rustc-path"),
clang_path: matches.opt_str("clang-path").map(|s| Path::new(s)),
llvm_bin_path: matches.opt_str("llvm-bin-path").map(|s| Path::new(s)),
src_base: opt_path(matches, "src-base"),
build_base: opt_path(matches, "build-base"),
aux_base: opt_path(matches, "aux-base"),
stage_id: matches.opt_str("stage-id").unwrap(),
mode: str_mode(matches.opt_str("mode").unwrap()),
run_ignored: matches.opt_present("ignored"),
filter:
if!matches.free.is_empty() {
Some(matches.free[0].clone())
} else {
None
},
logfile: matches.opt_str("logfile").map(|s| Path::new(s)),
save_metrics: matches.opt_str("save-metrics").map(|s| Path::new(s)),
ratchet_metrics:
matches.opt_str("ratchet-metrics").map(|s| Path::new(s)),
ratchet_noise_percent:
matches.opt_str("ratchet-noise-percent").and_then(|s| from_str::<f64>(s)),
runtool: matches.opt_str("runtool"),
rustcflags: matches.opt_str("rustcflags"),
jit: matches.opt_present("jit"),
target: opt_str2(matches.opt_str("target")).to_str(),
adb_path: opt_str2(matches.opt_str("adb-path")).to_str(),
adb_test_dir:
opt_str2(matches.opt_str("adb-test-dir")).to_str(),
adb_device_status:
if (opt_str2(matches.opt_str("target")) ==
~"arm-linux-androideabi") {
if (opt_str2(matches.opt_str("adb-test-dir"))!=
~"(none)" &&
opt_str2(matches.opt_str("adb-test-dir"))!=
~"") { true }
else { false }
} else { false },
test_shard: test::opt_shard(matches.opt_str("test-shard")),
verbose: matches.opt_present("verbose")
}
}
pub fn log_config(config: &config) {
let c = config;
logv(c, format!("configuration:"));
logv(c, format!("compile_lib_path: {}", config.compile_lib_path));
logv(c, format!("run_lib_path: {}", config.run_lib_path));
logv(c, format!("rustc_path: {}", config.rustc_path.display()));
logv(c, format!("src_base: {}", config.src_base.display()));
logv(c, format!("build_base: {}", config.build_base.display()));
logv(c, format!("stage_id: {}", config.stage_id));
logv(c, format!("mode: {}", mode_str(config.mode)));
logv(c, format!("run_ignored: {}", config.run_ignored));
logv(c, format!("filter: {}", opt_str(&config.filter)));
logv(c, format!("runtool: {}", opt_str(&config.runtool)));
logv(c, format!("rustcflags: {}", opt_str(&config.rustcflags)));
logv(c, format!("jit: {}", config.jit));
logv(c, format!("target: {}", config.target));
logv(c, format!("adb_path: {}", config.adb_path));
logv(c, format!("adb_test_dir: {}", config.adb_test_dir));
logv(c, format!("adb_device_status: {}", config.adb_device_status));
match config.test_shard {
None => logv(c, ~"test_shard: (all)"),
Some((a,b)) => logv(c, format!("test_shard: {}.{}", a, b))
}
logv(c, format!("verbose: {}", config.verbose));
logv(c, format!("\n"));
}
pub fn opt_str<'a>(maybestr: &'a Option<~str>) -> &'a str {
match *maybestr {
None => "(none)",
Some(ref s) => {
let s: &'a str = *s;
s
}
}
}
pub fn opt_str2(maybestr: Option<~str>) -> ~str {
match maybestr { None => ~"(none)", Some(s) => { s } }
}
pub fn str_mode(s: ~str) -> mode {
match s {
~"compile-fail" => mode_compile_fail,
~"run-fail" => mode_run_fail,
~"run-pass" => mode_run_pass,
~"pretty" => mode_pretty,
~"debug-info" => mode_debug_info,
~"codegen" => mode_codegen,
_ => fail!("invalid mode")
}
}
pub fn mode_str(mode: mode) -> ~str {
match mode {
mode_compile_fail => ~"compile-fail",
mode_run_fail => ~"run-fail",
mode_run_pass => ~"run-pass",
mode_pretty => ~"pretty",
mode_debug_info => ~"debug-info",
mode_codegen => ~"codegen",
}
}
pub fn run_tests(config: &config) {
if config.target == ~"arm-linux-androideabi" {
match config.mode{
mode_debug_info => {
println("arm-linux-androideabi debug-info \
test uses tcp 5039 port. please reserve it");
//arm-linux-androideabi debug-info test uses remote debugger
//so, we test 1 task at once
os::setenv("RUST_TEST_TASKS","1");
}
_ =>{}
}
}
let opts = test_opts(config);
let tests = make_tests(config);
// sadly osx needs some file descriptor limits raised for running tests in
// parallel (especially when we have lots and lots of child processes).
// For context, see #8904
rt::test::prepare_for_lots_of_tests();
let res = test::run_tests_console(&opts, tests);
if!res { fail!("Some tests failed"); }
}
pub fn test_opts(config: &config) -> test::TestOpts {
test::TestOpts {
filter: config.filter.clone(),
run_ignored: config.run_ignored,
logfile: config.logfile.clone(),
run_tests: true,
run_benchmarks: true,
ratchet_metrics: config.ratchet_metrics.clone(),
ratchet_noise_percent: config.ratchet_noise_percent.clone(),
save_metrics: config.save_metrics.clone(),
test_shard: config.test_shard.clone()
}
}
pub fn make_tests(config: &config) -> ~[test::TestDescAndFn] {
debug!("making tests from {}",
config.src_base.display());
let mut tests = ~[];
let dirs = fs::readdir(&config.src_base);
for file in dirs.iter() {
let file = file.clone();
debug!("inspecting file {}", file.display());
if is_test(config, &file) {
let t = make_test(config, &file, || {
match config.mode {
mode_codegen => make_metrics_test_closure(config, &file),
_ => make_test_closure(config, &file)
}
});
tests.push(t)
}
}
tests
}
pub fn is_test(config: &config, testfile: &Path) -> bool {
// Pretty-printer does not work with.rc files yet
let valid_extensions =
match config.mode {
mode_pretty => ~[~".rs"],
_ => ~[~".rc", ~".rs"]
};
let invalid_prefixes = ~[~".", ~"#", ~"~"];
let name = testfile.filename_str().unwrap();
let mut valid = false;
for ext in valid_extensions.iter() {
if name.ends_with(*ext) { valid = true; }
}
for pre in invalid_prefixes.iter() {
if name.starts_with(*pre) { valid = false; }
}
return valid;
}
pub fn make_test(config: &config, testfile: &Path, f: || -> test::TestFn)
-> test::TestDescAndFn {
test::TestDescAndFn {
desc: test::TestDesc {
name: make_test_name(config, testfile),
ignore: header::is_test_ignored(config, testfile),
should_fail: false
},
testfn: f(),
}
}
pub fn make_test_name(config: &config, testfile: &Path) -> test::TestName {
// Try to elide redundant long paths
fn shorten(path: &Path) -> ~str {
let filename = path.filename_str();
let p = path.dir_path();
let dir = p.filename_str();
format!("{}/{}", dir.unwrap_or(""), filename.unwrap_or(""))
}
test::DynTestName(format!("[{}] {}",
mode_str(config.mode),
shorten(testfile)))
}
pub fn make_test_closure(config: &config, testfile: &Path) -> test::TestFn {
let config = (*config).clone();
// FIXME (#9639): This needs to handle non-utf8 paths
let testfile = testfile.as_str().unwrap().to_owned();
test::DynTestFn(proc() { runtest::run(config, testfile) })
}
pub fn make_metrics_test_closure(config: &config, testfile: &Path) -> test::TestFn {
let config = (*config).clone();
// FIXME (#9639): This needs to handle non-utf8 paths
let testfile = testfile.as_str().unwrap().to_owned();
test::DynMetricFn(proc(mm) {
runtest::run_metrics(config, testfile, mm)
})
}
|
{
let groups : ~[getopts::groups::OptGroup] =
~[reqopt("", "compile-lib-path", "path to host shared libraries", "PATH"),
reqopt("", "run-lib-path", "path to target shared libraries", "PATH"),
reqopt("", "rustc-path", "path to rustc to use for compiling", "PATH"),
optopt("", "clang-path", "path to executable for codegen tests", "PATH"),
optopt("", "llvm-bin-path", "path to directory holding llvm binaries", "DIR"),
reqopt("", "src-base", "directory to scan for test files", "PATH"),
reqopt("", "build-base", "directory to deposit test outputs", "PATH"),
reqopt("", "aux-base", "directory to find auxiliary test files", "PATH"),
reqopt("", "stage-id", "the target-stage identifier", "stageN-TARGET"),
reqopt("", "mode", "which sort of compile tests to run",
"(compile-fail|run-fail|run-pass|pretty|debug-info)"),
optflag("", "ignored", "run tests marked as ignored / xfailed"),
optopt("", "runtool", "supervisor program to run tests under \
(eg. emulator, valgrind)", "PROGRAM"),
optopt("", "rustcflags", "flags to pass to rustc", "FLAGS"),
optflag("", "verbose", "run tests verbosely, showing all output"),
optopt("", "logfile", "file to log test execution to", "FILE"),
|
identifier_body
|
compiletest.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[crate_type = "bin"];
#[allow(non_camel_case_types)];
#[deny(warnings)];
extern mod extra;
use std::os;
use std::rt;
use std::io::fs;
use extra::getopts;
use extra::getopts::groups::{optopt, optflag, reqopt};
use extra::test;
use common::config;
use common::mode_run_pass;
use common::mode_run_fail;
use common::mode_compile_fail;
use common::mode_pretty;
use common::mode_debug_info;
use common::mode_codegen;
use common::mode;
use util::logv;
pub mod procsrv;
pub mod util;
pub mod header;
pub mod runtest;
pub mod common;
pub mod errors;
pub fn main() {
let args = os::args();
let config = parse_config(args);
log_config(&config);
run_tests(&config);
}
pub fn parse_config(args: ~[~str]) -> config {
let groups : ~[getopts::groups::OptGroup] =
~[reqopt("", "compile-lib-path", "path to host shared libraries", "PATH"),
reqopt("", "run-lib-path", "path to target shared libraries", "PATH"),
reqopt("", "rustc-path", "path to rustc to use for compiling", "PATH"),
optopt("", "clang-path", "path to executable for codegen tests", "PATH"),
optopt("", "llvm-bin-path", "path to directory holding llvm binaries", "DIR"),
reqopt("", "src-base", "directory to scan for test files", "PATH"),
reqopt("", "build-base", "directory to deposit test outputs", "PATH"),
reqopt("", "aux-base", "directory to find auxiliary test files", "PATH"),
reqopt("", "stage-id", "the target-stage identifier", "stageN-TARGET"),
reqopt("", "mode", "which sort of compile tests to run",
"(compile-fail|run-fail|run-pass|pretty|debug-info)"),
optflag("", "ignored", "run tests marked as ignored / xfailed"),
optopt("", "runtool", "supervisor program to run tests under \
(eg. emulator, valgrind)", "PROGRAM"),
optopt("", "rustcflags", "flags to pass to rustc", "FLAGS"),
optflag("", "verbose", "run tests verbosely, showing all output"),
optopt("", "logfile", "file to log test execution to", "FILE"),
optopt("", "save-metrics", "file to save metrics to", "FILE"),
optopt("", "ratchet-metrics", "file to ratchet metrics against", "FILE"),
optopt("", "ratchet-noise-percent",
"percent change in metrics to consider noise", "N"),
optflag("", "jit", "run tests under the JIT"),
optopt("", "target", "the target to build for", "TARGET"),
optopt("", "adb-path", "path to the android debugger", "PATH"),
optopt("", "adb-test-dir", "path to tests for the android debugger", "PATH"),
optopt("", "test-shard", "run shard A, of B shards, worth of the testsuite", "A.B"),
optflag("h", "help", "show this message"),
];
assert!(!args.is_empty());
let argv0 = args[0].clone();
let args_ = args.tail();
if args[1] == ~"-h" || args[1] == ~"--help" {
let message = format!("Usage: {} [OPTIONS] [TESTNAME...]", argv0);
println(getopts::groups::usage(message, groups));
println("");
fail!()
}
let matches =
&match getopts::groups::getopts(args_, groups) {
Ok(m) => m,
Err(f) => fail!("{}", f.to_err_msg())
};
if matches.opt_present("h") || matches.opt_present("help") {
let message = format!("Usage: {} [OPTIONS] [TESTNAME...]", argv0);
println(getopts::groups::usage(message, groups));
println("");
fail!()
}
fn opt_path(m: &getopts::Matches, nm: &str) -> Path {
Path::new(m.opt_str(nm).unwrap())
}
config {
compile_lib_path: matches.opt_str("compile-lib-path").unwrap(),
run_lib_path: matches.opt_str("run-lib-path").unwrap(),
rustc_path: opt_path(matches, "rustc-path"),
clang_path: matches.opt_str("clang-path").map(|s| Path::new(s)),
llvm_bin_path: matches.opt_str("llvm-bin-path").map(|s| Path::new(s)),
src_base: opt_path(matches, "src-base"),
build_base: opt_path(matches, "build-base"),
aux_base: opt_path(matches, "aux-base"),
stage_id: matches.opt_str("stage-id").unwrap(),
mode: str_mode(matches.opt_str("mode").unwrap()),
run_ignored: matches.opt_present("ignored"),
filter:
if!matches.free.is_empty() {
Some(matches.free[0].clone())
} else {
None
},
logfile: matches.opt_str("logfile").map(|s| Path::new(s)),
save_metrics: matches.opt_str("save-metrics").map(|s| Path::new(s)),
ratchet_metrics:
matches.opt_str("ratchet-metrics").map(|s| Path::new(s)),
ratchet_noise_percent:
matches.opt_str("ratchet-noise-percent").and_then(|s| from_str::<f64>(s)),
runtool: matches.opt_str("runtool"),
rustcflags: matches.opt_str("rustcflags"),
jit: matches.opt_present("jit"),
target: opt_str2(matches.opt_str("target")).to_str(),
adb_path: opt_str2(matches.opt_str("adb-path")).to_str(),
adb_test_dir:
opt_str2(matches.opt_str("adb-test-dir")).to_str(),
adb_device_status:
if (opt_str2(matches.opt_str("target")) ==
~"arm-linux-androideabi")
|
else { false },
test_shard: test::opt_shard(matches.opt_str("test-shard")),
verbose: matches.opt_present("verbose")
}
}
pub fn log_config(config: &config) {
let c = config;
logv(c, format!("configuration:"));
logv(c, format!("compile_lib_path: {}", config.compile_lib_path));
logv(c, format!("run_lib_path: {}", config.run_lib_path));
logv(c, format!("rustc_path: {}", config.rustc_path.display()));
logv(c, format!("src_base: {}", config.src_base.display()));
logv(c, format!("build_base: {}", config.build_base.display()));
logv(c, format!("stage_id: {}", config.stage_id));
logv(c, format!("mode: {}", mode_str(config.mode)));
logv(c, format!("run_ignored: {}", config.run_ignored));
logv(c, format!("filter: {}", opt_str(&config.filter)));
logv(c, format!("runtool: {}", opt_str(&config.runtool)));
logv(c, format!("rustcflags: {}", opt_str(&config.rustcflags)));
logv(c, format!("jit: {}", config.jit));
logv(c, format!("target: {}", config.target));
logv(c, format!("adb_path: {}", config.adb_path));
logv(c, format!("adb_test_dir: {}", config.adb_test_dir));
logv(c, format!("adb_device_status: {}", config.adb_device_status));
match config.test_shard {
None => logv(c, ~"test_shard: (all)"),
Some((a,b)) => logv(c, format!("test_shard: {}.{}", a, b))
}
logv(c, format!("verbose: {}", config.verbose));
logv(c, format!("\n"));
}
pub fn opt_str<'a>(maybestr: &'a Option<~str>) -> &'a str {
match *maybestr {
None => "(none)",
Some(ref s) => {
let s: &'a str = *s;
s
}
}
}
pub fn opt_str2(maybestr: Option<~str>) -> ~str {
match maybestr { None => ~"(none)", Some(s) => { s } }
}
pub fn str_mode(s: ~str) -> mode {
match s {
~"compile-fail" => mode_compile_fail,
~"run-fail" => mode_run_fail,
~"run-pass" => mode_run_pass,
~"pretty" => mode_pretty,
~"debug-info" => mode_debug_info,
~"codegen" => mode_codegen,
_ => fail!("invalid mode")
}
}
pub fn mode_str(mode: mode) -> ~str {
match mode {
mode_compile_fail => ~"compile-fail",
mode_run_fail => ~"run-fail",
mode_run_pass => ~"run-pass",
mode_pretty => ~"pretty",
mode_debug_info => ~"debug-info",
mode_codegen => ~"codegen",
}
}
pub fn run_tests(config: &config) {
if config.target == ~"arm-linux-androideabi" {
match config.mode{
mode_debug_info => {
println("arm-linux-androideabi debug-info \
test uses tcp 5039 port. please reserve it");
//arm-linux-androideabi debug-info test uses remote debugger
//so, we test 1 task at once
os::setenv("RUST_TEST_TASKS","1");
}
_ =>{}
}
}
let opts = test_opts(config);
let tests = make_tests(config);
// sadly osx needs some file descriptor limits raised for running tests in
// parallel (especially when we have lots and lots of child processes).
// For context, see #8904
rt::test::prepare_for_lots_of_tests();
let res = test::run_tests_console(&opts, tests);
if!res { fail!("Some tests failed"); }
}
pub fn test_opts(config: &config) -> test::TestOpts {
test::TestOpts {
filter: config.filter.clone(),
run_ignored: config.run_ignored,
logfile: config.logfile.clone(),
run_tests: true,
run_benchmarks: true,
ratchet_metrics: config.ratchet_metrics.clone(),
ratchet_noise_percent: config.ratchet_noise_percent.clone(),
save_metrics: config.save_metrics.clone(),
test_shard: config.test_shard.clone()
}
}
pub fn make_tests(config: &config) -> ~[test::TestDescAndFn] {
debug!("making tests from {}",
config.src_base.display());
let mut tests = ~[];
let dirs = fs::readdir(&config.src_base);
for file in dirs.iter() {
let file = file.clone();
debug!("inspecting file {}", file.display());
if is_test(config, &file) {
let t = make_test(config, &file, || {
match config.mode {
mode_codegen => make_metrics_test_closure(config, &file),
_ => make_test_closure(config, &file)
}
});
tests.push(t)
}
}
tests
}
pub fn is_test(config: &config, testfile: &Path) -> bool {
// Pretty-printer does not work with.rc files yet
let valid_extensions =
match config.mode {
mode_pretty => ~[~".rs"],
_ => ~[~".rc", ~".rs"]
};
let invalid_prefixes = ~[~".", ~"#", ~"~"];
let name = testfile.filename_str().unwrap();
let mut valid = false;
for ext in valid_extensions.iter() {
if name.ends_with(*ext) { valid = true; }
}
for pre in invalid_prefixes.iter() {
if name.starts_with(*pre) { valid = false; }
}
return valid;
}
pub fn make_test(config: &config, testfile: &Path, f: || -> test::TestFn)
-> test::TestDescAndFn {
test::TestDescAndFn {
desc: test::TestDesc {
name: make_test_name(config, testfile),
ignore: header::is_test_ignored(config, testfile),
should_fail: false
},
testfn: f(),
}
}
pub fn make_test_name(config: &config, testfile: &Path) -> test::TestName {
// Try to elide redundant long paths
fn shorten(path: &Path) -> ~str {
let filename = path.filename_str();
let p = path.dir_path();
let dir = p.filename_str();
format!("{}/{}", dir.unwrap_or(""), filename.unwrap_or(""))
}
test::DynTestName(format!("[{}] {}",
mode_str(config.mode),
shorten(testfile)))
}
pub fn make_test_closure(config: &config, testfile: &Path) -> test::TestFn {
let config = (*config).clone();
// FIXME (#9639): This needs to handle non-utf8 paths
let testfile = testfile.as_str().unwrap().to_owned();
test::DynTestFn(proc() { runtest::run(config, testfile) })
}
pub fn make_metrics_test_closure(config: &config, testfile: &Path) -> test::TestFn {
let config = (*config).clone();
// FIXME (#9639): This needs to handle non-utf8 paths
let testfile = testfile.as_str().unwrap().to_owned();
test::DynMetricFn(proc(mm) {
runtest::run_metrics(config, testfile, mm)
})
}
|
{
if (opt_str2(matches.opt_str("adb-test-dir")) !=
~"(none)" &&
opt_str2(matches.opt_str("adb-test-dir")) !=
~"") { true }
else { false }
}
|
conditional_block
|
compiletest.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[crate_type = "bin"];
#[allow(non_camel_case_types)];
#[deny(warnings)];
extern mod extra;
use std::os;
use std::rt;
use std::io::fs;
use extra::getopts;
use extra::getopts::groups::{optopt, optflag, reqopt};
use extra::test;
use common::config;
use common::mode_run_pass;
use common::mode_run_fail;
use common::mode_compile_fail;
use common::mode_pretty;
use common::mode_debug_info;
use common::mode_codegen;
use common::mode;
use util::logv;
pub mod procsrv;
pub mod util;
pub mod header;
pub mod runtest;
pub mod common;
pub mod errors;
pub fn main() {
let args = os::args();
let config = parse_config(args);
log_config(&config);
run_tests(&config);
}
pub fn parse_config(args: ~[~str]) -> config {
let groups : ~[getopts::groups::OptGroup] =
~[reqopt("", "compile-lib-path", "path to host shared libraries", "PATH"),
reqopt("", "run-lib-path", "path to target shared libraries", "PATH"),
reqopt("", "rustc-path", "path to rustc to use for compiling", "PATH"),
optopt("", "clang-path", "path to executable for codegen tests", "PATH"),
optopt("", "llvm-bin-path", "path to directory holding llvm binaries", "DIR"),
reqopt("", "src-base", "directory to scan for test files", "PATH"),
reqopt("", "build-base", "directory to deposit test outputs", "PATH"),
reqopt("", "aux-base", "directory to find auxiliary test files", "PATH"),
reqopt("", "stage-id", "the target-stage identifier", "stageN-TARGET"),
reqopt("", "mode", "which sort of compile tests to run",
"(compile-fail|run-fail|run-pass|pretty|debug-info)"),
optflag("", "ignored", "run tests marked as ignored / xfailed"),
optopt("", "runtool", "supervisor program to run tests under \
(eg. emulator, valgrind)", "PROGRAM"),
optopt("", "rustcflags", "flags to pass to rustc", "FLAGS"),
optflag("", "verbose", "run tests verbosely, showing all output"),
optopt("", "logfile", "file to log test execution to", "FILE"),
optopt("", "save-metrics", "file to save metrics to", "FILE"),
optopt("", "ratchet-metrics", "file to ratchet metrics against", "FILE"),
optopt("", "ratchet-noise-percent",
"percent change in metrics to consider noise", "N"),
optflag("", "jit", "run tests under the JIT"),
optopt("", "target", "the target to build for", "TARGET"),
optopt("", "adb-path", "path to the android debugger", "PATH"),
optopt("", "adb-test-dir", "path to tests for the android debugger", "PATH"),
optopt("", "test-shard", "run shard A, of B shards, worth of the testsuite", "A.B"),
optflag("h", "help", "show this message"),
];
assert!(!args.is_empty());
let argv0 = args[0].clone();
let args_ = args.tail();
if args[1] == ~"-h" || args[1] == ~"--help" {
let message = format!("Usage: {} [OPTIONS] [TESTNAME...]", argv0);
println(getopts::groups::usage(message, groups));
println("");
fail!()
}
let matches =
&match getopts::groups::getopts(args_, groups) {
Ok(m) => m,
Err(f) => fail!("{}", f.to_err_msg())
};
if matches.opt_present("h") || matches.opt_present("help") {
let message = format!("Usage: {} [OPTIONS] [TESTNAME...]", argv0);
println(getopts::groups::usage(message, groups));
println("");
fail!()
}
fn opt_path(m: &getopts::Matches, nm: &str) -> Path {
Path::new(m.opt_str(nm).unwrap())
}
config {
compile_lib_path: matches.opt_str("compile-lib-path").unwrap(),
run_lib_path: matches.opt_str("run-lib-path").unwrap(),
rustc_path: opt_path(matches, "rustc-path"),
clang_path: matches.opt_str("clang-path").map(|s| Path::new(s)),
llvm_bin_path: matches.opt_str("llvm-bin-path").map(|s| Path::new(s)),
src_base: opt_path(matches, "src-base"),
build_base: opt_path(matches, "build-base"),
aux_base: opt_path(matches, "aux-base"),
stage_id: matches.opt_str("stage-id").unwrap(),
mode: str_mode(matches.opt_str("mode").unwrap()),
run_ignored: matches.opt_present("ignored"),
filter:
if!matches.free.is_empty() {
Some(matches.free[0].clone())
} else {
None
},
logfile: matches.opt_str("logfile").map(|s| Path::new(s)),
save_metrics: matches.opt_str("save-metrics").map(|s| Path::new(s)),
ratchet_metrics:
matches.opt_str("ratchet-metrics").map(|s| Path::new(s)),
ratchet_noise_percent:
matches.opt_str("ratchet-noise-percent").and_then(|s| from_str::<f64>(s)),
runtool: matches.opt_str("runtool"),
rustcflags: matches.opt_str("rustcflags"),
jit: matches.opt_present("jit"),
target: opt_str2(matches.opt_str("target")).to_str(),
adb_path: opt_str2(matches.opt_str("adb-path")).to_str(),
adb_test_dir:
opt_str2(matches.opt_str("adb-test-dir")).to_str(),
adb_device_status:
if (opt_str2(matches.opt_str("target")) ==
~"arm-linux-androideabi") {
if (opt_str2(matches.opt_str("adb-test-dir"))!=
~"(none)" &&
opt_str2(matches.opt_str("adb-test-dir"))!=
~"") { true }
else { false }
} else { false },
test_shard: test::opt_shard(matches.opt_str("test-shard")),
verbose: matches.opt_present("verbose")
}
}
pub fn log_config(config: &config) {
let c = config;
logv(c, format!("configuration:"));
logv(c, format!("compile_lib_path: {}", config.compile_lib_path));
logv(c, format!("run_lib_path: {}", config.run_lib_path));
logv(c, format!("rustc_path: {}", config.rustc_path.display()));
logv(c, format!("src_base: {}", config.src_base.display()));
logv(c, format!("build_base: {}", config.build_base.display()));
logv(c, format!("stage_id: {}", config.stage_id));
logv(c, format!("mode: {}", mode_str(config.mode)));
logv(c, format!("run_ignored: {}", config.run_ignored));
logv(c, format!("filter: {}", opt_str(&config.filter)));
logv(c, format!("runtool: {}", opt_str(&config.runtool)));
logv(c, format!("rustcflags: {}", opt_str(&config.rustcflags)));
logv(c, format!("jit: {}", config.jit));
logv(c, format!("target: {}", config.target));
logv(c, format!("adb_path: {}", config.adb_path));
logv(c, format!("adb_test_dir: {}", config.adb_test_dir));
logv(c, format!("adb_device_status: {}", config.adb_device_status));
match config.test_shard {
None => logv(c, ~"test_shard: (all)"),
Some((a,b)) => logv(c, format!("test_shard: {}.{}", a, b))
}
logv(c, format!("verbose: {}", config.verbose));
logv(c, format!("\n"));
}
pub fn opt_str<'a>(maybestr: &'a Option<~str>) -> &'a str {
match *maybestr {
None => "(none)",
Some(ref s) => {
let s: &'a str = *s;
s
}
}
}
pub fn opt_str2(maybestr: Option<~str>) -> ~str {
match maybestr { None => ~"(none)", Some(s) => { s } }
}
pub fn str_mode(s: ~str) -> mode {
match s {
~"compile-fail" => mode_compile_fail,
~"run-fail" => mode_run_fail,
~"run-pass" => mode_run_pass,
~"pretty" => mode_pretty,
~"debug-info" => mode_debug_info,
~"codegen" => mode_codegen,
_ => fail!("invalid mode")
}
}
pub fn mode_str(mode: mode) -> ~str {
match mode {
mode_compile_fail => ~"compile-fail",
mode_run_fail => ~"run-fail",
mode_run_pass => ~"run-pass",
mode_pretty => ~"pretty",
mode_debug_info => ~"debug-info",
mode_codegen => ~"codegen",
}
}
pub fn run_tests(config: &config) {
if config.target == ~"arm-linux-androideabi" {
match config.mode{
mode_debug_info => {
println("arm-linux-androideabi debug-info \
test uses tcp 5039 port. please reserve it");
//arm-linux-androideabi debug-info test uses remote debugger
//so, we test 1 task at once
os::setenv("RUST_TEST_TASKS","1");
}
_ =>{}
}
}
let opts = test_opts(config);
let tests = make_tests(config);
// sadly osx needs some file descriptor limits raised for running tests in
// parallel (especially when we have lots and lots of child processes).
// For context, see #8904
rt::test::prepare_for_lots_of_tests();
let res = test::run_tests_console(&opts, tests);
if!res { fail!("Some tests failed"); }
}
pub fn test_opts(config: &config) -> test::TestOpts {
test::TestOpts {
filter: config.filter.clone(),
run_ignored: config.run_ignored,
logfile: config.logfile.clone(),
run_tests: true,
run_benchmarks: true,
ratchet_metrics: config.ratchet_metrics.clone(),
ratchet_noise_percent: config.ratchet_noise_percent.clone(),
save_metrics: config.save_metrics.clone(),
test_shard: config.test_shard.clone()
}
}
pub fn make_tests(config: &config) -> ~[test::TestDescAndFn] {
debug!("making tests from {}",
config.src_base.display());
let mut tests = ~[];
let dirs = fs::readdir(&config.src_base);
for file in dirs.iter() {
let file = file.clone();
debug!("inspecting file {}", file.display());
if is_test(config, &file) {
let t = make_test(config, &file, || {
match config.mode {
mode_codegen => make_metrics_test_closure(config, &file),
_ => make_test_closure(config, &file)
}
});
tests.push(t)
}
}
tests
}
pub fn is_test(config: &config, testfile: &Path) -> bool {
// Pretty-printer does not work with.rc files yet
let valid_extensions =
match config.mode {
mode_pretty => ~[~".rs"],
_ => ~[~".rc", ~".rs"]
};
let invalid_prefixes = ~[~".", ~"#", ~"~"];
let name = testfile.filename_str().unwrap();
let mut valid = false;
for ext in valid_extensions.iter() {
if name.ends_with(*ext) { valid = true; }
}
for pre in invalid_prefixes.iter() {
if name.starts_with(*pre) { valid = false; }
}
return valid;
}
pub fn make_test(config: &config, testfile: &Path, f: || -> test::TestFn)
-> test::TestDescAndFn {
test::TestDescAndFn {
desc: test::TestDesc {
name: make_test_name(config, testfile),
ignore: header::is_test_ignored(config, testfile),
should_fail: false
},
testfn: f(),
}
}
pub fn make_test_name(config: &config, testfile: &Path) -> test::TestName {
// Try to elide redundant long paths
fn shorten(path: &Path) -> ~str {
let filename = path.filename_str();
let p = path.dir_path();
let dir = p.filename_str();
format!("{}/{}", dir.unwrap_or(""), filename.unwrap_or(""))
}
test::DynTestName(format!("[{}] {}",
mode_str(config.mode),
shorten(testfile)))
}
pub fn make_test_closure(config: &config, testfile: &Path) -> test::TestFn {
let config = (*config).clone();
// FIXME (#9639): This needs to handle non-utf8 paths
let testfile = testfile.as_str().unwrap().to_owned();
|
let config = (*config).clone();
// FIXME (#9639): This needs to handle non-utf8 paths
let testfile = testfile.as_str().unwrap().to_owned();
test::DynMetricFn(proc(mm) {
runtest::run_metrics(config, testfile, mm)
})
}
|
test::DynTestFn(proc() { runtest::run(config, testfile) })
}
pub fn make_metrics_test_closure(config: &config, testfile: &Path) -> test::TestFn {
|
random_line_split
|
compiletest.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[crate_type = "bin"];
#[allow(non_camel_case_types)];
#[deny(warnings)];
extern mod extra;
use std::os;
use std::rt;
use std::io::fs;
use extra::getopts;
use extra::getopts::groups::{optopt, optflag, reqopt};
use extra::test;
use common::config;
use common::mode_run_pass;
use common::mode_run_fail;
use common::mode_compile_fail;
use common::mode_pretty;
use common::mode_debug_info;
use common::mode_codegen;
use common::mode;
use util::logv;
pub mod procsrv;
pub mod util;
pub mod header;
pub mod runtest;
pub mod common;
pub mod errors;
pub fn main() {
let args = os::args();
let config = parse_config(args);
log_config(&config);
run_tests(&config);
}
pub fn parse_config(args: ~[~str]) -> config {
let groups : ~[getopts::groups::OptGroup] =
~[reqopt("", "compile-lib-path", "path to host shared libraries", "PATH"),
reqopt("", "run-lib-path", "path to target shared libraries", "PATH"),
reqopt("", "rustc-path", "path to rustc to use for compiling", "PATH"),
optopt("", "clang-path", "path to executable for codegen tests", "PATH"),
optopt("", "llvm-bin-path", "path to directory holding llvm binaries", "DIR"),
reqopt("", "src-base", "directory to scan for test files", "PATH"),
reqopt("", "build-base", "directory to deposit test outputs", "PATH"),
reqopt("", "aux-base", "directory to find auxiliary test files", "PATH"),
reqopt("", "stage-id", "the target-stage identifier", "stageN-TARGET"),
reqopt("", "mode", "which sort of compile tests to run",
"(compile-fail|run-fail|run-pass|pretty|debug-info)"),
optflag("", "ignored", "run tests marked as ignored / xfailed"),
optopt("", "runtool", "supervisor program to run tests under \
(eg. emulator, valgrind)", "PROGRAM"),
optopt("", "rustcflags", "flags to pass to rustc", "FLAGS"),
optflag("", "verbose", "run tests verbosely, showing all output"),
optopt("", "logfile", "file to log test execution to", "FILE"),
optopt("", "save-metrics", "file to save metrics to", "FILE"),
optopt("", "ratchet-metrics", "file to ratchet metrics against", "FILE"),
optopt("", "ratchet-noise-percent",
"percent change in metrics to consider noise", "N"),
optflag("", "jit", "run tests under the JIT"),
optopt("", "target", "the target to build for", "TARGET"),
optopt("", "adb-path", "path to the android debugger", "PATH"),
optopt("", "adb-test-dir", "path to tests for the android debugger", "PATH"),
optopt("", "test-shard", "run shard A, of B shards, worth of the testsuite", "A.B"),
optflag("h", "help", "show this message"),
];
assert!(!args.is_empty());
let argv0 = args[0].clone();
let args_ = args.tail();
if args[1] == ~"-h" || args[1] == ~"--help" {
let message = format!("Usage: {} [OPTIONS] [TESTNAME...]", argv0);
println(getopts::groups::usage(message, groups));
println("");
fail!()
}
let matches =
&match getopts::groups::getopts(args_, groups) {
Ok(m) => m,
Err(f) => fail!("{}", f.to_err_msg())
};
if matches.opt_present("h") || matches.opt_present("help") {
let message = format!("Usage: {} [OPTIONS] [TESTNAME...]", argv0);
println(getopts::groups::usage(message, groups));
println("");
fail!()
}
fn opt_path(m: &getopts::Matches, nm: &str) -> Path {
Path::new(m.opt_str(nm).unwrap())
}
config {
compile_lib_path: matches.opt_str("compile-lib-path").unwrap(),
run_lib_path: matches.opt_str("run-lib-path").unwrap(),
rustc_path: opt_path(matches, "rustc-path"),
clang_path: matches.opt_str("clang-path").map(|s| Path::new(s)),
llvm_bin_path: matches.opt_str("llvm-bin-path").map(|s| Path::new(s)),
src_base: opt_path(matches, "src-base"),
build_base: opt_path(matches, "build-base"),
aux_base: opt_path(matches, "aux-base"),
stage_id: matches.opt_str("stage-id").unwrap(),
mode: str_mode(matches.opt_str("mode").unwrap()),
run_ignored: matches.opt_present("ignored"),
filter:
if!matches.free.is_empty() {
Some(matches.free[0].clone())
} else {
None
},
logfile: matches.opt_str("logfile").map(|s| Path::new(s)),
save_metrics: matches.opt_str("save-metrics").map(|s| Path::new(s)),
ratchet_metrics:
matches.opt_str("ratchet-metrics").map(|s| Path::new(s)),
ratchet_noise_percent:
matches.opt_str("ratchet-noise-percent").and_then(|s| from_str::<f64>(s)),
runtool: matches.opt_str("runtool"),
rustcflags: matches.opt_str("rustcflags"),
jit: matches.opt_present("jit"),
target: opt_str2(matches.opt_str("target")).to_str(),
adb_path: opt_str2(matches.opt_str("adb-path")).to_str(),
adb_test_dir:
opt_str2(matches.opt_str("adb-test-dir")).to_str(),
adb_device_status:
if (opt_str2(matches.opt_str("target")) ==
~"arm-linux-androideabi") {
if (opt_str2(matches.opt_str("adb-test-dir"))!=
~"(none)" &&
opt_str2(matches.opt_str("adb-test-dir"))!=
~"") { true }
else { false }
} else { false },
test_shard: test::opt_shard(matches.opt_str("test-shard")),
verbose: matches.opt_present("verbose")
}
}
pub fn
|
(config: &config) {
let c = config;
logv(c, format!("configuration:"));
logv(c, format!("compile_lib_path: {}", config.compile_lib_path));
logv(c, format!("run_lib_path: {}", config.run_lib_path));
logv(c, format!("rustc_path: {}", config.rustc_path.display()));
logv(c, format!("src_base: {}", config.src_base.display()));
logv(c, format!("build_base: {}", config.build_base.display()));
logv(c, format!("stage_id: {}", config.stage_id));
logv(c, format!("mode: {}", mode_str(config.mode)));
logv(c, format!("run_ignored: {}", config.run_ignored));
logv(c, format!("filter: {}", opt_str(&config.filter)));
logv(c, format!("runtool: {}", opt_str(&config.runtool)));
logv(c, format!("rustcflags: {}", opt_str(&config.rustcflags)));
logv(c, format!("jit: {}", config.jit));
logv(c, format!("target: {}", config.target));
logv(c, format!("adb_path: {}", config.adb_path));
logv(c, format!("adb_test_dir: {}", config.adb_test_dir));
logv(c, format!("adb_device_status: {}", config.adb_device_status));
match config.test_shard {
None => logv(c, ~"test_shard: (all)"),
Some((a,b)) => logv(c, format!("test_shard: {}.{}", a, b))
}
logv(c, format!("verbose: {}", config.verbose));
logv(c, format!("\n"));
}
pub fn opt_str<'a>(maybestr: &'a Option<~str>) -> &'a str {
match *maybestr {
None => "(none)",
Some(ref s) => {
let s: &'a str = *s;
s
}
}
}
pub fn opt_str2(maybestr: Option<~str>) -> ~str {
match maybestr { None => ~"(none)", Some(s) => { s } }
}
pub fn str_mode(s: ~str) -> mode {
match s {
~"compile-fail" => mode_compile_fail,
~"run-fail" => mode_run_fail,
~"run-pass" => mode_run_pass,
~"pretty" => mode_pretty,
~"debug-info" => mode_debug_info,
~"codegen" => mode_codegen,
_ => fail!("invalid mode")
}
}
pub fn mode_str(mode: mode) -> ~str {
match mode {
mode_compile_fail => ~"compile-fail",
mode_run_fail => ~"run-fail",
mode_run_pass => ~"run-pass",
mode_pretty => ~"pretty",
mode_debug_info => ~"debug-info",
mode_codegen => ~"codegen",
}
}
pub fn run_tests(config: &config) {
if config.target == ~"arm-linux-androideabi" {
match config.mode{
mode_debug_info => {
println("arm-linux-androideabi debug-info \
test uses tcp 5039 port. please reserve it");
//arm-linux-androideabi debug-info test uses remote debugger
//so, we test 1 task at once
os::setenv("RUST_TEST_TASKS","1");
}
_ =>{}
}
}
let opts = test_opts(config);
let tests = make_tests(config);
// sadly osx needs some file descriptor limits raised for running tests in
// parallel (especially when we have lots and lots of child processes).
// For context, see #8904
rt::test::prepare_for_lots_of_tests();
let res = test::run_tests_console(&opts, tests);
if!res { fail!("Some tests failed"); }
}
pub fn test_opts(config: &config) -> test::TestOpts {
test::TestOpts {
filter: config.filter.clone(),
run_ignored: config.run_ignored,
logfile: config.logfile.clone(),
run_tests: true,
run_benchmarks: true,
ratchet_metrics: config.ratchet_metrics.clone(),
ratchet_noise_percent: config.ratchet_noise_percent.clone(),
save_metrics: config.save_metrics.clone(),
test_shard: config.test_shard.clone()
}
}
pub fn make_tests(config: &config) -> ~[test::TestDescAndFn] {
debug!("making tests from {}",
config.src_base.display());
let mut tests = ~[];
let dirs = fs::readdir(&config.src_base);
for file in dirs.iter() {
let file = file.clone();
debug!("inspecting file {}", file.display());
if is_test(config, &file) {
let t = make_test(config, &file, || {
match config.mode {
mode_codegen => make_metrics_test_closure(config, &file),
_ => make_test_closure(config, &file)
}
});
tests.push(t)
}
}
tests
}
pub fn is_test(config: &config, testfile: &Path) -> bool {
// Pretty-printer does not work with.rc files yet
let valid_extensions =
match config.mode {
mode_pretty => ~[~".rs"],
_ => ~[~".rc", ~".rs"]
};
let invalid_prefixes = ~[~".", ~"#", ~"~"];
let name = testfile.filename_str().unwrap();
let mut valid = false;
for ext in valid_extensions.iter() {
if name.ends_with(*ext) { valid = true; }
}
for pre in invalid_prefixes.iter() {
if name.starts_with(*pre) { valid = false; }
}
return valid;
}
pub fn make_test(config: &config, testfile: &Path, f: || -> test::TestFn)
-> test::TestDescAndFn {
test::TestDescAndFn {
desc: test::TestDesc {
name: make_test_name(config, testfile),
ignore: header::is_test_ignored(config, testfile),
should_fail: false
},
testfn: f(),
}
}
pub fn make_test_name(config: &config, testfile: &Path) -> test::TestName {
// Try to elide redundant long paths
fn shorten(path: &Path) -> ~str {
let filename = path.filename_str();
let p = path.dir_path();
let dir = p.filename_str();
format!("{}/{}", dir.unwrap_or(""), filename.unwrap_or(""))
}
test::DynTestName(format!("[{}] {}",
mode_str(config.mode),
shorten(testfile)))
}
pub fn make_test_closure(config: &config, testfile: &Path) -> test::TestFn {
let config = (*config).clone();
// FIXME (#9639): This needs to handle non-utf8 paths
let testfile = testfile.as_str().unwrap().to_owned();
test::DynTestFn(proc() { runtest::run(config, testfile) })
}
pub fn make_metrics_test_closure(config: &config, testfile: &Path) -> test::TestFn {
let config = (*config).clone();
// FIXME (#9639): This needs to handle non-utf8 paths
let testfile = testfile.as_str().unwrap().to_owned();
test::DynMetricFn(proc(mm) {
runtest::run_metrics(config, testfile, mm)
})
}
|
log_config
|
identifier_name
|
uploader.rs
|
// External Dependencies ------------------------------------------------------
use diesel;
use diesel::prelude::*;
// Internal Dependencies ------------------------------------------------------
use super::super::Server;
use ::db::models::User;
use ::db::schema::users::dsl::{server_id, nickname as user_nickname, is_uploader};
use ::db::schema::users::table as userTable;
// Server Uploader Interface --------------------------------------------------
impl Server {
pub fn list_uploaders(&self) -> Vec<User> {
userTable.filter(
server_id.eq(&self.config.table_id)
).filter(is_uploader.eq(true))
.order(user_nickname)
.load::<User>(&self.config.connection)
.unwrap_or_else(|_| vec![])
}
pub fn add_uploader(&mut self, nickname: &str) -> bool {
::db::create_user_if_not_exists(&self.config, nickname).ok();
self.update_upload_user(nickname, true)
}
pub fn remove_uploader(&mut self, nickname: &str) -> bool {
self.update_upload_user(nickname, false)
}
fn update_upload_user(&self, nickname: &str, set_uploader: bool) -> bool {
if ::db::user_exists(&self.config, nickname) {
diesel::update(
userTable.filter(
server_id.eq(&self.config.table_id)
).filter(
user_nickname.eq(nickname)
)
).set(is_uploader.eq(set_uploader)).execute(
&self.config.connection
).ok();
true
} else
|
}
}
|
{
false
}
|
conditional_block
|
uploader.rs
|
// External Dependencies ------------------------------------------------------
use diesel;
use diesel::prelude::*;
// Internal Dependencies ------------------------------------------------------
use super::super::Server;
use ::db::models::User;
use ::db::schema::users::dsl::{server_id, nickname as user_nickname, is_uploader};
use ::db::schema::users::table as userTable;
// Server Uploader Interface --------------------------------------------------
impl Server {
pub fn list_uploaders(&self) -> Vec<User> {
userTable.filter(
server_id.eq(&self.config.table_id)
|
.load::<User>(&self.config.connection)
.unwrap_or_else(|_| vec![])
}
pub fn add_uploader(&mut self, nickname: &str) -> bool {
::db::create_user_if_not_exists(&self.config, nickname).ok();
self.update_upload_user(nickname, true)
}
pub fn remove_uploader(&mut self, nickname: &str) -> bool {
self.update_upload_user(nickname, false)
}
fn update_upload_user(&self, nickname: &str, set_uploader: bool) -> bool {
if ::db::user_exists(&self.config, nickname) {
diesel::update(
userTable.filter(
server_id.eq(&self.config.table_id)
).filter(
user_nickname.eq(nickname)
)
).set(is_uploader.eq(set_uploader)).execute(
&self.config.connection
).ok();
true
} else {
false
}
}
}
|
).filter(is_uploader.eq(true))
.order(user_nickname)
|
random_line_split
|
uploader.rs
|
// External Dependencies ------------------------------------------------------
use diesel;
use diesel::prelude::*;
// Internal Dependencies ------------------------------------------------------
use super::super::Server;
use ::db::models::User;
use ::db::schema::users::dsl::{server_id, nickname as user_nickname, is_uploader};
use ::db::schema::users::table as userTable;
// Server Uploader Interface --------------------------------------------------
impl Server {
pub fn list_uploaders(&self) -> Vec<User> {
userTable.filter(
server_id.eq(&self.config.table_id)
).filter(is_uploader.eq(true))
.order(user_nickname)
.load::<User>(&self.config.connection)
.unwrap_or_else(|_| vec![])
}
pub fn add_uploader(&mut self, nickname: &str) -> bool {
::db::create_user_if_not_exists(&self.config, nickname).ok();
self.update_upload_user(nickname, true)
}
pub fn remove_uploader(&mut self, nickname: &str) -> bool {
self.update_upload_user(nickname, false)
}
fn update_upload_user(&self, nickname: &str, set_uploader: bool) -> bool
|
}
|
{
if ::db::user_exists(&self.config, nickname) {
diesel::update(
userTable.filter(
server_id.eq(&self.config.table_id)
).filter(
user_nickname.eq(nickname)
)
).set(is_uploader.eq(set_uploader)).execute(
&self.config.connection
).ok();
true
} else {
false
}
}
|
identifier_body
|
uploader.rs
|
// External Dependencies ------------------------------------------------------
use diesel;
use diesel::prelude::*;
// Internal Dependencies ------------------------------------------------------
use super::super::Server;
use ::db::models::User;
use ::db::schema::users::dsl::{server_id, nickname as user_nickname, is_uploader};
use ::db::schema::users::table as userTable;
// Server Uploader Interface --------------------------------------------------
impl Server {
pub fn list_uploaders(&self) -> Vec<User> {
userTable.filter(
server_id.eq(&self.config.table_id)
).filter(is_uploader.eq(true))
.order(user_nickname)
.load::<User>(&self.config.connection)
.unwrap_or_else(|_| vec![])
}
pub fn
|
(&mut self, nickname: &str) -> bool {
::db::create_user_if_not_exists(&self.config, nickname).ok();
self.update_upload_user(nickname, true)
}
pub fn remove_uploader(&mut self, nickname: &str) -> bool {
self.update_upload_user(nickname, false)
}
fn update_upload_user(&self, nickname: &str, set_uploader: bool) -> bool {
if ::db::user_exists(&self.config, nickname) {
diesel::update(
userTable.filter(
server_id.eq(&self.config.table_id)
).filter(
user_nickname.eq(nickname)
)
).set(is_uploader.eq(set_uploader)).execute(
&self.config.connection
).ok();
true
} else {
false
}
}
}
|
add_uploader
|
identifier_name
|
parallel.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Implements parallel traversal over the DOM tree.
//!
//! This traversal is based on Rayon, and therefore its safety is largely
//! verified by the type system.
//!
//! The primary trickiness and fine print for the above relates to the
//! thread safety of the DOM nodes themselves. Accessing a DOM element
//! concurrently on multiple threads is actually mostly "safe", since all
//! the mutable state is protected by an AtomicRefCell, and so we'll
//! generally panic if something goes wrong. Still, we try to to enforce our
//! thread invariants at compile time whenever possible. As such, TNode and
//! TElement are not Send, so ordinary style system code cannot accidentally
//! share them with other threads. In the parallel traversal, we explicitly
//! invoke |unsafe { SendNode::new(n) }| to put nodes in containers that may
//! be sent to other threads. This occurs in only a handful of places and is
//! easy to grep for. At the time of this writing, there is no other unsafe
//! code in the parallel traversal.
#![deny(missing_docs)]
use arrayvec::ArrayVec;
use context::{StyleContext, ThreadLocalStyleContext, TraversalStatistics};
use dom::{OpaqueNode, SendNode, TElement, TNode};
use rayon;
use scoped_tls::ScopedTLS;
use smallvec::SmallVec;
use std::borrow::Borrow;
use time;
use traversal::{DomTraversal, PerLevelTraversalData, PreTraverseToken};
/// The maximum number of child nodes that we will process as a single unit.
///
/// Larger values will increase style sharing cache hits and general DOM locality
/// at the expense of decreased opportunities for parallelism. This value has not
/// been measured and could potentially be tuned.
pub const WORK_UNIT_MAX: usize = 16;
/// A set of nodes, sized to the work unit. This gets copied when sent to other
/// threads, so we keep it compact.
type WorkUnit<N> = ArrayVec<[SendNode<N>; WORK_UNIT_MAX]>;
/// Entry point for the parallel traversal.
#[allow(unsafe_code)]
pub fn traverse_dom<E, D>(traversal: &D,
root: E,
token: PreTraverseToken,
pool: &rayon::ThreadPool)
where E: TElement,
D: DomTraversal<E>,
{
let dump_stats = traversal.shared_context().options.dump_style_statistics;
let start_time = if dump_stats { Some(time::precise_time_s()) } else { None };
// Set up the SmallVec. We need to move this, and in most cases this is just
// one node, so keep it small.
let mut nodes = SmallVec::<[SendNode<E::ConcreteNode>; 8]>::new();
debug_assert!(traversal.is_parallel());
// Handle Gecko's eager initial styling. We don't currently support it
// in conjunction with bottom-up traversal. If we did, we'd need to put
// it on the context to make it available to the bottom-up phase.
let depth = if token.traverse_unstyled_children_only() {
debug_assert!(!D::needs_postorder_traversal());
for kid in root.as_node().traversal_children() {
if kid.as_element().map_or(false, |el| el.get_data().is_none()) {
nodes.push(unsafe { SendNode::new(kid) });
}
}
root.depth() + 1
} else {
nodes.push(unsafe { SendNode::new(root.as_node()) });
root.depth()
};
if nodes.is_empty() {
return;
}
let traversal_data = PerLevelTraversalData {
current_dom_depth: depth,
};
let tls = ScopedTLS::<ThreadLocalStyleContext<E>>::new(pool);
let root = root.as_node().opaque();
pool.install(|| {
rayon::scope(|scope| {
let nodes = nodes;
traverse_nodes(&*nodes,
DispatchMode::TailCall,
0,
root,
traversal_data,
scope,
pool,
traversal,
&tls);
});
});
// Dump statistics to stdout if requested.
if dump_stats
|
}
/// A callback to create our thread local context. This needs to be
/// out of line so we don't allocate stack space for the entire struct
/// in the caller.
#[inline(never)]
fn create_thread_local_context<'scope, E, D>(
traversal: &'scope D,
slot: &mut Option<ThreadLocalStyleContext<E>>)
where E: TElement +'scope,
D: DomTraversal<E>
{
*slot = Some(ThreadLocalStyleContext::new(traversal.shared_context()));
}
/// A parallel top-down DOM traversal.
///
/// This algorithm traverses the DOM in a breadth-first, top-down manner. The
/// goals are:
/// * Never process a child before its parent (since child style depends on
/// parent style). If this were to happen, the styling algorithm would panic.
/// * Prioritize discovering nodes as quickly as possible to maximize
/// opportunities for parallelism.
/// * Style all the children of a given node (i.e. all sibling nodes) on
/// a single thread (with an upper bound to handle nodes with an
/// abnormally large number of children). This is important because we use
/// a thread-local cache to share styles between siblings.
#[inline(always)]
#[allow(unsafe_code)]
fn top_down_dom<'a,'scope, E, D>(nodes: &'a [SendNode<E::ConcreteNode>],
recursion_depth: usize,
root: OpaqueNode,
mut traversal_data: PerLevelTraversalData,
scope: &'a rayon::Scope<'scope>,
pool: &'scope rayon::ThreadPool,
traversal: &'scope D,
tls: &'scope ScopedTLS<'scope, ThreadLocalStyleContext<E>>)
where E: TElement +'scope,
D: DomTraversal<E>,
{
debug_assert!(nodes.len() <= WORK_UNIT_MAX);
// Collect all the children of the elements in our work unit. This will
// contain the combined children of up to WORK_UNIT_MAX nodes, which may
// be numerous. As such, we store it in a large SmallVec to minimize heap-
// spilling, and never move it.
let mut discovered_child_nodes = SmallVec::<[SendNode<E::ConcreteNode>; 128]>::new();
{
// Scope the borrow of the TLS so that the borrow is dropped before
// a potential recursive call when we pass TailCall.
let mut tlc = tls.ensure(
|slot: &mut Option<ThreadLocalStyleContext<E>>| create_thread_local_context(traversal, slot));
let mut context = StyleContext {
shared: traversal.shared_context(),
thread_local: &mut *tlc,
};
for n in nodes {
// If the last node we processed produced children, spawn them off
// into a work item. We do this at the beginning of the loop (rather
// than at the end) so that we can traverse the children of the last
// sibling directly on this thread without a spawn call.
//
// This has the important effect of removing the allocation and
// context-switching overhead of the parallel traversal for perfectly
// linear regions of the DOM, i.e.:
//
// <russian><doll><tag><nesting></nesting></tag></doll></russian>
//
// Which are not at all uncommon.
if!discovered_child_nodes.is_empty() {
let mut traversal_data_copy = traversal_data.clone();
traversal_data_copy.current_dom_depth += 1;
traverse_nodes(&*discovered_child_nodes,
DispatchMode::NotTailCall,
recursion_depth,
root,
traversal_data_copy,
scope,
pool,
traversal,
tls);
discovered_child_nodes.clear();
}
let node = **n;
let mut children_to_process = 0isize;
traversal.process_preorder(&traversal_data, &mut context, node, |n| {
children_to_process += 1;
let send_n = unsafe { SendNode::new(n) };
discovered_child_nodes.push(send_n);
});
traversal.handle_postorder_traversal(&mut context, root, node,
children_to_process);
}
}
// Handle the children of the last element in this work unit. If any exist,
// we can process them (or at least one work unit's worth of them) directly
// on this thread by passing TailCall.
if!discovered_child_nodes.is_empty() {
traversal_data.current_dom_depth += 1;
traverse_nodes(&discovered_child_nodes,
DispatchMode::TailCall,
recursion_depth,
root,
traversal_data,
scope,
pool,
traversal,
tls);
}
}
/// Controls whether traverse_nodes may make a recursive call to continue
/// doing work, or whether it should always dispatch work asynchronously.
#[derive(Clone, Copy, PartialEq)]
enum DispatchMode {
TailCall,
NotTailCall,
}
impl DispatchMode {
fn is_tail_call(&self) -> bool { matches!(*self, DispatchMode::TailCall) }
}
// On x86_64-linux, a recursive cycle requires 3472 bytes of stack. Limiting
// the depth to 150 therefore should keep the stack use by the recursion to
// 520800 bytes, which would give a generously conservative margin should we
// decide to reduce the thread stack size from its default of 2MB down to 1MB.
const RECURSION_DEPTH_LIMIT: usize = 150;
#[inline]
fn traverse_nodes<'a,'scope, E, D>(nodes: &[SendNode<E::ConcreteNode>],
mode: DispatchMode,
recursion_depth: usize,
root: OpaqueNode,
traversal_data: PerLevelTraversalData,
scope: &'a rayon::Scope<'scope>,
pool: &'scope rayon::ThreadPool,
traversal: &'scope D,
tls: &'scope ScopedTLS<'scope, ThreadLocalStyleContext<E>>)
where E: TElement +'scope,
D: DomTraversal<E>,
{
debug_assert!(!nodes.is_empty());
// This is a tail call from the perspective of the caller. However, we only
// want to actually dispatch the job as a tail call if there's nothing left
// in our local queue. Otherwise we need to return to it to maintain proper
// breadth-first ordering. We also need to take care to avoid stack
// overflow due to excessive tail recursion. The stack overflow isn't
// observable to content -- we're still completely correct, just not
// using tail recursion any more. See bug 1368302.
debug_assert!(recursion_depth <= RECURSION_DEPTH_LIMIT);
let may_dispatch_tail = mode.is_tail_call() &&
recursion_depth!= RECURSION_DEPTH_LIMIT &&
!pool.current_thread_has_pending_tasks().unwrap();
// In the common case, our children fit within a single work unit, in which
// case we can pass the SmallVec directly and avoid extra allocation.
if nodes.len() <= WORK_UNIT_MAX {
let work = nodes.iter().cloned().collect::<WorkUnit<E::ConcreteNode>>();
if may_dispatch_tail {
top_down_dom(&work, recursion_depth + 1, root,
traversal_data, scope, pool, traversal, tls);
} else {
scope.spawn(move |scope| {
let work = work;
top_down_dom(&work, 0, root,
traversal_data, scope, pool, traversal, tls);
});
}
} else {
for chunk in nodes.chunks(WORK_UNIT_MAX) {
let nodes = chunk.iter().cloned().collect::<WorkUnit<E::ConcreteNode>>();
let traversal_data_copy = traversal_data.clone();
scope.spawn(move |scope| {
let n = nodes;
top_down_dom(&*n, 0, root,
traversal_data_copy, scope, pool, traversal, tls)
});
}
}
}
|
{
let slots = unsafe { tls.unsafe_get() };
let mut aggregate = slots.iter().fold(TraversalStatistics::default(), |acc, t| {
match *t.borrow() {
None => acc,
Some(ref cx) => &cx.borrow().statistics + &acc,
}
});
aggregate.finish(traversal, start_time.unwrap());
if aggregate.is_large_traversal() {
println!("{}", aggregate);
}
}
|
conditional_block
|
parallel.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Implements parallel traversal over the DOM tree.
//!
//! This traversal is based on Rayon, and therefore its safety is largely
//! verified by the type system.
//!
//! The primary trickiness and fine print for the above relates to the
//! thread safety of the DOM nodes themselves. Accessing a DOM element
//! concurrently on multiple threads is actually mostly "safe", since all
//! the mutable state is protected by an AtomicRefCell, and so we'll
//! generally panic if something goes wrong. Still, we try to to enforce our
//! thread invariants at compile time whenever possible. As such, TNode and
//! TElement are not Send, so ordinary style system code cannot accidentally
//! share them with other threads. In the parallel traversal, we explicitly
//! invoke |unsafe { SendNode::new(n) }| to put nodes in containers that may
//! be sent to other threads. This occurs in only a handful of places and is
//! easy to grep for. At the time of this writing, there is no other unsafe
//! code in the parallel traversal.
#![deny(missing_docs)]
use arrayvec::ArrayVec;
use context::{StyleContext, ThreadLocalStyleContext, TraversalStatistics};
use dom::{OpaqueNode, SendNode, TElement, TNode};
use rayon;
use scoped_tls::ScopedTLS;
use smallvec::SmallVec;
use std::borrow::Borrow;
use time;
use traversal::{DomTraversal, PerLevelTraversalData, PreTraverseToken};
/// The maximum number of child nodes that we will process as a single unit.
///
/// Larger values will increase style sharing cache hits and general DOM locality
/// at the expense of decreased opportunities for parallelism. This value has not
/// been measured and could potentially be tuned.
pub const WORK_UNIT_MAX: usize = 16;
/// A set of nodes, sized to the work unit. This gets copied when sent to other
/// threads, so we keep it compact.
type WorkUnit<N> = ArrayVec<[SendNode<N>; WORK_UNIT_MAX]>;
/// Entry point for the parallel traversal.
#[allow(unsafe_code)]
pub fn traverse_dom<E, D>(traversal: &D,
root: E,
token: PreTraverseToken,
pool: &rayon::ThreadPool)
where E: TElement,
D: DomTraversal<E>,
{
let dump_stats = traversal.shared_context().options.dump_style_statistics;
let start_time = if dump_stats { Some(time::precise_time_s()) } else { None };
// Set up the SmallVec. We need to move this, and in most cases this is just
// one node, so keep it small.
let mut nodes = SmallVec::<[SendNode<E::ConcreteNode>; 8]>::new();
debug_assert!(traversal.is_parallel());
// Handle Gecko's eager initial styling. We don't currently support it
// in conjunction with bottom-up traversal. If we did, we'd need to put
// it on the context to make it available to the bottom-up phase.
let depth = if token.traverse_unstyled_children_only() {
debug_assert!(!D::needs_postorder_traversal());
for kid in root.as_node().traversal_children() {
if kid.as_element().map_or(false, |el| el.get_data().is_none()) {
nodes.push(unsafe { SendNode::new(kid) });
}
}
root.depth() + 1
} else {
nodes.push(unsafe { SendNode::new(root.as_node()) });
root.depth()
};
if nodes.is_empty() {
return;
}
let traversal_data = PerLevelTraversalData {
current_dom_depth: depth,
};
let tls = ScopedTLS::<ThreadLocalStyleContext<E>>::new(pool);
let root = root.as_node().opaque();
pool.install(|| {
rayon::scope(|scope| {
let nodes = nodes;
traverse_nodes(&*nodes,
DispatchMode::TailCall,
0,
root,
traversal_data,
scope,
pool,
traversal,
&tls);
});
});
// Dump statistics to stdout if requested.
if dump_stats {
let slots = unsafe { tls.unsafe_get() };
let mut aggregate = slots.iter().fold(TraversalStatistics::default(), |acc, t| {
match *t.borrow() {
None => acc,
Some(ref cx) => &cx.borrow().statistics + &acc,
}
});
aggregate.finish(traversal, start_time.unwrap());
if aggregate.is_large_traversal() {
println!("{}", aggregate);
}
|
/// out of line so we don't allocate stack space for the entire struct
/// in the caller.
#[inline(never)]
fn create_thread_local_context<'scope, E, D>(
traversal: &'scope D,
slot: &mut Option<ThreadLocalStyleContext<E>>)
where E: TElement +'scope,
D: DomTraversal<E>
{
*slot = Some(ThreadLocalStyleContext::new(traversal.shared_context()));
}
/// A parallel top-down DOM traversal.
///
/// This algorithm traverses the DOM in a breadth-first, top-down manner. The
/// goals are:
/// * Never process a child before its parent (since child style depends on
/// parent style). If this were to happen, the styling algorithm would panic.
/// * Prioritize discovering nodes as quickly as possible to maximize
/// opportunities for parallelism.
/// * Style all the children of a given node (i.e. all sibling nodes) on
/// a single thread (with an upper bound to handle nodes with an
/// abnormally large number of children). This is important because we use
/// a thread-local cache to share styles between siblings.
#[inline(always)]
#[allow(unsafe_code)]
fn top_down_dom<'a,'scope, E, D>(nodes: &'a [SendNode<E::ConcreteNode>],
recursion_depth: usize,
root: OpaqueNode,
mut traversal_data: PerLevelTraversalData,
scope: &'a rayon::Scope<'scope>,
pool: &'scope rayon::ThreadPool,
traversal: &'scope D,
tls: &'scope ScopedTLS<'scope, ThreadLocalStyleContext<E>>)
where E: TElement +'scope,
D: DomTraversal<E>,
{
debug_assert!(nodes.len() <= WORK_UNIT_MAX);
// Collect all the children of the elements in our work unit. This will
// contain the combined children of up to WORK_UNIT_MAX nodes, which may
// be numerous. As such, we store it in a large SmallVec to minimize heap-
// spilling, and never move it.
let mut discovered_child_nodes = SmallVec::<[SendNode<E::ConcreteNode>; 128]>::new();
{
// Scope the borrow of the TLS so that the borrow is dropped before
// a potential recursive call when we pass TailCall.
let mut tlc = tls.ensure(
|slot: &mut Option<ThreadLocalStyleContext<E>>| create_thread_local_context(traversal, slot));
let mut context = StyleContext {
shared: traversal.shared_context(),
thread_local: &mut *tlc,
};
for n in nodes {
// If the last node we processed produced children, spawn them off
// into a work item. We do this at the beginning of the loop (rather
// than at the end) so that we can traverse the children of the last
// sibling directly on this thread without a spawn call.
//
// This has the important effect of removing the allocation and
// context-switching overhead of the parallel traversal for perfectly
// linear regions of the DOM, i.e.:
//
// <russian><doll><tag><nesting></nesting></tag></doll></russian>
//
// Which are not at all uncommon.
if!discovered_child_nodes.is_empty() {
let mut traversal_data_copy = traversal_data.clone();
traversal_data_copy.current_dom_depth += 1;
traverse_nodes(&*discovered_child_nodes,
DispatchMode::NotTailCall,
recursion_depth,
root,
traversal_data_copy,
scope,
pool,
traversal,
tls);
discovered_child_nodes.clear();
}
let node = **n;
let mut children_to_process = 0isize;
traversal.process_preorder(&traversal_data, &mut context, node, |n| {
children_to_process += 1;
let send_n = unsafe { SendNode::new(n) };
discovered_child_nodes.push(send_n);
});
traversal.handle_postorder_traversal(&mut context, root, node,
children_to_process);
}
}
// Handle the children of the last element in this work unit. If any exist,
// we can process them (or at least one work unit's worth of them) directly
// on this thread by passing TailCall.
if!discovered_child_nodes.is_empty() {
traversal_data.current_dom_depth += 1;
traverse_nodes(&discovered_child_nodes,
DispatchMode::TailCall,
recursion_depth,
root,
traversal_data,
scope,
pool,
traversal,
tls);
}
}
/// Controls whether traverse_nodes may make a recursive call to continue
/// doing work, or whether it should always dispatch work asynchronously.
#[derive(Clone, Copy, PartialEq)]
enum DispatchMode {
TailCall,
NotTailCall,
}
impl DispatchMode {
fn is_tail_call(&self) -> bool { matches!(*self, DispatchMode::TailCall) }
}
// On x86_64-linux, a recursive cycle requires 3472 bytes of stack. Limiting
// the depth to 150 therefore should keep the stack use by the recursion to
// 520800 bytes, which would give a generously conservative margin should we
// decide to reduce the thread stack size from its default of 2MB down to 1MB.
const RECURSION_DEPTH_LIMIT: usize = 150;
#[inline]
fn traverse_nodes<'a,'scope, E, D>(nodes: &[SendNode<E::ConcreteNode>],
mode: DispatchMode,
recursion_depth: usize,
root: OpaqueNode,
traversal_data: PerLevelTraversalData,
scope: &'a rayon::Scope<'scope>,
pool: &'scope rayon::ThreadPool,
traversal: &'scope D,
tls: &'scope ScopedTLS<'scope, ThreadLocalStyleContext<E>>)
where E: TElement +'scope,
D: DomTraversal<E>,
{
debug_assert!(!nodes.is_empty());
// This is a tail call from the perspective of the caller. However, we only
// want to actually dispatch the job as a tail call if there's nothing left
// in our local queue. Otherwise we need to return to it to maintain proper
// breadth-first ordering. We also need to take care to avoid stack
// overflow due to excessive tail recursion. The stack overflow isn't
// observable to content -- we're still completely correct, just not
// using tail recursion any more. See bug 1368302.
debug_assert!(recursion_depth <= RECURSION_DEPTH_LIMIT);
let may_dispatch_tail = mode.is_tail_call() &&
recursion_depth!= RECURSION_DEPTH_LIMIT &&
!pool.current_thread_has_pending_tasks().unwrap();
// In the common case, our children fit within a single work unit, in which
// case we can pass the SmallVec directly and avoid extra allocation.
if nodes.len() <= WORK_UNIT_MAX {
let work = nodes.iter().cloned().collect::<WorkUnit<E::ConcreteNode>>();
if may_dispatch_tail {
top_down_dom(&work, recursion_depth + 1, root,
traversal_data, scope, pool, traversal, tls);
} else {
scope.spawn(move |scope| {
let work = work;
top_down_dom(&work, 0, root,
traversal_data, scope, pool, traversal, tls);
});
}
} else {
for chunk in nodes.chunks(WORK_UNIT_MAX) {
let nodes = chunk.iter().cloned().collect::<WorkUnit<E::ConcreteNode>>();
let traversal_data_copy = traversal_data.clone();
scope.spawn(move |scope| {
let n = nodes;
top_down_dom(&*n, 0, root,
traversal_data_copy, scope, pool, traversal, tls)
});
}
}
}
|
}
}
/// A callback to create our thread local context. This needs to be
|
random_line_split
|
parallel.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Implements parallel traversal over the DOM tree.
//!
//! This traversal is based on Rayon, and therefore its safety is largely
//! verified by the type system.
//!
//! The primary trickiness and fine print for the above relates to the
//! thread safety of the DOM nodes themselves. Accessing a DOM element
//! concurrently on multiple threads is actually mostly "safe", since all
//! the mutable state is protected by an AtomicRefCell, and so we'll
//! generally panic if something goes wrong. Still, we try to to enforce our
//! thread invariants at compile time whenever possible. As such, TNode and
//! TElement are not Send, so ordinary style system code cannot accidentally
//! share them with other threads. In the parallel traversal, we explicitly
//! invoke |unsafe { SendNode::new(n) }| to put nodes in containers that may
//! be sent to other threads. This occurs in only a handful of places and is
//! easy to grep for. At the time of this writing, there is no other unsafe
//! code in the parallel traversal.
#![deny(missing_docs)]
use arrayvec::ArrayVec;
use context::{StyleContext, ThreadLocalStyleContext, TraversalStatistics};
use dom::{OpaqueNode, SendNode, TElement, TNode};
use rayon;
use scoped_tls::ScopedTLS;
use smallvec::SmallVec;
use std::borrow::Borrow;
use time;
use traversal::{DomTraversal, PerLevelTraversalData, PreTraverseToken};
/// The maximum number of child nodes that we will process as a single unit.
///
/// Larger values will increase style sharing cache hits and general DOM locality
/// at the expense of decreased opportunities for parallelism. This value has not
/// been measured and could potentially be tuned.
pub const WORK_UNIT_MAX: usize = 16;
/// A set of nodes, sized to the work unit. This gets copied when sent to other
/// threads, so we keep it compact.
type WorkUnit<N> = ArrayVec<[SendNode<N>; WORK_UNIT_MAX]>;
/// Entry point for the parallel traversal.
#[allow(unsafe_code)]
pub fn traverse_dom<E, D>(traversal: &D,
root: E,
token: PreTraverseToken,
pool: &rayon::ThreadPool)
where E: TElement,
D: DomTraversal<E>,
{
let dump_stats = traversal.shared_context().options.dump_style_statistics;
let start_time = if dump_stats { Some(time::precise_time_s()) } else { None };
// Set up the SmallVec. We need to move this, and in most cases this is just
// one node, so keep it small.
let mut nodes = SmallVec::<[SendNode<E::ConcreteNode>; 8]>::new();
debug_assert!(traversal.is_parallel());
// Handle Gecko's eager initial styling. We don't currently support it
// in conjunction with bottom-up traversal. If we did, we'd need to put
// it on the context to make it available to the bottom-up phase.
let depth = if token.traverse_unstyled_children_only() {
debug_assert!(!D::needs_postorder_traversal());
for kid in root.as_node().traversal_children() {
if kid.as_element().map_or(false, |el| el.get_data().is_none()) {
nodes.push(unsafe { SendNode::new(kid) });
}
}
root.depth() + 1
} else {
nodes.push(unsafe { SendNode::new(root.as_node()) });
root.depth()
};
if nodes.is_empty() {
return;
}
let traversal_data = PerLevelTraversalData {
current_dom_depth: depth,
};
let tls = ScopedTLS::<ThreadLocalStyleContext<E>>::new(pool);
let root = root.as_node().opaque();
pool.install(|| {
rayon::scope(|scope| {
let nodes = nodes;
traverse_nodes(&*nodes,
DispatchMode::TailCall,
0,
root,
traversal_data,
scope,
pool,
traversal,
&tls);
});
});
// Dump statistics to stdout if requested.
if dump_stats {
let slots = unsafe { tls.unsafe_get() };
let mut aggregate = slots.iter().fold(TraversalStatistics::default(), |acc, t| {
match *t.borrow() {
None => acc,
Some(ref cx) => &cx.borrow().statistics + &acc,
}
});
aggregate.finish(traversal, start_time.unwrap());
if aggregate.is_large_traversal() {
println!("{}", aggregate);
}
}
}
/// A callback to create our thread local context. This needs to be
/// out of line so we don't allocate stack space for the entire struct
/// in the caller.
#[inline(never)]
fn create_thread_local_context<'scope, E, D>(
traversal: &'scope D,
slot: &mut Option<ThreadLocalStyleContext<E>>)
where E: TElement +'scope,
D: DomTraversal<E>
{
*slot = Some(ThreadLocalStyleContext::new(traversal.shared_context()));
}
/// A parallel top-down DOM traversal.
///
/// This algorithm traverses the DOM in a breadth-first, top-down manner. The
/// goals are:
/// * Never process a child before its parent (since child style depends on
/// parent style). If this were to happen, the styling algorithm would panic.
/// * Prioritize discovering nodes as quickly as possible to maximize
/// opportunities for parallelism.
/// * Style all the children of a given node (i.e. all sibling nodes) on
/// a single thread (with an upper bound to handle nodes with an
/// abnormally large number of children). This is important because we use
/// a thread-local cache to share styles between siblings.
#[inline(always)]
#[allow(unsafe_code)]
fn top_down_dom<'a,'scope, E, D>(nodes: &'a [SendNode<E::ConcreteNode>],
recursion_depth: usize,
root: OpaqueNode,
mut traversal_data: PerLevelTraversalData,
scope: &'a rayon::Scope<'scope>,
pool: &'scope rayon::ThreadPool,
traversal: &'scope D,
tls: &'scope ScopedTLS<'scope, ThreadLocalStyleContext<E>>)
where E: TElement +'scope,
D: DomTraversal<E>,
{
debug_assert!(nodes.len() <= WORK_UNIT_MAX);
// Collect all the children of the elements in our work unit. This will
// contain the combined children of up to WORK_UNIT_MAX nodes, which may
// be numerous. As such, we store it in a large SmallVec to minimize heap-
// spilling, and never move it.
let mut discovered_child_nodes = SmallVec::<[SendNode<E::ConcreteNode>; 128]>::new();
{
// Scope the borrow of the TLS so that the borrow is dropped before
// a potential recursive call when we pass TailCall.
let mut tlc = tls.ensure(
|slot: &mut Option<ThreadLocalStyleContext<E>>| create_thread_local_context(traversal, slot));
let mut context = StyleContext {
shared: traversal.shared_context(),
thread_local: &mut *tlc,
};
for n in nodes {
// If the last node we processed produced children, spawn them off
// into a work item. We do this at the beginning of the loop (rather
// than at the end) so that we can traverse the children of the last
// sibling directly on this thread without a spawn call.
//
// This has the important effect of removing the allocation and
// context-switching overhead of the parallel traversal for perfectly
// linear regions of the DOM, i.e.:
//
// <russian><doll><tag><nesting></nesting></tag></doll></russian>
//
// Which are not at all uncommon.
if!discovered_child_nodes.is_empty() {
let mut traversal_data_copy = traversal_data.clone();
traversal_data_copy.current_dom_depth += 1;
traverse_nodes(&*discovered_child_nodes,
DispatchMode::NotTailCall,
recursion_depth,
root,
traversal_data_copy,
scope,
pool,
traversal,
tls);
discovered_child_nodes.clear();
}
let node = **n;
let mut children_to_process = 0isize;
traversal.process_preorder(&traversal_data, &mut context, node, |n| {
children_to_process += 1;
let send_n = unsafe { SendNode::new(n) };
discovered_child_nodes.push(send_n);
});
traversal.handle_postorder_traversal(&mut context, root, node,
children_to_process);
}
}
// Handle the children of the last element in this work unit. If any exist,
// we can process them (or at least one work unit's worth of them) directly
// on this thread by passing TailCall.
if!discovered_child_nodes.is_empty() {
traversal_data.current_dom_depth += 1;
traverse_nodes(&discovered_child_nodes,
DispatchMode::TailCall,
recursion_depth,
root,
traversal_data,
scope,
pool,
traversal,
tls);
}
}
/// Controls whether traverse_nodes may make a recursive call to continue
/// doing work, or whether it should always dispatch work asynchronously.
#[derive(Clone, Copy, PartialEq)]
enum
|
{
TailCall,
NotTailCall,
}
impl DispatchMode {
fn is_tail_call(&self) -> bool { matches!(*self, DispatchMode::TailCall) }
}
// On x86_64-linux, a recursive cycle requires 3472 bytes of stack. Limiting
// the depth to 150 therefore should keep the stack use by the recursion to
// 520800 bytes, which would give a generously conservative margin should we
// decide to reduce the thread stack size from its default of 2MB down to 1MB.
const RECURSION_DEPTH_LIMIT: usize = 150;
#[inline]
fn traverse_nodes<'a,'scope, E, D>(nodes: &[SendNode<E::ConcreteNode>],
mode: DispatchMode,
recursion_depth: usize,
root: OpaqueNode,
traversal_data: PerLevelTraversalData,
scope: &'a rayon::Scope<'scope>,
pool: &'scope rayon::ThreadPool,
traversal: &'scope D,
tls: &'scope ScopedTLS<'scope, ThreadLocalStyleContext<E>>)
where E: TElement +'scope,
D: DomTraversal<E>,
{
debug_assert!(!nodes.is_empty());
// This is a tail call from the perspective of the caller. However, we only
// want to actually dispatch the job as a tail call if there's nothing left
// in our local queue. Otherwise we need to return to it to maintain proper
// breadth-first ordering. We also need to take care to avoid stack
// overflow due to excessive tail recursion. The stack overflow isn't
// observable to content -- we're still completely correct, just not
// using tail recursion any more. See bug 1368302.
debug_assert!(recursion_depth <= RECURSION_DEPTH_LIMIT);
let may_dispatch_tail = mode.is_tail_call() &&
recursion_depth!= RECURSION_DEPTH_LIMIT &&
!pool.current_thread_has_pending_tasks().unwrap();
// In the common case, our children fit within a single work unit, in which
// case we can pass the SmallVec directly and avoid extra allocation.
if nodes.len() <= WORK_UNIT_MAX {
let work = nodes.iter().cloned().collect::<WorkUnit<E::ConcreteNode>>();
if may_dispatch_tail {
top_down_dom(&work, recursion_depth + 1, root,
traversal_data, scope, pool, traversal, tls);
} else {
scope.spawn(move |scope| {
let work = work;
top_down_dom(&work, 0, root,
traversal_data, scope, pool, traversal, tls);
});
}
} else {
for chunk in nodes.chunks(WORK_UNIT_MAX) {
let nodes = chunk.iter().cloned().collect::<WorkUnit<E::ConcreteNode>>();
let traversal_data_copy = traversal_data.clone();
scope.spawn(move |scope| {
let n = nodes;
top_down_dom(&*n, 0, root,
traversal_data_copy, scope, pool, traversal, tls)
});
}
}
}
|
DispatchMode
|
identifier_name
|
parallel.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Implements parallel traversal over the DOM tree.
//!
//! This traversal is based on Rayon, and therefore its safety is largely
//! verified by the type system.
//!
//! The primary trickiness and fine print for the above relates to the
//! thread safety of the DOM nodes themselves. Accessing a DOM element
//! concurrently on multiple threads is actually mostly "safe", since all
//! the mutable state is protected by an AtomicRefCell, and so we'll
//! generally panic if something goes wrong. Still, we try to to enforce our
//! thread invariants at compile time whenever possible. As such, TNode and
//! TElement are not Send, so ordinary style system code cannot accidentally
//! share them with other threads. In the parallel traversal, we explicitly
//! invoke |unsafe { SendNode::new(n) }| to put nodes in containers that may
//! be sent to other threads. This occurs in only a handful of places and is
//! easy to grep for. At the time of this writing, there is no other unsafe
//! code in the parallel traversal.
#![deny(missing_docs)]
use arrayvec::ArrayVec;
use context::{StyleContext, ThreadLocalStyleContext, TraversalStatistics};
use dom::{OpaqueNode, SendNode, TElement, TNode};
use rayon;
use scoped_tls::ScopedTLS;
use smallvec::SmallVec;
use std::borrow::Borrow;
use time;
use traversal::{DomTraversal, PerLevelTraversalData, PreTraverseToken};
/// The maximum number of child nodes that we will process as a single unit.
///
/// Larger values will increase style sharing cache hits and general DOM locality
/// at the expense of decreased opportunities for parallelism. This value has not
/// been measured and could potentially be tuned.
pub const WORK_UNIT_MAX: usize = 16;
/// A set of nodes, sized to the work unit. This gets copied when sent to other
/// threads, so we keep it compact.
type WorkUnit<N> = ArrayVec<[SendNode<N>; WORK_UNIT_MAX]>;
/// Entry point for the parallel traversal.
#[allow(unsafe_code)]
pub fn traverse_dom<E, D>(traversal: &D,
root: E,
token: PreTraverseToken,
pool: &rayon::ThreadPool)
where E: TElement,
D: DomTraversal<E>,
{
let dump_stats = traversal.shared_context().options.dump_style_statistics;
let start_time = if dump_stats { Some(time::precise_time_s()) } else { None };
// Set up the SmallVec. We need to move this, and in most cases this is just
// one node, so keep it small.
let mut nodes = SmallVec::<[SendNode<E::ConcreteNode>; 8]>::new();
debug_assert!(traversal.is_parallel());
// Handle Gecko's eager initial styling. We don't currently support it
// in conjunction with bottom-up traversal. If we did, we'd need to put
// it on the context to make it available to the bottom-up phase.
let depth = if token.traverse_unstyled_children_only() {
debug_assert!(!D::needs_postorder_traversal());
for kid in root.as_node().traversal_children() {
if kid.as_element().map_or(false, |el| el.get_data().is_none()) {
nodes.push(unsafe { SendNode::new(kid) });
}
}
root.depth() + 1
} else {
nodes.push(unsafe { SendNode::new(root.as_node()) });
root.depth()
};
if nodes.is_empty() {
return;
}
let traversal_data = PerLevelTraversalData {
current_dom_depth: depth,
};
let tls = ScopedTLS::<ThreadLocalStyleContext<E>>::new(pool);
let root = root.as_node().opaque();
pool.install(|| {
rayon::scope(|scope| {
let nodes = nodes;
traverse_nodes(&*nodes,
DispatchMode::TailCall,
0,
root,
traversal_data,
scope,
pool,
traversal,
&tls);
});
});
// Dump statistics to stdout if requested.
if dump_stats {
let slots = unsafe { tls.unsafe_get() };
let mut aggregate = slots.iter().fold(TraversalStatistics::default(), |acc, t| {
match *t.borrow() {
None => acc,
Some(ref cx) => &cx.borrow().statistics + &acc,
}
});
aggregate.finish(traversal, start_time.unwrap());
if aggregate.is_large_traversal() {
println!("{}", aggregate);
}
}
}
/// A callback to create our thread local context. This needs to be
/// out of line so we don't allocate stack space for the entire struct
/// in the caller.
#[inline(never)]
fn create_thread_local_context<'scope, E, D>(
traversal: &'scope D,
slot: &mut Option<ThreadLocalStyleContext<E>>)
where E: TElement +'scope,
D: DomTraversal<E>
{
*slot = Some(ThreadLocalStyleContext::new(traversal.shared_context()));
}
/// A parallel top-down DOM traversal.
///
/// This algorithm traverses the DOM in a breadth-first, top-down manner. The
/// goals are:
/// * Never process a child before its parent (since child style depends on
/// parent style). If this were to happen, the styling algorithm would panic.
/// * Prioritize discovering nodes as quickly as possible to maximize
/// opportunities for parallelism.
/// * Style all the children of a given node (i.e. all sibling nodes) on
/// a single thread (with an upper bound to handle nodes with an
/// abnormally large number of children). This is important because we use
/// a thread-local cache to share styles between siblings.
#[inline(always)]
#[allow(unsafe_code)]
fn top_down_dom<'a,'scope, E, D>(nodes: &'a [SendNode<E::ConcreteNode>],
recursion_depth: usize,
root: OpaqueNode,
mut traversal_data: PerLevelTraversalData,
scope: &'a rayon::Scope<'scope>,
pool: &'scope rayon::ThreadPool,
traversal: &'scope D,
tls: &'scope ScopedTLS<'scope, ThreadLocalStyleContext<E>>)
where E: TElement +'scope,
D: DomTraversal<E>,
|
// into a work item. We do this at the beginning of the loop (rather
// than at the end) so that we can traverse the children of the last
// sibling directly on this thread without a spawn call.
//
// This has the important effect of removing the allocation and
// context-switching overhead of the parallel traversal for perfectly
// linear regions of the DOM, i.e.:
//
// <russian><doll><tag><nesting></nesting></tag></doll></russian>
//
// Which are not at all uncommon.
if!discovered_child_nodes.is_empty() {
let mut traversal_data_copy = traversal_data.clone();
traversal_data_copy.current_dom_depth += 1;
traverse_nodes(&*discovered_child_nodes,
DispatchMode::NotTailCall,
recursion_depth,
root,
traversal_data_copy,
scope,
pool,
traversal,
tls);
discovered_child_nodes.clear();
}
let node = **n;
let mut children_to_process = 0isize;
traversal.process_preorder(&traversal_data, &mut context, node, |n| {
children_to_process += 1;
let send_n = unsafe { SendNode::new(n) };
discovered_child_nodes.push(send_n);
});
traversal.handle_postorder_traversal(&mut context, root, node,
children_to_process);
}
}
// Handle the children of the last element in this work unit. If any exist,
// we can process them (or at least one work unit's worth of them) directly
// on this thread by passing TailCall.
if!discovered_child_nodes.is_empty() {
traversal_data.current_dom_depth += 1;
traverse_nodes(&discovered_child_nodes,
DispatchMode::TailCall,
recursion_depth,
root,
traversal_data,
scope,
pool,
traversal,
tls);
}
}
/// Controls whether traverse_nodes may make a recursive call to continue
/// doing work, or whether it should always dispatch work asynchronously.
#[derive(Clone, Copy, PartialEq)]
enum DispatchMode {
TailCall,
NotTailCall,
}
impl DispatchMode {
fn is_tail_call(&self) -> bool { matches!(*self, DispatchMode::TailCall) }
}
// On x86_64-linux, a recursive cycle requires 3472 bytes of stack. Limiting
// the depth to 150 therefore should keep the stack use by the recursion to
// 520800 bytes, which would give a generously conservative margin should we
// decide to reduce the thread stack size from its default of 2MB down to 1MB.
const RECURSION_DEPTH_LIMIT: usize = 150;
#[inline]
fn traverse_nodes<'a,'scope, E, D>(nodes: &[SendNode<E::ConcreteNode>],
mode: DispatchMode,
recursion_depth: usize,
root: OpaqueNode,
traversal_data: PerLevelTraversalData,
scope: &'a rayon::Scope<'scope>,
pool: &'scope rayon::ThreadPool,
traversal: &'scope D,
tls: &'scope ScopedTLS<'scope, ThreadLocalStyleContext<E>>)
where E: TElement +'scope,
D: DomTraversal<E>,
{
debug_assert!(!nodes.is_empty());
// This is a tail call from the perspective of the caller. However, we only
// want to actually dispatch the job as a tail call if there's nothing left
// in our local queue. Otherwise we need to return to it to maintain proper
// breadth-first ordering. We also need to take care to avoid stack
// overflow due to excessive tail recursion. The stack overflow isn't
// observable to content -- we're still completely correct, just not
// using tail recursion any more. See bug 1368302.
debug_assert!(recursion_depth <= RECURSION_DEPTH_LIMIT);
let may_dispatch_tail = mode.is_tail_call() &&
recursion_depth!= RECURSION_DEPTH_LIMIT &&
!pool.current_thread_has_pending_tasks().unwrap();
// In the common case, our children fit within a single work unit, in which
// case we can pass the SmallVec directly and avoid extra allocation.
if nodes.len() <= WORK_UNIT_MAX {
let work = nodes.iter().cloned().collect::<WorkUnit<E::ConcreteNode>>();
if may_dispatch_tail {
top_down_dom(&work, recursion_depth + 1, root,
traversal_data, scope, pool, traversal, tls);
} else {
scope.spawn(move |scope| {
let work = work;
top_down_dom(&work, 0, root,
traversal_data, scope, pool, traversal, tls);
});
}
} else {
for chunk in nodes.chunks(WORK_UNIT_MAX) {
let nodes = chunk.iter().cloned().collect::<WorkUnit<E::ConcreteNode>>();
let traversal_data_copy = traversal_data.clone();
scope.spawn(move |scope| {
let n = nodes;
top_down_dom(&*n, 0, root,
traversal_data_copy, scope, pool, traversal, tls)
});
}
}
}
|
{
debug_assert!(nodes.len() <= WORK_UNIT_MAX);
// Collect all the children of the elements in our work unit. This will
// contain the combined children of up to WORK_UNIT_MAX nodes, which may
// be numerous. As such, we store it in a large SmallVec to minimize heap-
// spilling, and never move it.
let mut discovered_child_nodes = SmallVec::<[SendNode<E::ConcreteNode>; 128]>::new();
{
// Scope the borrow of the TLS so that the borrow is dropped before
// a potential recursive call when we pass TailCall.
let mut tlc = tls.ensure(
|slot: &mut Option<ThreadLocalStyleContext<E>>| create_thread_local_context(traversal, slot));
let mut context = StyleContext {
shared: traversal.shared_context(),
thread_local: &mut *tlc,
};
for n in nodes {
// If the last node we processed produced children, spawn them off
|
identifier_body
|
window.rs
|
extern crate ncurses;
use self::ncurses::*;
// Window are actually implemented has ncurcesw subwin
// of the stdscr
pub struct Window {
start_y: i32,
start_x: i32,
size_y: i32,
|
size_x: i32,
handle: WINDOW, // ncurses subwin handle
}
impl Window {
pub fn new(start_y: i32, start_x: i32, size_y: i32, size_x: i32) -> Window {
unsafe {
Window {
start_y: start_y,
start_x: start_x,
size_y: size_y,
size_x: size_x,
handle: stdscr, // hack init the subwin to the whole window
}
}
}
pub fn init(&mut self) {
unsafe {
self.handle =
ncurses::subwin(stdscr, self.size_y, self.size_x, self.start_y, self.start_x);
}
ncurses::box_(self.handle, 0, 0);
}
pub fn write(&self, text: String) {
ncurses::waddstr(self.handle, text.as_ref());
}
pub fn mvaddch(&self, y: i32, x: i32, ch: u64) {
ncurses::mvwaddch(self.handle, y, x, ch);
}
pub fn refresh(&self) {
ncurses::wrefresh(self.handle);
}
pub fn delwin(&self) {
ncurses::delwin(self.handle);
}
}
|
random_line_split
|
|
window.rs
|
extern crate ncurses;
use self::ncurses::*;
// Window are actually implemented has ncurcesw subwin
// of the stdscr
pub struct
|
{
start_y: i32,
start_x: i32,
size_y: i32,
size_x: i32,
handle: WINDOW, // ncurses subwin handle
}
impl Window {
pub fn new(start_y: i32, start_x: i32, size_y: i32, size_x: i32) -> Window {
unsafe {
Window {
start_y: start_y,
start_x: start_x,
size_y: size_y,
size_x: size_x,
handle: stdscr, // hack init the subwin to the whole window
}
}
}
pub fn init(&mut self) {
unsafe {
self.handle =
ncurses::subwin(stdscr, self.size_y, self.size_x, self.start_y, self.start_x);
}
ncurses::box_(self.handle, 0, 0);
}
pub fn write(&self, text: String) {
ncurses::waddstr(self.handle, text.as_ref());
}
pub fn mvaddch(&self, y: i32, x: i32, ch: u64) {
ncurses::mvwaddch(self.handle, y, x, ch);
}
pub fn refresh(&self) {
ncurses::wrefresh(self.handle);
}
pub fn delwin(&self) {
ncurses::delwin(self.handle);
}
}
|
Window
|
identifier_name
|
window.rs
|
extern crate ncurses;
use self::ncurses::*;
// Window are actually implemented has ncurcesw subwin
// of the stdscr
pub struct Window {
start_y: i32,
start_x: i32,
size_y: i32,
size_x: i32,
handle: WINDOW, // ncurses subwin handle
}
impl Window {
pub fn new(start_y: i32, start_x: i32, size_y: i32, size_x: i32) -> Window {
unsafe {
Window {
start_y: start_y,
start_x: start_x,
size_y: size_y,
size_x: size_x,
handle: stdscr, // hack init the subwin to the whole window
}
}
}
pub fn init(&mut self)
|
pub fn write(&self, text: String) {
ncurses::waddstr(self.handle, text.as_ref());
}
pub fn mvaddch(&self, y: i32, x: i32, ch: u64) {
ncurses::mvwaddch(self.handle, y, x, ch);
}
pub fn refresh(&self) {
ncurses::wrefresh(self.handle);
}
pub fn delwin(&self) {
ncurses::delwin(self.handle);
}
}
|
{
unsafe {
self.handle =
ncurses::subwin(stdscr, self.size_y, self.size_x, self.start_y, self.start_x);
}
ncurses::box_(self.handle, 0, 0);
}
|
identifier_body
|
cell.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! A shareable mutable container for the DOM.
use dom::bindings::trace::JSTraceable;
use js::jsapi::{JSTracer};
use util::task_state;
use util::task_state::SCRIPT;
use std::cell::{BorrowState, RefCell, Ref, RefMut};
/// A mutable field in the DOM.
///
/// This extends the API of `core::cell::RefCell` to allow unsafe access in
/// certain situations, with dynamic checking in debug builds.
#[derive(Clone, HeapSizeOf)]
pub struct DOMRefCell<T> {
value: RefCell<T>,
}
// Functionality specific to Servo's `DOMRefCell` type
// ===================================================
impl<T> DOMRefCell<T> {
/// Return a reference to the contents.
///
/// For use in the layout task only.
#[allow(unsafe_code)]
pub unsafe fn borrow_for_layout<'a>(&'a self) -> &'a T {
debug_assert!(task_state::get().is_layout());
&*self.value.as_unsafe_cell().get()
}
/// Borrow the contents for the purpose of GC tracing.
///
/// This succeeds even if the object is mutably borrowed,
/// so you have to be careful in trace code!
#[allow(unsafe_code)]
pub unsafe fn borrow_for_gc_trace<'a>(&'a self) -> &'a T {
// FIXME: IN_GC isn't reliable enough - doesn't catch minor GCs
// https://github.com/servo/servo/issues/6389
//debug_assert!(task_state::get().contains(SCRIPT | IN_GC));
&*self.value.as_unsafe_cell().get()
}
/// Borrow the contents for the purpose of script deallocation.
///
#[allow(unsafe_code)]
pub unsafe fn borrow_for_script_deallocation<'a>(&'a self) -> &'a mut T {
debug_assert!(task_state::get().contains(SCRIPT));
&mut *self.value.as_unsafe_cell().get()
}
/// Is the cell mutably borrowed?
///
/// For safety checks in debug builds only.
pub fn is_mutably_borrowed(&self) -> bool {
self.value.borrow_state() == BorrowState::Writing
}
/// Attempts to immutably borrow the wrapped value.
///
/// The borrow lasts until the returned `Ref` exits scope. Multiple
/// immutable borrows can be taken out at the same time.
///
/// Returns `None` if the value is currently mutably borrowed.
///
/// # Panics
///
/// Panics if this is called off the script thread.
pub fn try_borrow<'a>(&'a self) -> Option<Ref<'a, T>> {
debug_assert!(task_state::get().is_script());
match self.value.borrow_state() {
BorrowState::Writing => None,
_ => Some(self.value.borrow()),
}
}
/// Mutably borrows the wrapped value.
///
/// The borrow lasts until the returned `RefMut` exits scope. The value
/// cannot be borrowed while this borrow is active.
///
/// Returns `None` if the value is currently borrowed.
///
/// # Panics
///
/// Panics if this is called off the script thread.
pub fn try_borrow_mut<'a>(&'a self) -> Option<RefMut<'a, T>> {
debug_assert!(task_state::get().is_script());
match self.value.borrow_state() {
BorrowState::Unused => Some(self.value.borrow_mut()),
_ => None,
}
}
}
impl<T: JSTraceable> JSTraceable for DOMRefCell<T> {
fn
|
(&self, trc: *mut JSTracer) {
unsafe {
(*self).borrow_for_gc_trace().trace(trc)
}
}
}
// Functionality duplicated with `core::cell::RefCell`
// ===================================================
impl<T> DOMRefCell<T> {
/// Create a new `DOMRefCell` containing `value`.
pub fn new(value: T) -> DOMRefCell<T> {
DOMRefCell {
value: RefCell::new(value),
}
}
/// Immutably borrows the wrapped value.
///
/// The borrow lasts until the returned `Ref` exits scope. Multiple
/// immutable borrows can be taken out at the same time.
///
/// # Panics
///
/// Panics if this is called off the script thread.
///
/// Panics if the value is currently mutably borrowed.
pub fn borrow<'a>(&'a self) -> Ref<'a, T> {
self.try_borrow().expect("DOMRefCell<T> already mutably borrowed")
}
/// Mutably borrows the wrapped value.
///
/// The borrow lasts until the returned `RefMut` exits scope. The value
/// cannot be borrowed while this borrow is active.
///
/// # Panics
///
/// Panics if this is called off the script thread.
///
/// Panics if the value is currently borrowed.
pub fn borrow_mut<'a>(&'a self) -> RefMut<'a, T> {
self.try_borrow_mut().expect("DOMRefCell<T> already borrowed")
}
}
|
trace
|
identifier_name
|
cell.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! A shareable mutable container for the DOM.
use dom::bindings::trace::JSTraceable;
use js::jsapi::{JSTracer};
use util::task_state;
use util::task_state::SCRIPT;
use std::cell::{BorrowState, RefCell, Ref, RefMut};
/// A mutable field in the DOM.
///
/// This extends the API of `core::cell::RefCell` to allow unsafe access in
/// certain situations, with dynamic checking in debug builds.
#[derive(Clone, HeapSizeOf)]
pub struct DOMRefCell<T> {
value: RefCell<T>,
}
// Functionality specific to Servo's `DOMRefCell` type
// ===================================================
impl<T> DOMRefCell<T> {
/// Return a reference to the contents.
///
/// For use in the layout task only.
#[allow(unsafe_code)]
pub unsafe fn borrow_for_layout<'a>(&'a self) -> &'a T {
debug_assert!(task_state::get().is_layout());
&*self.value.as_unsafe_cell().get()
}
/// Borrow the contents for the purpose of GC tracing.
///
/// This succeeds even if the object is mutably borrowed,
/// so you have to be careful in trace code!
#[allow(unsafe_code)]
pub unsafe fn borrow_for_gc_trace<'a>(&'a self) -> &'a T {
// FIXME: IN_GC isn't reliable enough - doesn't catch minor GCs
// https://github.com/servo/servo/issues/6389
//debug_assert!(task_state::get().contains(SCRIPT | IN_GC));
&*self.value.as_unsafe_cell().get()
}
/// Borrow the contents for the purpose of script deallocation.
///
#[allow(unsafe_code)]
pub unsafe fn borrow_for_script_deallocation<'a>(&'a self) -> &'a mut T {
debug_assert!(task_state::get().contains(SCRIPT));
&mut *self.value.as_unsafe_cell().get()
}
/// Is the cell mutably borrowed?
///
/// For safety checks in debug builds only.
pub fn is_mutably_borrowed(&self) -> bool {
self.value.borrow_state() == BorrowState::Writing
}
/// Attempts to immutably borrow the wrapped value.
///
/// The borrow lasts until the returned `Ref` exits scope. Multiple
/// immutable borrows can be taken out at the same time.
///
/// Returns `None` if the value is currently mutably borrowed.
///
/// # Panics
///
/// Panics if this is called off the script thread.
pub fn try_borrow<'a>(&'a self) -> Option<Ref<'a, T>> {
debug_assert!(task_state::get().is_script());
match self.value.borrow_state() {
|
_ => Some(self.value.borrow()),
}
}
/// Mutably borrows the wrapped value.
///
/// The borrow lasts until the returned `RefMut` exits scope. The value
/// cannot be borrowed while this borrow is active.
///
/// Returns `None` if the value is currently borrowed.
///
/// # Panics
///
/// Panics if this is called off the script thread.
pub fn try_borrow_mut<'a>(&'a self) -> Option<RefMut<'a, T>> {
debug_assert!(task_state::get().is_script());
match self.value.borrow_state() {
BorrowState::Unused => Some(self.value.borrow_mut()),
_ => None,
}
}
}
impl<T: JSTraceable> JSTraceable for DOMRefCell<T> {
fn trace(&self, trc: *mut JSTracer) {
unsafe {
(*self).borrow_for_gc_trace().trace(trc)
}
}
}
// Functionality duplicated with `core::cell::RefCell`
// ===================================================
impl<T> DOMRefCell<T> {
/// Create a new `DOMRefCell` containing `value`.
pub fn new(value: T) -> DOMRefCell<T> {
DOMRefCell {
value: RefCell::new(value),
}
}
/// Immutably borrows the wrapped value.
///
/// The borrow lasts until the returned `Ref` exits scope. Multiple
/// immutable borrows can be taken out at the same time.
///
/// # Panics
///
/// Panics if this is called off the script thread.
///
/// Panics if the value is currently mutably borrowed.
pub fn borrow<'a>(&'a self) -> Ref<'a, T> {
self.try_borrow().expect("DOMRefCell<T> already mutably borrowed")
}
/// Mutably borrows the wrapped value.
///
/// The borrow lasts until the returned `RefMut` exits scope. The value
/// cannot be borrowed while this borrow is active.
///
/// # Panics
///
/// Panics if this is called off the script thread.
///
/// Panics if the value is currently borrowed.
pub fn borrow_mut<'a>(&'a self) -> RefMut<'a, T> {
self.try_borrow_mut().expect("DOMRefCell<T> already borrowed")
}
}
|
BorrowState::Writing => None,
|
random_line_split
|
cell.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! A shareable mutable container for the DOM.
use dom::bindings::trace::JSTraceable;
use js::jsapi::{JSTracer};
use util::task_state;
use util::task_state::SCRIPT;
use std::cell::{BorrowState, RefCell, Ref, RefMut};
/// A mutable field in the DOM.
///
/// This extends the API of `core::cell::RefCell` to allow unsafe access in
/// certain situations, with dynamic checking in debug builds.
#[derive(Clone, HeapSizeOf)]
pub struct DOMRefCell<T> {
value: RefCell<T>,
}
// Functionality specific to Servo's `DOMRefCell` type
// ===================================================
impl<T> DOMRefCell<T> {
/// Return a reference to the contents.
///
/// For use in the layout task only.
#[allow(unsafe_code)]
pub unsafe fn borrow_for_layout<'a>(&'a self) -> &'a T {
debug_assert!(task_state::get().is_layout());
&*self.value.as_unsafe_cell().get()
}
/// Borrow the contents for the purpose of GC tracing.
///
/// This succeeds even if the object is mutably borrowed,
/// so you have to be careful in trace code!
#[allow(unsafe_code)]
pub unsafe fn borrow_for_gc_trace<'a>(&'a self) -> &'a T {
// FIXME: IN_GC isn't reliable enough - doesn't catch minor GCs
// https://github.com/servo/servo/issues/6389
//debug_assert!(task_state::get().contains(SCRIPT | IN_GC));
&*self.value.as_unsafe_cell().get()
}
/// Borrow the contents for the purpose of script deallocation.
///
#[allow(unsafe_code)]
pub unsafe fn borrow_for_script_deallocation<'a>(&'a self) -> &'a mut T {
debug_assert!(task_state::get().contains(SCRIPT));
&mut *self.value.as_unsafe_cell().get()
}
/// Is the cell mutably borrowed?
///
/// For safety checks in debug builds only.
pub fn is_mutably_borrowed(&self) -> bool {
self.value.borrow_state() == BorrowState::Writing
}
/// Attempts to immutably borrow the wrapped value.
///
/// The borrow lasts until the returned `Ref` exits scope. Multiple
/// immutable borrows can be taken out at the same time.
///
/// Returns `None` if the value is currently mutably borrowed.
///
/// # Panics
///
/// Panics if this is called off the script thread.
pub fn try_borrow<'a>(&'a self) -> Option<Ref<'a, T>> {
debug_assert!(task_state::get().is_script());
match self.value.borrow_state() {
BorrowState::Writing => None,
_ => Some(self.value.borrow()),
}
}
/// Mutably borrows the wrapped value.
///
/// The borrow lasts until the returned `RefMut` exits scope. The value
/// cannot be borrowed while this borrow is active.
///
/// Returns `None` if the value is currently borrowed.
///
/// # Panics
///
/// Panics if this is called off the script thread.
pub fn try_borrow_mut<'a>(&'a self) -> Option<RefMut<'a, T>> {
debug_assert!(task_state::get().is_script());
match self.value.borrow_state() {
BorrowState::Unused => Some(self.value.borrow_mut()),
_ => None,
}
}
}
impl<T: JSTraceable> JSTraceable for DOMRefCell<T> {
fn trace(&self, trc: *mut JSTracer) {
unsafe {
(*self).borrow_for_gc_trace().trace(trc)
}
}
}
// Functionality duplicated with `core::cell::RefCell`
// ===================================================
impl<T> DOMRefCell<T> {
/// Create a new `DOMRefCell` containing `value`.
pub fn new(value: T) -> DOMRefCell<T> {
DOMRefCell {
value: RefCell::new(value),
}
}
/// Immutably borrows the wrapped value.
///
/// The borrow lasts until the returned `Ref` exits scope. Multiple
/// immutable borrows can be taken out at the same time.
///
/// # Panics
///
/// Panics if this is called off the script thread.
///
/// Panics if the value is currently mutably borrowed.
pub fn borrow<'a>(&'a self) -> Ref<'a, T> {
self.try_borrow().expect("DOMRefCell<T> already mutably borrowed")
}
/// Mutably borrows the wrapped value.
///
/// The borrow lasts until the returned `RefMut` exits scope. The value
/// cannot be borrowed while this borrow is active.
///
/// # Panics
///
/// Panics if this is called off the script thread.
///
/// Panics if the value is currently borrowed.
pub fn borrow_mut<'a>(&'a self) -> RefMut<'a, T>
|
}
|
{
self.try_borrow_mut().expect("DOMRefCell<T> already borrowed")
}
|
identifier_body
|
macro_crate_test.rs
|
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// force-host
#![feature(globs, macro_registrar, macro_rules, quote, managed_boxes)]
extern crate syntax;
use syntax::ast::{Name, TokenTree, Item, MetaItem};
use syntax::codemap::Span;
use syntax::ext::base::*;
use syntax::parse::token;
#[macro_export]
macro_rules! exported_macro (() => (2))
macro_rules! unexported_macro (() => (3))
#[macro_registrar]
pub fn
|
(register: |Name, SyntaxExtension|) {
register(token::intern("make_a_1"),
NormalTT(~BasicMacroExpander {
expander: expand_make_a_1,
span: None,
},
None));
register(token::intern("into_foo"), ItemModifier(expand_into_foo));
}
fn expand_make_a_1(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> ~MacResult {
if!tts.is_empty() {
cx.span_fatal(sp, "make_a_1 takes no arguments");
}
MacExpr::new(quote_expr!(cx, 1i))
}
fn expand_into_foo(cx: &mut ExtCtxt, sp: Span, attr: @MetaItem, it: @Item)
-> @Item {
@Item {
attrs: it.attrs.clone(),
..(*quote_item!(cx, enum Foo { Bar, Baz }).unwrap()).clone()
}
}
pub fn foo() {}
|
macro_registrar
|
identifier_name
|
macro_crate_test.rs
|
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// force-host
#![feature(globs, macro_registrar, macro_rules, quote, managed_boxes)]
extern crate syntax;
use syntax::ast::{Name, TokenTree, Item, MetaItem};
use syntax::codemap::Span;
use syntax::ext::base::*;
use syntax::parse::token;
#[macro_export]
macro_rules! exported_macro (() => (2))
macro_rules! unexported_macro (() => (3))
#[macro_registrar]
pub fn macro_registrar(register: |Name, SyntaxExtension|) {
register(token::intern("make_a_1"),
NormalTT(~BasicMacroExpander {
expander: expand_make_a_1,
span: None,
},
None));
register(token::intern("into_foo"), ItemModifier(expand_into_foo));
}
fn expand_make_a_1(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> ~MacResult {
if!tts.is_empty()
|
MacExpr::new(quote_expr!(cx, 1i))
}
fn expand_into_foo(cx: &mut ExtCtxt, sp: Span, attr: @MetaItem, it: @Item)
-> @Item {
@Item {
attrs: it.attrs.clone(),
..(*quote_item!(cx, enum Foo { Bar, Baz }).unwrap()).clone()
}
}
pub fn foo() {}
|
{
cx.span_fatal(sp, "make_a_1 takes no arguments");
}
|
conditional_block
|
macro_crate_test.rs
|
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// force-host
#![feature(globs, macro_registrar, macro_rules, quote, managed_boxes)]
extern crate syntax;
|
use syntax::parse::token;
#[macro_export]
macro_rules! exported_macro (() => (2))
macro_rules! unexported_macro (() => (3))
#[macro_registrar]
pub fn macro_registrar(register: |Name, SyntaxExtension|) {
register(token::intern("make_a_1"),
NormalTT(~BasicMacroExpander {
expander: expand_make_a_1,
span: None,
},
None));
register(token::intern("into_foo"), ItemModifier(expand_into_foo));
}
fn expand_make_a_1(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> ~MacResult {
if!tts.is_empty() {
cx.span_fatal(sp, "make_a_1 takes no arguments");
}
MacExpr::new(quote_expr!(cx, 1i))
}
fn expand_into_foo(cx: &mut ExtCtxt, sp: Span, attr: @MetaItem, it: @Item)
-> @Item {
@Item {
attrs: it.attrs.clone(),
..(*quote_item!(cx, enum Foo { Bar, Baz }).unwrap()).clone()
}
}
pub fn foo() {}
|
use syntax::ast::{Name, TokenTree, Item, MetaItem};
use syntax::codemap::Span;
use syntax::ext::base::*;
|
random_line_split
|
macro_crate_test.rs
|
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// force-host
#![feature(globs, macro_registrar, macro_rules, quote, managed_boxes)]
extern crate syntax;
use syntax::ast::{Name, TokenTree, Item, MetaItem};
use syntax::codemap::Span;
use syntax::ext::base::*;
use syntax::parse::token;
#[macro_export]
macro_rules! exported_macro (() => (2))
macro_rules! unexported_macro (() => (3))
#[macro_registrar]
pub fn macro_registrar(register: |Name, SyntaxExtension|) {
register(token::intern("make_a_1"),
NormalTT(~BasicMacroExpander {
expander: expand_make_a_1,
span: None,
},
None));
register(token::intern("into_foo"), ItemModifier(expand_into_foo));
}
fn expand_make_a_1(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> ~MacResult {
if!tts.is_empty() {
cx.span_fatal(sp, "make_a_1 takes no arguments");
}
MacExpr::new(quote_expr!(cx, 1i))
}
fn expand_into_foo(cx: &mut ExtCtxt, sp: Span, attr: @MetaItem, it: @Item)
-> @Item {
@Item {
attrs: it.attrs.clone(),
..(*quote_item!(cx, enum Foo { Bar, Baz }).unwrap()).clone()
}
}
pub fn foo()
|
{}
|
identifier_body
|
|
ui.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
//! Computed values for UI properties
use crate::values::computed::color::Color;
use crate::values::computed::url::ComputedImageUrl;
use crate::values::computed::Number;
use crate::values::generics::ui as generics;
pub use crate::values::specified::ui::CursorKind;
pub use crate::values::specified::ui::{MozForceBrokenImageIcon, UserSelect};
/// A computed value for the `cursor` property.
pub type Cursor = generics::Cursor<CursorImage>;
/// A computed value for item of `image cursors`.
pub type CursorImage = generics::CursorImage<ComputedImageUrl, Number>;
/// A computed value for `scrollbar-color` property.
pub type ScrollbarColor = generics::ScrollbarColor<Color>;
|
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
random_line_split
|
display.rs
|
use sdl2::video::Window;
use sdl2::render::Renderer;
use sdl2::pixels::Color::RGB;
use sdl2::rect::{Point, Rect};
use sdl2::sdl::Sdl;
use gpu::Color;
pub struct Display {
renderer: Renderer<'static>,
/// Upscaling factor, log2.
upscale: u8,
}
impl Display {
pub fn new(sdl2: &Sdl, upscale: u8) -> Display {
let up = 1 << (upscale as usize);
let xres = 160 * up;
let yres = 144 * up;
let window = match Window::new(sdl2, "gb-rs",
::sdl2::video::WindowPos::PosCentered,
::sdl2::video::WindowPos::PosCentered,
xres, yres, ::sdl2::video::OPENGL) {
Ok(window) => window,
Err(err) => panic!("failed to create SDL2 window: {}", err)
};
let renderer =
match Renderer::from_window(window,
::sdl2::render::RenderDriverIndex::Auto,
::sdl2::render::SOFTWARE) {
Ok(renderer) => renderer,
Err(err) => panic!("failed to create SDL2 renderer: {}", err)
};
Display { renderer: renderer, upscale: upscale }
}
}
impl ::ui::Display for Display {
fn clear(&mut self) {
let mut drawer = self.renderer.drawer();
let _ = drawer.set_draw_color(RGB(0xff, 0x00, 0x00));
let _ = drawer.clear();
}
fn
|
(&mut self, x: u32, y: u32, color: Color) {
let color = match color {
Color::Black => RGB(0x00, 0x00, 0x00),
Color::DarkGrey => RGB(0x55, 0x55, 0x55),
Color::LightGrey => RGB(0xab, 0xab, 0xab),
Color::White => RGB(0xff, 0xff, 0xff),
};
let mut drawer = self.renderer.drawer();
let _ = drawer.set_draw_color(color);
if self.upscale == 0 {
let _ = drawer.draw_point(Point::new(x as i32, y as i32));
} else {
let up = 1 << (self.upscale as usize);
// Translate coordinates
let x = x as i32 * up;
let y = y as i32 * up;
let _ = drawer.fill_rect(Rect::new(x, y, up, up));
}
}
fn flip(&mut self) {
self.renderer.drawer().present();
self.clear();
}
}
|
set_pixel
|
identifier_name
|
display.rs
|
use sdl2::video::Window;
use sdl2::render::Renderer;
use sdl2::pixels::Color::RGB;
use sdl2::rect::{Point, Rect};
use sdl2::sdl::Sdl;
use gpu::Color;
pub struct Display {
renderer: Renderer<'static>,
/// Upscaling factor, log2.
upscale: u8,
}
impl Display {
pub fn new(sdl2: &Sdl, upscale: u8) -> Display
|
};
Display { renderer: renderer, upscale: upscale }
}
}
impl ::ui::Display for Display {
fn clear(&mut self) {
let mut drawer = self.renderer.drawer();
let _ = drawer.set_draw_color(RGB(0xff, 0x00, 0x00));
let _ = drawer.clear();
}
fn set_pixel(&mut self, x: u32, y: u32, color: Color) {
let color = match color {
Color::Black => RGB(0x00, 0x00, 0x00),
Color::DarkGrey => RGB(0x55, 0x55, 0x55),
Color::LightGrey => RGB(0xab, 0xab, 0xab),
Color::White => RGB(0xff, 0xff, 0xff),
};
let mut drawer = self.renderer.drawer();
let _ = drawer.set_draw_color(color);
if self.upscale == 0 {
let _ = drawer.draw_point(Point::new(x as i32, y as i32));
} else {
let up = 1 << (self.upscale as usize);
// Translate coordinates
let x = x as i32 * up;
let y = y as i32 * up;
let _ = drawer.fill_rect(Rect::new(x, y, up, up));
}
}
fn flip(&mut self) {
self.renderer.drawer().present();
self.clear();
}
}
|
{
let up = 1 << (upscale as usize);
let xres = 160 * up;
let yres = 144 * up;
let window = match Window::new(sdl2, "gb-rs",
::sdl2::video::WindowPos::PosCentered,
::sdl2::video::WindowPos::PosCentered,
xres, yres, ::sdl2::video::OPENGL) {
Ok(window) => window,
Err(err) => panic!("failed to create SDL2 window: {}", err)
};
let renderer =
match Renderer::from_window(window,
::sdl2::render::RenderDriverIndex::Auto,
::sdl2::render::SOFTWARE) {
Ok(renderer) => renderer,
Err(err) => panic!("failed to create SDL2 renderer: {}", err)
|
identifier_body
|
display.rs
|
use sdl2::video::Window;
use sdl2::render::Renderer;
use sdl2::pixels::Color::RGB;
use sdl2::rect::{Point, Rect};
use sdl2::sdl::Sdl;
use gpu::Color;
pub struct Display {
renderer: Renderer<'static>,
/// Upscaling factor, log2.
upscale: u8,
}
impl Display {
pub fn new(sdl2: &Sdl, upscale: u8) -> Display {
let up = 1 << (upscale as usize);
let xres = 160 * up;
let yres = 144 * up;
let window = match Window::new(sdl2, "gb-rs",
::sdl2::video::WindowPos::PosCentered,
::sdl2::video::WindowPos::PosCentered,
xres, yres, ::sdl2::video::OPENGL) {
Ok(window) => window,
Err(err) => panic!("failed to create SDL2 window: {}", err)
};
let renderer =
match Renderer::from_window(window,
::sdl2::render::RenderDriverIndex::Auto,
::sdl2::render::SOFTWARE) {
Ok(renderer) => renderer,
Err(err) => panic!("failed to create SDL2 renderer: {}", err)
|
Display { renderer: renderer, upscale: upscale }
}
}
impl ::ui::Display for Display {
fn clear(&mut self) {
let mut drawer = self.renderer.drawer();
let _ = drawer.set_draw_color(RGB(0xff, 0x00, 0x00));
let _ = drawer.clear();
}
fn set_pixel(&mut self, x: u32, y: u32, color: Color) {
let color = match color {
Color::Black => RGB(0x00, 0x00, 0x00),
Color::DarkGrey => RGB(0x55, 0x55, 0x55),
Color::LightGrey => RGB(0xab, 0xab, 0xab),
Color::White => RGB(0xff, 0xff, 0xff),
};
let mut drawer = self.renderer.drawer();
let _ = drawer.set_draw_color(color);
if self.upscale == 0 {
let _ = drawer.draw_point(Point::new(x as i32, y as i32));
} else {
let up = 1 << (self.upscale as usize);
// Translate coordinates
let x = x as i32 * up;
let y = y as i32 * up;
let _ = drawer.fill_rect(Rect::new(x, y, up, up));
}
}
fn flip(&mut self) {
self.renderer.drawer().present();
self.clear();
}
}
|
};
|
random_line_split
|
query.rs
|
use async_trait::async_trait;
use super::ElasticsearchStorage;
use crate::domain::ports::secondary::get::{Error as GetError, Get, Parameters as GetParameters};
use crate::domain::ports::secondary::search::{
Error as SearchError, Parameters as SearchParameters, Search,
};
#[async_trait]
impl Search for ElasticsearchStorage {
type Doc = serde_json::Value;
async fn
|
(
&self,
parameters: SearchParameters,
) -> Result<Vec<Self::Doc>, SearchError> {
self.search_documents(
parameters.es_indices_to_search_in,
parameters.query,
parameters.result_limit,
parameters.timeout,
)
.await
.map_err(|err| SearchError::DocumentRetrievalError { source: err.into() })
}
}
#[async_trait]
impl Get for ElasticsearchStorage {
type Doc = serde_json::Value;
async fn get_documents_by_id(
&self,
parameters: GetParameters,
) -> Result<Vec<Self::Doc>, GetError> {
self.get_documents_by_id(parameters.query, parameters.timeout)
.await
.map_err(|err| GetError::DocumentRetrievalError { source: err.into() })
}
}
|
search_documents
|
identifier_name
|
query.rs
|
use async_trait::async_trait;
use super::ElasticsearchStorage;
use crate::domain::ports::secondary::get::{Error as GetError, Get, Parameters as GetParameters};
use crate::domain::ports::secondary::search::{
Error as SearchError, Parameters as SearchParameters, Search,
};
#[async_trait]
impl Search for ElasticsearchStorage {
type Doc = serde_json::Value;
async fn search_documents(
&self,
parameters: SearchParameters,
) -> Result<Vec<Self::Doc>, SearchError>
|
}
#[async_trait]
impl Get for ElasticsearchStorage {
type Doc = serde_json::Value;
async fn get_documents_by_id(
&self,
parameters: GetParameters,
) -> Result<Vec<Self::Doc>, GetError> {
self.get_documents_by_id(parameters.query, parameters.timeout)
.await
.map_err(|err| GetError::DocumentRetrievalError { source: err.into() })
}
}
|
{
self.search_documents(
parameters.es_indices_to_search_in,
parameters.query,
parameters.result_limit,
parameters.timeout,
)
.await
.map_err(|err| SearchError::DocumentRetrievalError { source: err.into() })
}
|
identifier_body
|
query.rs
|
use async_trait::async_trait;
use super::ElasticsearchStorage;
use crate::domain::ports::secondary::get::{Error as GetError, Get, Parameters as GetParameters};
use crate::domain::ports::secondary::search::{
Error as SearchError, Parameters as SearchParameters, Search,
};
|
type Doc = serde_json::Value;
async fn search_documents(
&self,
parameters: SearchParameters,
) -> Result<Vec<Self::Doc>, SearchError> {
self.search_documents(
parameters.es_indices_to_search_in,
parameters.query,
parameters.result_limit,
parameters.timeout,
)
.await
.map_err(|err| SearchError::DocumentRetrievalError { source: err.into() })
}
}
#[async_trait]
impl Get for ElasticsearchStorage {
type Doc = serde_json::Value;
async fn get_documents_by_id(
&self,
parameters: GetParameters,
) -> Result<Vec<Self::Doc>, GetError> {
self.get_documents_by_id(parameters.query, parameters.timeout)
.await
.map_err(|err| GetError::DocumentRetrievalError { source: err.into() })
}
}
|
#[async_trait]
impl Search for ElasticsearchStorage {
|
random_line_split
|
main.rs
|
// Copyright 2020 The Exonum Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Example of service which uses the time oracle.
//!
//! This example shows an implementation of a simple service which interacts
//! with `exonum-time` service to obtain time.
//!
//! `main` function of example runs the `testkit` with both `exonum-time` and
//! example services, and demonstrates their interaction.
use chrono::{DateTime, Duration, TimeZone, Utc};
use exonum::{
crypto::{KeyPair, PublicKey},
helpers::Height,
merkledb::{
access::{Access, FromAccess},
ProofMapIndex,
},
runtime::{ExecutionContext, ExecutionError, InstanceId, SnapshotExt},
};
use exonum_derive::{
exonum_interface, BinaryValue, FromAccess, ObjectHash, RequireArtifact, ServiceDispatcher,
ServiceFactory,
};
use exonum_rust_runtime::Service;
use exonum_testkit::{Spec, TestKitBuilder};
use serde_derive::{Deserialize, Serialize};
use exonum_time::{MockTimeProvider, TimeProvider, TimeSchema, TimeServiceFactory};
use std::sync::Arc;
/// The argument of the `MarkerInterface::mark` method.
#[derive(Clone, Debug)]
#[derive(Serialize, Deserialize)]
#[derive(BinaryValue, ObjectHash)]
#[binary_value(codec = "bincode")]
pub struct TxMarker {
mark: i32,
time: DateTime<Utc>,
}
impl TxMarker {
fn new(mark: i32, time: DateTime<Utc>) -> Self {
Self { mark, time }
}
}
/// Marker service transactions interface definition.
#[exonum_interface(auto_ids)]
pub trait MarkerTransactions<Ctx> {
/// Output returned by the interface methods.
type Output;
/// Transaction which must be executed no later than the specified time (field `time`).
fn mark(&self, context: Ctx, arg: TxMarker) -> Self::Output;
}
#[derive(Debug, ServiceDispatcher, ServiceFactory)]
#[service_factory(artifact_name = "marker", artifact_version = "0.1.0")]
#[service_dispatcher(implements("MarkerTransactions"))]
struct MarkerService;
/// Marker service database schema.
#[derive(Debug, FromAccess, RequireArtifact)]
#[require_artifact(name = "marker", version = "0.1.x")]
// ^-- Must match the name / version specified for `MarkerService`.
pub struct MarkerSchema<T: Access> {
pub marks: ProofMapIndex<T::Base, PublicKey, i32>,
}
impl<T: Access> MarkerSchema<T> {
fn new(access: T) -> Self {
Self::from_root(access).unwrap()
}
}
impl MarkerTransactions<ExecutionContext<'_>> for MarkerService {
type Output = Result<(), ExecutionError>;
fn mark(&self, context: ExecutionContext<'_>, arg: TxMarker) -> Result<(), ExecutionError> {
let author = context
.caller()
.author()
.expect("Wrong `TxMarker` initiator");
let data = context.data();
let time_schema: TimeSchema<_> = data.service_schema(TIME_SERVICE_NAME)?;
let time = time_schema.time.get();
match time {
Some(current_time) if current_time <= arg.time => {
let mut schema = MarkerSchema::new(context.service_data());
schema.marks.put(&author, arg.mark);
}
_ =>
|
}
Ok(())
}
}
impl Service for MarkerService {}
// Several helpers for testkit.
/// Time oracle instance ID.
const TIME_SERVICE_ID: InstanceId = 112;
/// Time oracle instance name.
const TIME_SERVICE_NAME: &str = "time-oracle";
/// Marker service ID.
const SERVICE_ID: InstanceId = 128;
/// Marker service name.
const SERVICE_NAME: &str = "marker";
fn main() {
let mock_provider = Arc::new(MockTimeProvider::default());
// Create testkit for network with one validator.
let time_service =
TimeServiceFactory::with_provider(mock_provider.clone() as Arc<dyn TimeProvider>);
let time_service =
Spec::new(time_service).with_instance(TIME_SERVICE_ID, TIME_SERVICE_NAME, ());
let marker_service = Spec::new(MarkerService).with_instance(SERVICE_ID, SERVICE_NAME, ());
let mut testkit = TestKitBuilder::validator()
.with(time_service)
.with(marker_service)
.build();
mock_provider.set_time(Utc.timestamp(10, 0));
testkit.create_blocks_until(Height(2));
let snapshot = testkit.snapshot();
let time_schema: TimeSchema<_> = snapshot.service_schema(TIME_SERVICE_NAME).unwrap();
assert_eq!(time_schema.time.get(), Some(mock_provider.time()));
let keypair1 = KeyPair::random();
let keypair2 = KeyPair::random();
let keypair3 = KeyPair::random();
let tx1 = keypair1.mark(SERVICE_ID, TxMarker::new(1, mock_provider.time()));
let tx2 = keypair2.mark(
SERVICE_ID,
TxMarker::new(2, mock_provider.time() + Duration::seconds(10)),
);
let tx3 = keypair3.mark(
SERVICE_ID,
TxMarker::new(3, mock_provider.time() - Duration::seconds(5)),
);
testkit.create_block_with_transactions(vec![tx1, tx2, tx3]);
let snapshot = testkit.snapshot();
let schema: MarkerSchema<_> = snapshot.service_schema(SERVICE_NAME).unwrap();
assert_eq!(schema.marks.get(&keypair1.public_key()), Some(1));
assert_eq!(schema.marks.get(&keypair2.public_key()), Some(2));
assert_eq!(schema.marks.get(&keypair3.public_key()), None);
let tx4 = keypair3.mark(SERVICE_ID, TxMarker::new(4, Utc.timestamp(15, 0)));
testkit.create_block_with_transactions(vec![tx4]);
let snapshot = testkit.snapshot();
let schema: MarkerSchema<_> = snapshot.service_schema(SERVICE_NAME).unwrap();
assert_eq!(schema.marks.get(&keypair3.public_key()), Some(4));
}
|
{}
|
conditional_block
|
main.rs
|
// Copyright 2020 The Exonum Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Example of service which uses the time oracle.
//!
//! This example shows an implementation of a simple service which interacts
//! with `exonum-time` service to obtain time.
//!
//! `main` function of example runs the `testkit` with both `exonum-time` and
//! example services, and demonstrates their interaction.
use chrono::{DateTime, Duration, TimeZone, Utc};
use exonum::{
crypto::{KeyPair, PublicKey},
helpers::Height,
merkledb::{
|
runtime::{ExecutionContext, ExecutionError, InstanceId, SnapshotExt},
};
use exonum_derive::{
exonum_interface, BinaryValue, FromAccess, ObjectHash, RequireArtifact, ServiceDispatcher,
ServiceFactory,
};
use exonum_rust_runtime::Service;
use exonum_testkit::{Spec, TestKitBuilder};
use serde_derive::{Deserialize, Serialize};
use exonum_time::{MockTimeProvider, TimeProvider, TimeSchema, TimeServiceFactory};
use std::sync::Arc;
/// The argument of the `MarkerInterface::mark` method.
#[derive(Clone, Debug)]
#[derive(Serialize, Deserialize)]
#[derive(BinaryValue, ObjectHash)]
#[binary_value(codec = "bincode")]
pub struct TxMarker {
mark: i32,
time: DateTime<Utc>,
}
impl TxMarker {
fn new(mark: i32, time: DateTime<Utc>) -> Self {
Self { mark, time }
}
}
/// Marker service transactions interface definition.
#[exonum_interface(auto_ids)]
pub trait MarkerTransactions<Ctx> {
/// Output returned by the interface methods.
type Output;
/// Transaction which must be executed no later than the specified time (field `time`).
fn mark(&self, context: Ctx, arg: TxMarker) -> Self::Output;
}
#[derive(Debug, ServiceDispatcher, ServiceFactory)]
#[service_factory(artifact_name = "marker", artifact_version = "0.1.0")]
#[service_dispatcher(implements("MarkerTransactions"))]
struct MarkerService;
/// Marker service database schema.
#[derive(Debug, FromAccess, RequireArtifact)]
#[require_artifact(name = "marker", version = "0.1.x")]
// ^-- Must match the name / version specified for `MarkerService`.
pub struct MarkerSchema<T: Access> {
pub marks: ProofMapIndex<T::Base, PublicKey, i32>,
}
impl<T: Access> MarkerSchema<T> {
fn new(access: T) -> Self {
Self::from_root(access).unwrap()
}
}
impl MarkerTransactions<ExecutionContext<'_>> for MarkerService {
type Output = Result<(), ExecutionError>;
fn mark(&self, context: ExecutionContext<'_>, arg: TxMarker) -> Result<(), ExecutionError> {
let author = context
.caller()
.author()
.expect("Wrong `TxMarker` initiator");
let data = context.data();
let time_schema: TimeSchema<_> = data.service_schema(TIME_SERVICE_NAME)?;
let time = time_schema.time.get();
match time {
Some(current_time) if current_time <= arg.time => {
let mut schema = MarkerSchema::new(context.service_data());
schema.marks.put(&author, arg.mark);
}
_ => {}
}
Ok(())
}
}
impl Service for MarkerService {}
// Several helpers for testkit.
/// Time oracle instance ID.
const TIME_SERVICE_ID: InstanceId = 112;
/// Time oracle instance name.
const TIME_SERVICE_NAME: &str = "time-oracle";
/// Marker service ID.
const SERVICE_ID: InstanceId = 128;
/// Marker service name.
const SERVICE_NAME: &str = "marker";
fn main() {
let mock_provider = Arc::new(MockTimeProvider::default());
// Create testkit for network with one validator.
let time_service =
TimeServiceFactory::with_provider(mock_provider.clone() as Arc<dyn TimeProvider>);
let time_service =
Spec::new(time_service).with_instance(TIME_SERVICE_ID, TIME_SERVICE_NAME, ());
let marker_service = Spec::new(MarkerService).with_instance(SERVICE_ID, SERVICE_NAME, ());
let mut testkit = TestKitBuilder::validator()
.with(time_service)
.with(marker_service)
.build();
mock_provider.set_time(Utc.timestamp(10, 0));
testkit.create_blocks_until(Height(2));
let snapshot = testkit.snapshot();
let time_schema: TimeSchema<_> = snapshot.service_schema(TIME_SERVICE_NAME).unwrap();
assert_eq!(time_schema.time.get(), Some(mock_provider.time()));
let keypair1 = KeyPair::random();
let keypair2 = KeyPair::random();
let keypair3 = KeyPair::random();
let tx1 = keypair1.mark(SERVICE_ID, TxMarker::new(1, mock_provider.time()));
let tx2 = keypair2.mark(
SERVICE_ID,
TxMarker::new(2, mock_provider.time() + Duration::seconds(10)),
);
let tx3 = keypair3.mark(
SERVICE_ID,
TxMarker::new(3, mock_provider.time() - Duration::seconds(5)),
);
testkit.create_block_with_transactions(vec![tx1, tx2, tx3]);
let snapshot = testkit.snapshot();
let schema: MarkerSchema<_> = snapshot.service_schema(SERVICE_NAME).unwrap();
assert_eq!(schema.marks.get(&keypair1.public_key()), Some(1));
assert_eq!(schema.marks.get(&keypair2.public_key()), Some(2));
assert_eq!(schema.marks.get(&keypair3.public_key()), None);
let tx4 = keypair3.mark(SERVICE_ID, TxMarker::new(4, Utc.timestamp(15, 0)));
testkit.create_block_with_transactions(vec![tx4]);
let snapshot = testkit.snapshot();
let schema: MarkerSchema<_> = snapshot.service_schema(SERVICE_NAME).unwrap();
assert_eq!(schema.marks.get(&keypair3.public_key()), Some(4));
}
|
access::{Access, FromAccess},
ProofMapIndex,
},
|
random_line_split
|
main.rs
|
// Copyright 2020 The Exonum Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Example of service which uses the time oracle.
//!
//! This example shows an implementation of a simple service which interacts
//! with `exonum-time` service to obtain time.
//!
//! `main` function of example runs the `testkit` with both `exonum-time` and
//! example services, and demonstrates their interaction.
use chrono::{DateTime, Duration, TimeZone, Utc};
use exonum::{
crypto::{KeyPair, PublicKey},
helpers::Height,
merkledb::{
access::{Access, FromAccess},
ProofMapIndex,
},
runtime::{ExecutionContext, ExecutionError, InstanceId, SnapshotExt},
};
use exonum_derive::{
exonum_interface, BinaryValue, FromAccess, ObjectHash, RequireArtifact, ServiceDispatcher,
ServiceFactory,
};
use exonum_rust_runtime::Service;
use exonum_testkit::{Spec, TestKitBuilder};
use serde_derive::{Deserialize, Serialize};
use exonum_time::{MockTimeProvider, TimeProvider, TimeSchema, TimeServiceFactory};
use std::sync::Arc;
/// The argument of the `MarkerInterface::mark` method.
#[derive(Clone, Debug)]
#[derive(Serialize, Deserialize)]
#[derive(BinaryValue, ObjectHash)]
#[binary_value(codec = "bincode")]
pub struct TxMarker {
mark: i32,
time: DateTime<Utc>,
}
impl TxMarker {
fn new(mark: i32, time: DateTime<Utc>) -> Self {
Self { mark, time }
}
}
/// Marker service transactions interface definition.
#[exonum_interface(auto_ids)]
pub trait MarkerTransactions<Ctx> {
/// Output returned by the interface methods.
type Output;
/// Transaction which must be executed no later than the specified time (field `time`).
fn mark(&self, context: Ctx, arg: TxMarker) -> Self::Output;
}
#[derive(Debug, ServiceDispatcher, ServiceFactory)]
#[service_factory(artifact_name = "marker", artifact_version = "0.1.0")]
#[service_dispatcher(implements("MarkerTransactions"))]
struct MarkerService;
/// Marker service database schema.
#[derive(Debug, FromAccess, RequireArtifact)]
#[require_artifact(name = "marker", version = "0.1.x")]
// ^-- Must match the name / version specified for `MarkerService`.
pub struct
|
<T: Access> {
pub marks: ProofMapIndex<T::Base, PublicKey, i32>,
}
impl<T: Access> MarkerSchema<T> {
fn new(access: T) -> Self {
Self::from_root(access).unwrap()
}
}
impl MarkerTransactions<ExecutionContext<'_>> for MarkerService {
type Output = Result<(), ExecutionError>;
fn mark(&self, context: ExecutionContext<'_>, arg: TxMarker) -> Result<(), ExecutionError> {
let author = context
.caller()
.author()
.expect("Wrong `TxMarker` initiator");
let data = context.data();
let time_schema: TimeSchema<_> = data.service_schema(TIME_SERVICE_NAME)?;
let time = time_schema.time.get();
match time {
Some(current_time) if current_time <= arg.time => {
let mut schema = MarkerSchema::new(context.service_data());
schema.marks.put(&author, arg.mark);
}
_ => {}
}
Ok(())
}
}
impl Service for MarkerService {}
// Several helpers for testkit.
/// Time oracle instance ID.
const TIME_SERVICE_ID: InstanceId = 112;
/// Time oracle instance name.
const TIME_SERVICE_NAME: &str = "time-oracle";
/// Marker service ID.
const SERVICE_ID: InstanceId = 128;
/// Marker service name.
const SERVICE_NAME: &str = "marker";
fn main() {
let mock_provider = Arc::new(MockTimeProvider::default());
// Create testkit for network with one validator.
let time_service =
TimeServiceFactory::with_provider(mock_provider.clone() as Arc<dyn TimeProvider>);
let time_service =
Spec::new(time_service).with_instance(TIME_SERVICE_ID, TIME_SERVICE_NAME, ());
let marker_service = Spec::new(MarkerService).with_instance(SERVICE_ID, SERVICE_NAME, ());
let mut testkit = TestKitBuilder::validator()
.with(time_service)
.with(marker_service)
.build();
mock_provider.set_time(Utc.timestamp(10, 0));
testkit.create_blocks_until(Height(2));
let snapshot = testkit.snapshot();
let time_schema: TimeSchema<_> = snapshot.service_schema(TIME_SERVICE_NAME).unwrap();
assert_eq!(time_schema.time.get(), Some(mock_provider.time()));
let keypair1 = KeyPair::random();
let keypair2 = KeyPair::random();
let keypair3 = KeyPair::random();
let tx1 = keypair1.mark(SERVICE_ID, TxMarker::new(1, mock_provider.time()));
let tx2 = keypair2.mark(
SERVICE_ID,
TxMarker::new(2, mock_provider.time() + Duration::seconds(10)),
);
let tx3 = keypair3.mark(
SERVICE_ID,
TxMarker::new(3, mock_provider.time() - Duration::seconds(5)),
);
testkit.create_block_with_transactions(vec![tx1, tx2, tx3]);
let snapshot = testkit.snapshot();
let schema: MarkerSchema<_> = snapshot.service_schema(SERVICE_NAME).unwrap();
assert_eq!(schema.marks.get(&keypair1.public_key()), Some(1));
assert_eq!(schema.marks.get(&keypair2.public_key()), Some(2));
assert_eq!(schema.marks.get(&keypair3.public_key()), None);
let tx4 = keypair3.mark(SERVICE_ID, TxMarker::new(4, Utc.timestamp(15, 0)));
testkit.create_block_with_transactions(vec![tx4]);
let snapshot = testkit.snapshot();
let schema: MarkerSchema<_> = snapshot.service_schema(SERVICE_NAME).unwrap();
assert_eq!(schema.marks.get(&keypair3.public_key()), Some(4));
}
|
MarkerSchema
|
identifier_name
|
version.rs
|
/*
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this
file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
use chrono::{DateTime, Utc};
use exempi::Xmp;
use std::path::Path;
use crate::audit::{
audit_get_array_value, audit_get_bool_value, audit_get_date_value, audit_get_dict_value,
audit_get_int_value, audit_get_str_value, Report, SkipReason,
};
use crate::custominfo::CustomInfoProperties;
use crate::exif::ExifProperties;
use crate::iptc::IptcProperties;
use crate::plutils::Value;
use crate::store;
use crate::xmp::ToXmp;
use crate::AplibObject;
use crate::AplibType;
use crate::PlistLoadable;
/// A rendered image. There is one for the orignal, and one per
/// actual version. `Version` are associated to a `Master`.
pub struct
|
{
uuid: Option<String>,
model_id: Option<i64>,
/// The associated `Master`.
master_uuid: Option<String>,
/// uuid of the `Folder` project this reside in.
pub project_uuid: Option<String>,
/// uuid of the raw `Master`.
pub raw_master_uuid: Option<String>,
/// uuid of the non raw `Master`.
pub nonraw_master_uuid: Option<String>,
pub timezone_name: Option<String>,
pub create_date: Option<DateTime<Utc>>,
pub image_date: Option<DateTime<Utc>>,
pub export_image_change_date: Option<DateTime<Utc>>,
pub export_metadata_change_date: Option<DateTime<Utc>>,
pub version_number: Option<i64>,
pub db_version: Option<i64>,
pub db_minor_version: Option<i64>,
pub is_flagged: Option<bool>,
/// Indicate the version is the original.
pub is_original: Option<bool>,
pub is_editable: Option<bool>,
pub is_hidden: Option<bool>,
pub is_in_trash: Option<bool>,
pub file_name: Option<String>,
pub name: Option<String>,
pub rating: Option<i64>,
pub rotation: Option<i64>,
pub colour_label_index: Option<i64>,
pub iptc: Option<IptcProperties>,
pub exif: Option<ExifProperties>,
pub custom_info: Option<CustomInfoProperties>,
pub keywords: Option<Vec<Value>>,
}
impl PlistLoadable for Version {
/// Load the version object from the plist at plist_path.
fn from_path<P>(plist_path: P, mut auditor: Option<&mut Report>) -> Option<Version>
where
P: AsRef<Path>,
{
use crate::plutils::*;
let plist = parse_plist(plist_path);
match plist {
Value::Dictionary(ref dict) => {
let iptc = audit_get_dict_value(dict, "iptcProperties", &mut auditor);
let exif = audit_get_dict_value(dict, "exifProperties", &mut auditor);
let custom_info = audit_get_dict_value(dict, "customInfo", &mut auditor);
let result = Some(Version {
uuid: audit_get_str_value(dict, "uuid", &mut auditor),
master_uuid: audit_get_str_value(dict, "masterUuid", &mut auditor),
project_uuid: audit_get_str_value(dict, "projectUuid", &mut auditor),
raw_master_uuid: audit_get_str_value(dict, "rawMasterUuid", &mut auditor),
nonraw_master_uuid: audit_get_str_value(dict, "nonRawMasterUuid", &mut auditor),
timezone_name: audit_get_str_value(dict, "imageTimeZoneName", &mut auditor),
create_date: audit_get_date_value(dict, "createDate", &mut auditor),
image_date: audit_get_date_value(dict, "imageDate", &mut auditor),
export_image_change_date: audit_get_date_value(
dict,
"exportImageChangeDate",
&mut auditor,
),
export_metadata_change_date: audit_get_date_value(
dict,
"exportMetadataChangeDate",
&mut auditor,
),
version_number: audit_get_int_value(dict, "versionNumber", &mut auditor),
db_version: audit_get_int_value(dict, "version", &mut auditor),
db_minor_version: audit_get_int_value(dict, "minorVersion", &mut auditor),
is_flagged: audit_get_bool_value(dict, "isFlagged", &mut auditor),
is_original: audit_get_bool_value(dict, "isOriginal", &mut auditor),
is_editable: audit_get_bool_value(dict, "isEditable", &mut auditor),
is_hidden: audit_get_bool_value(dict, "isHidden", &mut auditor),
is_in_trash: audit_get_bool_value(dict, "isInTrash", &mut auditor),
file_name: audit_get_str_value(dict, "fileName", &mut auditor),
name: audit_get_str_value(dict, "name", &mut auditor),
model_id: audit_get_int_value(dict, "modelId", &mut auditor),
rating: audit_get_int_value(dict, "mainRating", &mut auditor),
rotation: audit_get_int_value(dict, "rotation", &mut auditor),
colour_label_index: audit_get_int_value(dict, "colorLabelIndex", &mut auditor),
iptc: IptcProperties::from(&iptc, &mut auditor),
exif: ExifProperties::from(&exif, &mut auditor),
custom_info: CustomInfoProperties::from(&custom_info, &mut auditor),
keywords: audit_get_array_value(dict, "keywords", &mut auditor),
});
if let Some(auditor) = &mut auditor {
auditor.skip("statistics", SkipReason::Ignore);
auditor.skip("thumbnailGroup", SkipReason::Ignore);
auditor.skip("faceDetectionIsFromPreview", SkipReason::Ignore);
auditor.skip("processedHeight", SkipReason::Ignore);
auditor.skip("processedWidth", SkipReason::Ignore);
auditor.skip("masterHeight", SkipReason::Ignore);
auditor.skip("masterWidth", SkipReason::Ignore);
auditor.skip("supportedStatus", SkipReason::Ignore);
auditor.skip("showInLibrary", SkipReason::Ignore);
auditor.skip("adjustmentProperties", SkipReason::Ignore); // don't know what to do yet
auditor.skip("RKImageAdjustments", SkipReason::Ignore);
auditor.skip("hasAdjustments", SkipReason::Ignore);
auditor.skip("hasEnabledAdjustments", SkipReason::Ignore);
auditor.skip("renderVersion", SkipReason::Ignore);
auditor.skip("imageProxyState", SkipReason::Ignore);
auditor.skip("plistWriteTimestamp", SkipReason::Ignore);
auditor.audit_ignored(dict, None);
}
result
}
_ => None,
}
}
}
impl AplibObject for Version {
fn obj_type(&self) -> AplibType {
AplibType::Version
}
fn uuid(&self) -> &Option<String> {
&self.uuid
}
fn parent(&self) -> &Option<String> {
&self.master_uuid
}
fn model_id(&self) -> i64 {
self.model_id.unwrap_or(0)
}
fn is_valid(&self) -> bool {
self.uuid.is_some()
}
fn wrap(obj: Version) -> store::Wrapper {
store::Wrapper::Version(Box::new(obj))
}
}
impl ToXmp for Version {
fn to_xmp(&self, xmp: &mut Xmp) -> bool {
// Here we make sure the Exif data are
// processed before Iptc.
if let Some(ref exif) = self.exif {
exif.to_xmp(xmp);
}
if let Some(ref iptc) = self.iptc {
iptc.to_xmp(xmp);
}
true
}
}
#[cfg(test)]
#[test]
fn test_version_parse() {
use crate::testutils;
use crate::xmp;
use exempi;
let version = Version::from_path(
testutils::get_test_file_path("Version-0.apversion").as_path(),
None,
);
assert!(version.is_some());
let version = version.unwrap();
assert_eq!(version.uuid.as_ref().unwrap(), "MHMIbw5CQaiMgQ3n7g2w2A");
assert!(version.is_original.unwrap());
assert_eq!(
version.master_uuid.as_ref().unwrap(),
"WZMCPPRHR%C3nffgeeS4IQ"
);
assert_eq!(version.name.as_ref().unwrap(), "img_3136");
assert!(version.iptc.is_some());
let iptc = version.iptc.as_ref().unwrap();
assert!(iptc.bag.contains_key("Byline"));
assert!(iptc.bag.contains_key("CiAdrCity"));
let exif = version.exif.as_ref().unwrap();
assert!(exif.bag.contains_key("ApertureValue"));
assert!(exif.bag.contains_key("Depth"));
// XXX fix when have actual audit.
// println!("report {:?}", report);
exempi::init();
let mut xmp = Xmp::new();
let result = version.to_xmp(&mut xmp);
assert!(result);
let mut options: exempi::PropFlags = exempi::PROP_NONE;
let value = xmp.get_property(xmp::ns::NS_DC, "creator", &mut options);
assert!(value.is_ok());
assert_eq!(value.unwrap().to_str(), "Hubert Figuiere");
options = exempi::PROP_NONE;
let value = xmp.get_property(xmp::ns::NS_EXIF, "ApertureValue", &mut options);
assert!(value.is_ok());
assert_eq!(value.unwrap().to_str(), "4");
}
|
Version
|
identifier_name
|
version.rs
|
/*
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this
file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
use chrono::{DateTime, Utc};
use exempi::Xmp;
use std::path::Path;
use crate::audit::{
audit_get_array_value, audit_get_bool_value, audit_get_date_value, audit_get_dict_value,
audit_get_int_value, audit_get_str_value, Report, SkipReason,
};
use crate::custominfo::CustomInfoProperties;
use crate::exif::ExifProperties;
use crate::iptc::IptcProperties;
use crate::plutils::Value;
use crate::store;
use crate::xmp::ToXmp;
use crate::AplibObject;
use crate::AplibType;
use crate::PlistLoadable;
/// A rendered image. There is one for the orignal, and one per
/// actual version. `Version` are associated to a `Master`.
pub struct Version {
uuid: Option<String>,
model_id: Option<i64>,
/// The associated `Master`.
master_uuid: Option<String>,
/// uuid of the `Folder` project this reside in.
pub project_uuid: Option<String>,
/// uuid of the raw `Master`.
pub raw_master_uuid: Option<String>,
/// uuid of the non raw `Master`.
pub nonraw_master_uuid: Option<String>,
pub timezone_name: Option<String>,
pub create_date: Option<DateTime<Utc>>,
pub image_date: Option<DateTime<Utc>>,
pub export_image_change_date: Option<DateTime<Utc>>,
pub export_metadata_change_date: Option<DateTime<Utc>>,
pub version_number: Option<i64>,
pub db_version: Option<i64>,
pub db_minor_version: Option<i64>,
pub is_flagged: Option<bool>,
/// Indicate the version is the original.
pub is_original: Option<bool>,
pub is_editable: Option<bool>,
pub is_hidden: Option<bool>,
pub is_in_trash: Option<bool>,
pub file_name: Option<String>,
pub name: Option<String>,
pub rating: Option<i64>,
pub rotation: Option<i64>,
pub colour_label_index: Option<i64>,
pub iptc: Option<IptcProperties>,
pub exif: Option<ExifProperties>,
pub custom_info: Option<CustomInfoProperties>,
pub keywords: Option<Vec<Value>>,
}
impl PlistLoadable for Version {
/// Load the version object from the plist at plist_path.
fn from_path<P>(plist_path: P, mut auditor: Option<&mut Report>) -> Option<Version>
where
P: AsRef<Path>,
{
use crate::plutils::*;
let plist = parse_plist(plist_path);
match plist {
Value::Dictionary(ref dict) => {
let iptc = audit_get_dict_value(dict, "iptcProperties", &mut auditor);
let exif = audit_get_dict_value(dict, "exifProperties", &mut auditor);
let custom_info = audit_get_dict_value(dict, "customInfo", &mut auditor);
let result = Some(Version {
uuid: audit_get_str_value(dict, "uuid", &mut auditor),
master_uuid: audit_get_str_value(dict, "masterUuid", &mut auditor),
project_uuid: audit_get_str_value(dict, "projectUuid", &mut auditor),
raw_master_uuid: audit_get_str_value(dict, "rawMasterUuid", &mut auditor),
nonraw_master_uuid: audit_get_str_value(dict, "nonRawMasterUuid", &mut auditor),
timezone_name: audit_get_str_value(dict, "imageTimeZoneName", &mut auditor),
create_date: audit_get_date_value(dict, "createDate", &mut auditor),
image_date: audit_get_date_value(dict, "imageDate", &mut auditor),
export_image_change_date: audit_get_date_value(
dict,
"exportImageChangeDate",
&mut auditor,
),
export_metadata_change_date: audit_get_date_value(
dict,
"exportMetadataChangeDate",
&mut auditor,
),
version_number: audit_get_int_value(dict, "versionNumber", &mut auditor),
db_version: audit_get_int_value(dict, "version", &mut auditor),
db_minor_version: audit_get_int_value(dict, "minorVersion", &mut auditor),
is_flagged: audit_get_bool_value(dict, "isFlagged", &mut auditor),
is_original: audit_get_bool_value(dict, "isOriginal", &mut auditor),
is_editable: audit_get_bool_value(dict, "isEditable", &mut auditor),
is_hidden: audit_get_bool_value(dict, "isHidden", &mut auditor),
is_in_trash: audit_get_bool_value(dict, "isInTrash", &mut auditor),
file_name: audit_get_str_value(dict, "fileName", &mut auditor),
name: audit_get_str_value(dict, "name", &mut auditor),
model_id: audit_get_int_value(dict, "modelId", &mut auditor),
rating: audit_get_int_value(dict, "mainRating", &mut auditor),
rotation: audit_get_int_value(dict, "rotation", &mut auditor),
colour_label_index: audit_get_int_value(dict, "colorLabelIndex", &mut auditor),
iptc: IptcProperties::from(&iptc, &mut auditor),
exif: ExifProperties::from(&exif, &mut auditor),
custom_info: CustomInfoProperties::from(&custom_info, &mut auditor),
keywords: audit_get_array_value(dict, "keywords", &mut auditor),
});
if let Some(auditor) = &mut auditor {
|
auditor.skip("thumbnailGroup", SkipReason::Ignore);
auditor.skip("faceDetectionIsFromPreview", SkipReason::Ignore);
auditor.skip("processedHeight", SkipReason::Ignore);
auditor.skip("processedWidth", SkipReason::Ignore);
auditor.skip("masterHeight", SkipReason::Ignore);
auditor.skip("masterWidth", SkipReason::Ignore);
auditor.skip("supportedStatus", SkipReason::Ignore);
auditor.skip("showInLibrary", SkipReason::Ignore);
auditor.skip("adjustmentProperties", SkipReason::Ignore); // don't know what to do yet
auditor.skip("RKImageAdjustments", SkipReason::Ignore);
auditor.skip("hasAdjustments", SkipReason::Ignore);
auditor.skip("hasEnabledAdjustments", SkipReason::Ignore);
auditor.skip("renderVersion", SkipReason::Ignore);
auditor.skip("imageProxyState", SkipReason::Ignore);
auditor.skip("plistWriteTimestamp", SkipReason::Ignore);
auditor.audit_ignored(dict, None);
}
result
}
_ => None,
}
}
}
impl AplibObject for Version {
fn obj_type(&self) -> AplibType {
AplibType::Version
}
fn uuid(&self) -> &Option<String> {
&self.uuid
}
fn parent(&self) -> &Option<String> {
&self.master_uuid
}
fn model_id(&self) -> i64 {
self.model_id.unwrap_or(0)
}
fn is_valid(&self) -> bool {
self.uuid.is_some()
}
fn wrap(obj: Version) -> store::Wrapper {
store::Wrapper::Version(Box::new(obj))
}
}
impl ToXmp for Version {
fn to_xmp(&self, xmp: &mut Xmp) -> bool {
// Here we make sure the Exif data are
// processed before Iptc.
if let Some(ref exif) = self.exif {
exif.to_xmp(xmp);
}
if let Some(ref iptc) = self.iptc {
iptc.to_xmp(xmp);
}
true
}
}
#[cfg(test)]
#[test]
fn test_version_parse() {
use crate::testutils;
use crate::xmp;
use exempi;
let version = Version::from_path(
testutils::get_test_file_path("Version-0.apversion").as_path(),
None,
);
assert!(version.is_some());
let version = version.unwrap();
assert_eq!(version.uuid.as_ref().unwrap(), "MHMIbw5CQaiMgQ3n7g2w2A");
assert!(version.is_original.unwrap());
assert_eq!(
version.master_uuid.as_ref().unwrap(),
"WZMCPPRHR%C3nffgeeS4IQ"
);
assert_eq!(version.name.as_ref().unwrap(), "img_3136");
assert!(version.iptc.is_some());
let iptc = version.iptc.as_ref().unwrap();
assert!(iptc.bag.contains_key("Byline"));
assert!(iptc.bag.contains_key("CiAdrCity"));
let exif = version.exif.as_ref().unwrap();
assert!(exif.bag.contains_key("ApertureValue"));
assert!(exif.bag.contains_key("Depth"));
// XXX fix when have actual audit.
// println!("report {:?}", report);
exempi::init();
let mut xmp = Xmp::new();
let result = version.to_xmp(&mut xmp);
assert!(result);
let mut options: exempi::PropFlags = exempi::PROP_NONE;
let value = xmp.get_property(xmp::ns::NS_DC, "creator", &mut options);
assert!(value.is_ok());
assert_eq!(value.unwrap().to_str(), "Hubert Figuiere");
options = exempi::PROP_NONE;
let value = xmp.get_property(xmp::ns::NS_EXIF, "ApertureValue", &mut options);
assert!(value.is_ok());
assert_eq!(value.unwrap().to_str(), "4");
}
|
auditor.skip("statistics", SkipReason::Ignore);
|
random_line_split
|
version.rs
|
/*
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this
file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
use chrono::{DateTime, Utc};
use exempi::Xmp;
use std::path::Path;
use crate::audit::{
audit_get_array_value, audit_get_bool_value, audit_get_date_value, audit_get_dict_value,
audit_get_int_value, audit_get_str_value, Report, SkipReason,
};
use crate::custominfo::CustomInfoProperties;
use crate::exif::ExifProperties;
use crate::iptc::IptcProperties;
use crate::plutils::Value;
use crate::store;
use crate::xmp::ToXmp;
use crate::AplibObject;
use crate::AplibType;
use crate::PlistLoadable;
/// A rendered image. There is one for the orignal, and one per
/// actual version. `Version` are associated to a `Master`.
pub struct Version {
uuid: Option<String>,
model_id: Option<i64>,
/// The associated `Master`.
master_uuid: Option<String>,
/// uuid of the `Folder` project this reside in.
pub project_uuid: Option<String>,
/// uuid of the raw `Master`.
pub raw_master_uuid: Option<String>,
/// uuid of the non raw `Master`.
pub nonraw_master_uuid: Option<String>,
pub timezone_name: Option<String>,
pub create_date: Option<DateTime<Utc>>,
pub image_date: Option<DateTime<Utc>>,
pub export_image_change_date: Option<DateTime<Utc>>,
pub export_metadata_change_date: Option<DateTime<Utc>>,
pub version_number: Option<i64>,
pub db_version: Option<i64>,
pub db_minor_version: Option<i64>,
pub is_flagged: Option<bool>,
/// Indicate the version is the original.
pub is_original: Option<bool>,
pub is_editable: Option<bool>,
pub is_hidden: Option<bool>,
pub is_in_trash: Option<bool>,
pub file_name: Option<String>,
pub name: Option<String>,
pub rating: Option<i64>,
pub rotation: Option<i64>,
pub colour_label_index: Option<i64>,
pub iptc: Option<IptcProperties>,
pub exif: Option<ExifProperties>,
pub custom_info: Option<CustomInfoProperties>,
pub keywords: Option<Vec<Value>>,
}
impl PlistLoadable for Version {
/// Load the version object from the plist at plist_path.
fn from_path<P>(plist_path: P, mut auditor: Option<&mut Report>) -> Option<Version>
where
P: AsRef<Path>,
{
use crate::plutils::*;
let plist = parse_plist(plist_path);
match plist {
Value::Dictionary(ref dict) => {
let iptc = audit_get_dict_value(dict, "iptcProperties", &mut auditor);
let exif = audit_get_dict_value(dict, "exifProperties", &mut auditor);
let custom_info = audit_get_dict_value(dict, "customInfo", &mut auditor);
let result = Some(Version {
uuid: audit_get_str_value(dict, "uuid", &mut auditor),
master_uuid: audit_get_str_value(dict, "masterUuid", &mut auditor),
project_uuid: audit_get_str_value(dict, "projectUuid", &mut auditor),
raw_master_uuid: audit_get_str_value(dict, "rawMasterUuid", &mut auditor),
nonraw_master_uuid: audit_get_str_value(dict, "nonRawMasterUuid", &mut auditor),
timezone_name: audit_get_str_value(dict, "imageTimeZoneName", &mut auditor),
create_date: audit_get_date_value(dict, "createDate", &mut auditor),
image_date: audit_get_date_value(dict, "imageDate", &mut auditor),
export_image_change_date: audit_get_date_value(
dict,
"exportImageChangeDate",
&mut auditor,
),
export_metadata_change_date: audit_get_date_value(
dict,
"exportMetadataChangeDate",
&mut auditor,
),
version_number: audit_get_int_value(dict, "versionNumber", &mut auditor),
db_version: audit_get_int_value(dict, "version", &mut auditor),
db_minor_version: audit_get_int_value(dict, "minorVersion", &mut auditor),
is_flagged: audit_get_bool_value(dict, "isFlagged", &mut auditor),
is_original: audit_get_bool_value(dict, "isOriginal", &mut auditor),
is_editable: audit_get_bool_value(dict, "isEditable", &mut auditor),
is_hidden: audit_get_bool_value(dict, "isHidden", &mut auditor),
is_in_trash: audit_get_bool_value(dict, "isInTrash", &mut auditor),
file_name: audit_get_str_value(dict, "fileName", &mut auditor),
name: audit_get_str_value(dict, "name", &mut auditor),
model_id: audit_get_int_value(dict, "modelId", &mut auditor),
rating: audit_get_int_value(dict, "mainRating", &mut auditor),
rotation: audit_get_int_value(dict, "rotation", &mut auditor),
colour_label_index: audit_get_int_value(dict, "colorLabelIndex", &mut auditor),
iptc: IptcProperties::from(&iptc, &mut auditor),
exif: ExifProperties::from(&exif, &mut auditor),
custom_info: CustomInfoProperties::from(&custom_info, &mut auditor),
keywords: audit_get_array_value(dict, "keywords", &mut auditor),
});
if let Some(auditor) = &mut auditor
|
}
result
}
_ => None,
}
}
}
impl AplibObject for Version {
fn obj_type(&self) -> AplibType {
AplibType::Version
}
fn uuid(&self) -> &Option<String> {
&self.uuid
}
fn parent(&self) -> &Option<String> {
&self.master_uuid
}
fn model_id(&self) -> i64 {
self.model_id.unwrap_or(0)
}
fn is_valid(&self) -> bool {
self.uuid.is_some()
}
fn wrap(obj: Version) -> store::Wrapper {
store::Wrapper::Version(Box::new(obj))
}
}
impl ToXmp for Version {
fn to_xmp(&self, xmp: &mut Xmp) -> bool {
// Here we make sure the Exif data are
// processed before Iptc.
if let Some(ref exif) = self.exif {
exif.to_xmp(xmp);
}
if let Some(ref iptc) = self.iptc {
iptc.to_xmp(xmp);
}
true
}
}
#[cfg(test)]
#[test]
fn test_version_parse() {
use crate::testutils;
use crate::xmp;
use exempi;
let version = Version::from_path(
testutils::get_test_file_path("Version-0.apversion").as_path(),
None,
);
assert!(version.is_some());
let version = version.unwrap();
assert_eq!(version.uuid.as_ref().unwrap(), "MHMIbw5CQaiMgQ3n7g2w2A");
assert!(version.is_original.unwrap());
assert_eq!(
version.master_uuid.as_ref().unwrap(),
"WZMCPPRHR%C3nffgeeS4IQ"
);
assert_eq!(version.name.as_ref().unwrap(), "img_3136");
assert!(version.iptc.is_some());
let iptc = version.iptc.as_ref().unwrap();
assert!(iptc.bag.contains_key("Byline"));
assert!(iptc.bag.contains_key("CiAdrCity"));
let exif = version.exif.as_ref().unwrap();
assert!(exif.bag.contains_key("ApertureValue"));
assert!(exif.bag.contains_key("Depth"));
// XXX fix when have actual audit.
// println!("report {:?}", report);
exempi::init();
let mut xmp = Xmp::new();
let result = version.to_xmp(&mut xmp);
assert!(result);
let mut options: exempi::PropFlags = exempi::PROP_NONE;
let value = xmp.get_property(xmp::ns::NS_DC, "creator", &mut options);
assert!(value.is_ok());
assert_eq!(value.unwrap().to_str(), "Hubert Figuiere");
options = exempi::PROP_NONE;
let value = xmp.get_property(xmp::ns::NS_EXIF, "ApertureValue", &mut options);
assert!(value.is_ok());
assert_eq!(value.unwrap().to_str(), "4");
}
|
{
auditor.skip("statistics", SkipReason::Ignore);
auditor.skip("thumbnailGroup", SkipReason::Ignore);
auditor.skip("faceDetectionIsFromPreview", SkipReason::Ignore);
auditor.skip("processedHeight", SkipReason::Ignore);
auditor.skip("processedWidth", SkipReason::Ignore);
auditor.skip("masterHeight", SkipReason::Ignore);
auditor.skip("masterWidth", SkipReason::Ignore);
auditor.skip("supportedStatus", SkipReason::Ignore);
auditor.skip("showInLibrary", SkipReason::Ignore);
auditor.skip("adjustmentProperties", SkipReason::Ignore); // don't know what to do yet
auditor.skip("RKImageAdjustments", SkipReason::Ignore);
auditor.skip("hasAdjustments", SkipReason::Ignore);
auditor.skip("hasEnabledAdjustments", SkipReason::Ignore);
auditor.skip("renderVersion", SkipReason::Ignore);
auditor.skip("imageProxyState", SkipReason::Ignore);
auditor.skip("plistWriteTimestamp", SkipReason::Ignore);
auditor.audit_ignored(dict, None);
|
conditional_block
|
version.rs
|
/*
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this
file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
use chrono::{DateTime, Utc};
use exempi::Xmp;
use std::path::Path;
use crate::audit::{
audit_get_array_value, audit_get_bool_value, audit_get_date_value, audit_get_dict_value,
audit_get_int_value, audit_get_str_value, Report, SkipReason,
};
use crate::custominfo::CustomInfoProperties;
use crate::exif::ExifProperties;
use crate::iptc::IptcProperties;
use crate::plutils::Value;
use crate::store;
use crate::xmp::ToXmp;
use crate::AplibObject;
use crate::AplibType;
use crate::PlistLoadable;
/// A rendered image. There is one for the orignal, and one per
/// actual version. `Version` are associated to a `Master`.
pub struct Version {
uuid: Option<String>,
model_id: Option<i64>,
/// The associated `Master`.
master_uuid: Option<String>,
/// uuid of the `Folder` project this reside in.
pub project_uuid: Option<String>,
/// uuid of the raw `Master`.
pub raw_master_uuid: Option<String>,
/// uuid of the non raw `Master`.
pub nonraw_master_uuid: Option<String>,
pub timezone_name: Option<String>,
pub create_date: Option<DateTime<Utc>>,
pub image_date: Option<DateTime<Utc>>,
pub export_image_change_date: Option<DateTime<Utc>>,
pub export_metadata_change_date: Option<DateTime<Utc>>,
pub version_number: Option<i64>,
pub db_version: Option<i64>,
pub db_minor_version: Option<i64>,
pub is_flagged: Option<bool>,
/// Indicate the version is the original.
pub is_original: Option<bool>,
pub is_editable: Option<bool>,
pub is_hidden: Option<bool>,
pub is_in_trash: Option<bool>,
pub file_name: Option<String>,
pub name: Option<String>,
pub rating: Option<i64>,
pub rotation: Option<i64>,
pub colour_label_index: Option<i64>,
pub iptc: Option<IptcProperties>,
pub exif: Option<ExifProperties>,
pub custom_info: Option<CustomInfoProperties>,
pub keywords: Option<Vec<Value>>,
}
impl PlistLoadable for Version {
/// Load the version object from the plist at plist_path.
fn from_path<P>(plist_path: P, mut auditor: Option<&mut Report>) -> Option<Version>
where
P: AsRef<Path>,
{
use crate::plutils::*;
let plist = parse_plist(plist_path);
match plist {
Value::Dictionary(ref dict) => {
let iptc = audit_get_dict_value(dict, "iptcProperties", &mut auditor);
let exif = audit_get_dict_value(dict, "exifProperties", &mut auditor);
let custom_info = audit_get_dict_value(dict, "customInfo", &mut auditor);
let result = Some(Version {
uuid: audit_get_str_value(dict, "uuid", &mut auditor),
master_uuid: audit_get_str_value(dict, "masterUuid", &mut auditor),
project_uuid: audit_get_str_value(dict, "projectUuid", &mut auditor),
raw_master_uuid: audit_get_str_value(dict, "rawMasterUuid", &mut auditor),
nonraw_master_uuid: audit_get_str_value(dict, "nonRawMasterUuid", &mut auditor),
timezone_name: audit_get_str_value(dict, "imageTimeZoneName", &mut auditor),
create_date: audit_get_date_value(dict, "createDate", &mut auditor),
image_date: audit_get_date_value(dict, "imageDate", &mut auditor),
export_image_change_date: audit_get_date_value(
dict,
"exportImageChangeDate",
&mut auditor,
),
export_metadata_change_date: audit_get_date_value(
dict,
"exportMetadataChangeDate",
&mut auditor,
),
version_number: audit_get_int_value(dict, "versionNumber", &mut auditor),
db_version: audit_get_int_value(dict, "version", &mut auditor),
db_minor_version: audit_get_int_value(dict, "minorVersion", &mut auditor),
is_flagged: audit_get_bool_value(dict, "isFlagged", &mut auditor),
is_original: audit_get_bool_value(dict, "isOriginal", &mut auditor),
is_editable: audit_get_bool_value(dict, "isEditable", &mut auditor),
is_hidden: audit_get_bool_value(dict, "isHidden", &mut auditor),
is_in_trash: audit_get_bool_value(dict, "isInTrash", &mut auditor),
file_name: audit_get_str_value(dict, "fileName", &mut auditor),
name: audit_get_str_value(dict, "name", &mut auditor),
model_id: audit_get_int_value(dict, "modelId", &mut auditor),
rating: audit_get_int_value(dict, "mainRating", &mut auditor),
rotation: audit_get_int_value(dict, "rotation", &mut auditor),
colour_label_index: audit_get_int_value(dict, "colorLabelIndex", &mut auditor),
iptc: IptcProperties::from(&iptc, &mut auditor),
exif: ExifProperties::from(&exif, &mut auditor),
custom_info: CustomInfoProperties::from(&custom_info, &mut auditor),
keywords: audit_get_array_value(dict, "keywords", &mut auditor),
});
if let Some(auditor) = &mut auditor {
auditor.skip("statistics", SkipReason::Ignore);
auditor.skip("thumbnailGroup", SkipReason::Ignore);
auditor.skip("faceDetectionIsFromPreview", SkipReason::Ignore);
auditor.skip("processedHeight", SkipReason::Ignore);
auditor.skip("processedWidth", SkipReason::Ignore);
auditor.skip("masterHeight", SkipReason::Ignore);
auditor.skip("masterWidth", SkipReason::Ignore);
auditor.skip("supportedStatus", SkipReason::Ignore);
auditor.skip("showInLibrary", SkipReason::Ignore);
auditor.skip("adjustmentProperties", SkipReason::Ignore); // don't know what to do yet
auditor.skip("RKImageAdjustments", SkipReason::Ignore);
auditor.skip("hasAdjustments", SkipReason::Ignore);
auditor.skip("hasEnabledAdjustments", SkipReason::Ignore);
auditor.skip("renderVersion", SkipReason::Ignore);
auditor.skip("imageProxyState", SkipReason::Ignore);
auditor.skip("plistWriteTimestamp", SkipReason::Ignore);
auditor.audit_ignored(dict, None);
}
result
}
_ => None,
}
}
}
impl AplibObject for Version {
fn obj_type(&self) -> AplibType {
AplibType::Version
}
fn uuid(&self) -> &Option<String> {
&self.uuid
}
fn parent(&self) -> &Option<String> {
&self.master_uuid
}
fn model_id(&self) -> i64 {
self.model_id.unwrap_or(0)
}
fn is_valid(&self) -> bool {
self.uuid.is_some()
}
fn wrap(obj: Version) -> store::Wrapper
|
}
impl ToXmp for Version {
fn to_xmp(&self, xmp: &mut Xmp) -> bool {
// Here we make sure the Exif data are
// processed before Iptc.
if let Some(ref exif) = self.exif {
exif.to_xmp(xmp);
}
if let Some(ref iptc) = self.iptc {
iptc.to_xmp(xmp);
}
true
}
}
#[cfg(test)]
#[test]
fn test_version_parse() {
use crate::testutils;
use crate::xmp;
use exempi;
let version = Version::from_path(
testutils::get_test_file_path("Version-0.apversion").as_path(),
None,
);
assert!(version.is_some());
let version = version.unwrap();
assert_eq!(version.uuid.as_ref().unwrap(), "MHMIbw5CQaiMgQ3n7g2w2A");
assert!(version.is_original.unwrap());
assert_eq!(
version.master_uuid.as_ref().unwrap(),
"WZMCPPRHR%C3nffgeeS4IQ"
);
assert_eq!(version.name.as_ref().unwrap(), "img_3136");
assert!(version.iptc.is_some());
let iptc = version.iptc.as_ref().unwrap();
assert!(iptc.bag.contains_key("Byline"));
assert!(iptc.bag.contains_key("CiAdrCity"));
let exif = version.exif.as_ref().unwrap();
assert!(exif.bag.contains_key("ApertureValue"));
assert!(exif.bag.contains_key("Depth"));
// XXX fix when have actual audit.
// println!("report {:?}", report);
exempi::init();
let mut xmp = Xmp::new();
let result = version.to_xmp(&mut xmp);
assert!(result);
let mut options: exempi::PropFlags = exempi::PROP_NONE;
let value = xmp.get_property(xmp::ns::NS_DC, "creator", &mut options);
assert!(value.is_ok());
assert_eq!(value.unwrap().to_str(), "Hubert Figuiere");
options = exempi::PROP_NONE;
let value = xmp.get_property(xmp::ns::NS_EXIF, "ApertureValue", &mut options);
assert!(value.is_ok());
assert_eq!(value.unwrap().to_str(), "4");
}
|
{
store::Wrapper::Version(Box::new(obj))
}
|
identifier_body
|
readme_example.rs
|
// This file is released into Public Domain.
use crate::common::*;
use gnuplot::*;
mod common;
fn example(c: Common)
{
let mut fg = Figure::new();
fg.axes2d()
.set_title("A plot", &[])
.set_legend(Graph(0.5), Graph(0.9), &[], &[])
.set_x_label("x", &[])
.set_y_label("y^2", &[])
.lines(
&[-3., -2., -1., 0., 1., 2., 3.],
&[9., 4., 1., 0., 1., 4., 9.],
&[Caption("Parabola")],
);
c.show(&mut fg, "readme_example");
if!c.no_show
|
}
fn main()
{
Common::new().map(|c| example(c));
}
|
{
fg.set_terminal("pngcairo", "readme_example.png");
fg.show().unwrap();
}
|
conditional_block
|
readme_example.rs
|
// This file is released into Public Domain.
use crate::common::*;
use gnuplot::*;
mod common;
fn example(c: Common)
{
let mut fg = Figure::new();
fg.axes2d()
.set_title("A plot", &[])
.set_legend(Graph(0.5), Graph(0.9), &[], &[])
.set_x_label("x", &[])
.set_y_label("y^2", &[])
.lines(
&[-3., -2., -1., 0., 1., 2., 3.],
&[9., 4., 1., 0., 1., 4., 9.],
&[Caption("Parabola")],
);
c.show(&mut fg, "readme_example");
if!c.no_show
{
fg.set_terminal("pngcairo", "readme_example.png");
fg.show().unwrap();
}
}
fn
|
()
{
Common::new().map(|c| example(c));
}
|
main
|
identifier_name
|
readme_example.rs
|
// This file is released into Public Domain.
use crate::common::*;
use gnuplot::*;
mod common;
fn example(c: Common)
{
let mut fg = Figure::new();
fg.axes2d()
.set_title("A plot", &[])
.set_legend(Graph(0.5), Graph(0.9), &[], &[])
.set_x_label("x", &[])
.set_y_label("y^2", &[])
.lines(
&[-3., -2., -1., 0., 1., 2., 3.],
&[9., 4., 1., 0., 1., 4., 9.],
&[Caption("Parabola")],
);
c.show(&mut fg, "readme_example");
if!c.no_show
{
fg.set_terminal("pngcairo", "readme_example.png");
fg.show().unwrap();
}
}
fn main()
|
{
Common::new().map(|c| example(c));
}
|
identifier_body
|
|
readme_example.rs
|
// This file is released into Public Domain.
use crate::common::*;
use gnuplot::*;
mod common;
fn example(c: Common)
{
let mut fg = Figure::new();
fg.axes2d()
.set_title("A plot", &[])
.set_legend(Graph(0.5), Graph(0.9), &[], &[])
.set_x_label("x", &[])
.set_y_label("y^2", &[])
.lines(
&[-3., -2., -1., 0., 1., 2., 3.],
&[9., 4., 1., 0., 1., 4., 9.],
&[Caption("Parabola")],
);
c.show(&mut fg, "readme_example");
if!c.no_show
{
fg.set_terminal("pngcairo", "readme_example.png");
fg.show().unwrap();
}
}
fn main()
{
Common::new().map(|c| example(c));
|
}
|
random_line_split
|
|
lib.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
#![deny(unsafe_code)]
#![crate_name = "servo_url"]
#![crate_type = "rlib"]
#[macro_use]
extern crate malloc_size_of;
#[macro_use]
extern crate malloc_size_of_derive;
#[macro_use]
extern crate serde;
pub mod origin;
pub use crate::origin::{ImmutableOrigin, MutableOrigin, OpaqueOrigin};
use std::collections::hash_map::DefaultHasher;
use std::fmt;
use std::hash::Hasher;
use std::net::IpAddr;
use std::ops::{Index, Range, RangeFrom, RangeFull, RangeTo};
use std::path::Path;
use std::sync::Arc;
use to_shmem::{SharedMemoryBuilder, ToShmem};
use url::{Position, Url};
pub use url::Host;
#[derive(Clone, Deserialize, Eq, Hash, MallocSizeOf, Ord, PartialEq, PartialOrd, Serialize)]
pub struct ServoUrl(#[ignore_malloc_size_of = "Arc"] Arc<Url>);
impl ToShmem for ServoUrl {
fn to_shmem(&self, _builder: &mut SharedMemoryBuilder) -> to_shmem::Result<Self> {
unimplemented!("If servo wants to share stylesheets across processes, ToShmem for Url must be implemented")
}
}
impl ServoUrl {
pub fn from_url(url: Url) -> Self {
ServoUrl(Arc::new(url))
}
pub fn parse_with_base(base: Option<&Self>, input: &str) -> Result<Self, url::ParseError> {
Url::options()
.base_url(base.map(|b| &*b.0))
.parse(input)
.map(Self::from_url)
}
pub fn into_string(self) -> String {
Arc::try_unwrap(self.0)
.unwrap_or_else(|s| (*s).clone())
.into_string()
}
pub fn into_url(self) -> Url {
Arc::try_unwrap(self.0).unwrap_or_else(|s| (*s).clone())
}
pub fn as_url(&self) -> &Url {
&self.0
}
pub fn parse(input: &str) -> Result<Self, url::ParseError> {
Url::parse(input).map(Self::from_url)
}
pub fn cannot_be_a_base(&self) -> bool {
self.0.cannot_be_a_base()
}
pub fn domain(&self) -> Option<&str> {
self.0.domain()
}
pub fn fragment(&self) -> Option<&str> {
self.0.fragment()
}
pub fn path(&self) -> &str {
self.0.path()
}
pub fn origin(&self) -> ImmutableOrigin {
ImmutableOrigin::new(self.0.origin())
}
pub fn scheme(&self) -> &str {
self.0.scheme()
}
pub fn is_secure_scheme(&self) -> bool {
let scheme = self.scheme();
scheme == "https" || scheme == "wss"
}
/// <https://fetch.spec.whatwg.org/#local-scheme>
pub fn is_local_scheme(&self) -> bool {
let scheme = self.scheme();
scheme == "about" || scheme == "blob" || scheme == "data"
}
pub fn is_chrome(&self) -> bool {
self.scheme() == "chrome"
}
pub fn as_str(&self) -> &str {
self.0.as_str()
}
pub fn as_mut_url(&mut self) -> &mut Url {
Arc::make_mut(&mut self.0)
}
pub fn set_username(&mut self, user: &str) -> Result<(), ()> {
self.as_mut_url().set_username(user)
}
pub fn set_ip_host(&mut self, addr: IpAddr) -> Result<(), ()> {
self.as_mut_url().set_ip_host(addr)
}
pub fn set_password(&mut self, pass: Option<&str>) -> Result<(), ()> {
self.as_mut_url().set_password(pass)
}
pub fn set_fragment(&mut self, fragment: Option<&str>) {
self.as_mut_url().set_fragment(fragment)
}
pub fn username(&self) -> &str {
self.0.username()
}
pub fn password(&self) -> Option<&str> {
self.0.password()
}
pub fn to_file_path(&self) -> Result<::std::path::PathBuf, ()> {
self.0.to_file_path()
}
pub fn host(&self) -> Option<url::Host<&str>> {
self.0.host()
}
pub fn host_str(&self) -> Option<&str> {
self.0.host_str()
}
pub fn port(&self) -> Option<u16> {
self.0.port()
}
pub fn port_or_known_default(&self) -> Option<u16> {
self.0.port_or_known_default()
}
pub fn join(&self, input: &str) -> Result<ServoUrl, url::ParseError> {
self.0.join(input).map(Self::from_url)
}
pub fn path_segments(&self) -> Option<::std::str::Split<char>> {
self.0.path_segments()
}
pub fn query(&self) -> Option<&str> {
self.0.query()
}
pub fn from_file_path<P: AsRef<Path>>(path: P) -> Result<Self, ()> {
Ok(Self::from_url(Url::from_file_path(path)?))
}
/// <https://w3c.github.io/webappsec-secure-contexts/#potentially-trustworthy-url>
pub fn is_potentially_trustworthy(&self) -> bool {
// Step 1
if self.as_str() == "about:blank" || self.as_str() == "about:srcdoc" {
return true;
}
// Step 2
if self.scheme() == "data" {
return true;
}
// Step 3
self.is_origin_trustworthy()
}
/// <https://w3c.github.io/webappsec-secure-contexts/#is-origin-trustworthy>
pub fn is_origin_trustworthy(&self) -> bool {
// Step 1
if!self.origin().is_tuple() {
return false;
}
// Step 3
if self.scheme() == "https" || self.scheme() == "wss" {
true
// Steps 4-5
} else if self.host().is_some() {
let host = self.host_str().unwrap();
// Step 4
if let Ok(ip_addr) = host.parse::<IpAddr>() {
ip_addr.is_loopback()
// Step 5
} else
|
// Step 6
} else {
self.scheme() == "file"
}
}
}
impl fmt::Display for ServoUrl {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(formatter)
}
}
impl fmt::Debug for ServoUrl {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
if self.0.as_str().len() > 40 {
let mut hasher = DefaultHasher::new();
hasher.write(self.0.as_str().as_bytes());
let truncated: String = self.0.as_str().chars().take(40).collect();
let result = format!("{}... ({:x})", truncated, hasher.finish());
return result.fmt(formatter);
}
self.0.fmt(formatter)
}
}
impl Index<RangeFull> for ServoUrl {
type Output = str;
fn index(&self, _: RangeFull) -> &str {
&self.0[..]
}
}
impl Index<RangeFrom<Position>> for ServoUrl {
type Output = str;
fn index(&self, range: RangeFrom<Position>) -> &str {
&self.0[range]
}
}
impl Index<RangeTo<Position>> for ServoUrl {
type Output = str;
fn index(&self, range: RangeTo<Position>) -> &str {
&self.0[range]
}
}
impl Index<Range<Position>> for ServoUrl {
type Output = str;
fn index(&self, range: Range<Position>) -> &str {
&self.0[range]
}
}
impl From<Url> for ServoUrl {
fn from(url: Url) -> Self {
ServoUrl::from_url(url)
}
}
|
{
host == "localhost" || host.ends_with(".localhost")
}
|
conditional_block
|
lib.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
#![deny(unsafe_code)]
#![crate_name = "servo_url"]
#![crate_type = "rlib"]
#[macro_use]
extern crate malloc_size_of;
#[macro_use]
extern crate malloc_size_of_derive;
#[macro_use]
extern crate serde;
pub mod origin;
pub use crate::origin::{ImmutableOrigin, MutableOrigin, OpaqueOrigin};
use std::collections::hash_map::DefaultHasher;
use std::fmt;
use std::hash::Hasher;
use std::net::IpAddr;
use std::ops::{Index, Range, RangeFrom, RangeFull, RangeTo};
use std::path::Path;
use std::sync::Arc;
use to_shmem::{SharedMemoryBuilder, ToShmem};
use url::{Position, Url};
pub use url::Host;
#[derive(Clone, Deserialize, Eq, Hash, MallocSizeOf, Ord, PartialEq, PartialOrd, Serialize)]
pub struct ServoUrl(#[ignore_malloc_size_of = "Arc"] Arc<Url>);
impl ToShmem for ServoUrl {
fn to_shmem(&self, _builder: &mut SharedMemoryBuilder) -> to_shmem::Result<Self> {
unimplemented!("If servo wants to share stylesheets across processes, ToShmem for Url must be implemented")
}
}
impl ServoUrl {
pub fn from_url(url: Url) -> Self {
ServoUrl(Arc::new(url))
}
pub fn parse_with_base(base: Option<&Self>, input: &str) -> Result<Self, url::ParseError> {
Url::options()
.base_url(base.map(|b| &*b.0))
.parse(input)
.map(Self::from_url)
}
pub fn into_string(self) -> String {
Arc::try_unwrap(self.0)
.unwrap_or_else(|s| (*s).clone())
.into_string()
}
pub fn into_url(self) -> Url {
Arc::try_unwrap(self.0).unwrap_or_else(|s| (*s).clone())
}
pub fn as_url(&self) -> &Url {
&self.0
}
pub fn parse(input: &str) -> Result<Self, url::ParseError> {
Url::parse(input).map(Self::from_url)
}
pub fn cannot_be_a_base(&self) -> bool {
self.0.cannot_be_a_base()
}
pub fn domain(&self) -> Option<&str> {
self.0.domain()
}
pub fn fragment(&self) -> Option<&str> {
self.0.fragment()
}
pub fn path(&self) -> &str {
self.0.path()
}
pub fn origin(&self) -> ImmutableOrigin {
ImmutableOrigin::new(self.0.origin())
}
pub fn scheme(&self) -> &str {
self.0.scheme()
}
pub fn is_secure_scheme(&self) -> bool {
let scheme = self.scheme();
scheme == "https" || scheme == "wss"
}
/// <https://fetch.spec.whatwg.org/#local-scheme>
pub fn is_local_scheme(&self) -> bool {
let scheme = self.scheme();
scheme == "about" || scheme == "blob" || scheme == "data"
}
pub fn is_chrome(&self) -> bool {
self.scheme() == "chrome"
}
pub fn as_str(&self) -> &str {
self.0.as_str()
}
pub fn as_mut_url(&mut self) -> &mut Url {
Arc::make_mut(&mut self.0)
}
pub fn set_username(&mut self, user: &str) -> Result<(), ()> {
self.as_mut_url().set_username(user)
}
pub fn set_ip_host(&mut self, addr: IpAddr) -> Result<(), ()> {
self.as_mut_url().set_ip_host(addr)
}
pub fn set_password(&mut self, pass: Option<&str>) -> Result<(), ()> {
self.as_mut_url().set_password(pass)
}
pub fn set_fragment(&mut self, fragment: Option<&str>) {
self.as_mut_url().set_fragment(fragment)
}
pub fn username(&self) -> &str {
self.0.username()
}
pub fn password(&self) -> Option<&str> {
self.0.password()
}
pub fn to_file_path(&self) -> Result<::std::path::PathBuf, ()> {
self.0.to_file_path()
}
pub fn host(&self) -> Option<url::Host<&str>> {
self.0.host()
}
pub fn host_str(&self) -> Option<&str> {
self.0.host_str()
}
pub fn port(&self) -> Option<u16> {
self.0.port()
}
pub fn port_or_known_default(&self) -> Option<u16> {
self.0.port_or_known_default()
}
pub fn join(&self, input: &str) -> Result<ServoUrl, url::ParseError> {
self.0.join(input).map(Self::from_url)
}
pub fn path_segments(&self) -> Option<::std::str::Split<char>> {
self.0.path_segments()
}
pub fn query(&self) -> Option<&str> {
self.0.query()
}
pub fn from_file_path<P: AsRef<Path>>(path: P) -> Result<Self, ()> {
Ok(Self::from_url(Url::from_file_path(path)?))
}
/// <https://w3c.github.io/webappsec-secure-contexts/#potentially-trustworthy-url>
pub fn is_potentially_trustworthy(&self) -> bool {
// Step 1
if self.as_str() == "about:blank" || self.as_str() == "about:srcdoc" {
return true;
}
// Step 2
if self.scheme() == "data" {
return true;
}
// Step 3
self.is_origin_trustworthy()
}
/// <https://w3c.github.io/webappsec-secure-contexts/#is-origin-trustworthy>
pub fn is_origin_trustworthy(&self) -> bool {
// Step 1
if!self.origin().is_tuple() {
|
return false;
}
// Step 3
if self.scheme() == "https" || self.scheme() == "wss" {
true
// Steps 4-5
} else if self.host().is_some() {
let host = self.host_str().unwrap();
// Step 4
if let Ok(ip_addr) = host.parse::<IpAddr>() {
ip_addr.is_loopback()
// Step 5
} else {
host == "localhost" || host.ends_with(".localhost")
}
// Step 6
} else {
self.scheme() == "file"
}
}
}
impl fmt::Display for ServoUrl {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(formatter)
}
}
impl fmt::Debug for ServoUrl {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
if self.0.as_str().len() > 40 {
let mut hasher = DefaultHasher::new();
hasher.write(self.0.as_str().as_bytes());
let truncated: String = self.0.as_str().chars().take(40).collect();
let result = format!("{}... ({:x})", truncated, hasher.finish());
return result.fmt(formatter);
}
self.0.fmt(formatter)
}
}
impl Index<RangeFull> for ServoUrl {
type Output = str;
fn index(&self, _: RangeFull) -> &str {
&self.0[..]
}
}
impl Index<RangeFrom<Position>> for ServoUrl {
type Output = str;
fn index(&self, range: RangeFrom<Position>) -> &str {
&self.0[range]
}
}
impl Index<RangeTo<Position>> for ServoUrl {
type Output = str;
fn index(&self, range: RangeTo<Position>) -> &str {
&self.0[range]
}
}
impl Index<Range<Position>> for ServoUrl {
type Output = str;
fn index(&self, range: Range<Position>) -> &str {
&self.0[range]
}
}
impl From<Url> for ServoUrl {
fn from(url: Url) -> Self {
ServoUrl::from_url(url)
}
}
|
random_line_split
|
|
lib.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
#![deny(unsafe_code)]
#![crate_name = "servo_url"]
#![crate_type = "rlib"]
#[macro_use]
extern crate malloc_size_of;
#[macro_use]
extern crate malloc_size_of_derive;
#[macro_use]
extern crate serde;
pub mod origin;
pub use crate::origin::{ImmutableOrigin, MutableOrigin, OpaqueOrigin};
use std::collections::hash_map::DefaultHasher;
use std::fmt;
use std::hash::Hasher;
use std::net::IpAddr;
use std::ops::{Index, Range, RangeFrom, RangeFull, RangeTo};
use std::path::Path;
use std::sync::Arc;
use to_shmem::{SharedMemoryBuilder, ToShmem};
use url::{Position, Url};
pub use url::Host;
#[derive(Clone, Deserialize, Eq, Hash, MallocSizeOf, Ord, PartialEq, PartialOrd, Serialize)]
pub struct ServoUrl(#[ignore_malloc_size_of = "Arc"] Arc<Url>);
impl ToShmem for ServoUrl {
fn to_shmem(&self, _builder: &mut SharedMemoryBuilder) -> to_shmem::Result<Self> {
unimplemented!("If servo wants to share stylesheets across processes, ToShmem for Url must be implemented")
}
}
impl ServoUrl {
pub fn from_url(url: Url) -> Self {
ServoUrl(Arc::new(url))
}
pub fn parse_with_base(base: Option<&Self>, input: &str) -> Result<Self, url::ParseError> {
Url::options()
.base_url(base.map(|b| &*b.0))
.parse(input)
.map(Self::from_url)
}
pub fn into_string(self) -> String {
Arc::try_unwrap(self.0)
.unwrap_or_else(|s| (*s).clone())
.into_string()
}
pub fn into_url(self) -> Url {
Arc::try_unwrap(self.0).unwrap_or_else(|s| (*s).clone())
}
pub fn as_url(&self) -> &Url {
&self.0
}
pub fn parse(input: &str) -> Result<Self, url::ParseError> {
Url::parse(input).map(Self::from_url)
}
pub fn cannot_be_a_base(&self) -> bool {
self.0.cannot_be_a_base()
}
pub fn domain(&self) -> Option<&str> {
self.0.domain()
}
pub fn fragment(&self) -> Option<&str> {
self.0.fragment()
}
pub fn path(&self) -> &str {
self.0.path()
}
pub fn origin(&self) -> ImmutableOrigin {
ImmutableOrigin::new(self.0.origin())
}
pub fn scheme(&self) -> &str {
self.0.scheme()
}
pub fn is_secure_scheme(&self) -> bool {
let scheme = self.scheme();
scheme == "https" || scheme == "wss"
}
/// <https://fetch.spec.whatwg.org/#local-scheme>
pub fn is_local_scheme(&self) -> bool {
let scheme = self.scheme();
scheme == "about" || scheme == "blob" || scheme == "data"
}
pub fn is_chrome(&self) -> bool {
self.scheme() == "chrome"
}
pub fn as_str(&self) -> &str {
self.0.as_str()
}
pub fn as_mut_url(&mut self) -> &mut Url {
Arc::make_mut(&mut self.0)
}
pub fn set_username(&mut self, user: &str) -> Result<(), ()> {
self.as_mut_url().set_username(user)
}
pub fn set_ip_host(&mut self, addr: IpAddr) -> Result<(), ()> {
self.as_mut_url().set_ip_host(addr)
}
pub fn set_password(&mut self, pass: Option<&str>) -> Result<(), ()> {
self.as_mut_url().set_password(pass)
}
pub fn set_fragment(&mut self, fragment: Option<&str>)
|
pub fn username(&self) -> &str {
self.0.username()
}
pub fn password(&self) -> Option<&str> {
self.0.password()
}
pub fn to_file_path(&self) -> Result<::std::path::PathBuf, ()> {
self.0.to_file_path()
}
pub fn host(&self) -> Option<url::Host<&str>> {
self.0.host()
}
pub fn host_str(&self) -> Option<&str> {
self.0.host_str()
}
pub fn port(&self) -> Option<u16> {
self.0.port()
}
pub fn port_or_known_default(&self) -> Option<u16> {
self.0.port_or_known_default()
}
pub fn join(&self, input: &str) -> Result<ServoUrl, url::ParseError> {
self.0.join(input).map(Self::from_url)
}
pub fn path_segments(&self) -> Option<::std::str::Split<char>> {
self.0.path_segments()
}
pub fn query(&self) -> Option<&str> {
self.0.query()
}
pub fn from_file_path<P: AsRef<Path>>(path: P) -> Result<Self, ()> {
Ok(Self::from_url(Url::from_file_path(path)?))
}
/// <https://w3c.github.io/webappsec-secure-contexts/#potentially-trustworthy-url>
pub fn is_potentially_trustworthy(&self) -> bool {
// Step 1
if self.as_str() == "about:blank" || self.as_str() == "about:srcdoc" {
return true;
}
// Step 2
if self.scheme() == "data" {
return true;
}
// Step 3
self.is_origin_trustworthy()
}
/// <https://w3c.github.io/webappsec-secure-contexts/#is-origin-trustworthy>
pub fn is_origin_trustworthy(&self) -> bool {
// Step 1
if!self.origin().is_tuple() {
return false;
}
// Step 3
if self.scheme() == "https" || self.scheme() == "wss" {
true
// Steps 4-5
} else if self.host().is_some() {
let host = self.host_str().unwrap();
// Step 4
if let Ok(ip_addr) = host.parse::<IpAddr>() {
ip_addr.is_loopback()
// Step 5
} else {
host == "localhost" || host.ends_with(".localhost")
}
// Step 6
} else {
self.scheme() == "file"
}
}
}
impl fmt::Display for ServoUrl {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(formatter)
}
}
impl fmt::Debug for ServoUrl {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
if self.0.as_str().len() > 40 {
let mut hasher = DefaultHasher::new();
hasher.write(self.0.as_str().as_bytes());
let truncated: String = self.0.as_str().chars().take(40).collect();
let result = format!("{}... ({:x})", truncated, hasher.finish());
return result.fmt(formatter);
}
self.0.fmt(formatter)
}
}
impl Index<RangeFull> for ServoUrl {
type Output = str;
fn index(&self, _: RangeFull) -> &str {
&self.0[..]
}
}
impl Index<RangeFrom<Position>> for ServoUrl {
type Output = str;
fn index(&self, range: RangeFrom<Position>) -> &str {
&self.0[range]
}
}
impl Index<RangeTo<Position>> for ServoUrl {
type Output = str;
fn index(&self, range: RangeTo<Position>) -> &str {
&self.0[range]
}
}
impl Index<Range<Position>> for ServoUrl {
type Output = str;
fn index(&self, range: Range<Position>) -> &str {
&self.0[range]
}
}
impl From<Url> for ServoUrl {
fn from(url: Url) -> Self {
ServoUrl::from_url(url)
}
}
|
{
self.as_mut_url().set_fragment(fragment)
}
|
identifier_body
|
lib.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
#![deny(unsafe_code)]
#![crate_name = "servo_url"]
#![crate_type = "rlib"]
#[macro_use]
extern crate malloc_size_of;
#[macro_use]
extern crate malloc_size_of_derive;
#[macro_use]
extern crate serde;
pub mod origin;
pub use crate::origin::{ImmutableOrigin, MutableOrigin, OpaqueOrigin};
use std::collections::hash_map::DefaultHasher;
use std::fmt;
use std::hash::Hasher;
use std::net::IpAddr;
use std::ops::{Index, Range, RangeFrom, RangeFull, RangeTo};
use std::path::Path;
use std::sync::Arc;
use to_shmem::{SharedMemoryBuilder, ToShmem};
use url::{Position, Url};
pub use url::Host;
#[derive(Clone, Deserialize, Eq, Hash, MallocSizeOf, Ord, PartialEq, PartialOrd, Serialize)]
pub struct ServoUrl(#[ignore_malloc_size_of = "Arc"] Arc<Url>);
impl ToShmem for ServoUrl {
fn to_shmem(&self, _builder: &mut SharedMemoryBuilder) -> to_shmem::Result<Self> {
unimplemented!("If servo wants to share stylesheets across processes, ToShmem for Url must be implemented")
}
}
impl ServoUrl {
pub fn from_url(url: Url) -> Self {
ServoUrl(Arc::new(url))
}
pub fn parse_with_base(base: Option<&Self>, input: &str) -> Result<Self, url::ParseError> {
Url::options()
.base_url(base.map(|b| &*b.0))
.parse(input)
.map(Self::from_url)
}
pub fn into_string(self) -> String {
Arc::try_unwrap(self.0)
.unwrap_or_else(|s| (*s).clone())
.into_string()
}
pub fn into_url(self) -> Url {
Arc::try_unwrap(self.0).unwrap_or_else(|s| (*s).clone())
}
pub fn as_url(&self) -> &Url {
&self.0
}
pub fn parse(input: &str) -> Result<Self, url::ParseError> {
Url::parse(input).map(Self::from_url)
}
pub fn cannot_be_a_base(&self) -> bool {
self.0.cannot_be_a_base()
}
pub fn domain(&self) -> Option<&str> {
self.0.domain()
}
pub fn fragment(&self) -> Option<&str> {
self.0.fragment()
}
pub fn path(&self) -> &str {
self.0.path()
}
pub fn origin(&self) -> ImmutableOrigin {
ImmutableOrigin::new(self.0.origin())
}
pub fn scheme(&self) -> &str {
self.0.scheme()
}
pub fn is_secure_scheme(&self) -> bool {
let scheme = self.scheme();
scheme == "https" || scheme == "wss"
}
/// <https://fetch.spec.whatwg.org/#local-scheme>
pub fn is_local_scheme(&self) -> bool {
let scheme = self.scheme();
scheme == "about" || scheme == "blob" || scheme == "data"
}
pub fn is_chrome(&self) -> bool {
self.scheme() == "chrome"
}
pub fn as_str(&self) -> &str {
self.0.as_str()
}
pub fn
|
(&mut self) -> &mut Url {
Arc::make_mut(&mut self.0)
}
pub fn set_username(&mut self, user: &str) -> Result<(), ()> {
self.as_mut_url().set_username(user)
}
pub fn set_ip_host(&mut self, addr: IpAddr) -> Result<(), ()> {
self.as_mut_url().set_ip_host(addr)
}
pub fn set_password(&mut self, pass: Option<&str>) -> Result<(), ()> {
self.as_mut_url().set_password(pass)
}
pub fn set_fragment(&mut self, fragment: Option<&str>) {
self.as_mut_url().set_fragment(fragment)
}
pub fn username(&self) -> &str {
self.0.username()
}
pub fn password(&self) -> Option<&str> {
self.0.password()
}
pub fn to_file_path(&self) -> Result<::std::path::PathBuf, ()> {
self.0.to_file_path()
}
pub fn host(&self) -> Option<url::Host<&str>> {
self.0.host()
}
pub fn host_str(&self) -> Option<&str> {
self.0.host_str()
}
pub fn port(&self) -> Option<u16> {
self.0.port()
}
pub fn port_or_known_default(&self) -> Option<u16> {
self.0.port_or_known_default()
}
pub fn join(&self, input: &str) -> Result<ServoUrl, url::ParseError> {
self.0.join(input).map(Self::from_url)
}
pub fn path_segments(&self) -> Option<::std::str::Split<char>> {
self.0.path_segments()
}
pub fn query(&self) -> Option<&str> {
self.0.query()
}
pub fn from_file_path<P: AsRef<Path>>(path: P) -> Result<Self, ()> {
Ok(Self::from_url(Url::from_file_path(path)?))
}
/// <https://w3c.github.io/webappsec-secure-contexts/#potentially-trustworthy-url>
pub fn is_potentially_trustworthy(&self) -> bool {
// Step 1
if self.as_str() == "about:blank" || self.as_str() == "about:srcdoc" {
return true;
}
// Step 2
if self.scheme() == "data" {
return true;
}
// Step 3
self.is_origin_trustworthy()
}
/// <https://w3c.github.io/webappsec-secure-contexts/#is-origin-trustworthy>
pub fn is_origin_trustworthy(&self) -> bool {
// Step 1
if!self.origin().is_tuple() {
return false;
}
// Step 3
if self.scheme() == "https" || self.scheme() == "wss" {
true
// Steps 4-5
} else if self.host().is_some() {
let host = self.host_str().unwrap();
// Step 4
if let Ok(ip_addr) = host.parse::<IpAddr>() {
ip_addr.is_loopback()
// Step 5
} else {
host == "localhost" || host.ends_with(".localhost")
}
// Step 6
} else {
self.scheme() == "file"
}
}
}
impl fmt::Display for ServoUrl {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(formatter)
}
}
impl fmt::Debug for ServoUrl {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
if self.0.as_str().len() > 40 {
let mut hasher = DefaultHasher::new();
hasher.write(self.0.as_str().as_bytes());
let truncated: String = self.0.as_str().chars().take(40).collect();
let result = format!("{}... ({:x})", truncated, hasher.finish());
return result.fmt(formatter);
}
self.0.fmt(formatter)
}
}
impl Index<RangeFull> for ServoUrl {
type Output = str;
fn index(&self, _: RangeFull) -> &str {
&self.0[..]
}
}
impl Index<RangeFrom<Position>> for ServoUrl {
type Output = str;
fn index(&self, range: RangeFrom<Position>) -> &str {
&self.0[range]
}
}
impl Index<RangeTo<Position>> for ServoUrl {
type Output = str;
fn index(&self, range: RangeTo<Position>) -> &str {
&self.0[range]
}
}
impl Index<Range<Position>> for ServoUrl {
type Output = str;
fn index(&self, range: Range<Position>) -> &str {
&self.0[range]
}
}
impl From<Url> for ServoUrl {
fn from(url: Url) -> Self {
ServoUrl::from_url(url)
}
}
|
as_mut_url
|
identifier_name
|
last_modified.rs
|
use header::HttpDate;
header! {
#[doc="`Last-Modified` header, defined in [RFC7232](http://tools.ietf.org/html/rfc7232#section-2.2)"]
#[doc=""]
#[doc="The `Last-Modified` header field in a response provides a timestamp"]
#[doc="indicating the date and time at which the origin server believes the"]
#[doc="selected representation was last modified, as determined at the"]
#[doc="conclusion of handling the request."]
#[doc=""]
#[doc="# ABNF"]
#[doc="```plain"]
#[doc="Expires = HTTP-date"]
|
bench_header!(imf_fixdate, LastModified, { vec![b"Sun, 07 Nov 1994 08:48:37 GMT".to_vec()] });
bench_header!(rfc_850, LastModified, { vec![b"Sunday, 06-Nov-94 08:49:37 GMT".to_vec()] });
bench_header!(asctime, LastModified, { vec![b"Sun Nov 6 08:49:37 1994".to_vec()] });
|
#[doc="```"]
(LastModified, "Last-Modified") => [HttpDate]
}
|
random_line_split
|
wordlist.rs
|
extern crate regex;
extern crate rustc_serialize;
extern crate strsim;
use std::collections::HashMap;
use std::io::{self, Read, Write};
use dopt::Docopt;
use parse::{Atom, Parser};
// cheat until we get syntax extensions back :-(
macro_rules! regex(
($s:expr) => (::regex::Regex::new($s).unwrap());
);
macro_rules! werr(
($($arg:tt)*) => ({
use std::io::{Write, stderr};
write!(&mut stderr(), $($arg)*).unwrap();
})
);
#[allow(dead_code)]
mod dopt;
#[allow(dead_code)]
mod parse;
#[allow(dead_code)]
mod synonym;
|
given usage (provided on stdin).
Example use:
your-command --help | docopt-wordlist
This command also supports completing positional arguments when given a list of
choices. The choices are included in the word list if and only if the argument
name appears in the usage string. For example:
your-command --help | docopt-wordlist 'arg' 'a b c'
Which will only include 'a', 'b' and 'c' in the wordlist if
'your-command --help' contains a positional argument named 'arg'.
";
#[derive(Debug, RustcDecodable)]
struct Args {
arg_name: Vec<String>,
arg_possibles: Vec<String>,
}
fn main() {
let args: Args = Docopt::new(USAGE)
.and_then(|d| d.decode())
.unwrap_or_else(|e| e.exit());
match run(args) {
Ok(_) => {},
Err(err) => {
write!(&mut io::stderr(), "{}", err).unwrap();
::std::process::exit(1)
}
}
}
fn run(args: Args) -> Result<(), String> {
let mut usage = String::new();
try!(io::stdin().read_to_string(&mut usage).map_err(|e| e.to_string()));
let parsed = try!(Parser::new(&usage).map_err(|e| e.to_string()));
let arg_possibles: HashMap<String, Vec<String>> =
args.arg_name.iter()
.zip(args.arg_possibles.iter())
.map(|(name, possibles)| {
let choices =
regex!(r"[ \t]+").split(&**possibles)
.map(|s| s.to_string())
.collect::<Vec<String>>();
(name.clone(), choices)
})
.collect();
let mut words = vec![];
for k in parsed.descs.keys() {
if let Atom::Positional(ref arg_name) = *k {
if let Some(choices) = arg_possibles.get(arg_name) {
words.extend(choices.iter().map(|s| s.clone()));
}
// If the user hasn't given choices for this positional argument,
// then there's really nothing to complete here.
} else {
words.push(k.to_string());
}
}
for (k, _) in parsed.descs.synonyms() {
// We don't need to do anything special here since synonyms can
// only be flags, which we always include in the wordlist.
words.push(k.to_string());
}
println!("{}", words.join(" "));
Ok(())
}
|
const USAGE: &'static str = "
Usage: docopt-wordlist [(<name> <possibles>)] ...
docopt-wordlist prints a list of available flags and commands arguments for the
|
random_line_split
|
wordlist.rs
|
extern crate regex;
extern crate rustc_serialize;
extern crate strsim;
use std::collections::HashMap;
use std::io::{self, Read, Write};
use dopt::Docopt;
use parse::{Atom, Parser};
// cheat until we get syntax extensions back :-(
macro_rules! regex(
($s:expr) => (::regex::Regex::new($s).unwrap());
);
macro_rules! werr(
($($arg:tt)*) => ({
use std::io::{Write, stderr};
write!(&mut stderr(), $($arg)*).unwrap();
})
);
#[allow(dead_code)]
mod dopt;
#[allow(dead_code)]
mod parse;
#[allow(dead_code)]
mod synonym;
const USAGE: &'static str = "
Usage: docopt-wordlist [(<name> <possibles>)]...
docopt-wordlist prints a list of available flags and commands arguments for the
given usage (provided on stdin).
Example use:
your-command --help | docopt-wordlist
This command also supports completing positional arguments when given a list of
choices. The choices are included in the word list if and only if the argument
name appears in the usage string. For example:
your-command --help | docopt-wordlist 'arg' 'a b c'
Which will only include 'a', 'b' and 'c' in the wordlist if
'your-command --help' contains a positional argument named 'arg'.
";
#[derive(Debug, RustcDecodable)]
struct Args {
arg_name: Vec<String>,
arg_possibles: Vec<String>,
}
fn main()
|
fn run(args: Args) -> Result<(), String> {
let mut usage = String::new();
try!(io::stdin().read_to_string(&mut usage).map_err(|e| e.to_string()));
let parsed = try!(Parser::new(&usage).map_err(|e| e.to_string()));
let arg_possibles: HashMap<String, Vec<String>> =
args.arg_name.iter()
.zip(args.arg_possibles.iter())
.map(|(name, possibles)| {
let choices =
regex!(r"[ \t]+").split(&**possibles)
.map(|s| s.to_string())
.collect::<Vec<String>>();
(name.clone(), choices)
})
.collect();
let mut words = vec![];
for k in parsed.descs.keys() {
if let Atom::Positional(ref arg_name) = *k {
if let Some(choices) = arg_possibles.get(arg_name) {
words.extend(choices.iter().map(|s| s.clone()));
}
// If the user hasn't given choices for this positional argument,
// then there's really nothing to complete here.
} else {
words.push(k.to_string());
}
}
for (k, _) in parsed.descs.synonyms() {
// We don't need to do anything special here since synonyms can
// only be flags, which we always include in the wordlist.
words.push(k.to_string());
}
println!("{}", words.join(" "));
Ok(())
}
|
{
let args: Args = Docopt::new(USAGE)
.and_then(|d| d.decode())
.unwrap_or_else(|e| e.exit());
match run(args) {
Ok(_) => {},
Err(err) => {
write!(&mut io::stderr(), "{}", err).unwrap();
::std::process::exit(1)
}
}
}
|
identifier_body
|
wordlist.rs
|
extern crate regex;
extern crate rustc_serialize;
extern crate strsim;
use std::collections::HashMap;
use std::io::{self, Read, Write};
use dopt::Docopt;
use parse::{Atom, Parser};
// cheat until we get syntax extensions back :-(
macro_rules! regex(
($s:expr) => (::regex::Regex::new($s).unwrap());
);
macro_rules! werr(
($($arg:tt)*) => ({
use std::io::{Write, stderr};
write!(&mut stderr(), $($arg)*).unwrap();
})
);
#[allow(dead_code)]
mod dopt;
#[allow(dead_code)]
mod parse;
#[allow(dead_code)]
mod synonym;
const USAGE: &'static str = "
Usage: docopt-wordlist [(<name> <possibles>)]...
docopt-wordlist prints a list of available flags and commands arguments for the
given usage (provided on stdin).
Example use:
your-command --help | docopt-wordlist
This command also supports completing positional arguments when given a list of
choices. The choices are included in the word list if and only if the argument
name appears in the usage string. For example:
your-command --help | docopt-wordlist 'arg' 'a b c'
Which will only include 'a', 'b' and 'c' in the wordlist if
'your-command --help' contains a positional argument named 'arg'.
";
#[derive(Debug, RustcDecodable)]
struct Args {
arg_name: Vec<String>,
arg_possibles: Vec<String>,
}
fn main() {
let args: Args = Docopt::new(USAGE)
.and_then(|d| d.decode())
.unwrap_or_else(|e| e.exit());
match run(args) {
Ok(_) => {},
Err(err) =>
|
}
}
fn run(args: Args) -> Result<(), String> {
let mut usage = String::new();
try!(io::stdin().read_to_string(&mut usage).map_err(|e| e.to_string()));
let parsed = try!(Parser::new(&usage).map_err(|e| e.to_string()));
let arg_possibles: HashMap<String, Vec<String>> =
args.arg_name.iter()
.zip(args.arg_possibles.iter())
.map(|(name, possibles)| {
let choices =
regex!(r"[ \t]+").split(&**possibles)
.map(|s| s.to_string())
.collect::<Vec<String>>();
(name.clone(), choices)
})
.collect();
let mut words = vec![];
for k in parsed.descs.keys() {
if let Atom::Positional(ref arg_name) = *k {
if let Some(choices) = arg_possibles.get(arg_name) {
words.extend(choices.iter().map(|s| s.clone()));
}
// If the user hasn't given choices for this positional argument,
// then there's really nothing to complete here.
} else {
words.push(k.to_string());
}
}
for (k, _) in parsed.descs.synonyms() {
// We don't need to do anything special here since synonyms can
// only be flags, which we always include in the wordlist.
words.push(k.to_string());
}
println!("{}", words.join(" "));
Ok(())
}
|
{
write!(&mut io::stderr(), "{}", err).unwrap();
::std::process::exit(1)
}
|
conditional_block
|
wordlist.rs
|
extern crate regex;
extern crate rustc_serialize;
extern crate strsim;
use std::collections::HashMap;
use std::io::{self, Read, Write};
use dopt::Docopt;
use parse::{Atom, Parser};
// cheat until we get syntax extensions back :-(
macro_rules! regex(
($s:expr) => (::regex::Regex::new($s).unwrap());
);
macro_rules! werr(
($($arg:tt)*) => ({
use std::io::{Write, stderr};
write!(&mut stderr(), $($arg)*).unwrap();
})
);
#[allow(dead_code)]
mod dopt;
#[allow(dead_code)]
mod parse;
#[allow(dead_code)]
mod synonym;
const USAGE: &'static str = "
Usage: docopt-wordlist [(<name> <possibles>)]...
docopt-wordlist prints a list of available flags and commands arguments for the
given usage (provided on stdin).
Example use:
your-command --help | docopt-wordlist
This command also supports completing positional arguments when given a list of
choices. The choices are included in the word list if and only if the argument
name appears in the usage string. For example:
your-command --help | docopt-wordlist 'arg' 'a b c'
Which will only include 'a', 'b' and 'c' in the wordlist if
'your-command --help' contains a positional argument named 'arg'.
";
#[derive(Debug, RustcDecodable)]
struct
|
{
arg_name: Vec<String>,
arg_possibles: Vec<String>,
}
fn main() {
let args: Args = Docopt::new(USAGE)
.and_then(|d| d.decode())
.unwrap_or_else(|e| e.exit());
match run(args) {
Ok(_) => {},
Err(err) => {
write!(&mut io::stderr(), "{}", err).unwrap();
::std::process::exit(1)
}
}
}
fn run(args: Args) -> Result<(), String> {
let mut usage = String::new();
try!(io::stdin().read_to_string(&mut usage).map_err(|e| e.to_string()));
let parsed = try!(Parser::new(&usage).map_err(|e| e.to_string()));
let arg_possibles: HashMap<String, Vec<String>> =
args.arg_name.iter()
.zip(args.arg_possibles.iter())
.map(|(name, possibles)| {
let choices =
regex!(r"[ \t]+").split(&**possibles)
.map(|s| s.to_string())
.collect::<Vec<String>>();
(name.clone(), choices)
})
.collect();
let mut words = vec![];
for k in parsed.descs.keys() {
if let Atom::Positional(ref arg_name) = *k {
if let Some(choices) = arg_possibles.get(arg_name) {
words.extend(choices.iter().map(|s| s.clone()));
}
// If the user hasn't given choices for this positional argument,
// then there's really nothing to complete here.
} else {
words.push(k.to_string());
}
}
for (k, _) in parsed.descs.synonyms() {
// We don't need to do anything special here since synonyms can
// only be flags, which we always include in the wordlist.
words.push(k.to_string());
}
println!("{}", words.join(" "));
Ok(())
}
|
Args
|
identifier_name
|
cuda_af_app.rs
|
use arrayfire::{af_print, dim4, info, set_device, Array};
use rustacuda::prelude::*;
fn main()
|
};
let mut in_x = DeviceBuffer::from_slice(&[1.0f32; 10]).unwrap();
let mut in_y = DeviceBuffer::from_slice(&[2.0f32; 10]).unwrap();
// wait for any prior kernels to finish before passing
// the device pointers to ArrayFire
match stream.synchronize() {
Ok(()) => {}
Err(e) => panic!("Stream sync failure: {:?}", e),
};
set_device(0);
info();
let x = Array::new_from_device_ptr(in_x.as_device_ptr().as_raw_mut(), dim4!(10));
let y = Array::new_from_device_ptr(in_y.as_device_ptr().as_raw_mut(), dim4!(10));
// Lock so that ArrayFire doesn't free pointers from RustaCUDA
// But we have to make sure these pointers stay in valid scope
// as long as the associated ArrayFire Array objects are valid
x.lock();
y.lock();
af_print!("x", x);
af_print!("y", y);
let o = x + y;
af_print!("out", o);
let _o_dptr = unsafe { o.device_ptr() }; // Calls an implicit lock
// User has to call unlock if they want to relenquish control to ArrayFire
// Once the non-arrayfire operations are done, call unlock.
o.unlock(); // After this, there is no guarantee that value of o_dptr is valid
}
|
{
// MAKE SURE to do all rustacuda initilization before arrayfire API's
// first call. It seems like some CUDA context state is getting messed up
// if we mix CUDA context init(device, context, module, stream) with ArrayFire API
match rustacuda::init(CudaFlags::empty()) {
Ok(()) => {}
Err(e) => panic!("rustacuda init failure: {:?}", e),
}
let device = match Device::get_device(0) {
Ok(d) => d,
Err(e) => panic!("Failed to get device: {:?}", e),
};
let _context =
match Context::create_and_push(ContextFlags::MAP_HOST | ContextFlags::SCHED_AUTO, device) {
Ok(c) => c,
Err(e) => panic!("Failed to create context: {:?}", e),
};
let stream = match Stream::new(StreamFlags::NON_BLOCKING, None) {
Ok(s) => s,
Err(e) => panic!("Failed to create stream: {:?}", e),
|
identifier_body
|
cuda_af_app.rs
|
use arrayfire::{af_print, dim4, info, set_device, Array};
use rustacuda::prelude::*;
fn
|
() {
// MAKE SURE to do all rustacuda initilization before arrayfire API's
// first call. It seems like some CUDA context state is getting messed up
// if we mix CUDA context init(device, context, module, stream) with ArrayFire API
match rustacuda::init(CudaFlags::empty()) {
Ok(()) => {}
Err(e) => panic!("rustacuda init failure: {:?}", e),
}
let device = match Device::get_device(0) {
Ok(d) => d,
Err(e) => panic!("Failed to get device: {:?}", e),
};
let _context =
match Context::create_and_push(ContextFlags::MAP_HOST | ContextFlags::SCHED_AUTO, device) {
Ok(c) => c,
Err(e) => panic!("Failed to create context: {:?}", e),
};
let stream = match Stream::new(StreamFlags::NON_BLOCKING, None) {
Ok(s) => s,
Err(e) => panic!("Failed to create stream: {:?}", e),
};
let mut in_x = DeviceBuffer::from_slice(&[1.0f32; 10]).unwrap();
let mut in_y = DeviceBuffer::from_slice(&[2.0f32; 10]).unwrap();
// wait for any prior kernels to finish before passing
// the device pointers to ArrayFire
match stream.synchronize() {
Ok(()) => {}
Err(e) => panic!("Stream sync failure: {:?}", e),
};
set_device(0);
info();
let x = Array::new_from_device_ptr(in_x.as_device_ptr().as_raw_mut(), dim4!(10));
let y = Array::new_from_device_ptr(in_y.as_device_ptr().as_raw_mut(), dim4!(10));
// Lock so that ArrayFire doesn't free pointers from RustaCUDA
// But we have to make sure these pointers stay in valid scope
// as long as the associated ArrayFire Array objects are valid
x.lock();
y.lock();
af_print!("x", x);
af_print!("y", y);
let o = x + y;
af_print!("out", o);
let _o_dptr = unsafe { o.device_ptr() }; // Calls an implicit lock
// User has to call unlock if they want to relenquish control to ArrayFire
// Once the non-arrayfire operations are done, call unlock.
o.unlock(); // After this, there is no guarantee that value of o_dptr is valid
}
|
main
|
identifier_name
|
cuda_af_app.rs
|
use arrayfire::{af_print, dim4, info, set_device, Array};
use rustacuda::prelude::*;
fn main() {
// MAKE SURE to do all rustacuda initilization before arrayfire API's
// first call. It seems like some CUDA context state is getting messed up
// if we mix CUDA context init(device, context, module, stream) with ArrayFire API
match rustacuda::init(CudaFlags::empty()) {
Ok(()) => {}
Err(e) => panic!("rustacuda init failure: {:?}", e),
}
let device = match Device::get_device(0) {
Ok(d) => d,
Err(e) => panic!("Failed to get device: {:?}", e),
};
let _context =
match Context::create_and_push(ContextFlags::MAP_HOST | ContextFlags::SCHED_AUTO, device) {
Ok(c) => c,
Err(e) => panic!("Failed to create context: {:?}", e),
};
let stream = match Stream::new(StreamFlags::NON_BLOCKING, None) {
Ok(s) => s,
Err(e) => panic!("Failed to create stream: {:?}", e),
};
let mut in_x = DeviceBuffer::from_slice(&[1.0f32; 10]).unwrap();
let mut in_y = DeviceBuffer::from_slice(&[2.0f32; 10]).unwrap();
// wait for any prior kernels to finish before passing
// the device pointers to ArrayFire
match stream.synchronize() {
Ok(()) => {}
Err(e) => panic!("Stream sync failure: {:?}", e),
};
set_device(0);
info();
let x = Array::new_from_device_ptr(in_x.as_device_ptr().as_raw_mut(), dim4!(10));
let y = Array::new_from_device_ptr(in_y.as_device_ptr().as_raw_mut(), dim4!(10));
|
x.lock();
y.lock();
af_print!("x", x);
af_print!("y", y);
let o = x + y;
af_print!("out", o);
let _o_dptr = unsafe { o.device_ptr() }; // Calls an implicit lock
// User has to call unlock if they want to relenquish control to ArrayFire
// Once the non-arrayfire operations are done, call unlock.
o.unlock(); // After this, there is no guarantee that value of o_dptr is valid
}
|
// Lock so that ArrayFire doesn't free pointers from RustaCUDA
// But we have to make sure these pointers stay in valid scope
// as long as the associated ArrayFire Array objects are valid
|
random_line_split
|
derive_form.rs
|
#![feature(plugin, custom_derive)]
#![plugin(rocket_codegen)]
extern crate rocket;
use rocket::request::{FromForm, FromFormValue, FormItems};
use rocket::http::RawStr;
#[derive(Debug, PartialEq, FromForm)]
struct TodoTask {
description: String,
completed: bool
}
// TODO: Make deriving `FromForm` for this enum possible.
#[derive(Debug, PartialEq)]
enum FormOption {
A, B, C
}
impl<'v> FromFormValue<'v> for FormOption {
type Error = &'v str;
fn from_form_value(v: &'v RawStr) -> Result<Self, Self::Error> {
let variant = match v.as_str() {
"a" => FormOption::A,
"b" => FormOption::B,
"c" => FormOption::C,
_ => return Err(v)
};
Ok(variant)
}
}
#[derive(Debug, PartialEq, FromForm)]
struct FormInput<'r> {
checkbox: bool,
number: usize,
radio: FormOption,
password: &'r RawStr,
textarea: String,
select: FormOption,
}
#[derive(Debug, PartialEq, FromForm)]
struct DefaultInput<'r> {
arg: Option<&'r RawStr>,
}
#[derive(Debug, PartialEq, FromForm)]
struct ManualMethod<'r> {
_method: Option<&'r RawStr>,
done: bool
}
#[derive(Debug, PartialEq, FromForm)]
struct UnpresentCheckbox {
checkbox: bool
}
#[derive(Debug, PartialEq, FromForm)]
struct UnpresentCheckboxTwo<'r> {
checkbox: bool,
something: &'r RawStr
}
#[derive(Debug, PartialEq, FromForm)]
struct FieldNamedV<'r> {
v: &'r RawStr,
}
fn parse<'f, T: FromForm<'f>>(string: &'f str, strict: bool) -> Option<T> {
let mut items = FormItems::from(string);
let result = T::from_form(items.by_ref(), strict);
if!items.exhaust() {
panic!("Invalid form input.");
}
result.ok()
}
fn strict<'f, T: FromForm<'f>>(string: &'f str) -> Option<T> {
parse(string, true)
}
fn lenient<'f, T: FromForm<'f>>(string: &'f str) -> Option<T> {
parse(string, false)
}
fn
|
() {
// Same number of arguments: simple case.
let task: Option<TodoTask> = strict("description=Hello&completed=on");
assert_eq!(task, Some(TodoTask {
description: "Hello".to_string(),
completed: true
}));
// Argument in string but not in form.
let task: Option<TodoTask> = strict("other=a&description=Hello&completed=on");
assert!(task.is_none());
// Ensure _method isn't required.
let task: Option<TodoTask> = strict("_method=patch&description=Hello&completed=off");
assert_eq!(task, Some(TodoTask {
description: "Hello".to_string(),
completed: false
}));
let form_string = &[
"password=testing", "checkbox=off", "checkbox=on", "number=10",
"checkbox=off", "textarea=", "select=a", "radio=c",
].join("&");
let input: Option<FormInput> = strict(&form_string);
assert_eq!(input, Some(FormInput {
checkbox: false,
number: 10,
radio: FormOption::C,
password: "testing".into(),
textarea: "".to_string(),
select: FormOption::A,
}));
// Argument not in string with default in form.
let default: Option<DefaultInput> = strict("");
assert_eq!(default, Some(DefaultInput {
arg: None
}));
// Ensure _method can be captured if desired.
let manual: Option<ManualMethod> = strict("_method=put&done=true");
assert_eq!(manual, Some(ManualMethod {
_method: Some("put".into()),
done: true
}));
let manual: Option<ManualMethod> = lenient("_method=put&done=true");
assert_eq!(manual, Some(ManualMethod {
_method: Some("put".into()),
done: true
}));
// And ignored when not present.
let manual: Option<ManualMethod> = strict("done=true");
assert_eq!(manual, Some(ManualMethod {
_method: None,
done: true
}));
// Check that a `bool` value that isn't in the form is marked as `false`.
let manual: Option<UnpresentCheckbox> = strict("");
assert_eq!(manual, Some(UnpresentCheckbox {
checkbox: false
}));
// Check that a `bool` value that isn't in the form is marked as `false`.
let manual: Option<UnpresentCheckboxTwo> = strict("something=hello");
assert_eq!(manual, Some(UnpresentCheckboxTwo {
checkbox: false,
something: "hello".into()
}));
// Check that a structure with one field `v` parses correctly.
let manual: Option<FieldNamedV> = strict("v=abc");
assert_eq!(manual, Some(FieldNamedV {
v: "abc".into()
}));
// Check that a structure with one field `v` parses correctly (lenient).
let manual: Option<FieldNamedV> = lenient("v=abc");
assert_eq!(manual, Some(FieldNamedV { v: "abc".into() }));
let manual: Option<FieldNamedV> = lenient("v=abc&a=123");
assert_eq!(manual, Some(FieldNamedV { v: "abc".into() }));
let manual: Option<FieldNamedV> = lenient("c=abcddef&v=abc&a=123");
assert_eq!(manual, Some(FieldNamedV { v: "abc".into() }));
// Check default values (bool) with lenient parsing.
let manual: Option<UnpresentCheckboxTwo> = lenient("something=hello");
assert_eq!(manual, Some(UnpresentCheckboxTwo {
checkbox: false,
something: "hello".into()
}));
let manual: Option<UnpresentCheckboxTwo> = lenient("hi=hi&something=hello");
assert_eq!(manual, Some(UnpresentCheckboxTwo {
checkbox: false,
something: "hello".into()
}));
// Check that a missing field doesn't parse, even leniently.
let manual: Option<FieldNamedV> = lenient("a=abc");
assert!(manual.is_none());
let manual: Option<FieldNamedV> = lenient("_method=abc");
assert!(manual.is_none());
}
|
main
|
identifier_name
|
derive_form.rs
|
#![feature(plugin, custom_derive)]
#![plugin(rocket_codegen)]
extern crate rocket;
use rocket::request::{FromForm, FromFormValue, FormItems};
use rocket::http::RawStr;
#[derive(Debug, PartialEq, FromForm)]
struct TodoTask {
description: String,
completed: bool
}
// TODO: Make deriving `FromForm` for this enum possible.
#[derive(Debug, PartialEq)]
enum FormOption {
A, B, C
}
impl<'v> FromFormValue<'v> for FormOption {
type Error = &'v str;
fn from_form_value(v: &'v RawStr) -> Result<Self, Self::Error> {
let variant = match v.as_str() {
"a" => FormOption::A,
"b" => FormOption::B,
"c" => FormOption::C,
_ => return Err(v)
};
Ok(variant)
}
}
#[derive(Debug, PartialEq, FromForm)]
struct FormInput<'r> {
checkbox: bool,
number: usize,
radio: FormOption,
password: &'r RawStr,
textarea: String,
select: FormOption,
}
#[derive(Debug, PartialEq, FromForm)]
struct DefaultInput<'r> {
arg: Option<&'r RawStr>,
}
|
}
#[derive(Debug, PartialEq, FromForm)]
struct UnpresentCheckbox {
checkbox: bool
}
#[derive(Debug, PartialEq, FromForm)]
struct UnpresentCheckboxTwo<'r> {
checkbox: bool,
something: &'r RawStr
}
#[derive(Debug, PartialEq, FromForm)]
struct FieldNamedV<'r> {
v: &'r RawStr,
}
fn parse<'f, T: FromForm<'f>>(string: &'f str, strict: bool) -> Option<T> {
let mut items = FormItems::from(string);
let result = T::from_form(items.by_ref(), strict);
if!items.exhaust() {
panic!("Invalid form input.");
}
result.ok()
}
fn strict<'f, T: FromForm<'f>>(string: &'f str) -> Option<T> {
parse(string, true)
}
fn lenient<'f, T: FromForm<'f>>(string: &'f str) -> Option<T> {
parse(string, false)
}
fn main() {
// Same number of arguments: simple case.
let task: Option<TodoTask> = strict("description=Hello&completed=on");
assert_eq!(task, Some(TodoTask {
description: "Hello".to_string(),
completed: true
}));
// Argument in string but not in form.
let task: Option<TodoTask> = strict("other=a&description=Hello&completed=on");
assert!(task.is_none());
// Ensure _method isn't required.
let task: Option<TodoTask> = strict("_method=patch&description=Hello&completed=off");
assert_eq!(task, Some(TodoTask {
description: "Hello".to_string(),
completed: false
}));
let form_string = &[
"password=testing", "checkbox=off", "checkbox=on", "number=10",
"checkbox=off", "textarea=", "select=a", "radio=c",
].join("&");
let input: Option<FormInput> = strict(&form_string);
assert_eq!(input, Some(FormInput {
checkbox: false,
number: 10,
radio: FormOption::C,
password: "testing".into(),
textarea: "".to_string(),
select: FormOption::A,
}));
// Argument not in string with default in form.
let default: Option<DefaultInput> = strict("");
assert_eq!(default, Some(DefaultInput {
arg: None
}));
// Ensure _method can be captured if desired.
let manual: Option<ManualMethod> = strict("_method=put&done=true");
assert_eq!(manual, Some(ManualMethod {
_method: Some("put".into()),
done: true
}));
let manual: Option<ManualMethod> = lenient("_method=put&done=true");
assert_eq!(manual, Some(ManualMethod {
_method: Some("put".into()),
done: true
}));
// And ignored when not present.
let manual: Option<ManualMethod> = strict("done=true");
assert_eq!(manual, Some(ManualMethod {
_method: None,
done: true
}));
// Check that a `bool` value that isn't in the form is marked as `false`.
let manual: Option<UnpresentCheckbox> = strict("");
assert_eq!(manual, Some(UnpresentCheckbox {
checkbox: false
}));
// Check that a `bool` value that isn't in the form is marked as `false`.
let manual: Option<UnpresentCheckboxTwo> = strict("something=hello");
assert_eq!(manual, Some(UnpresentCheckboxTwo {
checkbox: false,
something: "hello".into()
}));
// Check that a structure with one field `v` parses correctly.
let manual: Option<FieldNamedV> = strict("v=abc");
assert_eq!(manual, Some(FieldNamedV {
v: "abc".into()
}));
// Check that a structure with one field `v` parses correctly (lenient).
let manual: Option<FieldNamedV> = lenient("v=abc");
assert_eq!(manual, Some(FieldNamedV { v: "abc".into() }));
let manual: Option<FieldNamedV> = lenient("v=abc&a=123");
assert_eq!(manual, Some(FieldNamedV { v: "abc".into() }));
let manual: Option<FieldNamedV> = lenient("c=abcddef&v=abc&a=123");
assert_eq!(manual, Some(FieldNamedV { v: "abc".into() }));
// Check default values (bool) with lenient parsing.
let manual: Option<UnpresentCheckboxTwo> = lenient("something=hello");
assert_eq!(manual, Some(UnpresentCheckboxTwo {
checkbox: false,
something: "hello".into()
}));
let manual: Option<UnpresentCheckboxTwo> = lenient("hi=hi&something=hello");
assert_eq!(manual, Some(UnpresentCheckboxTwo {
checkbox: false,
something: "hello".into()
}));
// Check that a missing field doesn't parse, even leniently.
let manual: Option<FieldNamedV> = lenient("a=abc");
assert!(manual.is_none());
let manual: Option<FieldNamedV> = lenient("_method=abc");
assert!(manual.is_none());
}
|
#[derive(Debug, PartialEq, FromForm)]
struct ManualMethod<'r> {
_method: Option<&'r RawStr>,
done: bool
|
random_line_split
|
to_twos_complement_limbs.rs
|
use malachite_base::num::arithmetic::traits::WrappingNegAssign;
use malachite_nz::natural::arithmetic::sub::limbs_sub_limb_in_place;
use malachite_nz::natural::logic::not::limbs_not_in_place;
use malachite_nz::platform::Limb;
pub fn limbs_twos_complement_in_place_alt_1(limbs: &mut [Limb]) -> bool {
let i = limbs.iter().cloned().take_while(|&x| x == 0).count();
let len = limbs.len();
if i == len
|
limbs[i].wrapping_neg_assign();
let j = i + 1;
if j!= len {
limbs_not_in_place(&mut limbs[j..]);
}
false
}
pub fn limbs_twos_complement_in_place_alt_2(limbs: &mut [Limb]) -> bool {
let carry = limbs_sub_limb_in_place(limbs, 1);
limbs_not_in_place(limbs);
carry
}
|
{
return true;
}
|
conditional_block
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.