file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
htmlstyleelement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HTMLStyleElementBinding;
use dom::bindings::codegen::Bindings::NodeBinding::NodeMethods;
use dom::bindings::codegen::InheritTypes::{ElementCast, HTMLElementCast, HTMLStyleElementDerived, NodeCast};
use dom::bindings::js::{JSRef, Temporary, OptionalRootable};
use dom::document::Document;
use dom::eventtarget::{EventTarget, EventTargetTypeId};
use dom::element::{Element, ElementTypeId};
use dom::htmlelement::{HTMLElement, HTMLElementTypeId};
use dom::node::{Node, NodeHelpers, NodeTypeId, window_from_node};
use dom::virtualmethods::VirtualMethods;
use dom::window::WindowHelpers;
use layout_interface::{LayoutChan, Msg};
use util::str::DOMString;
use style::stylesheets::{Origin, Stylesheet};
use style::media_queries::parse_media_query_list;
use style::node::TElement;
use cssparser::Parser as CssParser;
#[dom_struct]
pub struct HTMLStyleElement {
htmlelement: HTMLElement,
}
impl HTMLStyleElementDerived for EventTarget {
fn
|
(&self) -> bool {
*self.type_id() == EventTargetTypeId::Node(NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLStyleElement)))
}
}
impl HTMLStyleElement {
fn new_inherited(localName: DOMString, prefix: Option<DOMString>, document: JSRef<Document>) -> HTMLStyleElement {
HTMLStyleElement {
htmlelement: HTMLElement::new_inherited(HTMLElementTypeId::HTMLStyleElement, localName, prefix, document)
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString, prefix: Option<DOMString>, document: JSRef<Document>) -> Temporary<HTMLStyleElement> {
let element = HTMLStyleElement::new_inherited(localName, prefix, document);
Node::reflect_node(box element, document, HTMLStyleElementBinding::Wrap)
}
}
pub trait StyleElementHelpers {
fn parse_own_css(self);
}
impl<'a> StyleElementHelpers for JSRef<'a, HTMLStyleElement> {
fn parse_own_css(self) {
let node: JSRef<Node> = NodeCast::from_ref(self);
let element: JSRef<Element> = ElementCast::from_ref(self);
assert!(node.is_in_doc());
let win = window_from_node(node).root();
let win = win.r();
let url = win.get_url();
let mq_str = element.get_attr(&ns!(""), &atom!("media")).unwrap_or("");
let mut css_parser = CssParser::new(&mq_str);
let media = parse_media_query_list(&mut css_parser);
let data = node.GetTextContent().expect("Element.textContent must be a string");
let sheet = Stylesheet::from_str(data.as_slice(), url, Origin::Author);
let LayoutChan(ref layout_chan) = win.layout_chan();
layout_chan.send(Msg::AddStylesheet(sheet, media)).unwrap();
}
}
impl<'a> VirtualMethods for JSRef<'a, HTMLStyleElement> {
fn super_type<'b>(&'b self) -> Option<&'b VirtualMethods> {
let htmlelement: &JSRef<HTMLElement> = HTMLElementCast::from_borrowed_ref(self);
Some(htmlelement as &VirtualMethods)
}
fn child_inserted(&self, child: JSRef<Node>) {
if let Some(ref s) = self.super_type() {
s.child_inserted(child);
}
let node: JSRef<Node> = NodeCast::from_ref(*self);
if node.is_in_doc() {
self.parse_own_css();
}
}
fn bind_to_tree(&self, tree_in_doc: bool) {
if let Some(ref s) = self.super_type() {
s.bind_to_tree(tree_in_doc);
}
if tree_in_doc {
self.parse_own_css();
}
}
}
|
is_htmlstyleelement
|
identifier_name
|
htmlstyleelement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HTMLStyleElementBinding;
use dom::bindings::codegen::Bindings::NodeBinding::NodeMethods;
use dom::bindings::codegen::InheritTypes::{ElementCast, HTMLElementCast, HTMLStyleElementDerived, NodeCast};
use dom::bindings::js::{JSRef, Temporary, OptionalRootable};
use dom::document::Document;
use dom::eventtarget::{EventTarget, EventTargetTypeId};
use dom::element::{Element, ElementTypeId};
use dom::htmlelement::{HTMLElement, HTMLElementTypeId};
use dom::node::{Node, NodeHelpers, NodeTypeId, window_from_node};
use dom::virtualmethods::VirtualMethods;
use dom::window::WindowHelpers;
use layout_interface::{LayoutChan, Msg};
use util::str::DOMString;
use style::stylesheets::{Origin, Stylesheet};
use style::media_queries::parse_media_query_list;
use style::node::TElement;
use cssparser::Parser as CssParser;
#[dom_struct]
pub struct HTMLStyleElement {
htmlelement: HTMLElement,
}
impl HTMLStyleElementDerived for EventTarget {
fn is_htmlstyleelement(&self) -> bool {
*self.type_id() == EventTargetTypeId::Node(NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLStyleElement)))
}
}
impl HTMLStyleElement {
fn new_inherited(localName: DOMString, prefix: Option<DOMString>, document: JSRef<Document>) -> HTMLStyleElement {
HTMLStyleElement {
htmlelement: HTMLElement::new_inherited(HTMLElementTypeId::HTMLStyleElement, localName, prefix, document)
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString, prefix: Option<DOMString>, document: JSRef<Document>) -> Temporary<HTMLStyleElement> {
let element = HTMLStyleElement::new_inherited(localName, prefix, document);
Node::reflect_node(box element, document, HTMLStyleElementBinding::Wrap)
}
}
pub trait StyleElementHelpers {
fn parse_own_css(self);
}
impl<'a> StyleElementHelpers for JSRef<'a, HTMLStyleElement> {
fn parse_own_css(self) {
let node: JSRef<Node> = NodeCast::from_ref(self);
let element: JSRef<Element> = ElementCast::from_ref(self);
assert!(node.is_in_doc());
let win = window_from_node(node).root();
let win = win.r();
let url = win.get_url();
let mq_str = element.get_attr(&ns!(""), &atom!("media")).unwrap_or("");
let mut css_parser = CssParser::new(&mq_str);
let media = parse_media_query_list(&mut css_parser);
let data = node.GetTextContent().expect("Element.textContent must be a string");
let sheet = Stylesheet::from_str(data.as_slice(), url, Origin::Author);
let LayoutChan(ref layout_chan) = win.layout_chan();
layout_chan.send(Msg::AddStylesheet(sheet, media)).unwrap();
}
}
impl<'a> VirtualMethods for JSRef<'a, HTMLStyleElement> {
fn super_type<'b>(&'b self) -> Option<&'b VirtualMethods> {
let htmlelement: &JSRef<HTMLElement> = HTMLElementCast::from_borrowed_ref(self);
Some(htmlelement as &VirtualMethods)
}
fn child_inserted(&self, child: JSRef<Node>) {
if let Some(ref s) = self.super_type() {
s.child_inserted(child);
}
let node: JSRef<Node> = NodeCast::from_ref(*self);
if node.is_in_doc() {
self.parse_own_css();
}
}
fn bind_to_tree(&self, tree_in_doc: bool) {
if let Some(ref s) = self.super_type() {
s.bind_to_tree(tree_in_doc);
}
if tree_in_doc
|
}
}
|
{
self.parse_own_css();
}
|
conditional_block
|
utils.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use rustc::lint::Context;
use rustc::middle::{ty, def};
use syntax::ptr::P;
use syntax::{ast, ast_map};
use syntax::ast::{TyPath, Path, AngleBracketedParameters, PathSegment, Ty};
use syntax::attr::mark_used;
/// Matches a type with a provided string, and returns its type parameters if successful
///
/// Try not to use this for types defined in crates you own, use match_lang_ty instead (for lint passes)
pub fn match_ty_unwrap<'a>(ty: &'a Ty, segments: &[&str]) -> Option<&'a [P<Ty>]> {
match ty.node {
TyPath(Path {segments: ref seg,..}, _) => {
// So ast::Path isn't the full path, just the tokens that were provided.
// I could muck around with the maps and find the full path
// however the more efficient way is to simply reverse the iterators and zip them
// which will compare them in reverse until one of them runs out of segments
if seg.iter().rev().zip(segments.iter().rev()).all(|(a,b)| a.identifier.as_str() == *b) {
match seg.as_slice().last() {
Some(&PathSegment {parameters: AngleBracketedParameters(ref a),..}) => {
Some(a.types.as_slice())
}
_ => None
}
} else {
None
}
},
_ => None
}
}
/// Checks if a type has a #[servo_lang = "str"] attribute
pub fn match_lang_ty(cx: &Context, ty: &Ty, value: &str) -> bool {
let ty_id = match ty.node {
TyPath(_, ty_id) => ty_id,
_ => return false,
};
let def_id = match cx.tcx.def_map.borrow().get(&ty_id).cloned() {
Some(def::DefTy(def_id, _)) => def_id,
_ => return false,
};
ty::get_attrs(cx.tcx, def_id).iter().any(|attr| {
match attr.node.value.node {
ast::MetaNameValue(ref name, ref val) if &**name == "servo_lang" => {
match val.node {
ast::LitStr(ref v, _) if &**v == value => {
mark_used(attr);
true
|
},
_ => false,
}
}
_ => false,
}
})
}
// Determines if a block is in an unsafe context so that an unhelpful
// lint can be aborted.
pub fn unsafe_context(map: &ast_map::Map, id: ast::NodeId) -> bool {
match map.find(map.get_parent(id)) {
Some(ast_map::NodeImplItem(itm)) => {
match *itm {
ast::MethodImplItem(ref meth) => match meth.node {
ast::MethDecl(_, _, _, _, style, _, _, _) => match style {
ast::Unsafety::Unsafe => true,
_ => false,
},
_ => false,
},
_ => false,
}
},
Some(ast_map::NodeItem(itm)) => {
match itm.node {
ast::ItemFn(_, style, _, _, _) => match style {
ast::Unsafety::Unsafe => true,
_ => false,
},
_ => false,
}
}
_ => false // There are probably a couple of other unsafe cases we don't care to lint, those will need to be added.
}
}
|
random_line_split
|
|
utils.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use rustc::lint::Context;
use rustc::middle::{ty, def};
use syntax::ptr::P;
use syntax::{ast, ast_map};
use syntax::ast::{TyPath, Path, AngleBracketedParameters, PathSegment, Ty};
use syntax::attr::mark_used;
/// Matches a type with a provided string, and returns its type parameters if successful
///
/// Try not to use this for types defined in crates you own, use match_lang_ty instead (for lint passes)
pub fn
|
<'a>(ty: &'a Ty, segments: &[&str]) -> Option<&'a [P<Ty>]> {
match ty.node {
TyPath(Path {segments: ref seg,..}, _) => {
// So ast::Path isn't the full path, just the tokens that were provided.
// I could muck around with the maps and find the full path
// however the more efficient way is to simply reverse the iterators and zip them
// which will compare them in reverse until one of them runs out of segments
if seg.iter().rev().zip(segments.iter().rev()).all(|(a,b)| a.identifier.as_str() == *b) {
match seg.as_slice().last() {
Some(&PathSegment {parameters: AngleBracketedParameters(ref a),..}) => {
Some(a.types.as_slice())
}
_ => None
}
} else {
None
}
},
_ => None
}
}
/// Checks if a type has a #[servo_lang = "str"] attribute
pub fn match_lang_ty(cx: &Context, ty: &Ty, value: &str) -> bool {
let ty_id = match ty.node {
TyPath(_, ty_id) => ty_id,
_ => return false,
};
let def_id = match cx.tcx.def_map.borrow().get(&ty_id).cloned() {
Some(def::DefTy(def_id, _)) => def_id,
_ => return false,
};
ty::get_attrs(cx.tcx, def_id).iter().any(|attr| {
match attr.node.value.node {
ast::MetaNameValue(ref name, ref val) if &**name == "servo_lang" => {
match val.node {
ast::LitStr(ref v, _) if &**v == value => {
mark_used(attr);
true
},
_ => false,
}
}
_ => false,
}
})
}
// Determines if a block is in an unsafe context so that an unhelpful
// lint can be aborted.
pub fn unsafe_context(map: &ast_map::Map, id: ast::NodeId) -> bool {
match map.find(map.get_parent(id)) {
Some(ast_map::NodeImplItem(itm)) => {
match *itm {
ast::MethodImplItem(ref meth) => match meth.node {
ast::MethDecl(_, _, _, _, style, _, _, _) => match style {
ast::Unsafety::Unsafe => true,
_ => false,
},
_ => false,
},
_ => false,
}
},
Some(ast_map::NodeItem(itm)) => {
match itm.node {
ast::ItemFn(_, style, _, _, _) => match style {
ast::Unsafety::Unsafe => true,
_ => false,
},
_ => false,
}
}
_ => false // There are probably a couple of other unsafe cases we don't care to lint, those will need to be added.
}
}
|
match_ty_unwrap
|
identifier_name
|
utils.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use rustc::lint::Context;
use rustc::middle::{ty, def};
use syntax::ptr::P;
use syntax::{ast, ast_map};
use syntax::ast::{TyPath, Path, AngleBracketedParameters, PathSegment, Ty};
use syntax::attr::mark_used;
/// Matches a type with a provided string, and returns its type parameters if successful
///
/// Try not to use this for types defined in crates you own, use match_lang_ty instead (for lint passes)
pub fn match_ty_unwrap<'a>(ty: &'a Ty, segments: &[&str]) -> Option<&'a [P<Ty>]> {
match ty.node {
TyPath(Path {segments: ref seg,..}, _) => {
// So ast::Path isn't the full path, just the tokens that were provided.
// I could muck around with the maps and find the full path
// however the more efficient way is to simply reverse the iterators and zip them
// which will compare them in reverse until one of them runs out of segments
if seg.iter().rev().zip(segments.iter().rev()).all(|(a,b)| a.identifier.as_str() == *b) {
match seg.as_slice().last() {
Some(&PathSegment {parameters: AngleBracketedParameters(ref a),..}) => {
Some(a.types.as_slice())
}
_ => None
}
} else {
None
}
},
_ => None
}
}
/// Checks if a type has a #[servo_lang = "str"] attribute
pub fn match_lang_ty(cx: &Context, ty: &Ty, value: &str) -> bool {
let ty_id = match ty.node {
TyPath(_, ty_id) => ty_id,
_ => return false,
};
let def_id = match cx.tcx.def_map.borrow().get(&ty_id).cloned() {
Some(def::DefTy(def_id, _)) => def_id,
_ => return false,
};
ty::get_attrs(cx.tcx, def_id).iter().any(|attr| {
match attr.node.value.node {
ast::MetaNameValue(ref name, ref val) if &**name == "servo_lang" => {
match val.node {
ast::LitStr(ref v, _) if &**v == value => {
mark_used(attr);
true
},
_ => false,
}
}
_ => false,
}
})
}
// Determines if a block is in an unsafe context so that an unhelpful
// lint can be aborted.
pub fn unsafe_context(map: &ast_map::Map, id: ast::NodeId) -> bool
|
_ => false,
}
}
_ => false // There are probably a couple of other unsafe cases we don't care to lint, those will need to be added.
}
}
|
{
match map.find(map.get_parent(id)) {
Some(ast_map::NodeImplItem(itm)) => {
match *itm {
ast::MethodImplItem(ref meth) => match meth.node {
ast::MethDecl(_, _, _, _, style, _, _, _) => match style {
ast::Unsafety::Unsafe => true,
_ => false,
},
_ => false,
},
_ => false,
}
},
Some(ast_map::NodeItem(itm)) => {
match itm.node {
ast::ItemFn(_, style, _, _, _) => match style {
ast::Unsafety::Unsafe => true,
_ => false,
},
|
identifier_body
|
rust_string.rs
|
use std::ffi::CString;
use std::mem::forget;
use libc;
/// Compatibility wrapper for strings allocated in Rust and passed to C.
///
/// Rust doesn't ensure the safety of freeing memory across an FFI boundary, so
/// we need to take special care to ensure we're not accidentally calling
/// `tor_free`() on any string allocated in Rust. To more easily differentiate
/// between strings that possibly (if Rust support is enabled) were allocated
/// in Rust, C has the `rust_str_t` helper type. The equivalent on the Rust
/// side is `RustString`.
///
/// Note: This type must not be used for strings allocated in C.
#[repr(C)]
#[derive(Debug)]
pub struct RustString(*mut libc::c_char);
impl RustString {
/// Returns a pointer to the underlying NUL-terminated byte array.
///
/// Note that this function is not typically useful for Rust callers,
/// except in a direct FFI context.
///
/// # Examples
/// ```
/// # use tor_util::RustString;
/// use std::ffi::CString;
///
/// let r = RustString::from(CString::new("asdf").unwrap());
/// let c_str = r.as_ptr();
/// assert_eq!(b'a', unsafe { *c_str as u8});
/// ```
pub fn as_ptr(&self) -> *const libc::c_char {
self.0 as *const libc::c_char
}
}
impl From<CString> for RustString {
/// Constructs a new `RustString`
///
/// # Examples
/// ```
/// # use tor_util::RustString;
/// use std::ffi::CString;
///
/// let r = RustString::from(CString::new("asdf").unwrap());
/// ```
fn from(str: CString) -> RustString {
RustString(str.into_raw())
}
}
impl Into<CString> for RustString {
/// Reconstructs a `CString` from this `RustString`.
///
/// Useful to take ownership back from a `RustString` that was given to C
/// code.
///
/// # Examples
/// ```
/// # use tor_util::RustString;
/// use std::ffi::CString;
///
/// let cs = CString::new("asdf").unwrap();
/// let r = RustString::from(cs.clone());
/// let cs2 = r.into();
/// assert_eq!(cs, cs2);
/// ```
fn into(self) -> CString {
// Calling from_raw is always OK here: We only construct self using
// valid CStrings and don't expose anything that could mutate it
let ret = unsafe { CString::from_raw(self.0) };
forget(self);
ret
}
}
impl Drop for RustString {
fn drop(&mut self) {
// Don't use into() here, because we would need to move out of
// self. Same safety consideration. Immediately drop the created
// CString, which takes care of freeing the wrapped string.
unsafe { CString::from_raw(self.0) };
}
}
#[cfg(test)]
mod test {
use std::mem;
use super::*;
use libc;
/// Ensures we're not adding overhead by using RustString.
#[test]
fn size_of()
|
}
|
{
assert_eq!(mem::size_of::<*mut libc::c_char>(),
mem::size_of::<RustString>())
}
|
identifier_body
|
rust_string.rs
|
use std::ffi::CString;
use std::mem::forget;
use libc;
/// Compatibility wrapper for strings allocated in Rust and passed to C.
///
/// Rust doesn't ensure the safety of freeing memory across an FFI boundary, so
/// we need to take special care to ensure we're not accidentally calling
/// `tor_free`() on any string allocated in Rust. To more easily differentiate
/// between strings that possibly (if Rust support is enabled) were allocated
/// in Rust, C has the `rust_str_t` helper type. The equivalent on the Rust
/// side is `RustString`.
///
/// Note: This type must not be used for strings allocated in C.
#[repr(C)]
#[derive(Debug)]
pub struct RustString(*mut libc::c_char);
impl RustString {
/// Returns a pointer to the underlying NUL-terminated byte array.
///
/// Note that this function is not typically useful for Rust callers,
/// except in a direct FFI context.
///
/// # Examples
/// ```
/// # use tor_util::RustString;
/// use std::ffi::CString;
///
/// let r = RustString::from(CString::new("asdf").unwrap());
/// let c_str = r.as_ptr();
/// assert_eq!(b'a', unsafe { *c_str as u8});
/// ```
pub fn as_ptr(&self) -> *const libc::c_char {
self.0 as *const libc::c_char
}
}
impl From<CString> for RustString {
/// Constructs a new `RustString`
///
/// # Examples
/// ```
/// # use tor_util::RustString;
/// use std::ffi::CString;
///
/// let r = RustString::from(CString::new("asdf").unwrap());
/// ```
fn from(str: CString) -> RustString {
RustString(str.into_raw())
}
}
impl Into<CString> for RustString {
/// Reconstructs a `CString` from this `RustString`.
///
/// Useful to take ownership back from a `RustString` that was given to C
/// code.
///
/// # Examples
/// ```
/// # use tor_util::RustString;
/// use std::ffi::CString;
///
/// let cs = CString::new("asdf").unwrap();
/// let r = RustString::from(cs.clone());
/// let cs2 = r.into();
/// assert_eq!(cs, cs2);
/// ```
fn into(self) -> CString {
// Calling from_raw is always OK here: We only construct self using
// valid CStrings and don't expose anything that could mutate it
let ret = unsafe { CString::from_raw(self.0) };
forget(self);
ret
}
}
impl Drop for RustString {
fn drop(&mut self) {
// Don't use into() here, because we would need to move out of
// self. Same safety consideration. Immediately drop the created
// CString, which takes care of freeing the wrapped string.
unsafe { CString::from_raw(self.0) };
}
}
#[cfg(test)]
mod test {
use std::mem;
use super::*;
use libc;
/// Ensures we're not adding overhead by using RustString.
|
#[test]
fn size_of() {
assert_eq!(mem::size_of::<*mut libc::c_char>(),
mem::size_of::<RustString>())
}
}
|
random_line_split
|
|
rust_string.rs
|
use std::ffi::CString;
use std::mem::forget;
use libc;
/// Compatibility wrapper for strings allocated in Rust and passed to C.
///
/// Rust doesn't ensure the safety of freeing memory across an FFI boundary, so
/// we need to take special care to ensure we're not accidentally calling
/// `tor_free`() on any string allocated in Rust. To more easily differentiate
/// between strings that possibly (if Rust support is enabled) were allocated
/// in Rust, C has the `rust_str_t` helper type. The equivalent on the Rust
/// side is `RustString`.
///
/// Note: This type must not be used for strings allocated in C.
#[repr(C)]
#[derive(Debug)]
pub struct RustString(*mut libc::c_char);
impl RustString {
/// Returns a pointer to the underlying NUL-terminated byte array.
///
/// Note that this function is not typically useful for Rust callers,
/// except in a direct FFI context.
///
/// # Examples
/// ```
/// # use tor_util::RustString;
/// use std::ffi::CString;
///
/// let r = RustString::from(CString::new("asdf").unwrap());
/// let c_str = r.as_ptr();
/// assert_eq!(b'a', unsafe { *c_str as u8});
/// ```
pub fn
|
(&self) -> *const libc::c_char {
self.0 as *const libc::c_char
}
}
impl From<CString> for RustString {
/// Constructs a new `RustString`
///
/// # Examples
/// ```
/// # use tor_util::RustString;
/// use std::ffi::CString;
///
/// let r = RustString::from(CString::new("asdf").unwrap());
/// ```
fn from(str: CString) -> RustString {
RustString(str.into_raw())
}
}
impl Into<CString> for RustString {
/// Reconstructs a `CString` from this `RustString`.
///
/// Useful to take ownership back from a `RustString` that was given to C
/// code.
///
/// # Examples
/// ```
/// # use tor_util::RustString;
/// use std::ffi::CString;
///
/// let cs = CString::new("asdf").unwrap();
/// let r = RustString::from(cs.clone());
/// let cs2 = r.into();
/// assert_eq!(cs, cs2);
/// ```
fn into(self) -> CString {
// Calling from_raw is always OK here: We only construct self using
// valid CStrings and don't expose anything that could mutate it
let ret = unsafe { CString::from_raw(self.0) };
forget(self);
ret
}
}
impl Drop for RustString {
fn drop(&mut self) {
// Don't use into() here, because we would need to move out of
// self. Same safety consideration. Immediately drop the created
// CString, which takes care of freeing the wrapped string.
unsafe { CString::from_raw(self.0) };
}
}
#[cfg(test)]
mod test {
use std::mem;
use super::*;
use libc;
/// Ensures we're not adding overhead by using RustString.
#[test]
fn size_of() {
assert_eq!(mem::size_of::<*mut libc::c_char>(),
mem::size_of::<RustString>())
}
}
|
as_ptr
|
identifier_name
|
storage.rs
|
use std::borrow::Cow;
use std::ffi::CStr;
/// Structure containing information about a camera's storage.
///
/// ## Example
///
/// A `Storage` object can be used to retrieve information about a camera's storage:
///
/// ```no_run
/// let mut context = gphoto::Context::new().unwrap();
/// let mut camera = gphoto::Camera::autodetect(&mut context).unwrap();
///
/// for storage in camera.storage(&mut context).unwrap() {
/// println!(" base dir = {:?}", storage.base_dir());
/// println!(" label = {:?}", storage.label());
/// println!(" description = {:?}", storage.description());
/// println!(" storage type = {:?}", storage.storage_type());
/// println!("filesystem type = {:?}", storage.filesystem_type());
/// println!(" access type = {:?}", storage.access_type());
/// println!(" capacity kb = {:?}", storage.capacity_kbytes());
/// println!(" free kb = {:?}", storage.free_kbytes());
/// println!(" free images = {:?}", storage.free_images());
/// }
/// ```
///
/// The above example might print something like the following:
///
/// ```text
/// base dir = Some("/store_00010001")
/// label = Some("NIKON D750 [Slot 1]")
/// description = None
/// storage type = Some(RemoveableRam)
/// filesystem type = Some(DCF)
/// access type = Some(ReadDelete)
/// capacity kb = Some(31154688)
/// free kb = Some(30833088)
/// free images = Some(580)
/// ```
#[repr(C)]
pub struct Storage {
inner: ::gphoto2::CameraStorageInformation,
}
impl Storage {
/// Base directory of the storage.
pub fn base_dir(&self) -> Option<Cow<str>> {
if self.inner.fields & ::gphoto2::GP_STORAGEINFO_BASE!= 0 {
Some(unsafe {
String::from_utf8_lossy(CStr::from_ptr(self.inner.basedir.as_ptr()).to_bytes())
})
}
else {
None
}
}
/// The storage's label.
pub fn label(&self) -> Option<Cow<str>> {
if self.inner.fields & ::gphoto2::GP_STORAGEINFO_LABEL!= 0 {
Some(unsafe {
String::from_utf8_lossy(CStr::from_ptr(self.inner.label.as_ptr()).to_bytes())
})
}
else {
None
}
}
/// A description of the storage.
pub fn description(&self) -> Option<Cow<str>> {
if self.inner.fields & ::gphoto2::GP_STORAGEINFO_DESCRIPTION!= 0 {
Some(unsafe {
String::from_utf8_lossy(CStr::from_ptr(self.inner.description.as_ptr()).to_bytes())
})
}
else {
None
}
}
/// The storage's hardware type.
pub fn storage_type(&self) -> Option<StorageType> {
if self.inner.fields & ::gphoto2::GP_STORAGEINFO_STORAGETYPE!= 0 {
Some(match self.inner.storage_type {
::gphoto2::GP_STORAGEINFO_ST_FIXED_ROM => StorageType::FixedRom,
::gphoto2::GP_STORAGEINFO_ST_REMOVABLE_ROM => StorageType::RemovableRom,
::gphoto2::GP_STORAGEINFO_ST_FIXED_RAM => StorageType::FixedRam,
::gphoto2::GP_STORAGEINFO_ST_REMOVABLE_RAM => StorageType::RemoveableRam,
::gphoto2::GP_STORAGEINFO_ST_UNKNOWN => StorageType::Unknown,
})
}
else {
None
}
}
/// The hiearchy type of the storage's filesystem.
|
::gphoto2::GP_STORAGEINFO_FST_DCF => FilesystemType::DCF,
::gphoto2::GP_STORAGEINFO_FST_UNDEFINED => FilesystemType::Unknown,
})
}
else {
None
}
}
/// The storage's access permissions.
pub fn access_type(&self) -> Option<AccessType> {
if self.inner.fields & ::gphoto2::GP_STORAGEINFO_ACCESS!= 0 {
Some(match self.inner.access {
::gphoto2::GP_STORAGEINFO_AC_READWRITE => AccessType::ReadWrite,
::gphoto2::GP_STORAGEINFO_AC_READONLY => AccessType::ReadOnly,
::gphoto2::GP_STORAGEINFO_AC_READONLY_WITH_DELETE => AccessType::ReadDelete,
})
}
else {
None
}
}
/// The storage's total capacity in kilobytes.
pub fn capacity_kbytes(&self) -> Option<u64> {
if self.inner.fields & ::gphoto2::GP_STORAGEINFO_MAXCAPACITY!= 0 {
Some(self.inner.capacitykbytes)
}
else {
None
}
}
/// The storage's free space in kilobytes.
pub fn free_kbytes(&self) -> Option<u64> {
if self.inner.fields & ::gphoto2::GP_STORAGEINFO_FREESPACEKBYTES!= 0 {
Some(self.inner.freekbytes)
}
else {
None
}
}
/// An estimate of the number of images that could fit in the storage's remaining space.
///
/// This value is estimated by the camera.
pub fn free_images(&self) -> Option<u64> {
if self.inner.fields & ::gphoto2::GP_STORAGEINFO_FREESPACEIMAGES!= 0 {
Some(self.inner.freeimages)
}
else {
None
}
}
}
/// Types of storage hardware.
#[derive(Debug)]
pub enum StorageType {
/// A fixed ROM storage.
FixedRom,
/// A removable ROM storage.
RemovableRom,
/// A fixed RAM storage.
FixedRam,
/// A removable RAM storage.
///
/// This includes any kind of removable cards (SD card, CompactFlash, etc).
RemoveableRam,
/// Unknown storage type.
Unknown,
}
/// Types of filesystem hierarchies.
#[derive(Debug)]
pub enum FilesystemType {
/// All files stored in one directory.
Flat,
/// Files are stored in a generic tree-like hierarchy.
Hierarchical,
/// Files are stored in a DCF-compatible hierarchy.
///
/// Design rule for Camera File system (DCF) is a standard that defines a directory structure
/// (among other things) for use on digital cameras. A filesystem that follows the DCF standard
/// will store its media in a `DCIM` directory.
DCF,
/// Filesystem hierarchy is unknown.
Unknown,
}
/// Types of access permissions.
#[derive(Debug)]
pub enum AccessType {
/// Read and write operations are allowed.
ReadWrite,
/// Read and delete operations are allowed.
ReadDelete,
/// Only read operations are allowed.
ReadOnly,
}
|
pub fn filesystem_type(&self) -> Option<FilesystemType> {
if self.inner.fields & ::gphoto2::GP_STORAGEINFO_FILESYSTEMTYPE != 0 {
Some(match self.inner.fstype {
::gphoto2::GP_STORAGEINFO_FST_GENERICFLAT => FilesystemType::Flat,
::gphoto2::GP_STORAGEINFO_FST_GENERICHIERARCHICAL => FilesystemType::Hierarchical,
|
random_line_split
|
storage.rs
|
use std::borrow::Cow;
use std::ffi::CStr;
/// Structure containing information about a camera's storage.
///
/// ## Example
///
/// A `Storage` object can be used to retrieve information about a camera's storage:
///
/// ```no_run
/// let mut context = gphoto::Context::new().unwrap();
/// let mut camera = gphoto::Camera::autodetect(&mut context).unwrap();
///
/// for storage in camera.storage(&mut context).unwrap() {
/// println!(" base dir = {:?}", storage.base_dir());
/// println!(" label = {:?}", storage.label());
/// println!(" description = {:?}", storage.description());
/// println!(" storage type = {:?}", storage.storage_type());
/// println!("filesystem type = {:?}", storage.filesystem_type());
/// println!(" access type = {:?}", storage.access_type());
/// println!(" capacity kb = {:?}", storage.capacity_kbytes());
/// println!(" free kb = {:?}", storage.free_kbytes());
/// println!(" free images = {:?}", storage.free_images());
/// }
/// ```
///
/// The above example might print something like the following:
///
/// ```text
/// base dir = Some("/store_00010001")
/// label = Some("NIKON D750 [Slot 1]")
/// description = None
/// storage type = Some(RemoveableRam)
/// filesystem type = Some(DCF)
/// access type = Some(ReadDelete)
/// capacity kb = Some(31154688)
/// free kb = Some(30833088)
/// free images = Some(580)
/// ```
#[repr(C)]
pub struct Storage {
inner: ::gphoto2::CameraStorageInformation,
}
impl Storage {
/// Base directory of the storage.
pub fn
|
(&self) -> Option<Cow<str>> {
if self.inner.fields & ::gphoto2::GP_STORAGEINFO_BASE!= 0 {
Some(unsafe {
String::from_utf8_lossy(CStr::from_ptr(self.inner.basedir.as_ptr()).to_bytes())
})
}
else {
None
}
}
/// The storage's label.
pub fn label(&self) -> Option<Cow<str>> {
if self.inner.fields & ::gphoto2::GP_STORAGEINFO_LABEL!= 0 {
Some(unsafe {
String::from_utf8_lossy(CStr::from_ptr(self.inner.label.as_ptr()).to_bytes())
})
}
else {
None
}
}
/// A description of the storage.
pub fn description(&self) -> Option<Cow<str>> {
if self.inner.fields & ::gphoto2::GP_STORAGEINFO_DESCRIPTION!= 0 {
Some(unsafe {
String::from_utf8_lossy(CStr::from_ptr(self.inner.description.as_ptr()).to_bytes())
})
}
else {
None
}
}
/// The storage's hardware type.
pub fn storage_type(&self) -> Option<StorageType> {
if self.inner.fields & ::gphoto2::GP_STORAGEINFO_STORAGETYPE!= 0 {
Some(match self.inner.storage_type {
::gphoto2::GP_STORAGEINFO_ST_FIXED_ROM => StorageType::FixedRom,
::gphoto2::GP_STORAGEINFO_ST_REMOVABLE_ROM => StorageType::RemovableRom,
::gphoto2::GP_STORAGEINFO_ST_FIXED_RAM => StorageType::FixedRam,
::gphoto2::GP_STORAGEINFO_ST_REMOVABLE_RAM => StorageType::RemoveableRam,
::gphoto2::GP_STORAGEINFO_ST_UNKNOWN => StorageType::Unknown,
})
}
else {
None
}
}
/// The hiearchy type of the storage's filesystem.
pub fn filesystem_type(&self) -> Option<FilesystemType> {
if self.inner.fields & ::gphoto2::GP_STORAGEINFO_FILESYSTEMTYPE!= 0 {
Some(match self.inner.fstype {
::gphoto2::GP_STORAGEINFO_FST_GENERICFLAT => FilesystemType::Flat,
::gphoto2::GP_STORAGEINFO_FST_GENERICHIERARCHICAL => FilesystemType::Hierarchical,
::gphoto2::GP_STORAGEINFO_FST_DCF => FilesystemType::DCF,
::gphoto2::GP_STORAGEINFO_FST_UNDEFINED => FilesystemType::Unknown,
})
}
else {
None
}
}
/// The storage's access permissions.
pub fn access_type(&self) -> Option<AccessType> {
if self.inner.fields & ::gphoto2::GP_STORAGEINFO_ACCESS!= 0 {
Some(match self.inner.access {
::gphoto2::GP_STORAGEINFO_AC_READWRITE => AccessType::ReadWrite,
::gphoto2::GP_STORAGEINFO_AC_READONLY => AccessType::ReadOnly,
::gphoto2::GP_STORAGEINFO_AC_READONLY_WITH_DELETE => AccessType::ReadDelete,
})
}
else {
None
}
}
/// The storage's total capacity in kilobytes.
pub fn capacity_kbytes(&self) -> Option<u64> {
if self.inner.fields & ::gphoto2::GP_STORAGEINFO_MAXCAPACITY!= 0 {
Some(self.inner.capacitykbytes)
}
else {
None
}
}
/// The storage's free space in kilobytes.
pub fn free_kbytes(&self) -> Option<u64> {
if self.inner.fields & ::gphoto2::GP_STORAGEINFO_FREESPACEKBYTES!= 0 {
Some(self.inner.freekbytes)
}
else {
None
}
}
/// An estimate of the number of images that could fit in the storage's remaining space.
///
/// This value is estimated by the camera.
pub fn free_images(&self) -> Option<u64> {
if self.inner.fields & ::gphoto2::GP_STORAGEINFO_FREESPACEIMAGES!= 0 {
Some(self.inner.freeimages)
}
else {
None
}
}
}
/// Types of storage hardware.
#[derive(Debug)]
pub enum StorageType {
/// A fixed ROM storage.
FixedRom,
/// A removable ROM storage.
RemovableRom,
/// A fixed RAM storage.
FixedRam,
/// A removable RAM storage.
///
/// This includes any kind of removable cards (SD card, CompactFlash, etc).
RemoveableRam,
/// Unknown storage type.
Unknown,
}
/// Types of filesystem hierarchies.
#[derive(Debug)]
pub enum FilesystemType {
/// All files stored in one directory.
Flat,
/// Files are stored in a generic tree-like hierarchy.
Hierarchical,
/// Files are stored in a DCF-compatible hierarchy.
///
/// Design rule for Camera File system (DCF) is a standard that defines a directory structure
/// (among other things) for use on digital cameras. A filesystem that follows the DCF standard
/// will store its media in a `DCIM` directory.
DCF,
/// Filesystem hierarchy is unknown.
Unknown,
}
/// Types of access permissions.
#[derive(Debug)]
pub enum AccessType {
/// Read and write operations are allowed.
ReadWrite,
/// Read and delete operations are allowed.
ReadDelete,
/// Only read operations are allowed.
ReadOnly,
}
|
base_dir
|
identifier_name
|
ast.rs
|
use clingo::*;
use std::env;
pub struct OnStatementData<'a, 'b> {
atom: &'b ast::Term<'b>,
control: &'a mut Control,
}
impl<'a, 'b> StatementHandler for OnStatementData<'a, 'b> {
// adds atom enable to all rule bodies
fn on_statement(&mut self, stm: &ast::Statement) -> bool {
// pass through all statements that are not rules
let mut builder = ast::ProgramBuilder::from(self.control).unwrap();
match stm.statement_type() {
ast::StatementType::Rule(rule) => {
let body = rule.body();
let mut extended_body = std::vec::Vec::with_capacity(body.len() + 1);
for e in body {
extended_body.push(e.clone());
}
// create atom enable
let lit = ast::Literal::from_term(ast::Sign::None, &self.atom);
// add atom enable to the rule body
let blit = ast::BodyLiteral::from_literal(ast::Sign::None, &lit);
extended_body.push(blit);
// initialize the rule
let head = rule.head();
let rule = ast::Rule::new(*head, &extended_body);
// initialize the statement
let stm2 = rule.ast_statement();
// add the rewritten statement to the program
builder
.add(&stm2)
.expect("Failed to add statement to ProgramBuilder.");
true
}
_ => {
builder
.add(stm)
.expect("Failed to add statement to ProgramBuilder.");
true
}
}
}
}
fn print_model(model: &Model) {
// retrieve the symbols in the model
let atoms = model
.symbols(ShowType::SHOWN)
.expect("Failed to retrieve symbols in the model.");
print!("Model:");
for atom in atoms {
// retrieve and print the symbol's string
print!(" {}", atom.to_string().unwrap());
}
println!();
}
fn solve(ctl: &mut Control) {
// get a solve handle
let mut handle = ctl
.solve(SolveMode::YIELD, &[])
.expect("Failed retrieving solve handle.");
// loop over all models
loop {
handle.resume().expect("Failed resume on solve handle.");
match handle.model() {
// print the model
Ok(Some(model)) => print_model(model),
// stop if there are no more models
Ok(None) => break,
Err(e) => panic!("Error: {}", e),
}
}
// close the solve handle
handle
.get()
.expect("Failed to get result from solve handle.");
handle.close().expect("Failed to close solve handle.");
}
fn
|
() {
// collect clingo options from the command line
let options = env::args().skip(1).collect();
let mut ctl = Control::new(options).expect("Failed creating Control.");
let sym = Symbol::create_id("enable", true).unwrap();
{
// initilize atom to add and the program builder
let mut data = OnStatementData {
atom: &ast::Term::from(sym),
control: &mut ctl,
};
// get the AST of the program
parse_program("a :- not b. b :- not a.", &mut data)
.expect("Failed to parse logic program.");
// add the external statement: #external enable. [false]
let ext = ast::External::new(ast::Term::from(sym), &[]);
let mut builder = ast::ProgramBuilder::from(&mut ctl).unwrap();
let stm = ext.ast_statement();
builder
.add(&stm)
.expect("Failed to add statement to ProgramBuilder.");
// finish building a program
builder.end().expect("Failed to finish building a program.");
}
// ground the base part
let part = Part::new("base", &[]).unwrap();
let parts = vec![part];
ctl.ground(&parts)
.expect("Failed to ground a logic program.");
// get the program literal corresponding to the external atom
let atoms = ctl.symbolic_atoms().unwrap();
let mut atm_it = atoms.iter().unwrap();
let item = atm_it.find(|e| e.symbol().unwrap() == sym).unwrap();
let atm = item.literal().unwrap();
// solve with external enable = false
println!("Solving with enable = false...");
solve(&mut ctl);
// solve with external enable = true
println!("Solving with enable = true...");
ctl.assign_external(atm, TruthValue::True)
.expect("Failed to assign #external enable true.");
solve(&mut ctl);
// solve with external enable = false
println!("Solving with enable = false...");
ctl.assign_external(atm, TruthValue::False)
.expect("Failed to assign #external enable false.");
solve(&mut ctl);
}
|
main
|
identifier_name
|
ast.rs
|
use clingo::*;
use std::env;
pub struct OnStatementData<'a, 'b> {
atom: &'b ast::Term<'b>,
control: &'a mut Control,
}
impl<'a, 'b> StatementHandler for OnStatementData<'a, 'b> {
// adds atom enable to all rule bodies
fn on_statement(&mut self, stm: &ast::Statement) -> bool {
// pass through all statements that are not rules
let mut builder = ast::ProgramBuilder::from(self.control).unwrap();
match stm.statement_type() {
ast::StatementType::Rule(rule) => {
let body = rule.body();
let mut extended_body = std::vec::Vec::with_capacity(body.len() + 1);
for e in body {
extended_body.push(e.clone());
}
// create atom enable
let lit = ast::Literal::from_term(ast::Sign::None, &self.atom);
// add atom enable to the rule body
let blit = ast::BodyLiteral::from_literal(ast::Sign::None, &lit);
extended_body.push(blit);
// initialize the rule
let head = rule.head();
|
let rule = ast::Rule::new(*head, &extended_body);
// initialize the statement
let stm2 = rule.ast_statement();
// add the rewritten statement to the program
builder
.add(&stm2)
.expect("Failed to add statement to ProgramBuilder.");
true
}
_ => {
builder
.add(stm)
.expect("Failed to add statement to ProgramBuilder.");
true
}
}
}
}
fn print_model(model: &Model) {
// retrieve the symbols in the model
let atoms = model
.symbols(ShowType::SHOWN)
.expect("Failed to retrieve symbols in the model.");
print!("Model:");
for atom in atoms {
// retrieve and print the symbol's string
print!(" {}", atom.to_string().unwrap());
}
println!();
}
fn solve(ctl: &mut Control) {
// get a solve handle
let mut handle = ctl
.solve(SolveMode::YIELD, &[])
.expect("Failed retrieving solve handle.");
// loop over all models
loop {
handle.resume().expect("Failed resume on solve handle.");
match handle.model() {
// print the model
Ok(Some(model)) => print_model(model),
// stop if there are no more models
Ok(None) => break,
Err(e) => panic!("Error: {}", e),
}
}
// close the solve handle
handle
.get()
.expect("Failed to get result from solve handle.");
handle.close().expect("Failed to close solve handle.");
}
fn main() {
// collect clingo options from the command line
let options = env::args().skip(1).collect();
let mut ctl = Control::new(options).expect("Failed creating Control.");
let sym = Symbol::create_id("enable", true).unwrap();
{
// initilize atom to add and the program builder
let mut data = OnStatementData {
atom: &ast::Term::from(sym),
control: &mut ctl,
};
// get the AST of the program
parse_program("a :- not b. b :- not a.", &mut data)
.expect("Failed to parse logic program.");
// add the external statement: #external enable. [false]
let ext = ast::External::new(ast::Term::from(sym), &[]);
let mut builder = ast::ProgramBuilder::from(&mut ctl).unwrap();
let stm = ext.ast_statement();
builder
.add(&stm)
.expect("Failed to add statement to ProgramBuilder.");
// finish building a program
builder.end().expect("Failed to finish building a program.");
}
// ground the base part
let part = Part::new("base", &[]).unwrap();
let parts = vec![part];
ctl.ground(&parts)
.expect("Failed to ground a logic program.");
// get the program literal corresponding to the external atom
let atoms = ctl.symbolic_atoms().unwrap();
let mut atm_it = atoms.iter().unwrap();
let item = atm_it.find(|e| e.symbol().unwrap() == sym).unwrap();
let atm = item.literal().unwrap();
// solve with external enable = false
println!("Solving with enable = false...");
solve(&mut ctl);
// solve with external enable = true
println!("Solving with enable = true...");
ctl.assign_external(atm, TruthValue::True)
.expect("Failed to assign #external enable true.");
solve(&mut ctl);
// solve with external enable = false
println!("Solving with enable = false...");
ctl.assign_external(atm, TruthValue::False)
.expect("Failed to assign #external enable false.");
solve(&mut ctl);
}
|
random_line_split
|
|
ast.rs
|
use clingo::*;
use std::env;
pub struct OnStatementData<'a, 'b> {
atom: &'b ast::Term<'b>,
control: &'a mut Control,
}
impl<'a, 'b> StatementHandler for OnStatementData<'a, 'b> {
// adds atom enable to all rule bodies
fn on_statement(&mut self, stm: &ast::Statement) -> bool {
// pass through all statements that are not rules
let mut builder = ast::ProgramBuilder::from(self.control).unwrap();
match stm.statement_type() {
ast::StatementType::Rule(rule) => {
let body = rule.body();
let mut extended_body = std::vec::Vec::with_capacity(body.len() + 1);
for e in body {
extended_body.push(e.clone());
}
// create atom enable
let lit = ast::Literal::from_term(ast::Sign::None, &self.atom);
// add atom enable to the rule body
let blit = ast::BodyLiteral::from_literal(ast::Sign::None, &lit);
extended_body.push(blit);
// initialize the rule
let head = rule.head();
let rule = ast::Rule::new(*head, &extended_body);
// initialize the statement
let stm2 = rule.ast_statement();
// add the rewritten statement to the program
builder
.add(&stm2)
.expect("Failed to add statement to ProgramBuilder.");
true
}
_ => {
builder
.add(stm)
.expect("Failed to add statement to ProgramBuilder.");
true
}
}
}
}
fn print_model(model: &Model) {
// retrieve the symbols in the model
let atoms = model
.symbols(ShowType::SHOWN)
.expect("Failed to retrieve symbols in the model.");
print!("Model:");
for atom in atoms {
// retrieve and print the symbol's string
print!(" {}", atom.to_string().unwrap());
}
println!();
}
fn solve(ctl: &mut Control) {
// get a solve handle
let mut handle = ctl
.solve(SolveMode::YIELD, &[])
.expect("Failed retrieving solve handle.");
// loop over all models
loop {
handle.resume().expect("Failed resume on solve handle.");
match handle.model() {
// print the model
Ok(Some(model)) => print_model(model),
// stop if there are no more models
Ok(None) => break,
Err(e) => panic!("Error: {}", e),
}
}
// close the solve handle
handle
.get()
.expect("Failed to get result from solve handle.");
handle.close().expect("Failed to close solve handle.");
}
fn main()
|
let ext = ast::External::new(ast::Term::from(sym), &[]);
let mut builder = ast::ProgramBuilder::from(&mut ctl).unwrap();
let stm = ext.ast_statement();
builder
.add(&stm)
.expect("Failed to add statement to ProgramBuilder.");
// finish building a program
builder.end().expect("Failed to finish building a program.");
}
// ground the base part
let part = Part::new("base", &[]).unwrap();
let parts = vec![part];
ctl.ground(&parts)
.expect("Failed to ground a logic program.");
// get the program literal corresponding to the external atom
let atoms = ctl.symbolic_atoms().unwrap();
let mut atm_it = atoms.iter().unwrap();
let item = atm_it.find(|e| e.symbol().unwrap() == sym).unwrap();
let atm = item.literal().unwrap();
// solve with external enable = false
println!("Solving with enable = false...");
solve(&mut ctl);
// solve with external enable = true
println!("Solving with enable = true...");
ctl.assign_external(atm, TruthValue::True)
.expect("Failed to assign #external enable true.");
solve(&mut ctl);
// solve with external enable = false
println!("Solving with enable = false...");
ctl.assign_external(atm, TruthValue::False)
.expect("Failed to assign #external enable false.");
solve(&mut ctl);
}
|
{
// collect clingo options from the command line
let options = env::args().skip(1).collect();
let mut ctl = Control::new(options).expect("Failed creating Control.");
let sym = Symbol::create_id("enable", true).unwrap();
{
// initilize atom to add and the program builder
let mut data = OnStatementData {
atom: &ast::Term::from(sym),
control: &mut ctl,
};
// get the AST of the program
parse_program("a :- not b. b :- not a.", &mut data)
.expect("Failed to parse logic program.");
// add the external statement: #external enable. [false]
|
identifier_body
|
main.rs
|
use std::io::Write;
use std::str::FromStr;
fn main()
|
fn gcd(mut n: u64, mut m: u64) -> u64 {
assert!(n!= 0 && m!= 0);
while m!= 0 {
if m < n {
let t = m;
m = n;
n = t;
}
m = m % n;
}
n
}
#[test]
fn test_gcd() {
assert_eq!(gcd(14, 15), 1);
assert_eq!(gcd(2 * 3 * 5 * 11 * 17,
3 * 7 * 11 * 13 * 19),
3 * 11);
}
|
{
let mut numbers = Vec::new();
for arg in std::env::args().skip(1) {
numbers.push(u64::from_str(&arg)
.expect("error parsing argument"));
}
if numbers.len() == 0 {
writeln!(std::io::stderr(), "Usage: gcd NUMBER ...").unwrap();
std::process::exit(1);
}
let mut d = numbers[0];
for m in &numbers[1..] {
d = gcd(d, *m);
}
println!("The greatest common divisor of {:?} is {}", numbers, d);
}
|
identifier_body
|
main.rs
|
use std::io::Write;
use std::str::FromStr;
fn main() {
let mut numbers = Vec::new();
for arg in std::env::args().skip(1) {
numbers.push(u64::from_str(&arg)
.expect("error parsing argument"));
|
std::process::exit(1);
}
let mut d = numbers[0];
for m in &numbers[1..] {
d = gcd(d, *m);
}
println!("The greatest common divisor of {:?} is {}", numbers, d);
}
fn gcd(mut n: u64, mut m: u64) -> u64 {
assert!(n!= 0 && m!= 0);
while m!= 0 {
if m < n {
let t = m;
m = n;
n = t;
}
m = m % n;
}
n
}
#[test]
fn test_gcd() {
assert_eq!(gcd(14, 15), 1);
assert_eq!(gcd(2 * 3 * 5 * 11 * 17,
3 * 7 * 11 * 13 * 19),
3 * 11);
}
|
}
if numbers.len() == 0 {
writeln!(std::io::stderr(), "Usage: gcd NUMBER ...").unwrap();
|
random_line_split
|
main.rs
|
use std::io::Write;
use std::str::FromStr;
fn main() {
let mut numbers = Vec::new();
for arg in std::env::args().skip(1) {
numbers.push(u64::from_str(&arg)
.expect("error parsing argument"));
}
if numbers.len() == 0 {
writeln!(std::io::stderr(), "Usage: gcd NUMBER...").unwrap();
std::process::exit(1);
}
let mut d = numbers[0];
for m in &numbers[1..] {
d = gcd(d, *m);
}
println!("The greatest common divisor of {:?} is {}", numbers, d);
}
fn
|
(mut n: u64, mut m: u64) -> u64 {
assert!(n!= 0 && m!= 0);
while m!= 0 {
if m < n {
let t = m;
m = n;
n = t;
}
m = m % n;
}
n
}
#[test]
fn test_gcd() {
assert_eq!(gcd(14, 15), 1);
assert_eq!(gcd(2 * 3 * 5 * 11 * 17,
3 * 7 * 11 * 13 * 19),
3 * 11);
}
|
gcd
|
identifier_name
|
main.rs
|
use std::io::Write;
use std::str::FromStr;
fn main() {
let mut numbers = Vec::new();
for arg in std::env::args().skip(1) {
numbers.push(u64::from_str(&arg)
.expect("error parsing argument"));
}
if numbers.len() == 0
|
let mut d = numbers[0];
for m in &numbers[1..] {
d = gcd(d, *m);
}
println!("The greatest common divisor of {:?} is {}", numbers, d);
}
fn gcd(mut n: u64, mut m: u64) -> u64 {
assert!(n!= 0 && m!= 0);
while m!= 0 {
if m < n {
let t = m;
m = n;
n = t;
}
m = m % n;
}
n
}
#[test]
fn test_gcd() {
assert_eq!(gcd(14, 15), 1);
assert_eq!(gcd(2 * 3 * 5 * 11 * 17,
3 * 7 * 11 * 13 * 19),
3 * 11);
}
|
{
writeln!(std::io::stderr(), "Usage: gcd NUMBER ...").unwrap();
std::process::exit(1);
}
|
conditional_block
|
tempfile.rs
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Temporary files and directories
use std::os;
use std::rand::RngUtil;
use std::rand;
/// Attempts to make a temporary directory inside of `tmpdir` whose name will
/// have the suffix `suffix`. If no directory can be created, None is returned.
pub fn
|
(tmpdir: &Path, suffix: &str) -> Option<Path> {
let mut r = rand::rng();
for 1000.times {
let p = tmpdir.push(r.gen_str(16) + suffix);
if os::make_dir(&p, 0x1c0) { // 700
return Some(p);
}
}
None
}
#[cfg(test)]
mod tests {
use tempfile::mkdtemp;
use std::os;
#[test]
fn test_mkdtemp() {
let p = mkdtemp(&Path("."), "foobar").unwrap();
os::remove_dir(&p);
assert!(p.to_str().ends_with("foobar"));
}
// Ideally these would be in std::os but then core would need
// to depend on std
#[test]
fn recursive_mkdir_rel() {
use std::libc::consts::os::posix88::{S_IRUSR, S_IWUSR, S_IXUSR};
use std::os;
let root = mkdtemp(&os::tmpdir(), "recursive_mkdir_rel").
expect("recursive_mkdir_rel");
assert!(do os::change_dir_locked(&root) {
let path = Path("frob");
debug!("recursive_mkdir_rel: Making: %s in cwd %s [%?]", path.to_str(),
os::getcwd().to_str(),
os::path_exists(&path));
assert!(os::mkdir_recursive(&path, (S_IRUSR | S_IWUSR | S_IXUSR) as i32));
assert!(os::path_is_dir(&path));
assert!(os::mkdir_recursive(&path, (S_IRUSR | S_IWUSR | S_IXUSR) as i32));
assert!(os::path_is_dir(&path));
});
}
#[test]
fn recursive_mkdir_dot() {
use std::libc::consts::os::posix88::{S_IRUSR, S_IWUSR, S_IXUSR};
use std::os;
let dot = Path(".");
assert!(os::mkdir_recursive(&dot, (S_IRUSR | S_IWUSR | S_IXUSR) as i32));
let dotdot = Path("..");
assert!(os::mkdir_recursive(&dotdot, (S_IRUSR | S_IWUSR | S_IXUSR) as i32));
}
#[test]
fn recursive_mkdir_rel_2() {
use std::libc::consts::os::posix88::{S_IRUSR, S_IWUSR, S_IXUSR};
use std::os;
let root = mkdtemp(&os::tmpdir(), "recursive_mkdir_rel_2").
expect("recursive_mkdir_rel_2");
assert!(do os::change_dir_locked(&root) {
let path = Path("./frob/baz");
debug!("recursive_mkdir_rel_2: Making: %s in cwd %s [%?]", path.to_str(),
os::getcwd().to_str(), os::path_exists(&path));
assert!(os::mkdir_recursive(&path, (S_IRUSR | S_IWUSR | S_IXUSR) as i32));
assert!(os::path_is_dir(&path));
assert!(os::path_is_dir(&path.pop()));
let path2 = Path("quux/blat");
debug!("recursive_mkdir_rel_2: Making: %s in cwd %s", path2.to_str(),
os::getcwd().to_str());
assert!(os::mkdir_recursive(&path2, (S_IRUSR | S_IWUSR | S_IXUSR) as i32));
assert!(os::path_is_dir(&path2));
assert!(os::path_is_dir(&path2.pop()));
});
}
// Ideally this would be in core, but needs mkdtemp
#[test]
pub fn test_rmdir_recursive_ok() {
use std::libc::consts::os::posix88::{S_IRUSR, S_IWUSR, S_IXUSR};
use std::os;
let rwx = (S_IRUSR | S_IWUSR | S_IXUSR) as i32;
let tmpdir = mkdtemp(&os::tmpdir(), "test").expect("test_rmdir_recursive_ok: \
couldn't create temp dir");
let root = tmpdir.push("foo");
debug!("making %s", root.to_str());
assert!(os::make_dir(&root, rwx));
assert!(os::make_dir(&root.push("foo"), rwx));
assert!(os::make_dir(&root.push("foo").push("bar"), rwx));
assert!(os::make_dir(&root.push("foo").push("bar").push("blat"), rwx));
assert!(os::remove_dir_recursive(&root));
assert!(!os::path_exists(&root));
assert!(!os::path_exists(&root.push("bar")));
assert!(!os::path_exists(&root.push("bar").push("blat")));
}
}
|
mkdtemp
|
identifier_name
|
tempfile.rs
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Temporary files and directories
use std::os;
use std::rand::RngUtil;
use std::rand;
/// Attempts to make a temporary directory inside of `tmpdir` whose name will
/// have the suffix `suffix`. If no directory can be created, None is returned.
pub fn mkdtemp(tmpdir: &Path, suffix: &str) -> Option<Path> {
let mut r = rand::rng();
for 1000.times {
let p = tmpdir.push(r.gen_str(16) + suffix);
if os::make_dir(&p, 0x1c0) { // 700
return Some(p);
}
}
None
}
#[cfg(test)]
mod tests {
use tempfile::mkdtemp;
use std::os;
#[test]
fn test_mkdtemp() {
let p = mkdtemp(&Path("."), "foobar").unwrap();
os::remove_dir(&p);
assert!(p.to_str().ends_with("foobar"));
}
// Ideally these would be in std::os but then core would need
// to depend on std
#[test]
fn recursive_mkdir_rel() {
use std::libc::consts::os::posix88::{S_IRUSR, S_IWUSR, S_IXUSR};
use std::os;
|
let root = mkdtemp(&os::tmpdir(), "recursive_mkdir_rel").
expect("recursive_mkdir_rel");
assert!(do os::change_dir_locked(&root) {
let path = Path("frob");
debug!("recursive_mkdir_rel: Making: %s in cwd %s [%?]", path.to_str(),
os::getcwd().to_str(),
os::path_exists(&path));
assert!(os::mkdir_recursive(&path, (S_IRUSR | S_IWUSR | S_IXUSR) as i32));
assert!(os::path_is_dir(&path));
assert!(os::mkdir_recursive(&path, (S_IRUSR | S_IWUSR | S_IXUSR) as i32));
assert!(os::path_is_dir(&path));
});
}
#[test]
fn recursive_mkdir_dot() {
use std::libc::consts::os::posix88::{S_IRUSR, S_IWUSR, S_IXUSR};
use std::os;
let dot = Path(".");
assert!(os::mkdir_recursive(&dot, (S_IRUSR | S_IWUSR | S_IXUSR) as i32));
let dotdot = Path("..");
assert!(os::mkdir_recursive(&dotdot, (S_IRUSR | S_IWUSR | S_IXUSR) as i32));
}
#[test]
fn recursive_mkdir_rel_2() {
use std::libc::consts::os::posix88::{S_IRUSR, S_IWUSR, S_IXUSR};
use std::os;
let root = mkdtemp(&os::tmpdir(), "recursive_mkdir_rel_2").
expect("recursive_mkdir_rel_2");
assert!(do os::change_dir_locked(&root) {
let path = Path("./frob/baz");
debug!("recursive_mkdir_rel_2: Making: %s in cwd %s [%?]", path.to_str(),
os::getcwd().to_str(), os::path_exists(&path));
assert!(os::mkdir_recursive(&path, (S_IRUSR | S_IWUSR | S_IXUSR) as i32));
assert!(os::path_is_dir(&path));
assert!(os::path_is_dir(&path.pop()));
let path2 = Path("quux/blat");
debug!("recursive_mkdir_rel_2: Making: %s in cwd %s", path2.to_str(),
os::getcwd().to_str());
assert!(os::mkdir_recursive(&path2, (S_IRUSR | S_IWUSR | S_IXUSR) as i32));
assert!(os::path_is_dir(&path2));
assert!(os::path_is_dir(&path2.pop()));
});
}
// Ideally this would be in core, but needs mkdtemp
#[test]
pub fn test_rmdir_recursive_ok() {
use std::libc::consts::os::posix88::{S_IRUSR, S_IWUSR, S_IXUSR};
use std::os;
let rwx = (S_IRUSR | S_IWUSR | S_IXUSR) as i32;
let tmpdir = mkdtemp(&os::tmpdir(), "test").expect("test_rmdir_recursive_ok: \
couldn't create temp dir");
let root = tmpdir.push("foo");
debug!("making %s", root.to_str());
assert!(os::make_dir(&root, rwx));
assert!(os::make_dir(&root.push("foo"), rwx));
assert!(os::make_dir(&root.push("foo").push("bar"), rwx));
assert!(os::make_dir(&root.push("foo").push("bar").push("blat"), rwx));
assert!(os::remove_dir_recursive(&root));
assert!(!os::path_exists(&root));
assert!(!os::path_exists(&root.push("bar")));
assert!(!os::path_exists(&root.push("bar").push("blat")));
}
}
|
random_line_split
|
|
tempfile.rs
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Temporary files and directories
use std::os;
use std::rand::RngUtil;
use std::rand;
/// Attempts to make a temporary directory inside of `tmpdir` whose name will
/// have the suffix `suffix`. If no directory can be created, None is returned.
pub fn mkdtemp(tmpdir: &Path, suffix: &str) -> Option<Path>
|
let p = mkdtemp(&Path("."), "foobar").unwrap();
os::remove_dir(&p);
assert!(p.to_str().ends_with("foobar"));
}
// Ideally these would be in std::os but then core would need
// to depend on std
#[test]
fn recursive_mkdir_rel() {
use std::libc::consts::os::posix88::{S_IRUSR, S_IWUSR, S_IXUSR};
use std::os;
let root = mkdtemp(&os::tmpdir(), "recursive_mkdir_rel").
expect("recursive_mkdir_rel");
assert!(do os::change_dir_locked(&root) {
let path = Path("frob");
debug!("recursive_mkdir_rel: Making: %s in cwd %s [%?]", path.to_str(),
os::getcwd().to_str(),
os::path_exists(&path));
assert!(os::mkdir_recursive(&path, (S_IRUSR | S_IWUSR | S_IXUSR) as i32));
assert!(os::path_is_dir(&path));
assert!(os::mkdir_recursive(&path, (S_IRUSR | S_IWUSR | S_IXUSR) as i32));
assert!(os::path_is_dir(&path));
});
}
#[test]
fn recursive_mkdir_dot() {
use std::libc::consts::os::posix88::{S_IRUSR, S_IWUSR, S_IXUSR};
use std::os;
let dot = Path(".");
assert!(os::mkdir_recursive(&dot, (S_IRUSR | S_IWUSR | S_IXUSR) as i32));
let dotdot = Path("..");
assert!(os::mkdir_recursive(&dotdot, (S_IRUSR | S_IWUSR | S_IXUSR) as i32));
}
#[test]
fn recursive_mkdir_rel_2() {
use std::libc::consts::os::posix88::{S_IRUSR, S_IWUSR, S_IXUSR};
use std::os;
let root = mkdtemp(&os::tmpdir(), "recursive_mkdir_rel_2").
expect("recursive_mkdir_rel_2");
assert!(do os::change_dir_locked(&root) {
let path = Path("./frob/baz");
debug!("recursive_mkdir_rel_2: Making: %s in cwd %s [%?]", path.to_str(),
os::getcwd().to_str(), os::path_exists(&path));
assert!(os::mkdir_recursive(&path, (S_IRUSR | S_IWUSR | S_IXUSR) as i32));
assert!(os::path_is_dir(&path));
assert!(os::path_is_dir(&path.pop()));
let path2 = Path("quux/blat");
debug!("recursive_mkdir_rel_2: Making: %s in cwd %s", path2.to_str(),
os::getcwd().to_str());
assert!(os::mkdir_recursive(&path2, (S_IRUSR | S_IWUSR | S_IXUSR) as i32));
assert!(os::path_is_dir(&path2));
assert!(os::path_is_dir(&path2.pop()));
});
}
// Ideally this would be in core, but needs mkdtemp
#[test]
pub fn test_rmdir_recursive_ok() {
use std::libc::consts::os::posix88::{S_IRUSR, S_IWUSR, S_IXUSR};
use std::os;
let rwx = (S_IRUSR | S_IWUSR | S_IXUSR) as i32;
let tmpdir = mkdtemp(&os::tmpdir(), "test").expect("test_rmdir_recursive_ok: \
couldn't create temp dir");
let root = tmpdir.push("foo");
debug!("making %s", root.to_str());
assert!(os::make_dir(&root, rwx));
assert!(os::make_dir(&root.push("foo"), rwx));
assert!(os::make_dir(&root.push("foo").push("bar"), rwx));
assert!(os::make_dir(&root.push("foo").push("bar").push("blat"), rwx));
assert!(os::remove_dir_recursive(&root));
assert!(!os::path_exists(&root));
assert!(!os::path_exists(&root.push("bar")));
assert!(!os::path_exists(&root.push("bar").push("blat")));
}
}
|
{
let mut r = rand::rng();
for 1000.times {
let p = tmpdir.push(r.gen_str(16) + suffix);
if os::make_dir(&p, 0x1c0) { // 700
return Some(p);
}
}
None
}
#[cfg(test)]
mod tests {
use tempfile::mkdtemp;
use std::os;
#[test]
fn test_mkdtemp() {
|
identifier_body
|
dns_resolver.rs
|
//! Asynchronous DNS resolver
use std::{
io::{self, ErrorKind},
net::SocketAddr,
};
use futures::Future;
use tokio;
use trust_dns_resolver::{config::ResolverConfig, AsyncResolver};
use crate::context::SharedContext;
pub fn
|
(dns: Option<ResolverConfig>) -> AsyncResolver {
let (resolver, bg) = {
// To make this independent, if targeting macOS, BSD, Linux, or Windows, we can use the system's configuration:
#[cfg(any(unix, windows))]
{
if let Some(conf) = dns {
use trust_dns_resolver::config::ResolverOpts;
AsyncResolver::new(conf, ResolverOpts::default())
} else {
use trust_dns_resolver::system_conf::read_system_conf;
// use the system resolver configuration
let (config, opts) = read_system_conf().expect("Failed to read global dns sysconf");
AsyncResolver::new(config, opts)
}
}
// For other operating systems, we can use one of the preconfigured definitions
#[cfg(not(any(unix, windows)))]
{
// Directly reference the config types
use trust_dns_resolver::config::{ResolverConfig, ResolverOpts};
if let Some(conf) = dns {
AsyncResolver::new(conf, ResolverOpts::default())
} else {
// Get a new resolver with the google nameservers as the upstream recursive resolvers
AsyncResolver::new(ResolverConfig::google(), ResolverOpts::default())
}
}
};
// NOTE: resolving will always be called inside a future.
tokio::spawn(bg);
resolver
}
fn inner_resolve(
context: SharedContext,
addr: &str,
port: u16,
check_forbidden: bool,
) -> impl Future<Item = Vec<SocketAddr>, Error = io::Error> + Send {
// let owned_addr = addr.to_owned();
let cloned_context = context.clone();
context.dns_resolver().lookup_ip(addr).then(move |r| match r {
Err(err) => {
// error!("Failed to resolve {}, err: {}", owned_addr, err);
Err(io::Error::new(
io::ErrorKind::Other,
format!("dns resolve error: {}", err),
))
}
Ok(lookup_result) => {
let mut vaddr = Vec::new();
for ip in lookup_result.iter() {
if check_forbidden {
let forbidden_ip = &cloned_context.config().forbidden_ip;
if forbidden_ip.contains(&ip) {
// debug!("Resolved {} => {}, which is skipped by forbidden_ip", owned_addr, ip);
continue;
}
}
vaddr.push(SocketAddr::new(ip, port));
}
if vaddr.is_empty() {
let err = io::Error::new(
ErrorKind::Other,
// format!("resolved {} to empty address, all IPs are filtered", owned_addr),
"resolved to empty address, all IPs are filtered",
);
Err(err)
} else {
// debug!("Resolved {} => {:?}", owned_addr, vaddr);
Ok(vaddr)
}
}
})
}
/// Resolve address to IP
pub fn resolve(
context: SharedContext,
addr: &str,
port: u16,
check_forbidden: bool,
) -> impl Future<Item = Vec<SocketAddr>, Error = io::Error> + Send {
inner_resolve(context, addr, port, check_forbidden)
}
|
create_resolver
|
identifier_name
|
dns_resolver.rs
|
//! Asynchronous DNS resolver
use std::{
io::{self, ErrorKind},
net::SocketAddr,
};
use futures::Future;
use tokio;
use trust_dns_resolver::{config::ResolverConfig, AsyncResolver};
use crate::context::SharedContext;
pub fn create_resolver(dns: Option<ResolverConfig>) -> AsyncResolver {
let (resolver, bg) = {
// To make this independent, if targeting macOS, BSD, Linux, or Windows, we can use the system's configuration:
#[cfg(any(unix, windows))]
{
if let Some(conf) = dns {
use trust_dns_resolver::config::ResolverOpts;
AsyncResolver::new(conf, ResolverOpts::default())
} else {
use trust_dns_resolver::system_conf::read_system_conf;
// use the system resolver configuration
let (config, opts) = read_system_conf().expect("Failed to read global dns sysconf");
AsyncResolver::new(config, opts)
}
}
// For other operating systems, we can use one of the preconfigured definitions
#[cfg(not(any(unix, windows)))]
{
// Directly reference the config types
use trust_dns_resolver::config::{ResolverConfig, ResolverOpts};
if let Some(conf) = dns {
AsyncResolver::new(conf, ResolverOpts::default())
} else {
// Get a new resolver with the google nameservers as the upstream recursive resolvers
AsyncResolver::new(ResolverConfig::google(), ResolverOpts::default())
}
}
};
// NOTE: resolving will always be called inside a future.
tokio::spawn(bg);
resolver
}
fn inner_resolve(
context: SharedContext,
addr: &str,
port: u16,
check_forbidden: bool,
) -> impl Future<Item = Vec<SocketAddr>, Error = io::Error> + Send {
// let owned_addr = addr.to_owned();
let cloned_context = context.clone();
context.dns_resolver().lookup_ip(addr).then(move |r| match r {
Err(err) => {
// error!("Failed to resolve {}, err: {}", owned_addr, err);
Err(io::Error::new(
io::ErrorKind::Other,
format!("dns resolve error: {}", err),
))
}
Ok(lookup_result) => {
let mut vaddr = Vec::new();
for ip in lookup_result.iter() {
if check_forbidden {
let forbidden_ip = &cloned_context.config().forbidden_ip;
if forbidden_ip.contains(&ip) {
// debug!("Resolved {} => {}, which is skipped by forbidden_ip", owned_addr, ip);
continue;
}
}
vaddr.push(SocketAddr::new(ip, port));
}
if vaddr.is_empty() {
let err = io::Error::new(
ErrorKind::Other,
// format!("resolved {} to empty address, all IPs are filtered", owned_addr),
"resolved to empty address, all IPs are filtered",
);
Err(err)
} else {
// debug!("Resolved {} => {:?}", owned_addr, vaddr);
Ok(vaddr)
}
}
})
}
/// Resolve address to IP
pub fn resolve(
context: SharedContext,
addr: &str,
port: u16,
check_forbidden: bool,
) -> impl Future<Item = Vec<SocketAddr>, Error = io::Error> + Send {
inner_resolve(context, addr, port, check_forbidden)
|
}
|
random_line_split
|
|
dns_resolver.rs
|
//! Asynchronous DNS resolver
use std::{
io::{self, ErrorKind},
net::SocketAddr,
};
use futures::Future;
use tokio;
use trust_dns_resolver::{config::ResolverConfig, AsyncResolver};
use crate::context::SharedContext;
pub fn create_resolver(dns: Option<ResolverConfig>) -> AsyncResolver {
let (resolver, bg) = {
// To make this independent, if targeting macOS, BSD, Linux, or Windows, we can use the system's configuration:
#[cfg(any(unix, windows))]
{
if let Some(conf) = dns {
use trust_dns_resolver::config::ResolverOpts;
AsyncResolver::new(conf, ResolverOpts::default())
} else
|
}
// For other operating systems, we can use one of the preconfigured definitions
#[cfg(not(any(unix, windows)))]
{
// Directly reference the config types
use trust_dns_resolver::config::{ResolverConfig, ResolverOpts};
if let Some(conf) = dns {
AsyncResolver::new(conf, ResolverOpts::default())
} else {
// Get a new resolver with the google nameservers as the upstream recursive resolvers
AsyncResolver::new(ResolverConfig::google(), ResolverOpts::default())
}
}
};
// NOTE: resolving will always be called inside a future.
tokio::spawn(bg);
resolver
}
fn inner_resolve(
context: SharedContext,
addr: &str,
port: u16,
check_forbidden: bool,
) -> impl Future<Item = Vec<SocketAddr>, Error = io::Error> + Send {
// let owned_addr = addr.to_owned();
let cloned_context = context.clone();
context.dns_resolver().lookup_ip(addr).then(move |r| match r {
Err(err) => {
// error!("Failed to resolve {}, err: {}", owned_addr, err);
Err(io::Error::new(
io::ErrorKind::Other,
format!("dns resolve error: {}", err),
))
}
Ok(lookup_result) => {
let mut vaddr = Vec::new();
for ip in lookup_result.iter() {
if check_forbidden {
let forbidden_ip = &cloned_context.config().forbidden_ip;
if forbidden_ip.contains(&ip) {
// debug!("Resolved {} => {}, which is skipped by forbidden_ip", owned_addr, ip);
continue;
}
}
vaddr.push(SocketAddr::new(ip, port));
}
if vaddr.is_empty() {
let err = io::Error::new(
ErrorKind::Other,
// format!("resolved {} to empty address, all IPs are filtered", owned_addr),
"resolved to empty address, all IPs are filtered",
);
Err(err)
} else {
// debug!("Resolved {} => {:?}", owned_addr, vaddr);
Ok(vaddr)
}
}
})
}
/// Resolve address to IP
pub fn resolve(
context: SharedContext,
addr: &str,
port: u16,
check_forbidden: bool,
) -> impl Future<Item = Vec<SocketAddr>, Error = io::Error> + Send {
inner_resolve(context, addr, port, check_forbidden)
}
|
{
use trust_dns_resolver::system_conf::read_system_conf;
// use the system resolver configuration
let (config, opts) = read_system_conf().expect("Failed to read global dns sysconf");
AsyncResolver::new(config, opts)
}
|
conditional_block
|
dns_resolver.rs
|
//! Asynchronous DNS resolver
use std::{
io::{self, ErrorKind},
net::SocketAddr,
};
use futures::Future;
use tokio;
use trust_dns_resolver::{config::ResolverConfig, AsyncResolver};
use crate::context::SharedContext;
pub fn create_resolver(dns: Option<ResolverConfig>) -> AsyncResolver
|
use trust_dns_resolver::config::{ResolverConfig, ResolverOpts};
if let Some(conf) = dns {
AsyncResolver::new(conf, ResolverOpts::default())
} else {
// Get a new resolver with the google nameservers as the upstream recursive resolvers
AsyncResolver::new(ResolverConfig::google(), ResolverOpts::default())
}
}
};
// NOTE: resolving will always be called inside a future.
tokio::spawn(bg);
resolver
}
fn inner_resolve(
context: SharedContext,
addr: &str,
port: u16,
check_forbidden: bool,
) -> impl Future<Item = Vec<SocketAddr>, Error = io::Error> + Send {
// let owned_addr = addr.to_owned();
let cloned_context = context.clone();
context.dns_resolver().lookup_ip(addr).then(move |r| match r {
Err(err) => {
// error!("Failed to resolve {}, err: {}", owned_addr, err);
Err(io::Error::new(
io::ErrorKind::Other,
format!("dns resolve error: {}", err),
))
}
Ok(lookup_result) => {
let mut vaddr = Vec::new();
for ip in lookup_result.iter() {
if check_forbidden {
let forbidden_ip = &cloned_context.config().forbidden_ip;
if forbidden_ip.contains(&ip) {
// debug!("Resolved {} => {}, which is skipped by forbidden_ip", owned_addr, ip);
continue;
}
}
vaddr.push(SocketAddr::new(ip, port));
}
if vaddr.is_empty() {
let err = io::Error::new(
ErrorKind::Other,
// format!("resolved {} to empty address, all IPs are filtered", owned_addr),
"resolved to empty address, all IPs are filtered",
);
Err(err)
} else {
// debug!("Resolved {} => {:?}", owned_addr, vaddr);
Ok(vaddr)
}
}
})
}
/// Resolve address to IP
pub fn resolve(
context: SharedContext,
addr: &str,
port: u16,
check_forbidden: bool,
) -> impl Future<Item = Vec<SocketAddr>, Error = io::Error> + Send {
inner_resolve(context, addr, port, check_forbidden)
}
|
{
let (resolver, bg) = {
// To make this independent, if targeting macOS, BSD, Linux, or Windows, we can use the system's configuration:
#[cfg(any(unix, windows))]
{
if let Some(conf) = dns {
use trust_dns_resolver::config::ResolverOpts;
AsyncResolver::new(conf, ResolverOpts::default())
} else {
use trust_dns_resolver::system_conf::read_system_conf;
// use the system resolver configuration
let (config, opts) = read_system_conf().expect("Failed to read global dns sysconf");
AsyncResolver::new(config, opts)
}
}
// For other operating systems, we can use one of the preconfigured definitions
#[cfg(not(any(unix, windows)))]
{
// Directly reference the config types
|
identifier_body
|
dom_html_heading_element.rs
|
// This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files.git)
// DO NOT EDIT
use crate::DOMElement;
use crate::DOMEventTarget;
use crate::DOMHTMLElement;
use crate::DOMNode;
use crate::DOMObject;
use glib::object::Cast;
use glib::object::IsA;
use glib::signal::connect_raw;
use glib::signal::SignalHandlerId;
use glib::translate::*;
use std::boxed::Box as Box_;
use std::fmt;
use std::mem::transmute;
glib::wrapper! {
pub struct DOMHTMLHeadingElement(Object<ffi::WebKitDOMHTMLHeadingElement, ffi::WebKitDOMHTMLHeadingElementClass>) @extends DOMHTMLElement, DOMElement, DOMNode, DOMObject, @implements DOMEventTarget;
match fn {
type_ => || ffi::webkit_dom_html_heading_element_get_type(),
}
}
pub const NONE_DOMHTML_HEADING_ELEMENT: Option<&DOMHTMLHeadingElement> = None;
pub trait DOMHTMLHeadingElementExt:'static {
#[cfg_attr(feature = "v2_22", deprecated)]
#[doc(alias = "webkit_dom_html_heading_element_get_align")]
fn align(&self) -> Option<glib::GString>;
#[cfg_attr(feature = "v2_22", deprecated)]
#[doc(alias = "webkit_dom_html_heading_element_set_align")]
fn set_align(&self, value: &str);
fn connect_property_align_notify<F: Fn(&Self) +'static>(&self, f: F) -> SignalHandlerId;
}
impl<O: IsA<DOMHTMLHeadingElement>> DOMHTMLHeadingElementExt for O {
fn align(&self) -> Option<glib::GString> {
unsafe {
from_glib_full(ffi::webkit_dom_html_heading_element_get_align(
self.as_ref().to_glib_none().0,
))
}
}
fn set_align(&self, value: &str) {
unsafe {
ffi::webkit_dom_html_heading_element_set_align(
self.as_ref().to_glib_none().0,
value.to_glib_none().0,
);
}
}
fn connect_property_align_notify<F: Fn(&Self) +'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_align_trampoline<P, F: Fn(&P) +'static>(
this: *mut ffi::WebKitDOMHTMLHeadingElement,
_param_spec: glib::ffi::gpointer,
f: glib::ffi::gpointer,
) where
P: IsA<DOMHTMLHeadingElement>,
{
let f: &F = &*(f as *const F);
f(&DOMHTMLHeadingElement::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::align\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_align_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
}
impl fmt::Display for DOMHTMLHeadingElement {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result
|
}
|
{
f.write_str("DOMHTMLHeadingElement")
}
|
identifier_body
|
dom_html_heading_element.rs
|
// This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files.git)
// DO NOT EDIT
use crate::DOMElement;
use crate::DOMEventTarget;
use crate::DOMHTMLElement;
use crate::DOMNode;
use crate::DOMObject;
use glib::object::Cast;
use glib::object::IsA;
use glib::signal::connect_raw;
use glib::signal::SignalHandlerId;
use glib::translate::*;
use std::boxed::Box as Box_;
use std::fmt;
use std::mem::transmute;
glib::wrapper! {
pub struct DOMHTMLHeadingElement(Object<ffi::WebKitDOMHTMLHeadingElement, ffi::WebKitDOMHTMLHeadingElementClass>) @extends DOMHTMLElement, DOMElement, DOMNode, DOMObject, @implements DOMEventTarget;
match fn {
type_ => || ffi::webkit_dom_html_heading_element_get_type(),
}
}
pub const NONE_DOMHTML_HEADING_ELEMENT: Option<&DOMHTMLHeadingElement> = None;
pub trait DOMHTMLHeadingElementExt:'static {
#[cfg_attr(feature = "v2_22", deprecated)]
#[doc(alias = "webkit_dom_html_heading_element_get_align")]
fn align(&self) -> Option<glib::GString>;
#[cfg_attr(feature = "v2_22", deprecated)]
#[doc(alias = "webkit_dom_html_heading_element_set_align")]
fn set_align(&self, value: &str);
fn connect_property_align_notify<F: Fn(&Self) +'static>(&self, f: F) -> SignalHandlerId;
}
impl<O: IsA<DOMHTMLHeadingElement>> DOMHTMLHeadingElementExt for O {
fn align(&self) -> Option<glib::GString> {
unsafe {
from_glib_full(ffi::webkit_dom_html_heading_element_get_align(
self.as_ref().to_glib_none().0,
))
}
}
fn set_align(&self, value: &str) {
unsafe {
ffi::webkit_dom_html_heading_element_set_align(
self.as_ref().to_glib_none().0,
value.to_glib_none().0,
);
}
}
fn connect_property_align_notify<F: Fn(&Self) +'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn
|
<P, F: Fn(&P) +'static>(
this: *mut ffi::WebKitDOMHTMLHeadingElement,
_param_spec: glib::ffi::gpointer,
f: glib::ffi::gpointer,
) where
P: IsA<DOMHTMLHeadingElement>,
{
let f: &F = &*(f as *const F);
f(&DOMHTMLHeadingElement::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::align\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_align_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
}
impl fmt::Display for DOMHTMLHeadingElement {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("DOMHTMLHeadingElement")
}
}
|
notify_align_trampoline
|
identifier_name
|
dom_html_heading_element.rs
|
// This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files.git)
// DO NOT EDIT
use crate::DOMElement;
use crate::DOMEventTarget;
use crate::DOMHTMLElement;
use crate::DOMNode;
use crate::DOMObject;
use glib::object::Cast;
use glib::object::IsA;
use glib::signal::connect_raw;
use glib::signal::SignalHandlerId;
use glib::translate::*;
use std::boxed::Box as Box_;
use std::fmt;
use std::mem::transmute;
glib::wrapper! {
pub struct DOMHTMLHeadingElement(Object<ffi::WebKitDOMHTMLHeadingElement, ffi::WebKitDOMHTMLHeadingElementClass>) @extends DOMHTMLElement, DOMElement, DOMNode, DOMObject, @implements DOMEventTarget;
match fn {
type_ => || ffi::webkit_dom_html_heading_element_get_type(),
}
}
pub const NONE_DOMHTML_HEADING_ELEMENT: Option<&DOMHTMLHeadingElement> = None;
pub trait DOMHTMLHeadingElementExt:'static {
#[cfg_attr(feature = "v2_22", deprecated)]
#[doc(alias = "webkit_dom_html_heading_element_get_align")]
fn align(&self) -> Option<glib::GString>;
#[cfg_attr(feature = "v2_22", deprecated)]
#[doc(alias = "webkit_dom_html_heading_element_set_align")]
fn set_align(&self, value: &str);
fn connect_property_align_notify<F: Fn(&Self) +'static>(&self, f: F) -> SignalHandlerId;
}
impl<O: IsA<DOMHTMLHeadingElement>> DOMHTMLHeadingElementExt for O {
fn align(&self) -> Option<glib::GString> {
unsafe {
from_glib_full(ffi::webkit_dom_html_heading_element_get_align(
self.as_ref().to_glib_none().0,
|
}
}
fn set_align(&self, value: &str) {
unsafe {
ffi::webkit_dom_html_heading_element_set_align(
self.as_ref().to_glib_none().0,
value.to_glib_none().0,
);
}
}
fn connect_property_align_notify<F: Fn(&Self) +'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_align_trampoline<P, F: Fn(&P) +'static>(
this: *mut ffi::WebKitDOMHTMLHeadingElement,
_param_spec: glib::ffi::gpointer,
f: glib::ffi::gpointer,
) where
P: IsA<DOMHTMLHeadingElement>,
{
let f: &F = &*(f as *const F);
f(&DOMHTMLHeadingElement::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::align\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_align_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
}
impl fmt::Display for DOMHTMLHeadingElement {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("DOMHTMLHeadingElement")
}
}
|
))
|
random_line_split
|
rsa.rs
|
/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
//mod big;
//use big::BIG;
//mod dbig;
//use dbig::DBIG;
//mod ff;
use ff::FF;
//mod big;
//mod dbig;
//mod rom;
use rom;
//mod rand;
use rand::RAND;
//mod hash256;
use hash256::HASH256;
//mod hash384;
use hash384::HASH384;
//mod hash512;
use hash512::HASH512;
pub const RFS:usize =(rom::MODBYTES as usize)*rom::FFLEN;
pub const SHA256:usize=32;
pub const SHA384:usize=48;
pub const SHA512:usize=64;
pub const HASH_TYPE:usize=SHA256;
pub struct RsaPrivateKey {
p:FF,
q:FF,
dp:FF,
dq:FF,
c:FF
}
pub struct RsaPublicKey {
e: isize,
n: FF
}
pub fn new_private_key(n: usize) -> RsaPrivateKey {
RsaPrivateKey {p:FF::new_int(n),q:FF::new_int(n),dp:FF::new_int(n),dq:FF::new_int(n),c:FF::new_int(n)}
}
pub fn new_public_key(m: usize) -> RsaPublicKey {
RsaPublicKey {e:0,n:FF::new_int(m)}
}
fn hashit(sha: usize,a: Option<&[u8]>,n: isize,w: &mut [u8]) {
if sha==SHA256 {
let mut h=HASH256::new();
if let Some(x)=a {
h.process_array(x);
}
if n>=0 {h.process_num(n as i32)}
let hs=h.hash();
for i in 0..sha {w[i]=hs[i]}
}
if sha==SHA384 {
let mut h=HASH384::new();
if let Some(x)=a {
h.process_array(x);
}
if n>=0 {h.process_num(n as i32)}
let hs=h.hash();
for i in 0..sha {w[i]=hs[i]}
}
if sha==SHA512 {
let mut h=HASH512::new();
if let Some(x)=a {
h.process_array(x);
}
if n>=0 {h.process_num(n as i32)}
let hs=h.hash();
for i in 0..sha {w[i]=hs[i]}
}
}
pub fn key_pair(rng: &mut RAND,e: isize,prv: &mut RsaPrivateKey,pbc: &mut RsaPublicKey) { /* IEEE1363 A16.11/A16.12 more or less */
let n=pbc.n.getlen()/2;
let mut t=FF::new_int(n);
let mut p1=FF::new_int(n);
let mut q1=FF::new_int(n);
loop {
prv.p.random(rng);
while prv.p.lastbits(2)!=3 {prv.p.inc(1)}
while!FF::prime(&prv.p,rng) {
prv.p.inc(4);
}
p1.copy(&prv.p);
p1.dec(1);
if p1.cfactor(e) {continue}
break;
}
loop {
prv.q.random(rng);
while prv.q.lastbits(2)!=3 {prv.q.inc(1)}
while!FF::prime(&prv.q,rng) {
prv.q.inc(4);
}
q1.copy(&prv.q);
q1.dec(1);
if q1.cfactor(e) {continue}
break;
}
pbc.n=FF::mul(&prv.p,&prv.q);
pbc.e=e;
t.copy(&p1);
t.shr();
prv.dp.set(e);
prv.dp.invmodp(&t);
if prv.dp.parity()==0 {prv.dp.add(&t)}
prv.dp.norm();
t.copy(&q1);
t.shr();
prv.dq.set(e);
prv.dq.invmodp(&t);
if prv.dq.parity()==0 {prv.dq.add(&t)}
prv.dq.norm();
prv.c.copy(&prv.p);
prv.c.invmodp(&prv.q);
}
/* Mask Generation Function */
pub fn
|
(sha: usize,z: &[u8],olen: usize,k: &mut [u8]) {
let hlen=sha;
let mut j=0;
for i in 0..k.len() {k[i]=0}
let mut cthreshold=olen/hlen;
if olen%hlen!=0 {cthreshold+=1}
for counter in 0..cthreshold {
let mut b:[u8;64]=[0;64];
hashit(sha,Some(z),counter as isize,&mut b);
if j+hlen>olen {
for i in 0..(olen%hlen) {k[j]=b[i]; j+=1}
} else {
for i in 0..hlen {k[j]=b[i]; j+=1}
}
}
}
/* SHAXXX identifier strings */
const SHA256ID:[u8;19]= [0x30,0x31,0x30,0x0d,0x06,0x09,0x60,0x86,0x48,0x01,0x65,0x03,0x04,0x02,0x01,0x05,0x00,0x04,0x20];
const SHA384ID:[u8;19]= [0x30,0x41,0x30,0x0d,0x06,0x09,0x60,0x86,0x48,0x01,0x65,0x03,0x04,0x02,0x02,0x05,0x00,0x04,0x30];
const SHA512ID:[u8;19]= [0x30,0x51,0x30,0x0d,0x06,0x09,0x60,0x86,0x48,0x01,0x65,0x03,0x04,0x02,0x03,0x05,0x00,0x04,0x40];
pub fn pkcs15(sha: usize,m: &[u8],w: &mut [u8]) -> bool {
let olen=rom::FF_BITS/8;
let hlen=sha;
let idlen=19;
let mut b:[u8;64]=[0;64]; /* Not good */
if olen<idlen+hlen+10 {return false}
hashit(sha,Some(m),-1,&mut b);
for i in 0..w.len() {w[i]=0}
let mut i=0;
w[i]=0; i+=1;
w[i]=1; i+=1;
for _ in 0..olen-idlen-hlen-3 {w[i]=0xff; i+=1}
w[i]=0; i+=1;
if hlen==SHA256 {
for j in 0..idlen {w[i]=SHA256ID[j]; i+=1}
}
if hlen==SHA384 {
for j in 0..idlen {w[i]=SHA384ID[j]; i+=1}
}
if hlen==SHA512 {
for j in 0..idlen {w[i]=SHA512ID[j]; i+=1}
}
for j in 0..hlen {w[i]=b[j]; i+=1}
return true;
}
/* OAEP Message Encoding for Encryption */
pub fn oaep_encode(sha: usize,m: &[u8],rng: &mut RAND,p: Option<&[u8]>,f: &mut [u8]) -> bool {
let olen=RFS-1;
let mlen=m.len();
let hlen=sha;
let mut seed:[u8;64]=[0;64];
let seedlen=hlen;
if mlen>olen-hlen-seedlen-1 {return false}
let mut dbmask:[u8;RFS]=[0;RFS];
hashit(sha,p,-1,f);
let slen=olen-mlen-hlen-seedlen-1;
for i in 0..slen {f[hlen+i]=0}
f[hlen+slen]=1;
for i in 0..mlen {f[hlen+slen+1+i]=m[i]}
for i in 0..seedlen {seed[i]=rng.getbyte()}
mgf1(sha,&seed,olen-seedlen,&mut dbmask);
for i in 0..olen-seedlen {dbmask[i]^=f[i]}
mgf1(sha,&dbmask[0..olen-seedlen],seedlen,f);
for i in 0..seedlen {f[i]^=seed[i]}
for i in 0..olen-seedlen {f[i+seedlen]=dbmask[i]}
/* pad to length RFS */
let d=1;
for i in (d..RFS).rev() {
f[i]=f[i-d];
}
for i in (0..d).rev() {
f[i]=0;
}
return true;
}
/* OAEP Message Decoding for Decryption */
pub fn oaep_decode(sha: usize,p: Option<&[u8]>,f: &mut [u8]) -> usize {
let olen=RFS-1;
let hlen=sha;
let mut seed:[u8;64]=[0;64];
let seedlen=hlen;
let mut chash:[u8;64]=[0;64];
if olen<seedlen+hlen+1 {return 0}
let mut dbmask:[u8;RFS]=[0;RFS];
//for i in 0..olen-seedlen {dbmask[i]=0}
if f.len()<RFS {
let d=RFS-f.len();
for i in (d..RFS).rev() {
f[i]=f[i-d];
}
for i in (0..d).rev() {
f[i]=0;
}
}
hashit(sha,p,-1,&mut chash);
let x=f[0];
for i in seedlen..olen {
dbmask[i-seedlen]=f[i+1];
}
mgf1(sha,&dbmask[0..olen-seedlen],seedlen,&mut seed);
for i in 0..seedlen {seed[i]^=f[i+1]}
mgf1(sha,&seed,olen-seedlen,f);
for i in 0..olen-seedlen {dbmask[i]^=f[i]}
let mut comp=true;
for i in 0..hlen {
if chash[i]!=dbmask[i] {comp=false}
}
for i in 0..olen-seedlen-hlen {
dbmask[i]=dbmask[i+hlen]
}
for i in 0..hlen {
seed[i]=0; chash[i]=0
}
let mut k=0;
loop {
if k>=olen-seedlen-hlen {return 0}
if dbmask[k]!=0 {break}
k+=1;
}
let t=dbmask[k];
if!comp || x!=0 || t!=0x01 {
for i in 0..olen-seedlen {dbmask[i]=0}
return 0;
}
for i in 0..olen-seedlen-hlen-k-1 {
f[i]=dbmask[i+k+1];
}
for i in 0..olen-seedlen {dbmask[i]=0}
return olen-seedlen-hlen-k-1;
}
/* destroy the Private Key structure */
pub fn private_key_kill(prv: &mut RsaPrivateKey) {
prv.p.zero();
prv.q.zero();
prv.dp.zero();
prv.dq.zero();
prv.c.zero();
}
/* RSA encryption with the public key */
pub fn encrypt(pbc: &RsaPublicKey,f: &[u8],g: &mut [u8]) {
let m=pbc.n.getlen();
let mut r=FF::new_int(m);
FF::frombytes(&mut r,f);
r.power(pbc.e,&pbc.n);
r.tobytes(g);
}
/* RSA decryption with the private key */
pub fn decrypt(prv: &RsaPrivateKey,g: &[u8],f: &mut [u8]) {
let n=prv.p.getlen();
let mut r=FF::new_int(2*n);
FF::frombytes(&mut r,g);
let mut jp=r.dmod(&prv.p);
let mut jq=r.dmod(&prv.q);
jp.skpow(&prv.dp,&prv.p);
jq.skpow(&prv.dq,&prv.q);
r.zero();
r.dscopy(&jp);
jp.rmod(&prv.q);
if FF::comp(&jp,&jq)>0 {jq.add(&prv.q)}
jq.sub(&jp);
jq.norm();
let mut t=FF::mul(&prv.c,&jq);
jq=t.dmod(&prv.q);
t=FF::mul(&jq,&prv.p);
r.add(&t);
r.norm();
r.tobytes(f);
}
|
mgf1
|
identifier_name
|
rsa.rs
|
/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
//mod big;
//use big::BIG;
//mod dbig;
//use dbig::DBIG;
//mod ff;
use ff::FF;
//mod big;
//mod dbig;
//mod rom;
use rom;
//mod rand;
use rand::RAND;
//mod hash256;
use hash256::HASH256;
//mod hash384;
use hash384::HASH384;
//mod hash512;
use hash512::HASH512;
pub const RFS:usize =(rom::MODBYTES as usize)*rom::FFLEN;
pub const SHA256:usize=32;
pub const SHA384:usize=48;
pub const SHA512:usize=64;
pub const HASH_TYPE:usize=SHA256;
pub struct RsaPrivateKey {
p:FF,
q:FF,
dp:FF,
dq:FF,
c:FF
}
pub struct RsaPublicKey {
e: isize,
n: FF
}
pub fn new_private_key(n: usize) -> RsaPrivateKey {
RsaPrivateKey {p:FF::new_int(n),q:FF::new_int(n),dp:FF::new_int(n),dq:FF::new_int(n),c:FF::new_int(n)}
}
pub fn new_public_key(m: usize) -> RsaPublicKey {
RsaPublicKey {e:0,n:FF::new_int(m)}
}
fn hashit(sha: usize,a: Option<&[u8]>,n: isize,w: &mut [u8]) {
if sha==SHA256 {
let mut h=HASH256::new();
if let Some(x)=a {
h.process_array(x);
}
if n>=0 {h.process_num(n as i32)}
let hs=h.hash();
for i in 0..sha {w[i]=hs[i]}
}
if sha==SHA384 {
let mut h=HASH384::new();
if let Some(x)=a {
h.process_array(x);
}
if n>=0 {h.process_num(n as i32)}
let hs=h.hash();
for i in 0..sha {w[i]=hs[i]}
}
if sha==SHA512 {
let mut h=HASH512::new();
if let Some(x)=a {
h.process_array(x);
}
if n>=0 {h.process_num(n as i32)}
let hs=h.hash();
for i in 0..sha {w[i]=hs[i]}
}
}
pub fn key_pair(rng: &mut RAND,e: isize,prv: &mut RsaPrivateKey,pbc: &mut RsaPublicKey) { /* IEEE1363 A16.11/A16.12 more or less */
let n=pbc.n.getlen()/2;
let mut t=FF::new_int(n);
let mut p1=FF::new_int(n);
let mut q1=FF::new_int(n);
loop {
prv.p.random(rng);
while prv.p.lastbits(2)!=3 {prv.p.inc(1)}
while!FF::prime(&prv.p,rng) {
prv.p.inc(4);
}
p1.copy(&prv.p);
p1.dec(1);
if p1.cfactor(e) {continue}
break;
}
loop {
prv.q.random(rng);
while prv.q.lastbits(2)!=3 {prv.q.inc(1)}
while!FF::prime(&prv.q,rng) {
prv.q.inc(4);
}
q1.copy(&prv.q);
q1.dec(1);
if q1.cfactor(e) {continue}
break;
}
pbc.n=FF::mul(&prv.p,&prv.q);
pbc.e=e;
t.copy(&p1);
t.shr();
prv.dp.set(e);
prv.dp.invmodp(&t);
if prv.dp.parity()==0 {prv.dp.add(&t)}
prv.dp.norm();
t.copy(&q1);
t.shr();
prv.dq.set(e);
prv.dq.invmodp(&t);
if prv.dq.parity()==0 {prv.dq.add(&t)}
prv.dq.norm();
prv.c.copy(&prv.p);
prv.c.invmodp(&prv.q);
}
/* Mask Generation Function */
pub fn mgf1(sha: usize,z: &[u8],olen: usize,k: &mut [u8])
|
/* SHAXXX identifier strings */
const SHA256ID:[u8;19]= [0x30,0x31,0x30,0x0d,0x06,0x09,0x60,0x86,0x48,0x01,0x65,0x03,0x04,0x02,0x01,0x05,0x00,0x04,0x20];
const SHA384ID:[u8;19]= [0x30,0x41,0x30,0x0d,0x06,0x09,0x60,0x86,0x48,0x01,0x65,0x03,0x04,0x02,0x02,0x05,0x00,0x04,0x30];
const SHA512ID:[u8;19]= [0x30,0x51,0x30,0x0d,0x06,0x09,0x60,0x86,0x48,0x01,0x65,0x03,0x04,0x02,0x03,0x05,0x00,0x04,0x40];
pub fn pkcs15(sha: usize,m: &[u8],w: &mut [u8]) -> bool {
let olen=rom::FF_BITS/8;
let hlen=sha;
let idlen=19;
let mut b:[u8;64]=[0;64]; /* Not good */
if olen<idlen+hlen+10 {return false}
hashit(sha,Some(m),-1,&mut b);
for i in 0..w.len() {w[i]=0}
let mut i=0;
w[i]=0; i+=1;
w[i]=1; i+=1;
for _ in 0..olen-idlen-hlen-3 {w[i]=0xff; i+=1}
w[i]=0; i+=1;
if hlen==SHA256 {
for j in 0..idlen {w[i]=SHA256ID[j]; i+=1}
}
if hlen==SHA384 {
for j in 0..idlen {w[i]=SHA384ID[j]; i+=1}
}
if hlen==SHA512 {
for j in 0..idlen {w[i]=SHA512ID[j]; i+=1}
}
for j in 0..hlen {w[i]=b[j]; i+=1}
return true;
}
/* OAEP Message Encoding for Encryption */
pub fn oaep_encode(sha: usize,m: &[u8],rng: &mut RAND,p: Option<&[u8]>,f: &mut [u8]) -> bool {
let olen=RFS-1;
let mlen=m.len();
let hlen=sha;
let mut seed:[u8;64]=[0;64];
let seedlen=hlen;
if mlen>olen-hlen-seedlen-1 {return false}
let mut dbmask:[u8;RFS]=[0;RFS];
hashit(sha,p,-1,f);
let slen=olen-mlen-hlen-seedlen-1;
for i in 0..slen {f[hlen+i]=0}
f[hlen+slen]=1;
for i in 0..mlen {f[hlen+slen+1+i]=m[i]}
for i in 0..seedlen {seed[i]=rng.getbyte()}
mgf1(sha,&seed,olen-seedlen,&mut dbmask);
for i in 0..olen-seedlen {dbmask[i]^=f[i]}
mgf1(sha,&dbmask[0..olen-seedlen],seedlen,f);
for i in 0..seedlen {f[i]^=seed[i]}
for i in 0..olen-seedlen {f[i+seedlen]=dbmask[i]}
/* pad to length RFS */
let d=1;
for i in (d..RFS).rev() {
f[i]=f[i-d];
}
for i in (0..d).rev() {
f[i]=0;
}
return true;
}
/* OAEP Message Decoding for Decryption */
pub fn oaep_decode(sha: usize,p: Option<&[u8]>,f: &mut [u8]) -> usize {
let olen=RFS-1;
let hlen=sha;
let mut seed:[u8;64]=[0;64];
let seedlen=hlen;
let mut chash:[u8;64]=[0;64];
if olen<seedlen+hlen+1 {return 0}
let mut dbmask:[u8;RFS]=[0;RFS];
//for i in 0..olen-seedlen {dbmask[i]=0}
if f.len()<RFS {
let d=RFS-f.len();
for i in (d..RFS).rev() {
f[i]=f[i-d];
}
for i in (0..d).rev() {
f[i]=0;
}
}
hashit(sha,p,-1,&mut chash);
let x=f[0];
for i in seedlen..olen {
dbmask[i-seedlen]=f[i+1];
}
mgf1(sha,&dbmask[0..olen-seedlen],seedlen,&mut seed);
for i in 0..seedlen {seed[i]^=f[i+1]}
mgf1(sha,&seed,olen-seedlen,f);
for i in 0..olen-seedlen {dbmask[i]^=f[i]}
let mut comp=true;
for i in 0..hlen {
if chash[i]!=dbmask[i] {comp=false}
}
for i in 0..olen-seedlen-hlen {
dbmask[i]=dbmask[i+hlen]
}
for i in 0..hlen {
seed[i]=0; chash[i]=0
}
let mut k=0;
loop {
if k>=olen-seedlen-hlen {return 0}
if dbmask[k]!=0 {break}
k+=1;
}
let t=dbmask[k];
if!comp || x!=0 || t!=0x01 {
for i in 0..olen-seedlen {dbmask[i]=0}
return 0;
}
for i in 0..olen-seedlen-hlen-k-1 {
f[i]=dbmask[i+k+1];
}
for i in 0..olen-seedlen {dbmask[i]=0}
return olen-seedlen-hlen-k-1;
}
/* destroy the Private Key structure */
pub fn private_key_kill(prv: &mut RsaPrivateKey) {
prv.p.zero();
prv.q.zero();
prv.dp.zero();
prv.dq.zero();
prv.c.zero();
}
/* RSA encryption with the public key */
pub fn encrypt(pbc: &RsaPublicKey,f: &[u8],g: &mut [u8]) {
let m=pbc.n.getlen();
let mut r=FF::new_int(m);
FF::frombytes(&mut r,f);
r.power(pbc.e,&pbc.n);
r.tobytes(g);
}
/* RSA decryption with the private key */
pub fn decrypt(prv: &RsaPrivateKey,g: &[u8],f: &mut [u8]) {
let n=prv.p.getlen();
let mut r=FF::new_int(2*n);
FF::frombytes(&mut r,g);
let mut jp=r.dmod(&prv.p);
let mut jq=r.dmod(&prv.q);
jp.skpow(&prv.dp,&prv.p);
jq.skpow(&prv.dq,&prv.q);
r.zero();
r.dscopy(&jp);
jp.rmod(&prv.q);
if FF::comp(&jp,&jq)>0 {jq.add(&prv.q)}
jq.sub(&jp);
jq.norm();
let mut t=FF::mul(&prv.c,&jq);
jq=t.dmod(&prv.q);
t=FF::mul(&jq,&prv.p);
r.add(&t);
r.norm();
r.tobytes(f);
}
|
{
let hlen=sha;
let mut j=0;
for i in 0..k.len() {k[i]=0}
let mut cthreshold=olen/hlen;
if olen%hlen!=0 {cthreshold+=1}
for counter in 0..cthreshold {
let mut b:[u8;64]=[0;64];
hashit(sha,Some(z),counter as isize,&mut b);
if j+hlen>olen {
for i in 0..(olen%hlen) {k[j]=b[i]; j+=1}
} else {
for i in 0..hlen {k[j]=b[i]; j+=1}
}
}
}
|
identifier_body
|
rsa.rs
|
/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
//mod big;
//use big::BIG;
//mod dbig;
//use dbig::DBIG;
//mod ff;
use ff::FF;
//mod big;
//mod dbig;
//mod rom;
use rom;
//mod rand;
use rand::RAND;
//mod hash256;
use hash256::HASH256;
//mod hash384;
use hash384::HASH384;
//mod hash512;
use hash512::HASH512;
pub const RFS:usize =(rom::MODBYTES as usize)*rom::FFLEN;
pub const SHA256:usize=32;
pub const SHA384:usize=48;
pub const SHA512:usize=64;
pub const HASH_TYPE:usize=SHA256;
pub struct RsaPrivateKey {
p:FF,
q:FF,
dp:FF,
dq:FF,
c:FF
}
pub struct RsaPublicKey {
e: isize,
n: FF
}
pub fn new_private_key(n: usize) -> RsaPrivateKey {
RsaPrivateKey {p:FF::new_int(n),q:FF::new_int(n),dp:FF::new_int(n),dq:FF::new_int(n),c:FF::new_int(n)}
}
pub fn new_public_key(m: usize) -> RsaPublicKey {
RsaPublicKey {e:0,n:FF::new_int(m)}
}
fn hashit(sha: usize,a: Option<&[u8]>,n: isize,w: &mut [u8]) {
if sha==SHA256 {
let mut h=HASH256::new();
if let Some(x)=a {
h.process_array(x);
}
if n>=0 {h.process_num(n as i32)}
let hs=h.hash();
for i in 0..sha {w[i]=hs[i]}
}
if sha==SHA384 {
let mut h=HASH384::new();
if let Some(x)=a {
h.process_array(x);
}
if n>=0 {h.process_num(n as i32)}
let hs=h.hash();
for i in 0..sha {w[i]=hs[i]}
}
if sha==SHA512 {
let mut h=HASH512::new();
if let Some(x)=a {
h.process_array(x);
}
if n>=0 {h.process_num(n as i32)}
let hs=h.hash();
for i in 0..sha {w[i]=hs[i]}
}
}
pub fn key_pair(rng: &mut RAND,e: isize,prv: &mut RsaPrivateKey,pbc: &mut RsaPublicKey) { /* IEEE1363 A16.11/A16.12 more or less */
let n=pbc.n.getlen()/2;
let mut t=FF::new_int(n);
let mut p1=FF::new_int(n);
let mut q1=FF::new_int(n);
loop {
prv.p.random(rng);
while prv.p.lastbits(2)!=3 {prv.p.inc(1)}
while!FF::prime(&prv.p,rng) {
prv.p.inc(4);
}
p1.copy(&prv.p);
p1.dec(1);
if p1.cfactor(e) {continue}
break;
}
loop {
prv.q.random(rng);
while prv.q.lastbits(2)!=3 {prv.q.inc(1)}
while!FF::prime(&prv.q,rng) {
prv.q.inc(4);
}
q1.copy(&prv.q);
q1.dec(1);
if q1.cfactor(e) {continue}
break;
}
pbc.n=FF::mul(&prv.p,&prv.q);
pbc.e=e;
t.copy(&p1);
t.shr();
prv.dp.set(e);
prv.dp.invmodp(&t);
if prv.dp.parity()==0 {prv.dp.add(&t)}
prv.dp.norm();
t.copy(&q1);
t.shr();
prv.dq.set(e);
prv.dq.invmodp(&t);
if prv.dq.parity()==0 {prv.dq.add(&t)}
prv.dq.norm();
prv.c.copy(&prv.p);
prv.c.invmodp(&prv.q);
}
/* Mask Generation Function */
pub fn mgf1(sha: usize,z: &[u8],olen: usize,k: &mut [u8]) {
let hlen=sha;
let mut j=0;
for i in 0..k.len() {k[i]=0}
let mut cthreshold=olen/hlen;
if olen%hlen!=0 {cthreshold+=1}
for counter in 0..cthreshold {
let mut b:[u8;64]=[0;64];
hashit(sha,Some(z),counter as isize,&mut b);
if j+hlen>olen {
for i in 0..(olen%hlen) {k[j]=b[i]; j+=1}
} else {
for i in 0..hlen {k[j]=b[i]; j+=1}
}
}
}
/* SHAXXX identifier strings */
const SHA256ID:[u8;19]= [0x30,0x31,0x30,0x0d,0x06,0x09,0x60,0x86,0x48,0x01,0x65,0x03,0x04,0x02,0x01,0x05,0x00,0x04,0x20];
const SHA384ID:[u8;19]= [0x30,0x41,0x30,0x0d,0x06,0x09,0x60,0x86,0x48,0x01,0x65,0x03,0x04,0x02,0x02,0x05,0x00,0x04,0x30];
const SHA512ID:[u8;19]= [0x30,0x51,0x30,0x0d,0x06,0x09,0x60,0x86,0x48,0x01,0x65,0x03,0x04,0x02,0x03,0x05,0x00,0x04,0x40];
pub fn pkcs15(sha: usize,m: &[u8],w: &mut [u8]) -> bool {
let olen=rom::FF_BITS/8;
let hlen=sha;
let idlen=19;
let mut b:[u8;64]=[0;64]; /* Not good */
if olen<idlen+hlen+10 {return false}
hashit(sha,Some(m),-1,&mut b);
for i in 0..w.len() {w[i]=0}
let mut i=0;
w[i]=0; i+=1;
w[i]=1; i+=1;
for _ in 0..olen-idlen-hlen-3 {w[i]=0xff; i+=1}
w[i]=0; i+=1;
if hlen==SHA256 {
for j in 0..idlen {w[i]=SHA256ID[j]; i+=1}
}
if hlen==SHA384 {
for j in 0..idlen {w[i]=SHA384ID[j]; i+=1}
}
if hlen==SHA512 {
for j in 0..idlen {w[i]=SHA512ID[j]; i+=1}
}
for j in 0..hlen {w[i]=b[j]; i+=1}
return true;
}
/* OAEP Message Encoding for Encryption */
pub fn oaep_encode(sha: usize,m: &[u8],rng: &mut RAND,p: Option<&[u8]>,f: &mut [u8]) -> bool {
let olen=RFS-1;
let mlen=m.len();
let hlen=sha;
let mut seed:[u8;64]=[0;64];
let seedlen=hlen;
if mlen>olen-hlen-seedlen-1 {return false}
let mut dbmask:[u8;RFS]=[0;RFS];
hashit(sha,p,-1,f);
let slen=olen-mlen-hlen-seedlen-1;
for i in 0..slen {f[hlen+i]=0}
f[hlen+slen]=1;
for i in 0..mlen {f[hlen+slen+1+i]=m[i]}
for i in 0..seedlen {seed[i]=rng.getbyte()}
mgf1(sha,&seed,olen-seedlen,&mut dbmask);
for i in 0..olen-seedlen {dbmask[i]^=f[i]}
mgf1(sha,&dbmask[0..olen-seedlen],seedlen,f);
for i in 0..seedlen {f[i]^=seed[i]}
for i in 0..olen-seedlen {f[i+seedlen]=dbmask[i]}
/* pad to length RFS */
let d=1;
for i in (d..RFS).rev() {
f[i]=f[i-d];
}
for i in (0..d).rev() {
f[i]=0;
}
return true;
}
/* OAEP Message Decoding for Decryption */
pub fn oaep_decode(sha: usize,p: Option<&[u8]>,f: &mut [u8]) -> usize {
let olen=RFS-1;
let hlen=sha;
let mut seed:[u8;64]=[0;64];
let seedlen=hlen;
let mut chash:[u8;64]=[0;64];
if olen<seedlen+hlen+1 {return 0}
let mut dbmask:[u8;RFS]=[0;RFS];
//for i in 0..olen-seedlen {dbmask[i]=0}
if f.len()<RFS {
let d=RFS-f.len();
for i in (d..RFS).rev() {
f[i]=f[i-d];
}
for i in (0..d).rev() {
f[i]=0;
}
}
hashit(sha,p,-1,&mut chash);
let x=f[0];
for i in seedlen..olen {
dbmask[i-seedlen]=f[i+1];
}
mgf1(sha,&dbmask[0..olen-seedlen],seedlen,&mut seed);
for i in 0..seedlen {seed[i]^=f[i+1]}
mgf1(sha,&seed,olen-seedlen,f);
for i in 0..olen-seedlen {dbmask[i]^=f[i]}
let mut comp=true;
for i in 0..hlen {
if chash[i]!=dbmask[i] {comp=false}
}
for i in 0..olen-seedlen-hlen {
dbmask[i]=dbmask[i+hlen]
}
for i in 0..hlen {
seed[i]=0; chash[i]=0
}
let mut k=0;
loop {
if k>=olen-seedlen-hlen {return 0}
if dbmask[k]!=0 {break}
k+=1;
}
let t=dbmask[k];
if!comp || x!=0 || t!=0x01 {
for i in 0..olen-seedlen {dbmask[i]=0}
return 0;
}
for i in 0..olen-seedlen-hlen-k-1 {
f[i]=dbmask[i+k+1];
}
for i in 0..olen-seedlen {dbmask[i]=0}
return olen-seedlen-hlen-k-1;
}
/* destroy the Private Key structure */
pub fn private_key_kill(prv: &mut RsaPrivateKey) {
prv.p.zero();
prv.q.zero();
prv.dp.zero();
prv.dq.zero();
prv.c.zero();
}
/* RSA encryption with the public key */
pub fn encrypt(pbc: &RsaPublicKey,f: &[u8],g: &mut [u8]) {
let m=pbc.n.getlen();
let mut r=FF::new_int(m);
FF::frombytes(&mut r,f);
r.power(pbc.e,&pbc.n);
r.tobytes(g);
}
/* RSA decryption with the private key */
pub fn decrypt(prv: &RsaPrivateKey,g: &[u8],f: &mut [u8]) {
let n=prv.p.getlen();
let mut r=FF::new_int(2*n);
FF::frombytes(&mut r,g);
let mut jp=r.dmod(&prv.p);
let mut jq=r.dmod(&prv.q);
jp.skpow(&prv.dp,&prv.p);
jq.skpow(&prv.dq,&prv.q);
|
r.dscopy(&jp);
jp.rmod(&prv.q);
if FF::comp(&jp,&jq)>0 {jq.add(&prv.q)}
jq.sub(&jp);
jq.norm();
let mut t=FF::mul(&prv.c,&jq);
jq=t.dmod(&prv.q);
t=FF::mul(&jq,&prv.p);
r.add(&t);
r.norm();
r.tobytes(f);
}
|
r.zero();
|
random_line_split
|
rsa.rs
|
/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
//mod big;
//use big::BIG;
//mod dbig;
//use dbig::DBIG;
//mod ff;
use ff::FF;
//mod big;
//mod dbig;
//mod rom;
use rom;
//mod rand;
use rand::RAND;
//mod hash256;
use hash256::HASH256;
//mod hash384;
use hash384::HASH384;
//mod hash512;
use hash512::HASH512;
pub const RFS:usize =(rom::MODBYTES as usize)*rom::FFLEN;
pub const SHA256:usize=32;
pub const SHA384:usize=48;
pub const SHA512:usize=64;
pub const HASH_TYPE:usize=SHA256;
pub struct RsaPrivateKey {
p:FF,
q:FF,
dp:FF,
dq:FF,
c:FF
}
pub struct RsaPublicKey {
e: isize,
n: FF
}
pub fn new_private_key(n: usize) -> RsaPrivateKey {
RsaPrivateKey {p:FF::new_int(n),q:FF::new_int(n),dp:FF::new_int(n),dq:FF::new_int(n),c:FF::new_int(n)}
}
pub fn new_public_key(m: usize) -> RsaPublicKey {
RsaPublicKey {e:0,n:FF::new_int(m)}
}
fn hashit(sha: usize,a: Option<&[u8]>,n: isize,w: &mut [u8]) {
if sha==SHA256 {
let mut h=HASH256::new();
if let Some(x)=a {
h.process_array(x);
}
if n>=0 {h.process_num(n as i32)}
let hs=h.hash();
for i in 0..sha {w[i]=hs[i]}
}
if sha==SHA384 {
let mut h=HASH384::new();
if let Some(x)=a {
h.process_array(x);
}
if n>=0 {h.process_num(n as i32)}
let hs=h.hash();
for i in 0..sha {w[i]=hs[i]}
}
if sha==SHA512 {
let mut h=HASH512::new();
if let Some(x)=a {
h.process_array(x);
}
if n>=0
|
let hs=h.hash();
for i in 0..sha {w[i]=hs[i]}
}
}
pub fn key_pair(rng: &mut RAND,e: isize,prv: &mut RsaPrivateKey,pbc: &mut RsaPublicKey) { /* IEEE1363 A16.11/A16.12 more or less */
let n=pbc.n.getlen()/2;
let mut t=FF::new_int(n);
let mut p1=FF::new_int(n);
let mut q1=FF::new_int(n);
loop {
prv.p.random(rng);
while prv.p.lastbits(2)!=3 {prv.p.inc(1)}
while!FF::prime(&prv.p,rng) {
prv.p.inc(4);
}
p1.copy(&prv.p);
p1.dec(1);
if p1.cfactor(e) {continue}
break;
}
loop {
prv.q.random(rng);
while prv.q.lastbits(2)!=3 {prv.q.inc(1)}
while!FF::prime(&prv.q,rng) {
prv.q.inc(4);
}
q1.copy(&prv.q);
q1.dec(1);
if q1.cfactor(e) {continue}
break;
}
pbc.n=FF::mul(&prv.p,&prv.q);
pbc.e=e;
t.copy(&p1);
t.shr();
prv.dp.set(e);
prv.dp.invmodp(&t);
if prv.dp.parity()==0 {prv.dp.add(&t)}
prv.dp.norm();
t.copy(&q1);
t.shr();
prv.dq.set(e);
prv.dq.invmodp(&t);
if prv.dq.parity()==0 {prv.dq.add(&t)}
prv.dq.norm();
prv.c.copy(&prv.p);
prv.c.invmodp(&prv.q);
}
/* Mask Generation Function */
pub fn mgf1(sha: usize,z: &[u8],olen: usize,k: &mut [u8]) {
let hlen=sha;
let mut j=0;
for i in 0..k.len() {k[i]=0}
let mut cthreshold=olen/hlen;
if olen%hlen!=0 {cthreshold+=1}
for counter in 0..cthreshold {
let mut b:[u8;64]=[0;64];
hashit(sha,Some(z),counter as isize,&mut b);
if j+hlen>olen {
for i in 0..(olen%hlen) {k[j]=b[i]; j+=1}
} else {
for i in 0..hlen {k[j]=b[i]; j+=1}
}
}
}
/* SHAXXX identifier strings */
const SHA256ID:[u8;19]= [0x30,0x31,0x30,0x0d,0x06,0x09,0x60,0x86,0x48,0x01,0x65,0x03,0x04,0x02,0x01,0x05,0x00,0x04,0x20];
const SHA384ID:[u8;19]= [0x30,0x41,0x30,0x0d,0x06,0x09,0x60,0x86,0x48,0x01,0x65,0x03,0x04,0x02,0x02,0x05,0x00,0x04,0x30];
const SHA512ID:[u8;19]= [0x30,0x51,0x30,0x0d,0x06,0x09,0x60,0x86,0x48,0x01,0x65,0x03,0x04,0x02,0x03,0x05,0x00,0x04,0x40];
pub fn pkcs15(sha: usize,m: &[u8],w: &mut [u8]) -> bool {
let olen=rom::FF_BITS/8;
let hlen=sha;
let idlen=19;
let mut b:[u8;64]=[0;64]; /* Not good */
if olen<idlen+hlen+10 {return false}
hashit(sha,Some(m),-1,&mut b);
for i in 0..w.len() {w[i]=0}
let mut i=0;
w[i]=0; i+=1;
w[i]=1; i+=1;
for _ in 0..olen-idlen-hlen-3 {w[i]=0xff; i+=1}
w[i]=0; i+=1;
if hlen==SHA256 {
for j in 0..idlen {w[i]=SHA256ID[j]; i+=1}
}
if hlen==SHA384 {
for j in 0..idlen {w[i]=SHA384ID[j]; i+=1}
}
if hlen==SHA512 {
for j in 0..idlen {w[i]=SHA512ID[j]; i+=1}
}
for j in 0..hlen {w[i]=b[j]; i+=1}
return true;
}
/* OAEP Message Encoding for Encryption */
pub fn oaep_encode(sha: usize,m: &[u8],rng: &mut RAND,p: Option<&[u8]>,f: &mut [u8]) -> bool {
let olen=RFS-1;
let mlen=m.len();
let hlen=sha;
let mut seed:[u8;64]=[0;64];
let seedlen=hlen;
if mlen>olen-hlen-seedlen-1 {return false}
let mut dbmask:[u8;RFS]=[0;RFS];
hashit(sha,p,-1,f);
let slen=olen-mlen-hlen-seedlen-1;
for i in 0..slen {f[hlen+i]=0}
f[hlen+slen]=1;
for i in 0..mlen {f[hlen+slen+1+i]=m[i]}
for i in 0..seedlen {seed[i]=rng.getbyte()}
mgf1(sha,&seed,olen-seedlen,&mut dbmask);
for i in 0..olen-seedlen {dbmask[i]^=f[i]}
mgf1(sha,&dbmask[0..olen-seedlen],seedlen,f);
for i in 0..seedlen {f[i]^=seed[i]}
for i in 0..olen-seedlen {f[i+seedlen]=dbmask[i]}
/* pad to length RFS */
let d=1;
for i in (d..RFS).rev() {
f[i]=f[i-d];
}
for i in (0..d).rev() {
f[i]=0;
}
return true;
}
/* OAEP Message Decoding for Decryption */
pub fn oaep_decode(sha: usize,p: Option<&[u8]>,f: &mut [u8]) -> usize {
let olen=RFS-1;
let hlen=sha;
let mut seed:[u8;64]=[0;64];
let seedlen=hlen;
let mut chash:[u8;64]=[0;64];
if olen<seedlen+hlen+1 {return 0}
let mut dbmask:[u8;RFS]=[0;RFS];
//for i in 0..olen-seedlen {dbmask[i]=0}
if f.len()<RFS {
let d=RFS-f.len();
for i in (d..RFS).rev() {
f[i]=f[i-d];
}
for i in (0..d).rev() {
f[i]=0;
}
}
hashit(sha,p,-1,&mut chash);
let x=f[0];
for i in seedlen..olen {
dbmask[i-seedlen]=f[i+1];
}
mgf1(sha,&dbmask[0..olen-seedlen],seedlen,&mut seed);
for i in 0..seedlen {seed[i]^=f[i+1]}
mgf1(sha,&seed,olen-seedlen,f);
for i in 0..olen-seedlen {dbmask[i]^=f[i]}
let mut comp=true;
for i in 0..hlen {
if chash[i]!=dbmask[i] {comp=false}
}
for i in 0..olen-seedlen-hlen {
dbmask[i]=dbmask[i+hlen]
}
for i in 0..hlen {
seed[i]=0; chash[i]=0
}
let mut k=0;
loop {
if k>=olen-seedlen-hlen {return 0}
if dbmask[k]!=0 {break}
k+=1;
}
let t=dbmask[k];
if!comp || x!=0 || t!=0x01 {
for i in 0..olen-seedlen {dbmask[i]=0}
return 0;
}
for i in 0..olen-seedlen-hlen-k-1 {
f[i]=dbmask[i+k+1];
}
for i in 0..olen-seedlen {dbmask[i]=0}
return olen-seedlen-hlen-k-1;
}
/* destroy the Private Key structure */
pub fn private_key_kill(prv: &mut RsaPrivateKey) {
prv.p.zero();
prv.q.zero();
prv.dp.zero();
prv.dq.zero();
prv.c.zero();
}
/* RSA encryption with the public key */
pub fn encrypt(pbc: &RsaPublicKey,f: &[u8],g: &mut [u8]) {
let m=pbc.n.getlen();
let mut r=FF::new_int(m);
FF::frombytes(&mut r,f);
r.power(pbc.e,&pbc.n);
r.tobytes(g);
}
/* RSA decryption with the private key */
pub fn decrypt(prv: &RsaPrivateKey,g: &[u8],f: &mut [u8]) {
let n=prv.p.getlen();
let mut r=FF::new_int(2*n);
FF::frombytes(&mut r,g);
let mut jp=r.dmod(&prv.p);
let mut jq=r.dmod(&prv.q);
jp.skpow(&prv.dp,&prv.p);
jq.skpow(&prv.dq,&prv.q);
r.zero();
r.dscopy(&jp);
jp.rmod(&prv.q);
if FF::comp(&jp,&jq)>0 {jq.add(&prv.q)}
jq.sub(&jp);
jq.norm();
let mut t=FF::mul(&prv.c,&jq);
jq=t.dmod(&prv.q);
t=FF::mul(&jq,&prv.p);
r.add(&t);
r.norm();
r.tobytes(f);
}
|
{h.process_num(n as i32)}
|
conditional_block
|
attr.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use app_units::Au;
use cssparser::{self, Color, RGBA};
use euclid::num::Zero;
use num::ToPrimitive;
use std::ascii::AsciiExt;
use std::ops::Deref;
use string_cache::{Atom, Namespace};
use url::Url;
use util::str::{DOMString, LengthOrPercentageOrAuto, HTML_SPACE_CHARACTERS, WHITESPACE};
use util::str::{parse_length, read_numbers, split_html_space_chars, str_join};
use values::specified::{Length};
// Duplicated from script::dom::values.
const UNSIGNED_LONG_MAX: u32 = 2147483647;
#[derive(PartialEq, Clone, HeapSizeOf)]
pub enum AttrValue {
String(DOMString),
TokenList(DOMString, Vec<Atom>),
UInt(DOMString, u32),
Int(DOMString, i32),
Atom(Atom),
Length(DOMString, Option<Length>),
Color(DOMString, Option<RGBA>),
Dimension(DOMString, LengthOrPercentageOrAuto),
Url(DOMString, Option<Url>),
}
/// Shared implementation to parse an integer according to
/// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-integers> or
/// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-non-negative-integers>
fn do_parse_integer<T: Iterator<Item=char>>(input: T) -> Option<i64> {
let mut input = input.skip_while(|c| {
HTML_SPACE_CHARACTERS.iter().any(|s| s == c)
}).peekable();
let sign = match input.peek() {
None => return None,
Some(&'-') => {
input.next();
-1
},
Some(&'+') => {
input.next();
1
},
Some(_) => 1,
};
let value = read_numbers(input);
value.and_then(|value| value.checked_mul(sign))
}
/// Parse an integer according to
/// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-integers>.
pub fn parse_integer<T: Iterator<Item=char>>(input: T) -> Option<i32> {
do_parse_integer(input).and_then(|result| {
result.to_i32()
})
}
/// Parse an integer according to
/// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-non-negative-integers>
pub fn parse_unsigned_integer<T: Iterator<Item=char>>(input: T) -> Option<u32> {
do_parse_integer(input).and_then(|result| {
result.to_u32()
})
}
impl AttrValue {
pub fn from_serialized_tokenlist(tokens: DOMString) -> AttrValue {
let atoms =
split_html_space_chars(&tokens)
.map(Atom::from)
.fold(vec![], |mut acc, atom| {
if!acc.contains(&atom) { acc.push(atom) }
acc
});
AttrValue::TokenList(tokens, atoms)
}
pub fn from_atomic_tokens(atoms: Vec<Atom>) -> AttrValue {
// TODO(ajeffrey): effecient conversion of Vec<Atom> to DOMString
let tokens = DOMString::from(str_join(&atoms, "\x20"));
AttrValue::TokenList(tokens, atoms)
}
// https://html.spec.whatwg.org/multipage/#reflecting-content-attributes-in-idl-attributes:idl-unsigned-long
pub fn from_u32(string: DOMString, default: u32) -> AttrValue {
let result = parse_unsigned_integer(string.chars()).unwrap_or(default);
let result = if result > UNSIGNED_LONG_MAX {
default
} else {
result
};
AttrValue::UInt(string, result)
}
pub fn from_i32(string: DOMString, default: i32) -> AttrValue {
let result = parse_integer(string.chars()).unwrap_or(default);
AttrValue::Int(string, result)
}
// https://html.spec.whatwg.org/multipage/#limited-to-only-non-negative-numbers
pub fn from_limited_i32(string: DOMString, default: i32) -> AttrValue {
let result = parse_integer(string.chars()).unwrap_or(default);
if result < 0 {
AttrValue::Int(string, default)
} else {
AttrValue::Int(string, result)
}
}
// https://html.spec.whatwg.org/multipage/#limited-to-only-non-negative-numbers-greater-than-zero
pub fn from_limited_u32(string: DOMString, default: u32) -> AttrValue {
let result = parse_unsigned_integer(string.chars()).unwrap_or(default);
let result = if result == 0 || result > UNSIGNED_LONG_MAX {
default
} else {
result
};
AttrValue::UInt(string, result)
}
pub fn from_atomic(string: DOMString) -> AttrValue {
// FIXME(ajeffrey): convert directly from DOMString to Atom
let value = Atom::from(&*string);
AttrValue::Atom(value)
}
pub fn from_url(base: &Url, url: DOMString) -> AttrValue {
let joined = base.join(&url).ok();
AttrValue::Url(url, joined)
}
pub fn from_legacy_color(string: DOMString) -> AttrValue {
let parsed = parse_legacy_color(&string).ok();
AttrValue::Color(string, parsed)
}
pub fn from_dimension(string: DOMString) -> AttrValue {
let parsed = parse_length(&string);
AttrValue::Dimension(string, parsed)
}
pub fn from_nonzero_dimension(string: DOMString) -> AttrValue {
let parsed = parse_nonzero_length(&string);
AttrValue::Dimension(string, parsed)
}
/// Assumes the `AttrValue` is a `TokenList` and returns its tokens
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `TokenList`
pub fn as_tokens(&self) -> &[Atom] {
match *self {
AttrValue::TokenList(_, ref tokens) => tokens,
_ => panic!("Tokens not found"),
}
}
/// Assumes the `AttrValue` is an `Atom` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not an `Atom`
pub fn as_atom(&self) -> &Atom {
match *self {
AttrValue::Atom(ref value) => value,
_ => panic!("Atom not found"),
}
}
/// Assumes the `AttrValue` is a `Color` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `Color`
pub fn as_color(&self) -> Option<&RGBA> {
match *self {
AttrValue::Color(_, ref color) => color.as_ref(),
_ => panic!("Color not found"),
}
}
/// Assumes the `AttrValue` is a `Length` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `Length`
pub fn as_length(&self) -> Option<&Length>
|
/// Assumes the `AttrValue` is a `Dimension` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `Dimension`
pub fn as_dimension(&self) -> &LengthOrPercentageOrAuto {
match *self {
AttrValue::Dimension(_, ref l) => l,
_ => panic!("Dimension not found"),
}
}
/// Assumes the `AttrValue` is a `Url` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `Url`
pub fn as_url(&self) -> Option<&Url> {
match *self {
AttrValue::Url(_, ref url) => url.as_ref(),
_ => panic!("Url not found"),
}
}
/// Return the AttrValue as its integer representation, if any.
/// This corresponds to attribute values returned as `AttrValue::UInt(_)`
/// by `VirtualMethods::parse_plain_attribute()`.
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `UInt`
pub fn as_uint(&self) -> u32 {
if let AttrValue::UInt(_, value) = *self {
value
} else {
panic!("Uint not found");
}
}
}
impl Deref for AttrValue {
type Target = str;
fn deref(&self) -> &str {
match *self {
AttrValue::String(ref value) |
AttrValue::TokenList(ref value, _) |
AttrValue::UInt(ref value, _) |
AttrValue::Length(ref value, _) |
AttrValue::Color(ref value, _) |
AttrValue::Int(ref value, _) |
AttrValue::Url(ref value, _) |
AttrValue::Dimension(ref value, _) => &value,
AttrValue::Atom(ref value) => &value,
}
}
}
/// HTML5 § 2.4.4.5.
///
/// https://html.spec.whatwg.org/multipage/#rules-for-parsing-non-zero-dimension-values
pub fn parse_nonzero_length(value: &str) -> LengthOrPercentageOrAuto {
match parse_length(value) {
LengthOrPercentageOrAuto::Length(x) if x == Au::zero() => LengthOrPercentageOrAuto::Auto,
LengthOrPercentageOrAuto::Percentage(0.) => LengthOrPercentageOrAuto::Auto,
x => x,
}
}
/// Parses a legacy color per HTML5 § 2.4.6. If unparseable, `Err` is returned.
pub fn parse_legacy_color(mut input: &str) -> Result<RGBA, ()> {
// Steps 1 and 2.
if input.is_empty() {
return Err(())
}
// Step 3.
input = input.trim_matches(WHITESPACE);
// Step 4.
if input.eq_ignore_ascii_case("transparent") {
return Err(())
}
// Step 5.
if let Ok(Color::RGBA(rgba)) = cssparser::parse_color_keyword(input) {
return Ok(rgba);
}
// Step 6.
if input.len() == 4 {
if let (b'#', Ok(r), Ok(g), Ok(b)) =
(input.as_bytes()[0],
hex(input.as_bytes()[1] as char),
hex(input.as_bytes()[2] as char),
hex(input.as_bytes()[3] as char)) {
return Ok(RGBA {
red: (r as f32) * 17.0 / 255.0,
green: (g as f32) * 17.0 / 255.0,
blue: (b as f32) * 17.0 / 255.0,
alpha: 1.0,
})
}
}
// Step 7.
let mut new_input = String::new();
for ch in input.chars() {
if ch as u32 > 0xffff {
new_input.push_str("00")
} else {
new_input.push(ch)
}
}
let mut input = &*new_input;
// Step 8.
for (char_count, (index, _)) in input.char_indices().enumerate() {
if char_count == 128 {
input = &input[..index];
break
}
}
// Step 9.
if input.as_bytes()[0] == b'#' {
input = &input[1..]
}
// Step 10.
let mut new_input = Vec::new();
for ch in input.chars() {
if hex(ch).is_ok() {
new_input.push(ch as u8)
} else {
new_input.push(b'0')
}
}
let mut input = new_input;
// Step 11.
while input.is_empty() || (input.len() % 3)!= 0 {
input.push(b'0')
}
// Step 12.
let mut length = input.len() / 3;
let (mut red, mut green, mut blue) = (&input[..length],
&input[length..length * 2],
&input[length * 2..]);
// Step 13.
if length > 8 {
red = &red[length - 8..];
green = &green[length - 8..];
blue = &blue[length - 8..];
length = 8
}
// Step 14.
while length > 2 && red[0] == b'0' && green[0] == b'0' && blue[0] == b'0' {
red = &red[1..];
green = &green[1..];
blue = &blue[1..];
length -= 1
}
// Steps 15-20.
return Ok(RGBA {
red: hex_string(red).unwrap() as f32 / 255.0,
green: hex_string(green).unwrap() as f32 / 255.0,
blue: hex_string(blue).unwrap() as f32 / 255.0,
alpha: 1.0,
});
fn hex(ch: char) -> Result<u8, ()> {
match ch {
'0'...'9' => Ok((ch as u8) - b'0'),
'a'...'f' => Ok((ch as u8) - b'a' + 10),
'A'...'F' => Ok((ch as u8) - b'A' + 10),
_ => Err(()),
}
}
fn hex_string(string: &[u8]) -> Result<u8, ()> {
match string.len() {
0 => Err(()),
1 => hex(string[0] as char),
_ => {
let upper = try!(hex(string[0] as char));
let lower = try!(hex(string[1] as char));
Ok((upper << 4) | lower)
}
}
}
}
#[derive(Clone, HeapSizeOf, Debug)]
pub struct AttrIdentifier {
pub local_name: Atom,
pub name: Atom,
pub namespace: Namespace,
pub prefix: Option<Atom>,
}
|
{
match *self {
AttrValue::Length(_, ref length) => length.as_ref(),
_ => panic!("Length not found"),
}
}
|
identifier_body
|
attr.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use app_units::Au;
use cssparser::{self, Color, RGBA};
use euclid::num::Zero;
use num::ToPrimitive;
use std::ascii::AsciiExt;
use std::ops::Deref;
use string_cache::{Atom, Namespace};
use url::Url;
use util::str::{DOMString, LengthOrPercentageOrAuto, HTML_SPACE_CHARACTERS, WHITESPACE};
use util::str::{parse_length, read_numbers, split_html_space_chars, str_join};
use values::specified::{Length};
// Duplicated from script::dom::values.
const UNSIGNED_LONG_MAX: u32 = 2147483647;
#[derive(PartialEq, Clone, HeapSizeOf)]
pub enum AttrValue {
String(DOMString),
TokenList(DOMString, Vec<Atom>),
UInt(DOMString, u32),
Int(DOMString, i32),
Atom(Atom),
Length(DOMString, Option<Length>),
Color(DOMString, Option<RGBA>),
Dimension(DOMString, LengthOrPercentageOrAuto),
Url(DOMString, Option<Url>),
}
/// Shared implementation to parse an integer according to
/// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-integers> or
/// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-non-negative-integers>
fn do_parse_integer<T: Iterator<Item=char>>(input: T) -> Option<i64> {
let mut input = input.skip_while(|c| {
HTML_SPACE_CHARACTERS.iter().any(|s| s == c)
}).peekable();
let sign = match input.peek() {
None => return None,
Some(&'-') => {
input.next();
-1
},
Some(&'+') => {
input.next();
1
},
Some(_) => 1,
};
let value = read_numbers(input);
value.and_then(|value| value.checked_mul(sign))
}
/// Parse an integer according to
/// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-integers>.
pub fn parse_integer<T: Iterator<Item=char>>(input: T) -> Option<i32> {
do_parse_integer(input).and_then(|result| {
result.to_i32()
})
}
/// Parse an integer according to
/// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-non-negative-integers>
pub fn parse_unsigned_integer<T: Iterator<Item=char>>(input: T) -> Option<u32> {
do_parse_integer(input).and_then(|result| {
result.to_u32()
})
}
impl AttrValue {
pub fn from_serialized_tokenlist(tokens: DOMString) -> AttrValue {
let atoms =
split_html_space_chars(&tokens)
.map(Atom::from)
.fold(vec![], |mut acc, atom| {
if!acc.contains(&atom) { acc.push(atom) }
acc
});
AttrValue::TokenList(tokens, atoms)
}
pub fn from_atomic_tokens(atoms: Vec<Atom>) -> AttrValue {
// TODO(ajeffrey): effecient conversion of Vec<Atom> to DOMString
let tokens = DOMString::from(str_join(&atoms, "\x20"));
AttrValue::TokenList(tokens, atoms)
}
// https://html.spec.whatwg.org/multipage/#reflecting-content-attributes-in-idl-attributes:idl-unsigned-long
pub fn from_u32(string: DOMString, default: u32) -> AttrValue {
let result = parse_unsigned_integer(string.chars()).unwrap_or(default);
let result = if result > UNSIGNED_LONG_MAX {
default
} else {
result
};
AttrValue::UInt(string, result)
}
pub fn from_i32(string: DOMString, default: i32) -> AttrValue {
let result = parse_integer(string.chars()).unwrap_or(default);
AttrValue::Int(string, result)
}
// https://html.spec.whatwg.org/multipage/#limited-to-only-non-negative-numbers
pub fn from_limited_i32(string: DOMString, default: i32) -> AttrValue {
let result = parse_integer(string.chars()).unwrap_or(default);
if result < 0 {
AttrValue::Int(string, default)
} else {
AttrValue::Int(string, result)
}
}
// https://html.spec.whatwg.org/multipage/#limited-to-only-non-negative-numbers-greater-than-zero
pub fn from_limited_u32(string: DOMString, default: u32) -> AttrValue {
let result = parse_unsigned_integer(string.chars()).unwrap_or(default);
let result = if result == 0 || result > UNSIGNED_LONG_MAX {
default
} else {
result
};
AttrValue::UInt(string, result)
}
pub fn from_atomic(string: DOMString) -> AttrValue {
// FIXME(ajeffrey): convert directly from DOMString to Atom
let value = Atom::from(&*string);
AttrValue::Atom(value)
}
pub fn from_url(base: &Url, url: DOMString) -> AttrValue {
let joined = base.join(&url).ok();
AttrValue::Url(url, joined)
}
pub fn from_legacy_color(string: DOMString) -> AttrValue {
let parsed = parse_legacy_color(&string).ok();
AttrValue::Color(string, parsed)
}
pub fn from_dimension(string: DOMString) -> AttrValue {
let parsed = parse_length(&string);
AttrValue::Dimension(string, parsed)
}
pub fn from_nonzero_dimension(string: DOMString) -> AttrValue {
let parsed = parse_nonzero_length(&string);
AttrValue::Dimension(string, parsed)
}
/// Assumes the `AttrValue` is a `TokenList` and returns its tokens
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `TokenList`
pub fn as_tokens(&self) -> &[Atom] {
match *self {
AttrValue::TokenList(_, ref tokens) => tokens,
_ => panic!("Tokens not found"),
}
}
/// Assumes the `AttrValue` is an `Atom` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not an `Atom`
pub fn as_atom(&self) -> &Atom {
match *self {
AttrValue::Atom(ref value) => value,
_ => panic!("Atom not found"),
}
}
/// Assumes the `AttrValue` is a `Color` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `Color`
pub fn as_color(&self) -> Option<&RGBA> {
match *self {
AttrValue::Color(_, ref color) => color.as_ref(),
_ => panic!("Color not found"),
}
}
/// Assumes the `AttrValue` is a `Length` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `Length`
pub fn as_length(&self) -> Option<&Length> {
match *self {
AttrValue::Length(_, ref length) => length.as_ref(),
_ => panic!("Length not found"),
}
}
/// Assumes the `AttrValue` is a `Dimension` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `Dimension`
pub fn as_dimension(&self) -> &LengthOrPercentageOrAuto {
match *self {
AttrValue::Dimension(_, ref l) => l,
_ => panic!("Dimension not found"),
}
}
/// Assumes the `AttrValue` is a `Url` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `Url`
pub fn as_url(&self) -> Option<&Url> {
match *self {
AttrValue::Url(_, ref url) => url.as_ref(),
_ => panic!("Url not found"),
}
}
/// Return the AttrValue as its integer representation, if any.
/// This corresponds to attribute values returned as `AttrValue::UInt(_)`
/// by `VirtualMethods::parse_plain_attribute()`.
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `UInt`
pub fn as_uint(&self) -> u32 {
if let AttrValue::UInt(_, value) = *self {
value
} else {
panic!("Uint not found");
}
}
}
impl Deref for AttrValue {
type Target = str;
fn deref(&self) -> &str {
|
match *self {
AttrValue::String(ref value) |
AttrValue::TokenList(ref value, _) |
AttrValue::UInt(ref value, _) |
AttrValue::Length(ref value, _) |
AttrValue::Color(ref value, _) |
AttrValue::Int(ref value, _) |
AttrValue::Url(ref value, _) |
AttrValue::Dimension(ref value, _) => &value,
AttrValue::Atom(ref value) => &value,
}
}
}
/// HTML5 § 2.4.4.5.
///
/// https://html.spec.whatwg.org/multipage/#rules-for-parsing-non-zero-dimension-values
pub fn parse_nonzero_length(value: &str) -> LengthOrPercentageOrAuto {
match parse_length(value) {
LengthOrPercentageOrAuto::Length(x) if x == Au::zero() => LengthOrPercentageOrAuto::Auto,
LengthOrPercentageOrAuto::Percentage(0.) => LengthOrPercentageOrAuto::Auto,
x => x,
}
}
/// Parses a legacy color per HTML5 § 2.4.6. If unparseable, `Err` is returned.
pub fn parse_legacy_color(mut input: &str) -> Result<RGBA, ()> {
// Steps 1 and 2.
if input.is_empty() {
return Err(())
}
// Step 3.
input = input.trim_matches(WHITESPACE);
// Step 4.
if input.eq_ignore_ascii_case("transparent") {
return Err(())
}
// Step 5.
if let Ok(Color::RGBA(rgba)) = cssparser::parse_color_keyword(input) {
return Ok(rgba);
}
// Step 6.
if input.len() == 4 {
if let (b'#', Ok(r), Ok(g), Ok(b)) =
(input.as_bytes()[0],
hex(input.as_bytes()[1] as char),
hex(input.as_bytes()[2] as char),
hex(input.as_bytes()[3] as char)) {
return Ok(RGBA {
red: (r as f32) * 17.0 / 255.0,
green: (g as f32) * 17.0 / 255.0,
blue: (b as f32) * 17.0 / 255.0,
alpha: 1.0,
})
}
}
// Step 7.
let mut new_input = String::new();
for ch in input.chars() {
if ch as u32 > 0xffff {
new_input.push_str("00")
} else {
new_input.push(ch)
}
}
let mut input = &*new_input;
// Step 8.
for (char_count, (index, _)) in input.char_indices().enumerate() {
if char_count == 128 {
input = &input[..index];
break
}
}
// Step 9.
if input.as_bytes()[0] == b'#' {
input = &input[1..]
}
// Step 10.
let mut new_input = Vec::new();
for ch in input.chars() {
if hex(ch).is_ok() {
new_input.push(ch as u8)
} else {
new_input.push(b'0')
}
}
let mut input = new_input;
// Step 11.
while input.is_empty() || (input.len() % 3)!= 0 {
input.push(b'0')
}
// Step 12.
let mut length = input.len() / 3;
let (mut red, mut green, mut blue) = (&input[..length],
&input[length..length * 2],
&input[length * 2..]);
// Step 13.
if length > 8 {
red = &red[length - 8..];
green = &green[length - 8..];
blue = &blue[length - 8..];
length = 8
}
// Step 14.
while length > 2 && red[0] == b'0' && green[0] == b'0' && blue[0] == b'0' {
red = &red[1..];
green = &green[1..];
blue = &blue[1..];
length -= 1
}
// Steps 15-20.
return Ok(RGBA {
red: hex_string(red).unwrap() as f32 / 255.0,
green: hex_string(green).unwrap() as f32 / 255.0,
blue: hex_string(blue).unwrap() as f32 / 255.0,
alpha: 1.0,
});
fn hex(ch: char) -> Result<u8, ()> {
match ch {
'0'...'9' => Ok((ch as u8) - b'0'),
'a'...'f' => Ok((ch as u8) - b'a' + 10),
'A'...'F' => Ok((ch as u8) - b'A' + 10),
_ => Err(()),
}
}
fn hex_string(string: &[u8]) -> Result<u8, ()> {
match string.len() {
0 => Err(()),
1 => hex(string[0] as char),
_ => {
let upper = try!(hex(string[0] as char));
let lower = try!(hex(string[1] as char));
Ok((upper << 4) | lower)
}
}
}
}
#[derive(Clone, HeapSizeOf, Debug)]
pub struct AttrIdentifier {
pub local_name: Atom,
pub name: Atom,
pub namespace: Namespace,
pub prefix: Option<Atom>,
}
|
random_line_split
|
|
attr.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use app_units::Au;
use cssparser::{self, Color, RGBA};
use euclid::num::Zero;
use num::ToPrimitive;
use std::ascii::AsciiExt;
use std::ops::Deref;
use string_cache::{Atom, Namespace};
use url::Url;
use util::str::{DOMString, LengthOrPercentageOrAuto, HTML_SPACE_CHARACTERS, WHITESPACE};
use util::str::{parse_length, read_numbers, split_html_space_chars, str_join};
use values::specified::{Length};
// Duplicated from script::dom::values.
const UNSIGNED_LONG_MAX: u32 = 2147483647;
#[derive(PartialEq, Clone, HeapSizeOf)]
pub enum
|
{
String(DOMString),
TokenList(DOMString, Vec<Atom>),
UInt(DOMString, u32),
Int(DOMString, i32),
Atom(Atom),
Length(DOMString, Option<Length>),
Color(DOMString, Option<RGBA>),
Dimension(DOMString, LengthOrPercentageOrAuto),
Url(DOMString, Option<Url>),
}
/// Shared implementation to parse an integer according to
/// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-integers> or
/// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-non-negative-integers>
fn do_parse_integer<T: Iterator<Item=char>>(input: T) -> Option<i64> {
let mut input = input.skip_while(|c| {
HTML_SPACE_CHARACTERS.iter().any(|s| s == c)
}).peekable();
let sign = match input.peek() {
None => return None,
Some(&'-') => {
input.next();
-1
},
Some(&'+') => {
input.next();
1
},
Some(_) => 1,
};
let value = read_numbers(input);
value.and_then(|value| value.checked_mul(sign))
}
/// Parse an integer according to
/// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-integers>.
pub fn parse_integer<T: Iterator<Item=char>>(input: T) -> Option<i32> {
do_parse_integer(input).and_then(|result| {
result.to_i32()
})
}
/// Parse an integer according to
/// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-non-negative-integers>
pub fn parse_unsigned_integer<T: Iterator<Item=char>>(input: T) -> Option<u32> {
do_parse_integer(input).and_then(|result| {
result.to_u32()
})
}
impl AttrValue {
pub fn from_serialized_tokenlist(tokens: DOMString) -> AttrValue {
let atoms =
split_html_space_chars(&tokens)
.map(Atom::from)
.fold(vec![], |mut acc, atom| {
if!acc.contains(&atom) { acc.push(atom) }
acc
});
AttrValue::TokenList(tokens, atoms)
}
pub fn from_atomic_tokens(atoms: Vec<Atom>) -> AttrValue {
// TODO(ajeffrey): effecient conversion of Vec<Atom> to DOMString
let tokens = DOMString::from(str_join(&atoms, "\x20"));
AttrValue::TokenList(tokens, atoms)
}
// https://html.spec.whatwg.org/multipage/#reflecting-content-attributes-in-idl-attributes:idl-unsigned-long
pub fn from_u32(string: DOMString, default: u32) -> AttrValue {
let result = parse_unsigned_integer(string.chars()).unwrap_or(default);
let result = if result > UNSIGNED_LONG_MAX {
default
} else {
result
};
AttrValue::UInt(string, result)
}
pub fn from_i32(string: DOMString, default: i32) -> AttrValue {
let result = parse_integer(string.chars()).unwrap_or(default);
AttrValue::Int(string, result)
}
// https://html.spec.whatwg.org/multipage/#limited-to-only-non-negative-numbers
pub fn from_limited_i32(string: DOMString, default: i32) -> AttrValue {
let result = parse_integer(string.chars()).unwrap_or(default);
if result < 0 {
AttrValue::Int(string, default)
} else {
AttrValue::Int(string, result)
}
}
// https://html.spec.whatwg.org/multipage/#limited-to-only-non-negative-numbers-greater-than-zero
pub fn from_limited_u32(string: DOMString, default: u32) -> AttrValue {
let result = parse_unsigned_integer(string.chars()).unwrap_or(default);
let result = if result == 0 || result > UNSIGNED_LONG_MAX {
default
} else {
result
};
AttrValue::UInt(string, result)
}
pub fn from_atomic(string: DOMString) -> AttrValue {
// FIXME(ajeffrey): convert directly from DOMString to Atom
let value = Atom::from(&*string);
AttrValue::Atom(value)
}
pub fn from_url(base: &Url, url: DOMString) -> AttrValue {
let joined = base.join(&url).ok();
AttrValue::Url(url, joined)
}
pub fn from_legacy_color(string: DOMString) -> AttrValue {
let parsed = parse_legacy_color(&string).ok();
AttrValue::Color(string, parsed)
}
pub fn from_dimension(string: DOMString) -> AttrValue {
let parsed = parse_length(&string);
AttrValue::Dimension(string, parsed)
}
pub fn from_nonzero_dimension(string: DOMString) -> AttrValue {
let parsed = parse_nonzero_length(&string);
AttrValue::Dimension(string, parsed)
}
/// Assumes the `AttrValue` is a `TokenList` and returns its tokens
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `TokenList`
pub fn as_tokens(&self) -> &[Atom] {
match *self {
AttrValue::TokenList(_, ref tokens) => tokens,
_ => panic!("Tokens not found"),
}
}
/// Assumes the `AttrValue` is an `Atom` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not an `Atom`
pub fn as_atom(&self) -> &Atom {
match *self {
AttrValue::Atom(ref value) => value,
_ => panic!("Atom not found"),
}
}
/// Assumes the `AttrValue` is a `Color` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `Color`
pub fn as_color(&self) -> Option<&RGBA> {
match *self {
AttrValue::Color(_, ref color) => color.as_ref(),
_ => panic!("Color not found"),
}
}
/// Assumes the `AttrValue` is a `Length` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `Length`
pub fn as_length(&self) -> Option<&Length> {
match *self {
AttrValue::Length(_, ref length) => length.as_ref(),
_ => panic!("Length not found"),
}
}
/// Assumes the `AttrValue` is a `Dimension` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `Dimension`
pub fn as_dimension(&self) -> &LengthOrPercentageOrAuto {
match *self {
AttrValue::Dimension(_, ref l) => l,
_ => panic!("Dimension not found"),
}
}
/// Assumes the `AttrValue` is a `Url` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `Url`
pub fn as_url(&self) -> Option<&Url> {
match *self {
AttrValue::Url(_, ref url) => url.as_ref(),
_ => panic!("Url not found"),
}
}
/// Return the AttrValue as its integer representation, if any.
/// This corresponds to attribute values returned as `AttrValue::UInt(_)`
/// by `VirtualMethods::parse_plain_attribute()`.
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `UInt`
pub fn as_uint(&self) -> u32 {
if let AttrValue::UInt(_, value) = *self {
value
} else {
panic!("Uint not found");
}
}
}
impl Deref for AttrValue {
type Target = str;
fn deref(&self) -> &str {
match *self {
AttrValue::String(ref value) |
AttrValue::TokenList(ref value, _) |
AttrValue::UInt(ref value, _) |
AttrValue::Length(ref value, _) |
AttrValue::Color(ref value, _) |
AttrValue::Int(ref value, _) |
AttrValue::Url(ref value, _) |
AttrValue::Dimension(ref value, _) => &value,
AttrValue::Atom(ref value) => &value,
}
}
}
/// HTML5 § 2.4.4.5.
///
/// https://html.spec.whatwg.org/multipage/#rules-for-parsing-non-zero-dimension-values
pub fn parse_nonzero_length(value: &str) -> LengthOrPercentageOrAuto {
match parse_length(value) {
LengthOrPercentageOrAuto::Length(x) if x == Au::zero() => LengthOrPercentageOrAuto::Auto,
LengthOrPercentageOrAuto::Percentage(0.) => LengthOrPercentageOrAuto::Auto,
x => x,
}
}
/// Parses a legacy color per HTML5 § 2.4.6. If unparseable, `Err` is returned.
pub fn parse_legacy_color(mut input: &str) -> Result<RGBA, ()> {
// Steps 1 and 2.
if input.is_empty() {
return Err(())
}
// Step 3.
input = input.trim_matches(WHITESPACE);
// Step 4.
if input.eq_ignore_ascii_case("transparent") {
return Err(())
}
// Step 5.
if let Ok(Color::RGBA(rgba)) = cssparser::parse_color_keyword(input) {
return Ok(rgba);
}
// Step 6.
if input.len() == 4 {
if let (b'#', Ok(r), Ok(g), Ok(b)) =
(input.as_bytes()[0],
hex(input.as_bytes()[1] as char),
hex(input.as_bytes()[2] as char),
hex(input.as_bytes()[3] as char)) {
return Ok(RGBA {
red: (r as f32) * 17.0 / 255.0,
green: (g as f32) * 17.0 / 255.0,
blue: (b as f32) * 17.0 / 255.0,
alpha: 1.0,
})
}
}
// Step 7.
let mut new_input = String::new();
for ch in input.chars() {
if ch as u32 > 0xffff {
new_input.push_str("00")
} else {
new_input.push(ch)
}
}
let mut input = &*new_input;
// Step 8.
for (char_count, (index, _)) in input.char_indices().enumerate() {
if char_count == 128 {
input = &input[..index];
break
}
}
// Step 9.
if input.as_bytes()[0] == b'#' {
input = &input[1..]
}
// Step 10.
let mut new_input = Vec::new();
for ch in input.chars() {
if hex(ch).is_ok() {
new_input.push(ch as u8)
} else {
new_input.push(b'0')
}
}
let mut input = new_input;
// Step 11.
while input.is_empty() || (input.len() % 3)!= 0 {
input.push(b'0')
}
// Step 12.
let mut length = input.len() / 3;
let (mut red, mut green, mut blue) = (&input[..length],
&input[length..length * 2],
&input[length * 2..]);
// Step 13.
if length > 8 {
red = &red[length - 8..];
green = &green[length - 8..];
blue = &blue[length - 8..];
length = 8
}
// Step 14.
while length > 2 && red[0] == b'0' && green[0] == b'0' && blue[0] == b'0' {
red = &red[1..];
green = &green[1..];
blue = &blue[1..];
length -= 1
}
// Steps 15-20.
return Ok(RGBA {
red: hex_string(red).unwrap() as f32 / 255.0,
green: hex_string(green).unwrap() as f32 / 255.0,
blue: hex_string(blue).unwrap() as f32 / 255.0,
alpha: 1.0,
});
fn hex(ch: char) -> Result<u8, ()> {
match ch {
'0'...'9' => Ok((ch as u8) - b'0'),
'a'...'f' => Ok((ch as u8) - b'a' + 10),
'A'...'F' => Ok((ch as u8) - b'A' + 10),
_ => Err(()),
}
}
fn hex_string(string: &[u8]) -> Result<u8, ()> {
match string.len() {
0 => Err(()),
1 => hex(string[0] as char),
_ => {
let upper = try!(hex(string[0] as char));
let lower = try!(hex(string[1] as char));
Ok((upper << 4) | lower)
}
}
}
}
#[derive(Clone, HeapSizeOf, Debug)]
pub struct AttrIdentifier {
pub local_name: Atom,
pub name: Atom,
pub namespace: Namespace,
pub prefix: Option<Atom>,
}
|
AttrValue
|
identifier_name
|
deriving-eq-ord-boxed-slice.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[derive(PartialEq, PartialOrd, Eq, Ord)]
struct Foo(Box<[u8]>);
pub fn main() {
// FIXME (#22405): Replace `Box::new` with `box` here when/if possible.
let a = Foo(Box::new([0, 1, 2]));
let b = Foo(Box::new([0, 1, 2]));
assert!(a == b);
println!("{}", a!= b);
println!("{}", a < b);
println!("{}", a <= b);
println!("{}", a == b);
println!("{}", a > b);
|
println!("{}", a >= b);
}
|
random_line_split
|
|
deriving-eq-ord-boxed-slice.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[derive(PartialEq, PartialOrd, Eq, Ord)]
struct
|
(Box<[u8]>);
pub fn main() {
// FIXME (#22405): Replace `Box::new` with `box` here when/if possible.
let a = Foo(Box::new([0, 1, 2]));
let b = Foo(Box::new([0, 1, 2]));
assert!(a == b);
println!("{}", a!= b);
println!("{}", a < b);
println!("{}", a <= b);
println!("{}", a == b);
println!("{}", a > b);
println!("{}", a >= b);
}
|
Foo
|
identifier_name
|
qsearch.rs
|
//! Defines the `Qsearch` trait.
use uci::SetOption;
use value::*;
use depth::*;
use move_generator::MoveGenerator;
/// Parameters describing a quiescence search.
///
/// **Important note:** `lower_bound` and `upper_bound` fields
/// together give the interval within which an as precise as possible
/// evaluation is required. If during the search is determined that
/// the exact evaluation is outside of this interval, the search may
/// return a value that is closer to the the interval bounds than the
/// exact evaluation, but always staying on the correct side of the
/// interval (i.e. "fail-soft" semantics).
pub struct
|
<'a, T: MoveGenerator + 'a> {
/// A mutable reference to the root position for the search.
///
/// **Important note:** The search routine may use this reference
/// to do and undo moves, but when the search is finished, all
/// played moves must be taken back so that the board is restored
/// to its original state.
pub position: &'a mut T,
/// The requested search depth.
///
/// This is the depth at which the main search stops and the
/// quiescence search takes on. It should be between `DEPTH_MIN`
/// and `0`. The quiescence search implementation may decide to
/// perform less thorough analysis when `depth` is smaller than
/// zero.
pub depth: Depth,
/// The lower bound for the search.
///
/// Should be no lesser than `VALUE_MIN`.
pub lower_bound: Value,
/// The upper bound for the search.
///
/// Should be greater than `lower_bound`, but no greater than
/// `VALUE_MAX`.
pub upper_bound: Value,
/// Position's static evaluation, or `VALUE_UNKNOWN`.
///
/// Saves the re-calculation if position's static evaluation is
/// already available.
pub static_eval: Value,
}
/// A trait for quiescence searches' results.
pub trait QsearchResult: Clone {
/// Creates a new instance.
///
/// * `value` -- the calculated evaluation for the position. Must
/// be between `VALUE_EVAL_MIN` and `VALUE_EVAL_MAX`.
///
/// * `searched_nodes` -- the number of positions searched to
/// calculate the evaluation.
fn new(value: Value, searched_nodes: u64) -> Self;
/// Returns the calculated evaluation for the position.
///
/// Will always be between `VALUE_EVAL_MIN` and `VALUE_EVAL_MAX`.
fn value(&self) -> Value;
/// Retruns the number of positions searched to calculate the evaluation.
fn searched_nodes(&self) -> u64;
}
/// A trait for performing quiescence searches.
///
/// Quiescence search is a restricted search which considers only a
/// limited set of moves (for example: winning captures, pawn
/// promotions to queen, check evasions). The goal is to statically
/// evaluate only "quiet" positions (positions where there are no
/// winning tactical moves to be made). Although this search can
/// cheaply and correctly resolve many simple tactical issues, it is
/// completely blind to the more complex ones. To implement your own
/// quiescence search routine, you must define a type that implements
/// the `Qsearch` trait.
pub trait Qsearch: SetOption + Send +'static {
/// The type of move generator that the implementation works with.
type MoveGenerator: MoveGenerator;
/// The type of result object that the search produces.
type QsearchResult: QsearchResult;
/// Performs a quiescence search and returns a result object.
fn qsearch(params: QsearchParams<Self::MoveGenerator>) -> Self::QsearchResult;
}
|
QsearchParams
|
identifier_name
|
qsearch.rs
|
//! Defines the `Qsearch` trait.
use uci::SetOption;
use value::*;
use depth::*;
use move_generator::MoveGenerator;
/// Parameters describing a quiescence search.
///
/// **Important note:** `lower_bound` and `upper_bound` fields
/// together give the interval within which an as precise as possible
/// evaluation is required. If during the search is determined that
/// the exact evaluation is outside of this interval, the search may
/// return a value that is closer to the the interval bounds than the
/// exact evaluation, but always staying on the correct side of the
/// interval (i.e. "fail-soft" semantics).
pub struct QsearchParams<'a, T: MoveGenerator + 'a> {
/// A mutable reference to the root position for the search.
///
/// **Important note:** The search routine may use this reference
/// to do and undo moves, but when the search is finished, all
/// played moves must be taken back so that the board is restored
/// to its original state.
pub position: &'a mut T,
/// The requested search depth.
///
/// This is the depth at which the main search stops and the
/// quiescence search takes on. It should be between `DEPTH_MIN`
/// and `0`. The quiescence search implementation may decide to
/// perform less thorough analysis when `depth` is smaller than
/// zero.
pub depth: Depth,
/// The lower bound for the search.
///
/// Should be no lesser than `VALUE_MIN`.
pub lower_bound: Value,
/// The upper bound for the search.
///
/// Should be greater than `lower_bound`, but no greater than
/// `VALUE_MAX`.
pub upper_bound: Value,
/// Position's static evaluation, or `VALUE_UNKNOWN`.
///
/// Saves the re-calculation if position's static evaluation is
/// already available.
pub static_eval: Value,
}
/// A trait for quiescence searches' results.
pub trait QsearchResult: Clone {
/// Creates a new instance.
///
/// * `value` -- the calculated evaluation for the position. Must
/// be between `VALUE_EVAL_MIN` and `VALUE_EVAL_MAX`.
///
/// * `searched_nodes` -- the number of positions searched to
/// calculate the evaluation.
fn new(value: Value, searched_nodes: u64) -> Self;
|
/// Will always be between `VALUE_EVAL_MIN` and `VALUE_EVAL_MAX`.
fn value(&self) -> Value;
/// Retruns the number of positions searched to calculate the evaluation.
fn searched_nodes(&self) -> u64;
}
/// A trait for performing quiescence searches.
///
/// Quiescence search is a restricted search which considers only a
/// limited set of moves (for example: winning captures, pawn
/// promotions to queen, check evasions). The goal is to statically
/// evaluate only "quiet" positions (positions where there are no
/// winning tactical moves to be made). Although this search can
/// cheaply and correctly resolve many simple tactical issues, it is
/// completely blind to the more complex ones. To implement your own
/// quiescence search routine, you must define a type that implements
/// the `Qsearch` trait.
pub trait Qsearch: SetOption + Send +'static {
/// The type of move generator that the implementation works with.
type MoveGenerator: MoveGenerator;
/// The type of result object that the search produces.
type QsearchResult: QsearchResult;
/// Performs a quiescence search and returns a result object.
fn qsearch(params: QsearchParams<Self::MoveGenerator>) -> Self::QsearchResult;
}
|
/// Returns the calculated evaluation for the position.
///
|
random_line_split
|
dynamics.rs
|
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at https://mozilla.org/MPL/2.0/.
extern crate core;
use self::core::slice;
use self::core::fmt::{
Debug,
Error,
Formatter,
};
use context::encodable::{
HpackEncodable,
};
/// Header is a decoded header of &[u8] backed by a Headers buffer.
#[repr(C)]
pub struct Header {
name: (*const u8, usize),
value: (*const u8, usize),
compressable: bool,
}
impl Header {
#[inline(always)]
pub fn new(
name: &[u8],
value: &[u8],
compressable: bool,
) -> Header {
Header {
name: (name.as_ptr(), name.len()),
value: (value.as_ptr(), value.len()),
compressable: compressable,
}
}
}
impl HpackEncodable for Header {
#[inline(always)]
fn name(&self) -> &[u8] {
unsafe {
slice::from_raw_parts(self.name.0, self.name.1)
}
}
#[inline(always)]
fn value(&self) -> &[u8] {
unsafe {
slice::from_raw_parts(self.value.0, self.value.1)
}
}
#[inline(always)]
fn is_compressable(&self) -> bool {
self.compressable
}
}
impl Debug for Header {
fn fmt(&self, formatter: &mut Formatter) -> Result<(), Error> {
write!(
formatter,
"Header ( name: {}, value: {} )",
String::from_utf8(self.name().to_vec()).unwrap(),
String::from_utf8(self.value().to_vec()).unwrap(),
)
}
}
#[derive(Clone)]
#[repr(C)]
pub struct DynamicHeader {
seg: usize,
buf: Vec<u8>,
}
impl DynamicHeader {
#[inline]
pub fn new(name: &[u8], value: &[u8]) -> DynamicHeader
|
}
impl HpackEncodable for DynamicHeader {
#[inline(always)]
fn name(&self) -> &[u8] {
&self.buf[0..self.seg]
}
#[inline(always)]
fn value(&self) -> &[u8] {
&self.buf[self.seg..self.buf.len()]
}
#[inline]
fn size(&self) -> u32 {
self.buf.len() as u32 + 32
}
#[inline]
fn is_compressable(&self) -> bool {
true
}
}
impl Debug for DynamicHeader {
fn fmt(&self, formatter: &mut Formatter) -> Result<(), Error> {
write!(
formatter,
"DynamicHeader ( name: {}, value: {} )",
String::from_utf8(self.name().to_vec()).unwrap(),
String::from_utf8(self.value().to_vec()).unwrap(),
)
}
}
|
{
let mut buf = Vec::with_capacity(name.len() + value.len());
buf.extend(name);
buf.extend(value);
DynamicHeader {
seg: name.len(),
buf: buf,
}
}
|
identifier_body
|
dynamics.rs
|
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at https://mozilla.org/MPL/2.0/.
extern crate core;
use self::core::slice;
use self::core::fmt::{
Debug,
Error,
Formatter,
};
use context::encodable::{
|
/// Header is a decoded header of &[u8] backed by a Headers buffer.
#[repr(C)]
pub struct Header {
name: (*const u8, usize),
value: (*const u8, usize),
compressable: bool,
}
impl Header {
#[inline(always)]
pub fn new(
name: &[u8],
value: &[u8],
compressable: bool,
) -> Header {
Header {
name: (name.as_ptr(), name.len()),
value: (value.as_ptr(), value.len()),
compressable: compressable,
}
}
}
impl HpackEncodable for Header {
#[inline(always)]
fn name(&self) -> &[u8] {
unsafe {
slice::from_raw_parts(self.name.0, self.name.1)
}
}
#[inline(always)]
fn value(&self) -> &[u8] {
unsafe {
slice::from_raw_parts(self.value.0, self.value.1)
}
}
#[inline(always)]
fn is_compressable(&self) -> bool {
self.compressable
}
}
impl Debug for Header {
fn fmt(&self, formatter: &mut Formatter) -> Result<(), Error> {
write!(
formatter,
"Header ( name: {}, value: {} )",
String::from_utf8(self.name().to_vec()).unwrap(),
String::from_utf8(self.value().to_vec()).unwrap(),
)
}
}
#[derive(Clone)]
#[repr(C)]
pub struct DynamicHeader {
seg: usize,
buf: Vec<u8>,
}
impl DynamicHeader {
#[inline]
pub fn new(name: &[u8], value: &[u8]) -> DynamicHeader {
let mut buf = Vec::with_capacity(name.len() + value.len());
buf.extend(name);
buf.extend(value);
DynamicHeader {
seg: name.len(),
buf: buf,
}
}
}
impl HpackEncodable for DynamicHeader {
#[inline(always)]
fn name(&self) -> &[u8] {
&self.buf[0..self.seg]
}
#[inline(always)]
fn value(&self) -> &[u8] {
&self.buf[self.seg..self.buf.len()]
}
#[inline]
fn size(&self) -> u32 {
self.buf.len() as u32 + 32
}
#[inline]
fn is_compressable(&self) -> bool {
true
}
}
impl Debug for DynamicHeader {
fn fmt(&self, formatter: &mut Formatter) -> Result<(), Error> {
write!(
formatter,
"DynamicHeader ( name: {}, value: {} )",
String::from_utf8(self.name().to_vec()).unwrap(),
String::from_utf8(self.value().to_vec()).unwrap(),
)
}
}
|
HpackEncodable,
};
|
random_line_split
|
dynamics.rs
|
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at https://mozilla.org/MPL/2.0/.
extern crate core;
use self::core::slice;
use self::core::fmt::{
Debug,
Error,
Formatter,
};
use context::encodable::{
HpackEncodable,
};
/// Header is a decoded header of &[u8] backed by a Headers buffer.
#[repr(C)]
pub struct
|
{
name: (*const u8, usize),
value: (*const u8, usize),
compressable: bool,
}
impl Header {
#[inline(always)]
pub fn new(
name: &[u8],
value: &[u8],
compressable: bool,
) -> Header {
Header {
name: (name.as_ptr(), name.len()),
value: (value.as_ptr(), value.len()),
compressable: compressable,
}
}
}
impl HpackEncodable for Header {
#[inline(always)]
fn name(&self) -> &[u8] {
unsafe {
slice::from_raw_parts(self.name.0, self.name.1)
}
}
#[inline(always)]
fn value(&self) -> &[u8] {
unsafe {
slice::from_raw_parts(self.value.0, self.value.1)
}
}
#[inline(always)]
fn is_compressable(&self) -> bool {
self.compressable
}
}
impl Debug for Header {
fn fmt(&self, formatter: &mut Formatter) -> Result<(), Error> {
write!(
formatter,
"Header ( name: {}, value: {} )",
String::from_utf8(self.name().to_vec()).unwrap(),
String::from_utf8(self.value().to_vec()).unwrap(),
)
}
}
#[derive(Clone)]
#[repr(C)]
pub struct DynamicHeader {
seg: usize,
buf: Vec<u8>,
}
impl DynamicHeader {
#[inline]
pub fn new(name: &[u8], value: &[u8]) -> DynamicHeader {
let mut buf = Vec::with_capacity(name.len() + value.len());
buf.extend(name);
buf.extend(value);
DynamicHeader {
seg: name.len(),
buf: buf,
}
}
}
impl HpackEncodable for DynamicHeader {
#[inline(always)]
fn name(&self) -> &[u8] {
&self.buf[0..self.seg]
}
#[inline(always)]
fn value(&self) -> &[u8] {
&self.buf[self.seg..self.buf.len()]
}
#[inline]
fn size(&self) -> u32 {
self.buf.len() as u32 + 32
}
#[inline]
fn is_compressable(&self) -> bool {
true
}
}
impl Debug for DynamicHeader {
fn fmt(&self, formatter: &mut Formatter) -> Result<(), Error> {
write!(
formatter,
"DynamicHeader ( name: {}, value: {} )",
String::from_utf8(self.name().to_vec()).unwrap(),
String::from_utf8(self.value().to_vec()).unwrap(),
)
}
}
|
Header
|
identifier_name
|
security_checker.rs
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use crate::repo_handlers::RepoHandler;
use anyhow::{bail, Context, Error, Result};
use borrowed::borrowed;
use fbinit::FacebookInit;
use futures::future::try_join_all;
use metaconfig_types::{AllowlistEntry, CommonConfig};
use permission_checker::{
BoxMembershipChecker, BoxPermissionChecker, MembershipCheckerBuilder, MononokeIdentity,
MononokeIdentitySet, PermissionCheckerBuilder,
};
use slog::{warn, Logger};
use std::collections::HashMap;
pub struct ConnectionsSecurityChecker {
tier_permchecker: BoxPermissionChecker,
allowlisted_checker: BoxMembershipChecker,
repo_permcheckers: HashMap<String, BoxPermissionChecker>,
}
impl ConnectionsSecurityChecker {
pub async fn new(
fb: FacebookInit,
common_config: CommonConfig,
repo_handlers: &HashMap<String, RepoHandler>,
logger: &Logger,
) -> Result<Self> {
let mut allowlisted_identities = MononokeIdentitySet::new();
let mut tier_permchecker = None;
for allowlist_entry in common_config.security_config {
match allowlist_entry {
AllowlistEntry::HardcodedIdentity { ty, data } => {
allowlisted_identities.insert(MononokeIdentity::new(&ty, &data)?);
}
AllowlistEntry::Tier(tier) =>
|
}
}
let futures = repo_handlers.iter().map(|(reponame, repohandler)| {
borrowed!(allowlisted_identities);
async move {
if let Some(acl_name) = repohandler.repo.hipster_acl() {
let permchecker = PermissionCheckerBuilder::acl_for_repo(fb, acl_name)
.await
.with_context(|| {
format!("Failed to create PermissionChecker for {}", acl_name)
})?;
Result::<(String, BoxPermissionChecker), Error>::Ok((
reponame.clone(),
permchecker,
))
} else {
// If we dont have an Acl config here, we just use the allowlisted identities.
// Those are the identities we'd allow to impersonate anyone anyway. Note that
// that this is not a setup we run in prod — it's just convenient for local
// repos.
warn!(
logger,
"No ACL set for repo {}, defaulting to allowlisted identities", reponame
);
Result::<(String, BoxPermissionChecker), Error>::Ok((
reponame.clone(),
PermissionCheckerBuilder::allowlist_checker(allowlisted_identities.clone()),
))
}
}
});
let repo_permcheckers: HashMap<_, _> = try_join_all(futures).await?.into_iter().collect();
Ok(Self {
tier_permchecker: tier_permchecker
.unwrap_or_else(|| PermissionCheckerBuilder::always_reject()),
allowlisted_checker: MembershipCheckerBuilder::allowlist_checker(
allowlisted_identities,
),
repo_permcheckers,
})
}
pub async fn check_if_trusted(&self, identities: &MononokeIdentitySet) -> Result<bool> {
let action = "trusted_parties";
Ok(self.allowlisted_checker.is_member(&identities).await?
|| self
.tier_permchecker
.check_set(&identities, &[action])
.await?)
}
pub async fn check_if_repo_access_allowed(
&self,
reponame: &str,
identities: &MononokeIdentitySet,
) -> Result<bool> {
match self.repo_permcheckers.get(reponame) {
Some(permchecker) => Ok(permchecker.check_set(&identities, &["read"]).await?),
None => Ok(false),
}
}
}
|
{
if tier_permchecker.is_some() {
bail!("invalid config: only one PermissionChecker for tier is allowed");
}
tier_permchecker =
Some(PermissionCheckerBuilder::acl_for_tier(fb, &tier).await?);
}
|
conditional_block
|
security_checker.rs
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use crate::repo_handlers::RepoHandler;
use anyhow::{bail, Context, Error, Result};
|
BoxMembershipChecker, BoxPermissionChecker, MembershipCheckerBuilder, MononokeIdentity,
MononokeIdentitySet, PermissionCheckerBuilder,
};
use slog::{warn, Logger};
use std::collections::HashMap;
pub struct ConnectionsSecurityChecker {
tier_permchecker: BoxPermissionChecker,
allowlisted_checker: BoxMembershipChecker,
repo_permcheckers: HashMap<String, BoxPermissionChecker>,
}
impl ConnectionsSecurityChecker {
pub async fn new(
fb: FacebookInit,
common_config: CommonConfig,
repo_handlers: &HashMap<String, RepoHandler>,
logger: &Logger,
) -> Result<Self> {
let mut allowlisted_identities = MononokeIdentitySet::new();
let mut tier_permchecker = None;
for allowlist_entry in common_config.security_config {
match allowlist_entry {
AllowlistEntry::HardcodedIdentity { ty, data } => {
allowlisted_identities.insert(MononokeIdentity::new(&ty, &data)?);
}
AllowlistEntry::Tier(tier) => {
if tier_permchecker.is_some() {
bail!("invalid config: only one PermissionChecker for tier is allowed");
}
tier_permchecker =
Some(PermissionCheckerBuilder::acl_for_tier(fb, &tier).await?);
}
}
}
let futures = repo_handlers.iter().map(|(reponame, repohandler)| {
borrowed!(allowlisted_identities);
async move {
if let Some(acl_name) = repohandler.repo.hipster_acl() {
let permchecker = PermissionCheckerBuilder::acl_for_repo(fb, acl_name)
.await
.with_context(|| {
format!("Failed to create PermissionChecker for {}", acl_name)
})?;
Result::<(String, BoxPermissionChecker), Error>::Ok((
reponame.clone(),
permchecker,
))
} else {
// If we dont have an Acl config here, we just use the allowlisted identities.
// Those are the identities we'd allow to impersonate anyone anyway. Note that
// that this is not a setup we run in prod — it's just convenient for local
// repos.
warn!(
logger,
"No ACL set for repo {}, defaulting to allowlisted identities", reponame
);
Result::<(String, BoxPermissionChecker), Error>::Ok((
reponame.clone(),
PermissionCheckerBuilder::allowlist_checker(allowlisted_identities.clone()),
))
}
}
});
let repo_permcheckers: HashMap<_, _> = try_join_all(futures).await?.into_iter().collect();
Ok(Self {
tier_permchecker: tier_permchecker
.unwrap_or_else(|| PermissionCheckerBuilder::always_reject()),
allowlisted_checker: MembershipCheckerBuilder::allowlist_checker(
allowlisted_identities,
),
repo_permcheckers,
})
}
pub async fn check_if_trusted(&self, identities: &MononokeIdentitySet) -> Result<bool> {
let action = "trusted_parties";
Ok(self.allowlisted_checker.is_member(&identities).await?
|| self
.tier_permchecker
.check_set(&identities, &[action])
.await?)
}
pub async fn check_if_repo_access_allowed(
&self,
reponame: &str,
identities: &MononokeIdentitySet,
) -> Result<bool> {
match self.repo_permcheckers.get(reponame) {
Some(permchecker) => Ok(permchecker.check_set(&identities, &["read"]).await?),
None => Ok(false),
}
}
}
|
use borrowed::borrowed;
use fbinit::FacebookInit;
use futures::future::try_join_all;
use metaconfig_types::{AllowlistEntry, CommonConfig};
use permission_checker::{
|
random_line_split
|
security_checker.rs
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use crate::repo_handlers::RepoHandler;
use anyhow::{bail, Context, Error, Result};
use borrowed::borrowed;
use fbinit::FacebookInit;
use futures::future::try_join_all;
use metaconfig_types::{AllowlistEntry, CommonConfig};
use permission_checker::{
BoxMembershipChecker, BoxPermissionChecker, MembershipCheckerBuilder, MononokeIdentity,
MononokeIdentitySet, PermissionCheckerBuilder,
};
use slog::{warn, Logger};
use std::collections::HashMap;
pub struct ConnectionsSecurityChecker {
tier_permchecker: BoxPermissionChecker,
allowlisted_checker: BoxMembershipChecker,
repo_permcheckers: HashMap<String, BoxPermissionChecker>,
}
impl ConnectionsSecurityChecker {
pub async fn new(
fb: FacebookInit,
common_config: CommonConfig,
repo_handlers: &HashMap<String, RepoHandler>,
logger: &Logger,
) -> Result<Self> {
let mut allowlisted_identities = MononokeIdentitySet::new();
let mut tier_permchecker = None;
for allowlist_entry in common_config.security_config {
match allowlist_entry {
AllowlistEntry::HardcodedIdentity { ty, data } => {
allowlisted_identities.insert(MononokeIdentity::new(&ty, &data)?);
}
AllowlistEntry::Tier(tier) => {
if tier_permchecker.is_some() {
bail!("invalid config: only one PermissionChecker for tier is allowed");
}
tier_permchecker =
Some(PermissionCheckerBuilder::acl_for_tier(fb, &tier).await?);
}
}
}
let futures = repo_handlers.iter().map(|(reponame, repohandler)| {
borrowed!(allowlisted_identities);
async move {
if let Some(acl_name) = repohandler.repo.hipster_acl() {
let permchecker = PermissionCheckerBuilder::acl_for_repo(fb, acl_name)
.await
.with_context(|| {
format!("Failed to create PermissionChecker for {}", acl_name)
})?;
Result::<(String, BoxPermissionChecker), Error>::Ok((
reponame.clone(),
permchecker,
))
} else {
// If we dont have an Acl config here, we just use the allowlisted identities.
// Those are the identities we'd allow to impersonate anyone anyway. Note that
// that this is not a setup we run in prod — it's just convenient for local
// repos.
warn!(
logger,
"No ACL set for repo {}, defaulting to allowlisted identities", reponame
);
Result::<(String, BoxPermissionChecker), Error>::Ok((
reponame.clone(),
PermissionCheckerBuilder::allowlist_checker(allowlisted_identities.clone()),
))
}
}
});
let repo_permcheckers: HashMap<_, _> = try_join_all(futures).await?.into_iter().collect();
Ok(Self {
tier_permchecker: tier_permchecker
.unwrap_or_else(|| PermissionCheckerBuilder::always_reject()),
allowlisted_checker: MembershipCheckerBuilder::allowlist_checker(
allowlisted_identities,
),
repo_permcheckers,
})
}
pub async fn check_if_trusted(&self, identities: &MononokeIdentitySet) -> Result<bool> {
|
pub async fn check_if_repo_access_allowed(
&self,
reponame: &str,
identities: &MononokeIdentitySet,
) -> Result<bool> {
match self.repo_permcheckers.get(reponame) {
Some(permchecker) => Ok(permchecker.check_set(&identities, &["read"]).await?),
None => Ok(false),
}
}
}
|
let action = "trusted_parties";
Ok(self.allowlisted_checker.is_member(&identities).await?
|| self
.tier_permchecker
.check_set(&identities, &[action])
.await?)
}
|
identifier_body
|
security_checker.rs
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use crate::repo_handlers::RepoHandler;
use anyhow::{bail, Context, Error, Result};
use borrowed::borrowed;
use fbinit::FacebookInit;
use futures::future::try_join_all;
use metaconfig_types::{AllowlistEntry, CommonConfig};
use permission_checker::{
BoxMembershipChecker, BoxPermissionChecker, MembershipCheckerBuilder, MononokeIdentity,
MononokeIdentitySet, PermissionCheckerBuilder,
};
use slog::{warn, Logger};
use std::collections::HashMap;
pub struct ConnectionsSecurityChecker {
tier_permchecker: BoxPermissionChecker,
allowlisted_checker: BoxMembershipChecker,
repo_permcheckers: HashMap<String, BoxPermissionChecker>,
}
impl ConnectionsSecurityChecker {
pub async fn
|
(
fb: FacebookInit,
common_config: CommonConfig,
repo_handlers: &HashMap<String, RepoHandler>,
logger: &Logger,
) -> Result<Self> {
let mut allowlisted_identities = MononokeIdentitySet::new();
let mut tier_permchecker = None;
for allowlist_entry in common_config.security_config {
match allowlist_entry {
AllowlistEntry::HardcodedIdentity { ty, data } => {
allowlisted_identities.insert(MononokeIdentity::new(&ty, &data)?);
}
AllowlistEntry::Tier(tier) => {
if tier_permchecker.is_some() {
bail!("invalid config: only one PermissionChecker for tier is allowed");
}
tier_permchecker =
Some(PermissionCheckerBuilder::acl_for_tier(fb, &tier).await?);
}
}
}
let futures = repo_handlers.iter().map(|(reponame, repohandler)| {
borrowed!(allowlisted_identities);
async move {
if let Some(acl_name) = repohandler.repo.hipster_acl() {
let permchecker = PermissionCheckerBuilder::acl_for_repo(fb, acl_name)
.await
.with_context(|| {
format!("Failed to create PermissionChecker for {}", acl_name)
})?;
Result::<(String, BoxPermissionChecker), Error>::Ok((
reponame.clone(),
permchecker,
))
} else {
// If we dont have an Acl config here, we just use the allowlisted identities.
// Those are the identities we'd allow to impersonate anyone anyway. Note that
// that this is not a setup we run in prod — it's just convenient for local
// repos.
warn!(
logger,
"No ACL set for repo {}, defaulting to allowlisted identities", reponame
);
Result::<(String, BoxPermissionChecker), Error>::Ok((
reponame.clone(),
PermissionCheckerBuilder::allowlist_checker(allowlisted_identities.clone()),
))
}
}
});
let repo_permcheckers: HashMap<_, _> = try_join_all(futures).await?.into_iter().collect();
Ok(Self {
tier_permchecker: tier_permchecker
.unwrap_or_else(|| PermissionCheckerBuilder::always_reject()),
allowlisted_checker: MembershipCheckerBuilder::allowlist_checker(
allowlisted_identities,
),
repo_permcheckers,
})
}
pub async fn check_if_trusted(&self, identities: &MononokeIdentitySet) -> Result<bool> {
let action = "trusted_parties";
Ok(self.allowlisted_checker.is_member(&identities).await?
|| self
.tier_permchecker
.check_set(&identities, &[action])
.await?)
}
pub async fn check_if_repo_access_allowed(
&self,
reponame: &str,
identities: &MononokeIdentitySet,
) -> Result<bool> {
match self.repo_permcheckers.get(reponame) {
Some(permchecker) => Ok(permchecker.check_set(&identities, &["read"]).await?),
None => Ok(false),
}
}
}
|
new
|
identifier_name
|
idle.rs
|
use crate::{Event, Loop};
/// Idle arguments, such as expected idle time in seconds.
#[derive(Copy, Clone, PartialEq, PartialOrd, Debug, Deserialize, Serialize)]
pub struct IdleArgs {
/// Expected idle time in seconds.
pub dt: f64,
}
/// When background tasks should be performed.
pub trait IdleEvent: Sized {
/// Creates an idle event.
fn from_idle_args(args: &IdleArgs, old_event: &Self) -> Option<Self>;
/// Creates an update event with delta time.
fn from_dt(dt: f64, old_event: &Self) -> Option<Self>
|
/// Calls closure if this is an idle event.
fn idle<U, F>(&self, f: F) -> Option<U>
where
F: FnMut(&IdleArgs) -> U;
/// Returns idle arguments.
fn idle_args(&self) -> Option<IdleArgs> {
self.idle(|args| *args)
}
}
impl IdleEvent for Event {
fn from_idle_args(args: &IdleArgs, _old_event: &Self) -> Option<Self> {
Some(Event::Loop(Loop::Idle(*args)))
}
fn idle<U, F>(&self, mut f: F) -> Option<U>
where
F: FnMut(&IdleArgs) -> U,
{
match *self {
Event::Loop(Loop::Idle(ref args)) => Some(f(args)),
_ => None,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_input_idle() {
use IdleArgs;
let e: Event = IdleArgs { dt: 1.0 }.into();
let x: Option<Event> = IdleEvent::from_idle_args(&IdleArgs { dt: 1.0 }, &e);
let y: Option<Event> = x
.clone()
.unwrap()
.idle(|args| IdleEvent::from_idle_args(args, x.as_ref().unwrap()))
.unwrap();
assert_eq!(x, y);
}
}
|
{
IdleEvent::from_idle_args(&IdleArgs { dt }, old_event)
}
|
identifier_body
|
idle.rs
|
use crate::{Event, Loop};
/// Idle arguments, such as expected idle time in seconds.
#[derive(Copy, Clone, PartialEq, PartialOrd, Debug, Deserialize, Serialize)]
pub struct IdleArgs {
/// Expected idle time in seconds.
pub dt: f64,
}
/// When background tasks should be performed.
pub trait IdleEvent: Sized {
/// Creates an idle event.
fn from_idle_args(args: &IdleArgs, old_event: &Self) -> Option<Self>;
/// Creates an update event with delta time.
fn from_dt(dt: f64, old_event: &Self) -> Option<Self> {
IdleEvent::from_idle_args(&IdleArgs { dt }, old_event)
}
/// Calls closure if this is an idle event.
fn idle<U, F>(&self, f: F) -> Option<U>
where
F: FnMut(&IdleArgs) -> U;
/// Returns idle arguments.
fn idle_args(&self) -> Option<IdleArgs> {
self.idle(|args| *args)
}
}
impl IdleEvent for Event {
fn from_idle_args(args: &IdleArgs, _old_event: &Self) -> Option<Self> {
Some(Event::Loop(Loop::Idle(*args)))
}
|
where
F: FnMut(&IdleArgs) -> U,
{
match *self {
Event::Loop(Loop::Idle(ref args)) => Some(f(args)),
_ => None,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_input_idle() {
use IdleArgs;
let e: Event = IdleArgs { dt: 1.0 }.into();
let x: Option<Event> = IdleEvent::from_idle_args(&IdleArgs { dt: 1.0 }, &e);
let y: Option<Event> = x
.clone()
.unwrap()
.idle(|args| IdleEvent::from_idle_args(args, x.as_ref().unwrap()))
.unwrap();
assert_eq!(x, y);
}
}
|
fn idle<U, F>(&self, mut f: F) -> Option<U>
|
random_line_split
|
idle.rs
|
use crate::{Event, Loop};
/// Idle arguments, such as expected idle time in seconds.
#[derive(Copy, Clone, PartialEq, PartialOrd, Debug, Deserialize, Serialize)]
pub struct IdleArgs {
/// Expected idle time in seconds.
pub dt: f64,
}
/// When background tasks should be performed.
pub trait IdleEvent: Sized {
/// Creates an idle event.
fn from_idle_args(args: &IdleArgs, old_event: &Self) -> Option<Self>;
/// Creates an update event with delta time.
fn from_dt(dt: f64, old_event: &Self) -> Option<Self> {
IdleEvent::from_idle_args(&IdleArgs { dt }, old_event)
}
/// Calls closure if this is an idle event.
fn idle<U, F>(&self, f: F) -> Option<U>
where
F: FnMut(&IdleArgs) -> U;
/// Returns idle arguments.
fn
|
(&self) -> Option<IdleArgs> {
self.idle(|args| *args)
}
}
impl IdleEvent for Event {
fn from_idle_args(args: &IdleArgs, _old_event: &Self) -> Option<Self> {
Some(Event::Loop(Loop::Idle(*args)))
}
fn idle<U, F>(&self, mut f: F) -> Option<U>
where
F: FnMut(&IdleArgs) -> U,
{
match *self {
Event::Loop(Loop::Idle(ref args)) => Some(f(args)),
_ => None,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_input_idle() {
use IdleArgs;
let e: Event = IdleArgs { dt: 1.0 }.into();
let x: Option<Event> = IdleEvent::from_idle_args(&IdleArgs { dt: 1.0 }, &e);
let y: Option<Event> = x
.clone()
.unwrap()
.idle(|args| IdleEvent::from_idle_args(args, x.as_ref().unwrap()))
.unwrap();
assert_eq!(x, y);
}
}
|
idle_args
|
identifier_name
|
main.rs
|
// Copyright 2018 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::io::{stdout, BufWriter};
fn main() {
let out = b"Hello fellow Rustaceans!";
let width = 24;
let mut writer = BufWriter::new(stdout());
ferris_says::say(out, width, &mut writer).unwrap();
}
#[cfg(test)]
mod tests {
use super::*;
use indoc::indoc;
#[test]
fn
|
() {
let out = indoc! { b"
Hello fellow Rustaceans!
" };
let width = 24;
let mut writer = BufWriter::new(stdout());
ferris_says::say(out, width, &mut writer).unwrap();
}
}
|
test_dev_dependencies
|
identifier_name
|
main.rs
|
// Copyright 2018 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::io::{stdout, BufWriter};
fn main()
|
#[cfg(test)]
mod tests {
use super::*;
use indoc::indoc;
#[test]
fn test_dev_dependencies() {
let out = indoc! { b"
Hello fellow Rustaceans!
" };
let width = 24;
let mut writer = BufWriter::new(stdout());
ferris_says::say(out, width, &mut writer).unwrap();
}
}
|
{
let out = b"Hello fellow Rustaceans!";
let width = 24;
let mut writer = BufWriter::new(stdout());
ferris_says::say(out, width, &mut writer).unwrap();
}
|
identifier_body
|
main.rs
|
// Copyright 2018 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::io::{stdout, BufWriter};
fn main() {
let out = b"Hello fellow Rustaceans!";
let width = 24;
let mut writer = BufWriter::new(stdout());
ferris_says::say(out, width, &mut writer).unwrap();
}
#[cfg(test)]
mod tests {
use super::*;
use indoc::indoc;
#[test]
fn test_dev_dependencies() {
let out = indoc! { b"
Hello fellow Rustaceans!
" };
let width = 24;
let mut writer = BufWriter::new(stdout());
ferris_says::say(out, width, &mut writer).unwrap();
}
}
|
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
|
random_line_split
|
serde_types.rs
|
use hybrid_clocks::{Timestamp, WallT};
use potboiler_common::{enum_str, types::CRDT};
use serde_derive::{Deserialize, Serialize};
use std::{collections::HashMap, fmt};
enum_str!(Operation {
Set("set"),
Add("add"),
Remove("remove"),
Create("create"),
});
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Change {
pub table: String,
pub key: String,
pub op: Operation,
pub change: serde_json::Value,
}
|
pub struct LWWConfigOp {
pub crdt: CRDT,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct LWW {
pub when: Timestamp<WallT>,
pub data: serde_json::Value,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct ORCreateOp {}
#[derive(Serialize, Deserialize, Debug)]
pub struct ORSetOp {
pub item: String,
pub key: String,
pub metadata: serde_json::Value,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct ORSet {
pub adds: HashMap<String, String>,
pub removes: HashMap<String, String>,
}
|
#[derive(Serialize, Deserialize, Debug)]
|
random_line_split
|
serde_types.rs
|
use hybrid_clocks::{Timestamp, WallT};
use potboiler_common::{enum_str, types::CRDT};
use serde_derive::{Deserialize, Serialize};
use std::{collections::HashMap, fmt};
enum_str!(Operation {
Set("set"),
Add("add"),
Remove("remove"),
Create("create"),
});
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Change {
pub table: String,
pub key: String,
pub op: Operation,
pub change: serde_json::Value,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct LWWConfigOp {
pub crdt: CRDT,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct LWW {
pub when: Timestamp<WallT>,
pub data: serde_json::Value,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct ORCreateOp {}
#[derive(Serialize, Deserialize, Debug)]
pub struct ORSetOp {
pub item: String,
pub key: String,
pub metadata: serde_json::Value,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct
|
{
pub adds: HashMap<String, String>,
pub removes: HashMap<String, String>,
}
|
ORSet
|
identifier_name
|
ecs_pos_vel_spread.rs
|
#![feature(test)]
extern crate test;
extern crate froggy;
use test::Bencher;
use froggy::{Pointer, Storage};
|
// it has a custom Velocity component
struct Velocity {
pub dx: f32,
pub dy: f32,
}
struct Entity {
pos: Pointer<Position>,
vel: Option<Pointer<Velocity>>,
}
struct World {
pos: Storage<Position>,
vel: Storage<Velocity>,
entities: Vec<Entity>,
}
fn build() -> World {
let mut world = World {
pos: Storage::with_capacity(N_POS_VEL + N_POS),
vel: Storage::with_capacity(N_POS_VEL),
entities: Vec::with_capacity(N_POS_VEL + N_POS),
};
// setup entities
{
let pos_spread = (N_POS + N_POS_VEL) / N_POS_VEL;
for fx in 1.. (N_POS_VEL + N_POS + 1) {
world.entities.push(Entity {
pos: world.pos.create(Position { x: 0.0, y: 0.0 }),
vel: None,
});
if fx % pos_spread == 0 {
world.entities.push(Entity {
pos: world.pos.create(Position { x: 0.0, y: 0.0 }),
vel: Some(world.vel.create(Velocity { dx: 0.0, dy: 0.0 })),
});
}
}
}
world
}
#[bench]
fn bench_build(b: &mut Bencher) {
b.iter(build);
}
#[bench]
fn bench_update(b: &mut Bencher) {
let mut world = build();
b.iter(|| {
for e in &world.entities {
if let Some(ref vel) = e.vel {
let mut p = &mut world.pos[&e.pos];
let v = &world.vel[vel];
p.x += v.dx;
p.y += v.dy;
}
}
});
}
|
mod bench_setup;
use bench_setup::{Position, N_POS_VEL, N_POS};
// Since component linking is not used in this bench,
|
random_line_split
|
ecs_pos_vel_spread.rs
|
#![feature(test)]
extern crate test;
extern crate froggy;
use test::Bencher;
use froggy::{Pointer, Storage};
mod bench_setup;
use bench_setup::{Position, N_POS_VEL, N_POS};
// Since component linking is not used in this bench,
// it has a custom Velocity component
struct Velocity {
pub dx: f32,
pub dy: f32,
}
struct Entity {
pos: Pointer<Position>,
vel: Option<Pointer<Velocity>>,
}
struct World {
pos: Storage<Position>,
vel: Storage<Velocity>,
entities: Vec<Entity>,
}
fn build() -> World
|
vel: Some(world.vel.create(Velocity { dx: 0.0, dy: 0.0 })),
});
}
}
}
world
}
#[bench]
fn bench_build(b: &mut Bencher) {
b.iter(build);
}
#[bench]
fn bench_update(b: &mut Bencher) {
let mut world = build();
b.iter(|| {
for e in &world.entities {
if let Some(ref vel) = e.vel {
let mut p = &mut world.pos[&e.pos];
let v = &world.vel[vel];
p.x += v.dx;
p.y += v.dy;
}
}
});
}
|
{
let mut world = World {
pos: Storage::with_capacity(N_POS_VEL + N_POS),
vel: Storage::with_capacity(N_POS_VEL),
entities: Vec::with_capacity(N_POS_VEL + N_POS),
};
// setup entities
{
let pos_spread = (N_POS + N_POS_VEL) / N_POS_VEL;
for fx in 1 .. (N_POS_VEL + N_POS + 1) {
world.entities.push(Entity {
pos: world.pos.create(Position { x: 0.0, y: 0.0 }),
vel: None,
});
if fx % pos_spread == 0 {
world.entities.push(Entity {
pos: world.pos.create(Position { x: 0.0, y: 0.0 }),
|
identifier_body
|
ecs_pos_vel_spread.rs
|
#![feature(test)]
extern crate test;
extern crate froggy;
use test::Bencher;
use froggy::{Pointer, Storage};
mod bench_setup;
use bench_setup::{Position, N_POS_VEL, N_POS};
// Since component linking is not used in this bench,
// it has a custom Velocity component
struct Velocity {
pub dx: f32,
pub dy: f32,
}
struct Entity {
pos: Pointer<Position>,
vel: Option<Pointer<Velocity>>,
}
struct World {
pos: Storage<Position>,
vel: Storage<Velocity>,
entities: Vec<Entity>,
}
fn build() -> World {
let mut world = World {
pos: Storage::with_capacity(N_POS_VEL + N_POS),
vel: Storage::with_capacity(N_POS_VEL),
entities: Vec::with_capacity(N_POS_VEL + N_POS),
};
// setup entities
{
let pos_spread = (N_POS + N_POS_VEL) / N_POS_VEL;
for fx in 1.. (N_POS_VEL + N_POS + 1) {
world.entities.push(Entity {
pos: world.pos.create(Position { x: 0.0, y: 0.0 }),
vel: None,
});
if fx % pos_spread == 0 {
world.entities.push(Entity {
pos: world.pos.create(Position { x: 0.0, y: 0.0 }),
vel: Some(world.vel.create(Velocity { dx: 0.0, dy: 0.0 })),
});
}
}
}
world
}
#[bench]
fn
|
(b: &mut Bencher) {
b.iter(build);
}
#[bench]
fn bench_update(b: &mut Bencher) {
let mut world = build();
b.iter(|| {
for e in &world.entities {
if let Some(ref vel) = e.vel {
let mut p = &mut world.pos[&e.pos];
let v = &world.vel[vel];
p.x += v.dx;
p.y += v.dy;
}
}
});
}
|
bench_build
|
identifier_name
|
ecs_pos_vel_spread.rs
|
#![feature(test)]
extern crate test;
extern crate froggy;
use test::Bencher;
use froggy::{Pointer, Storage};
mod bench_setup;
use bench_setup::{Position, N_POS_VEL, N_POS};
// Since component linking is not used in this bench,
// it has a custom Velocity component
struct Velocity {
pub dx: f32,
pub dy: f32,
}
struct Entity {
pos: Pointer<Position>,
vel: Option<Pointer<Velocity>>,
}
struct World {
pos: Storage<Position>,
vel: Storage<Velocity>,
entities: Vec<Entity>,
}
fn build() -> World {
let mut world = World {
pos: Storage::with_capacity(N_POS_VEL + N_POS),
vel: Storage::with_capacity(N_POS_VEL),
entities: Vec::with_capacity(N_POS_VEL + N_POS),
};
// setup entities
{
let pos_spread = (N_POS + N_POS_VEL) / N_POS_VEL;
for fx in 1.. (N_POS_VEL + N_POS + 1) {
world.entities.push(Entity {
pos: world.pos.create(Position { x: 0.0, y: 0.0 }),
vel: None,
});
if fx % pos_spread == 0
|
}
}
world
}
#[bench]
fn bench_build(b: &mut Bencher) {
b.iter(build);
}
#[bench]
fn bench_update(b: &mut Bencher) {
let mut world = build();
b.iter(|| {
for e in &world.entities {
if let Some(ref vel) = e.vel {
let mut p = &mut world.pos[&e.pos];
let v = &world.vel[vel];
p.x += v.dx;
p.y += v.dy;
}
}
});
}
|
{
world.entities.push(Entity {
pos: world.pos.create(Position { x: 0.0, y: 0.0 }),
vel: Some(world.vel.create(Velocity { dx: 0.0, dy: 0.0 })),
});
}
|
conditional_block
|
detect_format_change.rs
|
// Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use generate_format::Corpus;
use serde_reflection::Registry;
use std::collections::{btree_map::Entry, BTreeMap};
#[test]
fn analyze_serde_formats() {
let mut all_corpuses = BTreeMap::new();
for corpus in Corpus::values() {
// Compute the Serde formats of this corpus by analyzing the codebase.
let registry = corpus.get_registry();
// If the corpus was recorded on disk, test that the formats have not changed since then.
if let Some(path) = corpus.output_file() {
let content = std::fs::read_to_string(path).unwrap();
let expected = serde_yaml::from_str::<Registry>(content.as_str()).unwrap();
assert_registry_has_not_changed(&corpus.to_string(), path, registry.clone(), expected);
}
// Test that the definitions in all corpus are unique and pass the linter.
for (key, value) in registry {
assert_eq!(
generate_format::lint_bcs_format(&value),
Ok(()),
"In corpus {}: lint error while analyzing {}",
corpus.to_string(),
key
);
match all_corpuses.entry(key.clone()) {
Entry::Vacant(e) => {
e.insert(value);
}
Entry::Occupied(e) => assert_eq!(
e.get(),
&value,
"Type {} in corpus {} differs with previous definition in another corpus: {:?} vs {:?}",
key,
corpus.to_string(),
e.get(),
&value,
),
}
}
}
}
fn message(name: &str) -> String {
format!(
r#"
You may run `cargo run -p generate-format -- --corpus {} --record` to refresh the records.
Please verify the changes to the recorded file(s) and consider tagging your pull-request as `breaking`."#,
name
)
}
fn assert_registry_has_not_changed(name: &str, path: &str, registry: Registry, expected: Registry) {
for (key, value) in expected.iter() {
assert_eq!(
Some(value),
registry.get(key),
r#"
----
The recorded format for type `{}` was removed or does not match the recorded value in {}.{}
----
"#,
key,
path,
message(name),
);
}
|
for key in registry.keys() {
assert!(
expected.contains_key(key),
r#"
----
Type `{}` was added and has no recorded format in {} yet.{}
----
"#,
key,
path,
message(name),
);
}
}
#[test]
fn test_we_can_detect_changes_in_yaml() {
let yaml1 = r#"---
Person:
ENUM:
0:
NickName:
NEWTYPE:
STR
"#;
let yaml2 = r#"---
Person:
ENUM:
0:
NickName:
NEWTYPE:
STR
1:
FullName: UNIT
"#;
let value1 = serde_yaml::from_str::<Registry>(yaml1).unwrap();
let value2 = serde_yaml::from_str::<Registry>(yaml2).unwrap();
assert_ne!(value1, value2);
assert_ne!(value1.get("Person").unwrap(), value2.get("Person").unwrap());
}
|
random_line_split
|
|
detect_format_change.rs
|
// Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use generate_format::Corpus;
use serde_reflection::Registry;
use std::collections::{btree_map::Entry, BTreeMap};
#[test]
fn analyze_serde_formats() {
let mut all_corpuses = BTreeMap::new();
for corpus in Corpus::values() {
// Compute the Serde formats of this corpus by analyzing the codebase.
let registry = corpus.get_registry();
// If the corpus was recorded on disk, test that the formats have not changed since then.
if let Some(path) = corpus.output_file() {
let content = std::fs::read_to_string(path).unwrap();
let expected = serde_yaml::from_str::<Registry>(content.as_str()).unwrap();
assert_registry_has_not_changed(&corpus.to_string(), path, registry.clone(), expected);
}
// Test that the definitions in all corpus are unique and pass the linter.
for (key, value) in registry {
assert_eq!(
generate_format::lint_bcs_format(&value),
Ok(()),
"In corpus {}: lint error while analyzing {}",
corpus.to_string(),
key
);
match all_corpuses.entry(key.clone()) {
Entry::Vacant(e) => {
e.insert(value);
}
Entry::Occupied(e) => assert_eq!(
e.get(),
&value,
"Type {} in corpus {} differs with previous definition in another corpus: {:?} vs {:?}",
key,
corpus.to_string(),
e.get(),
&value,
),
}
}
}
}
fn message(name: &str) -> String {
format!(
r#"
You may run `cargo run -p generate-format -- --corpus {} --record` to refresh the records.
Please verify the changes to the recorded file(s) and consider tagging your pull-request as `breaking`."#,
name
)
}
fn assert_registry_has_not_changed(name: &str, path: &str, registry: Registry, expected: Registry) {
for (key, value) in expected.iter() {
assert_eq!(
Some(value),
registry.get(key),
r#"
----
The recorded format for type `{}` was removed or does not match the recorded value in {}.{}
----
"#,
key,
path,
message(name),
);
}
for key in registry.keys() {
assert!(
expected.contains_key(key),
r#"
----
Type `{}` was added and has no recorded format in {} yet.{}
----
"#,
key,
path,
message(name),
);
}
}
#[test]
fn
|
() {
let yaml1 = r#"---
Person:
ENUM:
0:
NickName:
NEWTYPE:
STR
"#;
let yaml2 = r#"---
Person:
ENUM:
0:
NickName:
NEWTYPE:
STR
1:
FullName: UNIT
"#;
let value1 = serde_yaml::from_str::<Registry>(yaml1).unwrap();
let value2 = serde_yaml::from_str::<Registry>(yaml2).unwrap();
assert_ne!(value1, value2);
assert_ne!(value1.get("Person").unwrap(), value2.get("Person").unwrap());
}
|
test_we_can_detect_changes_in_yaml
|
identifier_name
|
detect_format_change.rs
|
// Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use generate_format::Corpus;
use serde_reflection::Registry;
use std::collections::{btree_map::Entry, BTreeMap};
#[test]
fn analyze_serde_formats() {
let mut all_corpuses = BTreeMap::new();
for corpus in Corpus::values() {
// Compute the Serde formats of this corpus by analyzing the codebase.
let registry = corpus.get_registry();
// If the corpus was recorded on disk, test that the formats have not changed since then.
if let Some(path) = corpus.output_file() {
let content = std::fs::read_to_string(path).unwrap();
let expected = serde_yaml::from_str::<Registry>(content.as_str()).unwrap();
assert_registry_has_not_changed(&corpus.to_string(), path, registry.clone(), expected);
}
// Test that the definitions in all corpus are unique and pass the linter.
for (key, value) in registry {
assert_eq!(
generate_format::lint_bcs_format(&value),
Ok(()),
"In corpus {}: lint error while analyzing {}",
corpus.to_string(),
key
);
match all_corpuses.entry(key.clone()) {
Entry::Vacant(e) => {
e.insert(value);
}
Entry::Occupied(e) => assert_eq!(
e.get(),
&value,
"Type {} in corpus {} differs with previous definition in another corpus: {:?} vs {:?}",
key,
corpus.to_string(),
e.get(),
&value,
),
}
}
}
}
fn message(name: &str) -> String
|
fn assert_registry_has_not_changed(name: &str, path: &str, registry: Registry, expected: Registry) {
for (key, value) in expected.iter() {
assert_eq!(
Some(value),
registry.get(key),
r#"
----
The recorded format for type `{}` was removed or does not match the recorded value in {}.{}
----
"#,
key,
path,
message(name),
);
}
for key in registry.keys() {
assert!(
expected.contains_key(key),
r#"
----
Type `{}` was added and has no recorded format in {} yet.{}
----
"#,
key,
path,
message(name),
);
}
}
#[test]
fn test_we_can_detect_changes_in_yaml() {
let yaml1 = r#"---
Person:
ENUM:
0:
NickName:
NEWTYPE:
STR
"#;
let yaml2 = r#"---
Person:
ENUM:
0:
NickName:
NEWTYPE:
STR
1:
FullName: UNIT
"#;
let value1 = serde_yaml::from_str::<Registry>(yaml1).unwrap();
let value2 = serde_yaml::from_str::<Registry>(yaml2).unwrap();
assert_ne!(value1, value2);
assert_ne!(value1.get("Person").unwrap(), value2.get("Person").unwrap());
}
|
{
format!(
r#"
You may run `cargo run -p generate-format -- --corpus {} --record` to refresh the records.
Please verify the changes to the recorded file(s) and consider tagging your pull-request as `breaking`."#,
name
)
}
|
identifier_body
|
build.rs
|
//
// Copyright 2021 The Project Oak Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
// limitations under the License.
//
extern crate prost_build;
fn main() {
let file_paths = [
"oak_functions/proto/abi.proto",
"oak_functions/proto/lookup_data.proto",
"oak_functions/proto/invocation.proto",
];
prost_build::compile_protos(&file_paths, &["../.."]).expect("Proto compilation failed");
// Tell cargo to rerun this build script if the proto file has changed.
// https://doc.rust-lang.org/cargo/reference/build-scripts.html#cargorerun-if-changedpath
for proto_path in file_paths.iter() {
let file_path = std::path::Path::new(proto_path);
println!("cargo:rerun-if-changed=../../{}", file_path.display());
}
}
|
// See the License for the specific language governing permissions and
|
random_line_split
|
build.rs
|
//
// Copyright 2021 The Project Oak Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
extern crate prost_build;
fn main()
|
{
let file_paths = [
"oak_functions/proto/abi.proto",
"oak_functions/proto/lookup_data.proto",
"oak_functions/proto/invocation.proto",
];
prost_build::compile_protos(&file_paths, &["../.."]).expect("Proto compilation failed");
// Tell cargo to rerun this build script if the proto file has changed.
// https://doc.rust-lang.org/cargo/reference/build-scripts.html#cargorerun-if-changedpath
for proto_path in file_paths.iter() {
let file_path = std::path::Path::new(proto_path);
println!("cargo:rerun-if-changed=../../{}", file_path.display());
}
}
|
identifier_body
|
|
build.rs
|
//
// Copyright 2021 The Project Oak Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
extern crate prost_build;
fn
|
() {
let file_paths = [
"oak_functions/proto/abi.proto",
"oak_functions/proto/lookup_data.proto",
"oak_functions/proto/invocation.proto",
];
prost_build::compile_protos(&file_paths, &["../.."]).expect("Proto compilation failed");
// Tell cargo to rerun this build script if the proto file has changed.
// https://doc.rust-lang.org/cargo/reference/build-scripts.html#cargorerun-if-changedpath
for proto_path in file_paths.iter() {
let file_path = std::path::Path::new(proto_path);
println!("cargo:rerun-if-changed=../../{}", file_path.display());
}
}
|
main
|
identifier_name
|
client.rs
|
#![deny(warnings)]
#![allow(non_snake_case)]
#[allow(unused_imports)]
use std::vec::Vec;
use object;
use rest::url;
use super::connector::Connector;
pub struct GoodDataClient {
pub connector: Connector,
pub token: Option<String>,
pub environment: Option<String>,
pub driver: Option<String>,
pub user: Option<object::AccountSetting>,
pub projects: Option<Vec<object::Project>>,
}
impl Drop for GoodDataClient {
fn
|
(&mut self) {
self.disconnect();
}
}
#[allow(dead_code)]
#[allow(unused_variables)]
#[allow(unreachable_code)]
impl GoodDataClient {
/// Create Instance of GoodData Client
pub fn new(connector: Connector,
token: Option<String>,
environment: Option<String>,
driver: Option<String>)
-> GoodDataClient {
GoodDataClient {
connector: connector,
token: token,
environment: environment,
driver: driver,
user: None,
projects: None,
}
}
/// Get Connector
pub fn connector(&self) -> &Connector {
&self.connector
}
/// Get Projects
pub fn projects(&self) -> &Option<Vec<object::Project>> {
// self.projects_fetch();
&self.projects
}
/// Get user
pub fn user(&self) -> &Option<object::AccountSetting> {
&self.user
}
pub fn projects_fetch_if_none(&mut self) -> &Vec<object::Project> {
match self.projects {
Some(ref projects) => projects,
None => {
self.projects_fetch();
self.projects().as_ref().unwrap()
}
}
}
pub fn create_project(&mut self, project_create: object::ProjectCreate) {
let project =
self.user.as_ref().unwrap().project_create(&mut self.connector, project_create);
match project {
Some(p) => self.projects.as_mut().unwrap().push(p),
None => {}
}
}
pub fn delete_project(&mut self, project_delete: object::Project) {
let res = self.user.as_ref().unwrap().project_delete(&mut self.connector, project_delete);
}
pub fn projects_fetch(&mut self) {
let projects = self.user.as_ref().unwrap().projects(&mut self.connector);
self.projects = match projects {
Some(p) => Some(p.projects),
None => None,
}
}
pub fn report_csv(&mut self, report_definition: String) -> String {
let payload = object::ReportReq {
report_req: object::ReportReqBody { reportDefinition: report_definition },
};
let uri = self.connector
.object_by_post::<object::ReportReq, object::Uri>(url::PROJECT_EXECUTE_RAW.to_string(),
payload);
let mut result = self.connector.get(uri.unwrap().uri);
self.connector.get_content(&mut result)
}
/// Login to GoodData platform
pub fn connect<S: Into<String>>(&mut self, username: S, password: S) {
let payload = object::PostUserLogin {
postUserLogin: object::PostUserLoginBody {
login: Some(username.into()),
password: Some(password.into()),
remember: Some("0".into()),
},
};
let user_login = self.connector
.object_by_post::<object::PostUserLogin, object::UserLogin>(url::LOGIN.to_string(),
payload);
let profile_link = user_login.unwrap().userLogin.profile;
self.connector.refresh_token();
let user = self.connector.object_by_get::<object::AccountSetting>(profile_link).unwrap();
self.user = Some(user);
// let csv = self.report_csv("/gdc/md/GoodSalesDemo/obj/30834".to_string());
// debug!("CSV: {}", csv);
}
pub fn disconnect(&mut self) {
info!("GoodDataClient::disconnect() - Disconnecting from GoodData Platform");
self.user = None;
self.projects = None;
}
}
|
drop
|
identifier_name
|
client.rs
|
#![deny(warnings)]
#![allow(non_snake_case)]
#[allow(unused_imports)]
use std::vec::Vec;
use object;
use rest::url;
use super::connector::Connector;
pub struct GoodDataClient {
pub connector: Connector,
pub token: Option<String>,
pub environment: Option<String>,
pub driver: Option<String>,
pub user: Option<object::AccountSetting>,
pub projects: Option<Vec<object::Project>>,
}
impl Drop for GoodDataClient {
fn drop(&mut self) {
self.disconnect();
}
}
#[allow(dead_code)]
#[allow(unused_variables)]
#[allow(unreachable_code)]
impl GoodDataClient {
/// Create Instance of GoodData Client
pub fn new(connector: Connector,
token: Option<String>,
environment: Option<String>,
driver: Option<String>)
-> GoodDataClient {
GoodDataClient {
connector: connector,
token: token,
environment: environment,
driver: driver,
user: None,
projects: None,
}
}
/// Get Connector
pub fn connector(&self) -> &Connector {
&self.connector
}
/// Get Projects
pub fn projects(&self) -> &Option<Vec<object::Project>> {
// self.projects_fetch();
&self.projects
}
/// Get user
pub fn user(&self) -> &Option<object::AccountSetting> {
&self.user
}
pub fn projects_fetch_if_none(&mut self) -> &Vec<object::Project> {
match self.projects {
Some(ref projects) => projects,
None => {
self.projects_fetch();
self.projects().as_ref().unwrap()
}
}
}
pub fn create_project(&mut self, project_create: object::ProjectCreate) {
let project =
self.user.as_ref().unwrap().project_create(&mut self.connector, project_create);
match project {
Some(p) => self.projects.as_mut().unwrap().push(p),
|
let res = self.user.as_ref().unwrap().project_delete(&mut self.connector, project_delete);
}
pub fn projects_fetch(&mut self) {
let projects = self.user.as_ref().unwrap().projects(&mut self.connector);
self.projects = match projects {
Some(p) => Some(p.projects),
None => None,
}
}
pub fn report_csv(&mut self, report_definition: String) -> String {
let payload = object::ReportReq {
report_req: object::ReportReqBody { reportDefinition: report_definition },
};
let uri = self.connector
.object_by_post::<object::ReportReq, object::Uri>(url::PROJECT_EXECUTE_RAW.to_string(),
payload);
let mut result = self.connector.get(uri.unwrap().uri);
self.connector.get_content(&mut result)
}
/// Login to GoodData platform
pub fn connect<S: Into<String>>(&mut self, username: S, password: S) {
let payload = object::PostUserLogin {
postUserLogin: object::PostUserLoginBody {
login: Some(username.into()),
password: Some(password.into()),
remember: Some("0".into()),
},
};
let user_login = self.connector
.object_by_post::<object::PostUserLogin, object::UserLogin>(url::LOGIN.to_string(),
payload);
let profile_link = user_login.unwrap().userLogin.profile;
self.connector.refresh_token();
let user = self.connector.object_by_get::<object::AccountSetting>(profile_link).unwrap();
self.user = Some(user);
// let csv = self.report_csv("/gdc/md/GoodSalesDemo/obj/30834".to_string());
// debug!("CSV: {}", csv);
}
pub fn disconnect(&mut self) {
info!("GoodDataClient::disconnect() - Disconnecting from GoodData Platform");
self.user = None;
self.projects = None;
}
}
|
None => {}
}
}
pub fn delete_project(&mut self, project_delete: object::Project) {
|
random_line_split
|
client.rs
|
#![deny(warnings)]
#![allow(non_snake_case)]
#[allow(unused_imports)]
use std::vec::Vec;
use object;
use rest::url;
use super::connector::Connector;
pub struct GoodDataClient {
pub connector: Connector,
pub token: Option<String>,
pub environment: Option<String>,
pub driver: Option<String>,
pub user: Option<object::AccountSetting>,
pub projects: Option<Vec<object::Project>>,
}
impl Drop for GoodDataClient {
fn drop(&mut self) {
self.disconnect();
}
}
#[allow(dead_code)]
#[allow(unused_variables)]
#[allow(unreachable_code)]
impl GoodDataClient {
/// Create Instance of GoodData Client
pub fn new(connector: Connector,
token: Option<String>,
environment: Option<String>,
driver: Option<String>)
-> GoodDataClient {
GoodDataClient {
connector: connector,
token: token,
environment: environment,
driver: driver,
user: None,
projects: None,
}
}
/// Get Connector
pub fn connector(&self) -> &Connector {
&self.connector
}
/// Get Projects
pub fn projects(&self) -> &Option<Vec<object::Project>> {
// self.projects_fetch();
&self.projects
}
/// Get user
pub fn user(&self) -> &Option<object::AccountSetting>
|
pub fn projects_fetch_if_none(&mut self) -> &Vec<object::Project> {
match self.projects {
Some(ref projects) => projects,
None => {
self.projects_fetch();
self.projects().as_ref().unwrap()
}
}
}
pub fn create_project(&mut self, project_create: object::ProjectCreate) {
let project =
self.user.as_ref().unwrap().project_create(&mut self.connector, project_create);
match project {
Some(p) => self.projects.as_mut().unwrap().push(p),
None => {}
}
}
pub fn delete_project(&mut self, project_delete: object::Project) {
let res = self.user.as_ref().unwrap().project_delete(&mut self.connector, project_delete);
}
pub fn projects_fetch(&mut self) {
let projects = self.user.as_ref().unwrap().projects(&mut self.connector);
self.projects = match projects {
Some(p) => Some(p.projects),
None => None,
}
}
pub fn report_csv(&mut self, report_definition: String) -> String {
let payload = object::ReportReq {
report_req: object::ReportReqBody { reportDefinition: report_definition },
};
let uri = self.connector
.object_by_post::<object::ReportReq, object::Uri>(url::PROJECT_EXECUTE_RAW.to_string(),
payload);
let mut result = self.connector.get(uri.unwrap().uri);
self.connector.get_content(&mut result)
}
/// Login to GoodData platform
pub fn connect<S: Into<String>>(&mut self, username: S, password: S) {
let payload = object::PostUserLogin {
postUserLogin: object::PostUserLoginBody {
login: Some(username.into()),
password: Some(password.into()),
remember: Some("0".into()),
},
};
let user_login = self.connector
.object_by_post::<object::PostUserLogin, object::UserLogin>(url::LOGIN.to_string(),
payload);
let profile_link = user_login.unwrap().userLogin.profile;
self.connector.refresh_token();
let user = self.connector.object_by_get::<object::AccountSetting>(profile_link).unwrap();
self.user = Some(user);
// let csv = self.report_csv("/gdc/md/GoodSalesDemo/obj/30834".to_string());
// debug!("CSV: {}", csv);
}
pub fn disconnect(&mut self) {
info!("GoodDataClient::disconnect() - Disconnecting from GoodData Platform");
self.user = None;
self.projects = None;
}
}
|
{
&self.user
}
|
identifier_body
|
packed-struct-size-xc.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
// aux-build:packed.rs
extern crate packed;
use std::mem;
macro_rules! check {
($t:ty, $align:expr, $size:expr) => ({
assert_eq!(mem::align_of::<$t>(), $align);
assert_eq!(mem::size_of::<$t>(), $size);
});
}
pub fn
|
() {
check!(packed::P1S5, 1, 5);
check!(packed::P2S6, 2, 6);
check!(packed::P2CS8, 2, 8);
}
|
main
|
identifier_name
|
packed-struct-size-xc.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
// aux-build:packed.rs
extern crate packed;
use std::mem;
macro_rules! check {
($t:ty, $align:expr, $size:expr) => ({
assert_eq!(mem::align_of::<$t>(), $align);
assert_eq!(mem::size_of::<$t>(), $size);
});
}
pub fn main() {
check!(packed::P1S5, 1, 5);
|
check!(packed::P2S6, 2, 6);
check!(packed::P2CS8, 2, 8);
}
|
random_line_split
|
|
packed-struct-size-xc.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
// aux-build:packed.rs
extern crate packed;
use std::mem;
macro_rules! check {
($t:ty, $align:expr, $size:expr) => ({
assert_eq!(mem::align_of::<$t>(), $align);
assert_eq!(mem::size_of::<$t>(), $size);
});
}
pub fn main()
|
{
check!(packed::P1S5, 1, 5);
check!(packed::P2S6, 2, 6);
check!(packed::P2CS8, 2, 8);
}
|
identifier_body
|
|
props.rs
|
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
use dbus::arg::IterAppend;
use dbus_tree::{MTSync, MethodErr, PropInfo};
use crate::{
dbus_api::{
api::shared::{self, get_manager_property},
types::TData,
},
engine::Engine,
stratis::VERSION,
};
pub fn
|
<E>(
i: &mut IterAppend<'_>,
_: &PropInfo<'_, MTSync<TData<E>>, TData<E>>,
) -> Result<(), MethodErr>
where
E: Engine,
{
i.append(VERSION);
Ok(())
}
pub fn get_locked_pools<E>(
i: &mut IterAppend<'_>,
p: &PropInfo<'_, MTSync<TData<E>>, TData<E>>,
) -> Result<(), MethodErr>
where
E: Engine,
{
get_manager_property(i, p, |e| Ok(shared::locked_pools_prop(e)))
}
|
get_version
|
identifier_name
|
props.rs
|
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
use dbus::arg::IterAppend;
use dbus_tree::{MTSync, MethodErr, PropInfo};
use crate::{
dbus_api::{
api::shared::{self, get_manager_property},
types::TData,
},
engine::Engine,
stratis::VERSION,
};
pub fn get_version<E>(
i: &mut IterAppend<'_>,
_: &PropInfo<'_, MTSync<TData<E>>, TData<E>>,
) -> Result<(), MethodErr>
where
E: Engine,
|
pub fn get_locked_pools<E>(
i: &mut IterAppend<'_>,
p: &PropInfo<'_, MTSync<TData<E>>, TData<E>>,
) -> Result<(), MethodErr>
where
E: Engine,
{
get_manager_property(i, p, |e| Ok(shared::locked_pools_prop(e)))
}
|
{
i.append(VERSION);
Ok(())
}
|
identifier_body
|
props.rs
|
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
use dbus::arg::IterAppend;
|
use crate::{
dbus_api::{
api::shared::{self, get_manager_property},
types::TData,
},
engine::Engine,
stratis::VERSION,
};
pub fn get_version<E>(
i: &mut IterAppend<'_>,
_: &PropInfo<'_, MTSync<TData<E>>, TData<E>>,
) -> Result<(), MethodErr>
where
E: Engine,
{
i.append(VERSION);
Ok(())
}
pub fn get_locked_pools<E>(
i: &mut IterAppend<'_>,
p: &PropInfo<'_, MTSync<TData<E>>, TData<E>>,
) -> Result<(), MethodErr>
where
E: Engine,
{
get_manager_property(i, p, |e| Ok(shared::locked_pools_prop(e)))
}
|
use dbus_tree::{MTSync, MethodErr, PropInfo};
|
random_line_split
|
sgx.rs
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
use std::{
ffi::CString,
os::raw::{c_char, c_int},
};
pub use crate::threading::tvm_run_worker as run_worker;
use crate::{threading::sgx_join_threads, SystemLibModule, TVMArgValue, TVMRetValue};
use errors::SgxError;
use ffi::TVMValue;
#[macro_export]
macro_rules! tvm_ocall {
($func: expr) => {
match $func {
0 => Ok(()),
code => Err(SgxError { code }),
}
};
}
pub type SgxStatus = u32;
#[cfg(target_env = "sgx")]
extern "C" {
fn tvm_ocall_packed_func(
name: *const c_char,
arg_values: *const TVMValue,
type_codes: *const c_int,
num_args: c_int,
ret_val: *mut TVMValue,
ret_type_code: *mut c_int,
) -> SgxStatus;
}
pub fn ocall_packed_func<S: AsRef<str>>(
fn_name: S,
args: &[TVMArgValue],
) -> Result<TVMRetValue, SgxError> {
let mut ret_val = TVMValue { v_int64: 0 };
let ret_type_code = 0i64;
unsafe {
tvm_ocall!(tvm_ocall_packed_func(
CString::new(fn_name.as_ref()).unwrap().as_ptr(),
args.iter()
.map(|ref arg| arg.value)
.collect::<Vec<TVMValue>>()
.as_ptr(),
args.iter()
.map(|ref arg| arg.type_code as i32)
.collect::<Vec<i32>>()
.as_ptr() as *const i32,
args.len() as i32,
&mut ret_val as *mut TVMValue,
&mut (ret_type_code as i32) as *mut c_int,
))?;
}
Ok(TVMRetValue::from_tvm_value(ret_val, ret_type_code as i64))
}
#[macro_export]
macro_rules! ocall_packed {
($fn_name:expr, $($args:expr),+) => {
$crate::sgx::ocall_packed_func($fn_name, &[$($args.into(),)+])
.expect(concat!("Error calling `", $fn_name, "`"))
};
($fn_name:expr) => {
$crate::sgx::ocall_packed_func($fn_name, &Vec::new())
.expect(concat!("Error calling `", $fn_name, "`"))
}
}
pub fn shutdown() {
if env!("TVM_NUM_THREADS")!= "0"
|
}
impl Drop for SystemLibModule {
fn drop(&mut self) {
shutdown()
}
}
|
{
sgx_join_threads()
}
|
conditional_block
|
sgx.rs
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
use std::{
ffi::CString,
os::raw::{c_char, c_int},
};
pub use crate::threading::tvm_run_worker as run_worker;
use crate::{threading::sgx_join_threads, SystemLibModule, TVMArgValue, TVMRetValue};
use errors::SgxError;
use ffi::TVMValue;
#[macro_export]
macro_rules! tvm_ocall {
($func: expr) => {
match $func {
0 => Ok(()),
code => Err(SgxError { code }),
}
};
}
pub type SgxStatus = u32;
#[cfg(target_env = "sgx")]
extern "C" {
fn tvm_ocall_packed_func(
name: *const c_char,
arg_values: *const TVMValue,
type_codes: *const c_int,
num_args: c_int,
ret_val: *mut TVMValue,
ret_type_code: *mut c_int,
) -> SgxStatus;
}
pub fn ocall_packed_func<S: AsRef<str>>(
fn_name: S,
args: &[TVMArgValue],
) -> Result<TVMRetValue, SgxError> {
let mut ret_val = TVMValue { v_int64: 0 };
let ret_type_code = 0i64;
unsafe {
tvm_ocall!(tvm_ocall_packed_func(
CString::new(fn_name.as_ref()).unwrap().as_ptr(),
args.iter()
.map(|ref arg| arg.value)
.collect::<Vec<TVMValue>>()
.as_ptr(),
args.iter()
.map(|ref arg| arg.type_code as i32)
.collect::<Vec<i32>>()
.as_ptr() as *const i32,
args.len() as i32,
&mut ret_val as *mut TVMValue,
&mut (ret_type_code as i32) as *mut c_int,
))?;
}
Ok(TVMRetValue::from_tvm_value(ret_val, ret_type_code as i64))
}
#[macro_export]
macro_rules! ocall_packed {
($fn_name:expr, $($args:expr),+) => {
$crate::sgx::ocall_packed_func($fn_name, &[$($args.into(),)+])
.expect(concat!("Error calling `", $fn_name, "`"))
};
($fn_name:expr) => {
$crate::sgx::ocall_packed_func($fn_name, &Vec::new())
.expect(concat!("Error calling `", $fn_name, "`"))
}
}
pub fn shutdown()
|
impl Drop for SystemLibModule {
fn drop(&mut self) {
shutdown()
}
}
|
{
if env!("TVM_NUM_THREADS") != "0" {
sgx_join_threads()
}
}
|
identifier_body
|
sgx.rs
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
use std::{
ffi::CString,
os::raw::{c_char, c_int},
};
pub use crate::threading::tvm_run_worker as run_worker;
use crate::{threading::sgx_join_threads, SystemLibModule, TVMArgValue, TVMRetValue};
use errors::SgxError;
use ffi::TVMValue;
#[macro_export]
macro_rules! tvm_ocall {
($func: expr) => {
match $func {
0 => Ok(()),
code => Err(SgxError { code }),
}
};
}
pub type SgxStatus = u32;
#[cfg(target_env = "sgx")]
extern "C" {
fn tvm_ocall_packed_func(
name: *const c_char,
arg_values: *const TVMValue,
type_codes: *const c_int,
num_args: c_int,
ret_val: *mut TVMValue,
ret_type_code: *mut c_int,
) -> SgxStatus;
}
pub fn ocall_packed_func<S: AsRef<str>>(
fn_name: S,
args: &[TVMArgValue],
) -> Result<TVMRetValue, SgxError> {
let mut ret_val = TVMValue { v_int64: 0 };
let ret_type_code = 0i64;
unsafe {
tvm_ocall!(tvm_ocall_packed_func(
CString::new(fn_name.as_ref()).unwrap().as_ptr(),
args.iter()
.map(|ref arg| arg.value)
.collect::<Vec<TVMValue>>()
.as_ptr(),
args.iter()
.map(|ref arg| arg.type_code as i32)
.collect::<Vec<i32>>()
.as_ptr() as *const i32,
args.len() as i32,
&mut ret_val as *mut TVMValue,
&mut (ret_type_code as i32) as *mut c_int,
))?;
}
Ok(TVMRetValue::from_tvm_value(ret_val, ret_type_code as i64))
}
#[macro_export]
macro_rules! ocall_packed {
($fn_name:expr, $($args:expr),+) => {
$crate::sgx::ocall_packed_func($fn_name, &[$($args.into(),)+])
.expect(concat!("Error calling `", $fn_name, "`"))
};
($fn_name:expr) => {
$crate::sgx::ocall_packed_func($fn_name, &Vec::new())
.expect(concat!("Error calling `", $fn_name, "`"))
}
}
pub fn shutdown() {
if env!("TVM_NUM_THREADS")!= "0" {
sgx_join_threads()
}
}
impl Drop for SystemLibModule {
fn
|
(&mut self) {
shutdown()
}
}
|
drop
|
identifier_name
|
sgx.rs
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
|
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
use std::{
ffi::CString,
os::raw::{c_char, c_int},
};
pub use crate::threading::tvm_run_worker as run_worker;
use crate::{threading::sgx_join_threads, SystemLibModule, TVMArgValue, TVMRetValue};
use errors::SgxError;
use ffi::TVMValue;
#[macro_export]
macro_rules! tvm_ocall {
($func: expr) => {
match $func {
0 => Ok(()),
code => Err(SgxError { code }),
}
};
}
pub type SgxStatus = u32;
#[cfg(target_env = "sgx")]
extern "C" {
fn tvm_ocall_packed_func(
name: *const c_char,
arg_values: *const TVMValue,
type_codes: *const c_int,
num_args: c_int,
ret_val: *mut TVMValue,
ret_type_code: *mut c_int,
) -> SgxStatus;
}
pub fn ocall_packed_func<S: AsRef<str>>(
fn_name: S,
args: &[TVMArgValue],
) -> Result<TVMRetValue, SgxError> {
let mut ret_val = TVMValue { v_int64: 0 };
let ret_type_code = 0i64;
unsafe {
tvm_ocall!(tvm_ocall_packed_func(
CString::new(fn_name.as_ref()).unwrap().as_ptr(),
args.iter()
.map(|ref arg| arg.value)
.collect::<Vec<TVMValue>>()
.as_ptr(),
args.iter()
.map(|ref arg| arg.type_code as i32)
.collect::<Vec<i32>>()
.as_ptr() as *const i32,
args.len() as i32,
&mut ret_val as *mut TVMValue,
&mut (ret_type_code as i32) as *mut c_int,
))?;
}
Ok(TVMRetValue::from_tvm_value(ret_val, ret_type_code as i64))
}
#[macro_export]
macro_rules! ocall_packed {
($fn_name:expr, $($args:expr),+) => {
$crate::sgx::ocall_packed_func($fn_name, &[$($args.into(),)+])
.expect(concat!("Error calling `", $fn_name, "`"))
};
($fn_name:expr) => {
$crate::sgx::ocall_packed_func($fn_name, &Vec::new())
.expect(concat!("Error calling `", $fn_name, "`"))
}
}
pub fn shutdown() {
if env!("TVM_NUM_THREADS")!= "0" {
sgx_join_threads()
}
}
impl Drop for SystemLibModule {
fn drop(&mut self) {
shutdown()
}
}
|
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
|
random_line_split
|
cstool.rs
|
//! Disassembles machine code
use std::fmt::Display;
use std::fs::File;
use std::io;
use std::io::prelude::*;
use std::process::exit;
use std::str::FromStr;
use capstone::{self, prelude::*, Arch, Endian, EnumList, ExtraMode, Mode};
use clap::{App, Arg, ArgGroup};
use log::{debug, info};
use stderrlog;
const DEFAULT_CAPACITY: usize = 1024;
trait ExpectExit<T> {
fn expect_exit(self) -> T;
}
impl<T, E> ExpectExit<T> for Result<T, E>
where
E: Display,
{
fn expect_exit(self) -> T {
match self {
Ok(t) => t,
Err(e) => {
eprintln!("error: {}", e);
exit(1);
}
}
}
}
/// Print register names
fn reg_names<T, I>(cs: &Capstone, regs: T) -> String
where
T: Iterator<Item = I>,
I: Into<RegId>,
{
let names: Vec<String> = regs.map(|x| cs.reg_name(x.into()).unwrap()).collect();
names.join(", ")
}
/// Print instruction group names
fn group_names<T, I>(cs: &Capstone, regs: T) -> String
where
T: Iterator<Item = I>,
I: Into<InsnGroupId>,
{
let names: Vec<String> = regs.map(|x| cs.group_name(x.into()).unwrap()).collect();
names.join(", ")
}
/// Select only hex bytes from input
fn unhexed_bytes(input: Vec<u8>) -> Vec<u8> {
let mut output: Vec<u8> = Vec::new();
let mut curr_byte_str = String::with_capacity(2);
for b_u8 in input {
let b = char::from(b_u8);
if ('0' <= b && b <= '9') || ('a' <= b && b <= 'f') || ('A' <= b && b <= 'F') {
curr_byte_str.push(b);
}
if curr_byte_str.len() == 2 {
debug!(" curr_byte_str={:?}", curr_byte_str);
let byte = u8::from_str_radix(&curr_byte_str, 16).expect("Unexpect hex parse error");
output.push(byte);
curr_byte_str.clear();
}
}
if log::max_level() >= log::LevelFilter::Info {
let output_hex: Vec<String> = output.iter().map(|x| format!("{:02x}", x)).collect();
info!("unhexed_output = {:?}", output_hex);
}
output
}
fn disasm<T: Iterator<Item = ExtraMode>>(
arch: Arch,
mode: Mode,
extra_mode: T,
endian: Option<Endian>,
code: &[u8],
addr: u64,
show_detail: bool,
) {
info!("Got {} bytes", code.len());
let mut cs = Capstone::new_raw(arch, mode, extra_mode, endian).expect_exit();
if show_detail {
cs.set_detail(true).expect("Failed to set detail");
}
let stdout = io::stdout();
let mut handle = stdout.lock();
for i in cs.disasm_all(code, addr).expect_exit().iter() {
let bytes: Vec<_> = i.bytes().iter().map(|x| format!("{:02x}", x)).collect();
let bytes = bytes.join(" ");
let _ = writeln!(
&mut handle,
"{:-10x}: {:35} {:7} {}",
i.address(),
bytes,
i.mnemonic().unwrap(),
i.op_str().unwrap_or("")
)
.is_ok();
if show_detail {
let detail = cs.insn_detail(&i).expect("Failed to get insn detail");
let output: &[(&str, String)] = &[
("insn id:", format!("{:?}", i.id().0)),
("read regs:", reg_names(&cs, detail.regs_read())),
("write regs:", reg_names(&cs, detail.regs_write())),
("insn groups:", group_names(&cs, detail.groups())),
];
for &(ref name, ref message) in output.iter() {
let _ = writeln!(&mut handle, "{:13}{:12} {}", "", name, message).is_ok();
}
}
}
}
const FILE_ARG: &str = "file";
const STDIN_ARG: &str = "stdin";
const CODE_ARG: &str = "code";
const ADDRESS_ARG: &str = "address";
const VERBOSE_ARG: &str = "verbose";
const HEX_ARG: &str = "hex";
const DETAIL_ARG: &str = "detail";
const ARCH_ARG: &str = "arch";
const MODE_ARG: &str = "mode";
const EXTRA_MODE_ARG: &str = "extra";
const ENDIAN_ARG: &str = "endian";
const AFTER_HELP: &str = r#"
Example:
# Disassemble 32-bit X86 (non-hex characters are ignored)
cstool --arch x86 --mode mode32 --code "90 42 e812345678"
1000: 90 nop
1001: 42 inc edx
1002: e8 12 34 56 78 call 0x78564419
"#;
fn main() {
// Lowercase arches
let _arches: Vec<String> = Arch::variants()
.iter()
.map(|x| format!("{}", x).to_lowercase())
.collect();
let arches: Vec<&str> = _arches.iter().map(|x| x.as_str()).collect();
// Lowercase modes
let _modes: Vec<String> = Mode::variants()
.iter()
.map(|x| format!("{}", x).to_lowercase())
.collect();
let modes: Vec<&str> = _modes.iter().map(|x| x.as_str()).collect();
// Lowercase extra modes
let _extra_modes: Vec<String> = ExtraMode::variants()
.iter()
.map(|x| format!("{}", x).to_lowercase())
.collect();
let extra_modes: Vec<&str> = _extra_modes.iter().map(|x| x.as_str()).collect();
let matches = App::new("capstone-rs disassembler tool")
.about("Disassembles binary file")
.after_help(AFTER_HELP)
.arg(
Arg::with_name(FILE_ARG)
.short("f")
.long(FILE_ARG)
.help("input file with binary instructions")
.takes_value(true),
)
.arg(
Arg::with_name(STDIN_ARG)
.short("s")
.long(STDIN_ARG)
.help("read binary instructions from stdin")
.takes_value(false),
)
.arg(
Arg::with_name(CODE_ARG)
.short("c")
.long(CODE_ARG)
.help("instruction bytes (implies --hex)")
.takes_value(true),
)
.arg(
Arg::with_name(ADDRESS_ARG)
.short("r")
.long("addr")
.help("address of code")
.takes_value(true),
)
.arg(
Arg::with_name(VERBOSE_ARG)
.short("v")
.multiple(true)
.help("Sets the level of verbosity"),
)
.arg(
Arg::with_name(HEX_ARG)
.short("x")
.long(HEX_ARG)
.help("Treat input has hex; only select characters that are [a-fA-F0-9]")
.takes_value(false),
)
.arg(
Arg::with_name(DETAIL_ARG)
.short("d")
.long(DETAIL_ARG)
.help("Print details about instructions")
.takes_value(false),
)
.arg(
Arg::with_name(ARCH_ARG)
.short("a")
.long(ARCH_ARG)
.help("Architecture")
.takes_value(true)
.required(true)
.possible_values(arches.as_slice())
.case_insensitive(true),
)
.arg(
Arg::with_name(MODE_ARG)
.short("m")
.long(MODE_ARG)
.help(MODE_ARG)
.takes_value(true)
.required(true)
.possible_values(modes.as_slice())
.case_insensitive(true),
)
.arg(
Arg::with_name(EXTRA_MODE_ARG)
.short("e")
.long(EXTRA_MODE_ARG)
.help("Extra Mode")
.takes_value(true)
.required(false)
.possible_values(extra_modes.as_slice())
.case_insensitive(true)
.multiple(true),
)
.arg(
Arg::with_name(ENDIAN_ARG)
.short("n")
.long(ENDIAN_ARG)
.help("Endianness")
.takes_value(true)
.required(false)
.possible_values(&["little", "big"])
.case_insensitive(true),
)
.group(
ArgGroup::with_name("input")
.arg(FILE_ARG)
.arg(STDIN_ARG)
.arg(CODE_ARG)
.required(true),
)
.get_matches();
let direct_input_bytes: Vec<u8> = if let Some(file_path) = matches.value_of(FILE_ARG) {
let mut file = File::open(file_path).expect_exit();
let capacity = match file.metadata() {
Err(_) => DEFAULT_CAPACITY,
Ok(metadata) => metadata.len() as usize,
};
let mut buf = Vec::with_capacity(capacity as usize);
file.read_to_end(&mut buf).expect_exit();
buf
} else if let Some(code) = matches.value_of(CODE_ARG) {
code.as_bytes().iter().map(|x| *x).collect()
} else {
let mut buf = Vec::with_capacity(DEFAULT_CAPACITY);
let stdin = std::io::stdin();
stdin.lock().read_to_end(&mut buf).expect_exit();
buf
};
stderrlog::new()
.verbosity(matches.occurrences_of(VERBOSE_ARG) as usize)
.init()
|
let is_hex = matches.is_present(HEX_ARG) || matches.is_present(CODE_ARG);
info!("is_hex = {:?}", is_hex);
let show_detail = matches.is_present(DETAIL_ARG);
info!("show_detail = {:?}", show_detail);
let arch: Arch = Arch::from_str(matches.value_of(ARCH_ARG).unwrap())
.unwrap()
.into();
info!("Arch = {:?}", arch);
let mode: Mode = Mode::from_str(matches.value_of(MODE_ARG).unwrap())
.unwrap()
.into();
info!("Mode = {:?}", mode);
let extra_mode: Vec<_> = match matches.values_of(EXTRA_MODE_ARG) {
None => Vec::with_capacity(0),
Some(x) => x
.map(|x| ExtraMode::from(ExtraMode::from_str(x).unwrap()))
.collect(),
};
info!("ExtraMode = {:?}", extra_mode);
let endian: Option<Endian> = matches
.value_of(ENDIAN_ARG)
.map(|x| Endian::from_str(x).expect_exit());
info!("Endian = {:?}", endian);
let address =
u64::from_str_radix(matches.value_of(ADDRESS_ARG).unwrap_or("1000"), 16).expect_exit();
info!("Address = 0x{:x}", address);
let input_bytes = if is_hex {
unhexed_bytes(direct_input_bytes)
} else {
direct_input_bytes
};
disasm(
arch,
mode,
extra_mode.iter().map(|x| *x),
endian,
input_bytes.as_slice(),
address,
show_detail,
);
}
|
.unwrap();
|
random_line_split
|
cstool.rs
|
//! Disassembles machine code
use std::fmt::Display;
use std::fs::File;
use std::io;
use std::io::prelude::*;
use std::process::exit;
use std::str::FromStr;
use capstone::{self, prelude::*, Arch, Endian, EnumList, ExtraMode, Mode};
use clap::{App, Arg, ArgGroup};
use log::{debug, info};
use stderrlog;
const DEFAULT_CAPACITY: usize = 1024;
trait ExpectExit<T> {
fn expect_exit(self) -> T;
}
impl<T, E> ExpectExit<T> for Result<T, E>
where
E: Display,
{
fn expect_exit(self) -> T
|
}
/// Print register names
fn reg_names<T, I>(cs: &Capstone, regs: T) -> String
where
T: Iterator<Item = I>,
I: Into<RegId>,
{
let names: Vec<String> = regs.map(|x| cs.reg_name(x.into()).unwrap()).collect();
names.join(", ")
}
/// Print instruction group names
fn group_names<T, I>(cs: &Capstone, regs: T) -> String
where
T: Iterator<Item = I>,
I: Into<InsnGroupId>,
{
let names: Vec<String> = regs.map(|x| cs.group_name(x.into()).unwrap()).collect();
names.join(", ")
}
/// Select only hex bytes from input
fn unhexed_bytes(input: Vec<u8>) -> Vec<u8> {
let mut output: Vec<u8> = Vec::new();
let mut curr_byte_str = String::with_capacity(2);
for b_u8 in input {
let b = char::from(b_u8);
if ('0' <= b && b <= '9') || ('a' <= b && b <= 'f') || ('A' <= b && b <= 'F') {
curr_byte_str.push(b);
}
if curr_byte_str.len() == 2 {
debug!(" curr_byte_str={:?}", curr_byte_str);
let byte = u8::from_str_radix(&curr_byte_str, 16).expect("Unexpect hex parse error");
output.push(byte);
curr_byte_str.clear();
}
}
if log::max_level() >= log::LevelFilter::Info {
let output_hex: Vec<String> = output.iter().map(|x| format!("{:02x}", x)).collect();
info!("unhexed_output = {:?}", output_hex);
}
output
}
fn disasm<T: Iterator<Item = ExtraMode>>(
arch: Arch,
mode: Mode,
extra_mode: T,
endian: Option<Endian>,
code: &[u8],
addr: u64,
show_detail: bool,
) {
info!("Got {} bytes", code.len());
let mut cs = Capstone::new_raw(arch, mode, extra_mode, endian).expect_exit();
if show_detail {
cs.set_detail(true).expect("Failed to set detail");
}
let stdout = io::stdout();
let mut handle = stdout.lock();
for i in cs.disasm_all(code, addr).expect_exit().iter() {
let bytes: Vec<_> = i.bytes().iter().map(|x| format!("{:02x}", x)).collect();
let bytes = bytes.join(" ");
let _ = writeln!(
&mut handle,
"{:-10x}: {:35} {:7} {}",
i.address(),
bytes,
i.mnemonic().unwrap(),
i.op_str().unwrap_or("")
)
.is_ok();
if show_detail {
let detail = cs.insn_detail(&i).expect("Failed to get insn detail");
let output: &[(&str, String)] = &[
("insn id:", format!("{:?}", i.id().0)),
("read regs:", reg_names(&cs, detail.regs_read())),
("write regs:", reg_names(&cs, detail.regs_write())),
("insn groups:", group_names(&cs, detail.groups())),
];
for &(ref name, ref message) in output.iter() {
let _ = writeln!(&mut handle, "{:13}{:12} {}", "", name, message).is_ok();
}
}
}
}
const FILE_ARG: &str = "file";
const STDIN_ARG: &str = "stdin";
const CODE_ARG: &str = "code";
const ADDRESS_ARG: &str = "address";
const VERBOSE_ARG: &str = "verbose";
const HEX_ARG: &str = "hex";
const DETAIL_ARG: &str = "detail";
const ARCH_ARG: &str = "arch";
const MODE_ARG: &str = "mode";
const EXTRA_MODE_ARG: &str = "extra";
const ENDIAN_ARG: &str = "endian";
const AFTER_HELP: &str = r#"
Example:
# Disassemble 32-bit X86 (non-hex characters are ignored)
cstool --arch x86 --mode mode32 --code "90 42 e812345678"
1000: 90 nop
1001: 42 inc edx
1002: e8 12 34 56 78 call 0x78564419
"#;
fn main() {
// Lowercase arches
let _arches: Vec<String> = Arch::variants()
.iter()
.map(|x| format!("{}", x).to_lowercase())
.collect();
let arches: Vec<&str> = _arches.iter().map(|x| x.as_str()).collect();
// Lowercase modes
let _modes: Vec<String> = Mode::variants()
.iter()
.map(|x| format!("{}", x).to_lowercase())
.collect();
let modes: Vec<&str> = _modes.iter().map(|x| x.as_str()).collect();
// Lowercase extra modes
let _extra_modes: Vec<String> = ExtraMode::variants()
.iter()
.map(|x| format!("{}", x).to_lowercase())
.collect();
let extra_modes: Vec<&str> = _extra_modes.iter().map(|x| x.as_str()).collect();
let matches = App::new("capstone-rs disassembler tool")
.about("Disassembles binary file")
.after_help(AFTER_HELP)
.arg(
Arg::with_name(FILE_ARG)
.short("f")
.long(FILE_ARG)
.help("input file with binary instructions")
.takes_value(true),
)
.arg(
Arg::with_name(STDIN_ARG)
.short("s")
.long(STDIN_ARG)
.help("read binary instructions from stdin")
.takes_value(false),
)
.arg(
Arg::with_name(CODE_ARG)
.short("c")
.long(CODE_ARG)
.help("instruction bytes (implies --hex)")
.takes_value(true),
)
.arg(
Arg::with_name(ADDRESS_ARG)
.short("r")
.long("addr")
.help("address of code")
.takes_value(true),
)
.arg(
Arg::with_name(VERBOSE_ARG)
.short("v")
.multiple(true)
.help("Sets the level of verbosity"),
)
.arg(
Arg::with_name(HEX_ARG)
.short("x")
.long(HEX_ARG)
.help("Treat input has hex; only select characters that are [a-fA-F0-9]")
.takes_value(false),
)
.arg(
Arg::with_name(DETAIL_ARG)
.short("d")
.long(DETAIL_ARG)
.help("Print details about instructions")
.takes_value(false),
)
.arg(
Arg::with_name(ARCH_ARG)
.short("a")
.long(ARCH_ARG)
.help("Architecture")
.takes_value(true)
.required(true)
.possible_values(arches.as_slice())
.case_insensitive(true),
)
.arg(
Arg::with_name(MODE_ARG)
.short("m")
.long(MODE_ARG)
.help(MODE_ARG)
.takes_value(true)
.required(true)
.possible_values(modes.as_slice())
.case_insensitive(true),
)
.arg(
Arg::with_name(EXTRA_MODE_ARG)
.short("e")
.long(EXTRA_MODE_ARG)
.help("Extra Mode")
.takes_value(true)
.required(false)
.possible_values(extra_modes.as_slice())
.case_insensitive(true)
.multiple(true),
)
.arg(
Arg::with_name(ENDIAN_ARG)
.short("n")
.long(ENDIAN_ARG)
.help("Endianness")
.takes_value(true)
.required(false)
.possible_values(&["little", "big"])
.case_insensitive(true),
)
.group(
ArgGroup::with_name("input")
.arg(FILE_ARG)
.arg(STDIN_ARG)
.arg(CODE_ARG)
.required(true),
)
.get_matches();
let direct_input_bytes: Vec<u8> = if let Some(file_path) = matches.value_of(FILE_ARG) {
let mut file = File::open(file_path).expect_exit();
let capacity = match file.metadata() {
Err(_) => DEFAULT_CAPACITY,
Ok(metadata) => metadata.len() as usize,
};
let mut buf = Vec::with_capacity(capacity as usize);
file.read_to_end(&mut buf).expect_exit();
buf
} else if let Some(code) = matches.value_of(CODE_ARG) {
code.as_bytes().iter().map(|x| *x).collect()
} else {
let mut buf = Vec::with_capacity(DEFAULT_CAPACITY);
let stdin = std::io::stdin();
stdin.lock().read_to_end(&mut buf).expect_exit();
buf
};
stderrlog::new()
.verbosity(matches.occurrences_of(VERBOSE_ARG) as usize)
.init()
.unwrap();
let is_hex = matches.is_present(HEX_ARG) || matches.is_present(CODE_ARG);
info!("is_hex = {:?}", is_hex);
let show_detail = matches.is_present(DETAIL_ARG);
info!("show_detail = {:?}", show_detail);
let arch: Arch = Arch::from_str(matches.value_of(ARCH_ARG).unwrap())
.unwrap()
.into();
info!("Arch = {:?}", arch);
let mode: Mode = Mode::from_str(matches.value_of(MODE_ARG).unwrap())
.unwrap()
.into();
info!("Mode = {:?}", mode);
let extra_mode: Vec<_> = match matches.values_of(EXTRA_MODE_ARG) {
None => Vec::with_capacity(0),
Some(x) => x
.map(|x| ExtraMode::from(ExtraMode::from_str(x).unwrap()))
.collect(),
};
info!("ExtraMode = {:?}", extra_mode);
let endian: Option<Endian> = matches
.value_of(ENDIAN_ARG)
.map(|x| Endian::from_str(x).expect_exit());
info!("Endian = {:?}", endian);
let address =
u64::from_str_radix(matches.value_of(ADDRESS_ARG).unwrap_or("1000"), 16).expect_exit();
info!("Address = 0x{:x}", address);
let input_bytes = if is_hex {
unhexed_bytes(direct_input_bytes)
} else {
direct_input_bytes
};
disasm(
arch,
mode,
extra_mode.iter().map(|x| *x),
endian,
input_bytes.as_slice(),
address,
show_detail,
);
}
|
{
match self {
Ok(t) => t,
Err(e) => {
eprintln!("error: {}", e);
exit(1);
}
}
}
|
identifier_body
|
cstool.rs
|
//! Disassembles machine code
use std::fmt::Display;
use std::fs::File;
use std::io;
use std::io::prelude::*;
use std::process::exit;
use std::str::FromStr;
use capstone::{self, prelude::*, Arch, Endian, EnumList, ExtraMode, Mode};
use clap::{App, Arg, ArgGroup};
use log::{debug, info};
use stderrlog;
const DEFAULT_CAPACITY: usize = 1024;
trait ExpectExit<T> {
fn expect_exit(self) -> T;
}
impl<T, E> ExpectExit<T> for Result<T, E>
where
E: Display,
{
fn
|
(self) -> T {
match self {
Ok(t) => t,
Err(e) => {
eprintln!("error: {}", e);
exit(1);
}
}
}
}
/// Print register names
fn reg_names<T, I>(cs: &Capstone, regs: T) -> String
where
T: Iterator<Item = I>,
I: Into<RegId>,
{
let names: Vec<String> = regs.map(|x| cs.reg_name(x.into()).unwrap()).collect();
names.join(", ")
}
/// Print instruction group names
fn group_names<T, I>(cs: &Capstone, regs: T) -> String
where
T: Iterator<Item = I>,
I: Into<InsnGroupId>,
{
let names: Vec<String> = regs.map(|x| cs.group_name(x.into()).unwrap()).collect();
names.join(", ")
}
/// Select only hex bytes from input
fn unhexed_bytes(input: Vec<u8>) -> Vec<u8> {
let mut output: Vec<u8> = Vec::new();
let mut curr_byte_str = String::with_capacity(2);
for b_u8 in input {
let b = char::from(b_u8);
if ('0' <= b && b <= '9') || ('a' <= b && b <= 'f') || ('A' <= b && b <= 'F') {
curr_byte_str.push(b);
}
if curr_byte_str.len() == 2 {
debug!(" curr_byte_str={:?}", curr_byte_str);
let byte = u8::from_str_radix(&curr_byte_str, 16).expect("Unexpect hex parse error");
output.push(byte);
curr_byte_str.clear();
}
}
if log::max_level() >= log::LevelFilter::Info {
let output_hex: Vec<String> = output.iter().map(|x| format!("{:02x}", x)).collect();
info!("unhexed_output = {:?}", output_hex);
}
output
}
fn disasm<T: Iterator<Item = ExtraMode>>(
arch: Arch,
mode: Mode,
extra_mode: T,
endian: Option<Endian>,
code: &[u8],
addr: u64,
show_detail: bool,
) {
info!("Got {} bytes", code.len());
let mut cs = Capstone::new_raw(arch, mode, extra_mode, endian).expect_exit();
if show_detail {
cs.set_detail(true).expect("Failed to set detail");
}
let stdout = io::stdout();
let mut handle = stdout.lock();
for i in cs.disasm_all(code, addr).expect_exit().iter() {
let bytes: Vec<_> = i.bytes().iter().map(|x| format!("{:02x}", x)).collect();
let bytes = bytes.join(" ");
let _ = writeln!(
&mut handle,
"{:-10x}: {:35} {:7} {}",
i.address(),
bytes,
i.mnemonic().unwrap(),
i.op_str().unwrap_or("")
)
.is_ok();
if show_detail {
let detail = cs.insn_detail(&i).expect("Failed to get insn detail");
let output: &[(&str, String)] = &[
("insn id:", format!("{:?}", i.id().0)),
("read regs:", reg_names(&cs, detail.regs_read())),
("write regs:", reg_names(&cs, detail.regs_write())),
("insn groups:", group_names(&cs, detail.groups())),
];
for &(ref name, ref message) in output.iter() {
let _ = writeln!(&mut handle, "{:13}{:12} {}", "", name, message).is_ok();
}
}
}
}
const FILE_ARG: &str = "file";
const STDIN_ARG: &str = "stdin";
const CODE_ARG: &str = "code";
const ADDRESS_ARG: &str = "address";
const VERBOSE_ARG: &str = "verbose";
const HEX_ARG: &str = "hex";
const DETAIL_ARG: &str = "detail";
const ARCH_ARG: &str = "arch";
const MODE_ARG: &str = "mode";
const EXTRA_MODE_ARG: &str = "extra";
const ENDIAN_ARG: &str = "endian";
const AFTER_HELP: &str = r#"
Example:
# Disassemble 32-bit X86 (non-hex characters are ignored)
cstool --arch x86 --mode mode32 --code "90 42 e812345678"
1000: 90 nop
1001: 42 inc edx
1002: e8 12 34 56 78 call 0x78564419
"#;
fn main() {
// Lowercase arches
let _arches: Vec<String> = Arch::variants()
.iter()
.map(|x| format!("{}", x).to_lowercase())
.collect();
let arches: Vec<&str> = _arches.iter().map(|x| x.as_str()).collect();
// Lowercase modes
let _modes: Vec<String> = Mode::variants()
.iter()
.map(|x| format!("{}", x).to_lowercase())
.collect();
let modes: Vec<&str> = _modes.iter().map(|x| x.as_str()).collect();
// Lowercase extra modes
let _extra_modes: Vec<String> = ExtraMode::variants()
.iter()
.map(|x| format!("{}", x).to_lowercase())
.collect();
let extra_modes: Vec<&str> = _extra_modes.iter().map(|x| x.as_str()).collect();
let matches = App::new("capstone-rs disassembler tool")
.about("Disassembles binary file")
.after_help(AFTER_HELP)
.arg(
Arg::with_name(FILE_ARG)
.short("f")
.long(FILE_ARG)
.help("input file with binary instructions")
.takes_value(true),
)
.arg(
Arg::with_name(STDIN_ARG)
.short("s")
.long(STDIN_ARG)
.help("read binary instructions from stdin")
.takes_value(false),
)
.arg(
Arg::with_name(CODE_ARG)
.short("c")
.long(CODE_ARG)
.help("instruction bytes (implies --hex)")
.takes_value(true),
)
.arg(
Arg::with_name(ADDRESS_ARG)
.short("r")
.long("addr")
.help("address of code")
.takes_value(true),
)
.arg(
Arg::with_name(VERBOSE_ARG)
.short("v")
.multiple(true)
.help("Sets the level of verbosity"),
)
.arg(
Arg::with_name(HEX_ARG)
.short("x")
.long(HEX_ARG)
.help("Treat input has hex; only select characters that are [a-fA-F0-9]")
.takes_value(false),
)
.arg(
Arg::with_name(DETAIL_ARG)
.short("d")
.long(DETAIL_ARG)
.help("Print details about instructions")
.takes_value(false),
)
.arg(
Arg::with_name(ARCH_ARG)
.short("a")
.long(ARCH_ARG)
.help("Architecture")
.takes_value(true)
.required(true)
.possible_values(arches.as_slice())
.case_insensitive(true),
)
.arg(
Arg::with_name(MODE_ARG)
.short("m")
.long(MODE_ARG)
.help(MODE_ARG)
.takes_value(true)
.required(true)
.possible_values(modes.as_slice())
.case_insensitive(true),
)
.arg(
Arg::with_name(EXTRA_MODE_ARG)
.short("e")
.long(EXTRA_MODE_ARG)
.help("Extra Mode")
.takes_value(true)
.required(false)
.possible_values(extra_modes.as_slice())
.case_insensitive(true)
.multiple(true),
)
.arg(
Arg::with_name(ENDIAN_ARG)
.short("n")
.long(ENDIAN_ARG)
.help("Endianness")
.takes_value(true)
.required(false)
.possible_values(&["little", "big"])
.case_insensitive(true),
)
.group(
ArgGroup::with_name("input")
.arg(FILE_ARG)
.arg(STDIN_ARG)
.arg(CODE_ARG)
.required(true),
)
.get_matches();
let direct_input_bytes: Vec<u8> = if let Some(file_path) = matches.value_of(FILE_ARG) {
let mut file = File::open(file_path).expect_exit();
let capacity = match file.metadata() {
Err(_) => DEFAULT_CAPACITY,
Ok(metadata) => metadata.len() as usize,
};
let mut buf = Vec::with_capacity(capacity as usize);
file.read_to_end(&mut buf).expect_exit();
buf
} else if let Some(code) = matches.value_of(CODE_ARG) {
code.as_bytes().iter().map(|x| *x).collect()
} else {
let mut buf = Vec::with_capacity(DEFAULT_CAPACITY);
let stdin = std::io::stdin();
stdin.lock().read_to_end(&mut buf).expect_exit();
buf
};
stderrlog::new()
.verbosity(matches.occurrences_of(VERBOSE_ARG) as usize)
.init()
.unwrap();
let is_hex = matches.is_present(HEX_ARG) || matches.is_present(CODE_ARG);
info!("is_hex = {:?}", is_hex);
let show_detail = matches.is_present(DETAIL_ARG);
info!("show_detail = {:?}", show_detail);
let arch: Arch = Arch::from_str(matches.value_of(ARCH_ARG).unwrap())
.unwrap()
.into();
info!("Arch = {:?}", arch);
let mode: Mode = Mode::from_str(matches.value_of(MODE_ARG).unwrap())
.unwrap()
.into();
info!("Mode = {:?}", mode);
let extra_mode: Vec<_> = match matches.values_of(EXTRA_MODE_ARG) {
None => Vec::with_capacity(0),
Some(x) => x
.map(|x| ExtraMode::from(ExtraMode::from_str(x).unwrap()))
.collect(),
};
info!("ExtraMode = {:?}", extra_mode);
let endian: Option<Endian> = matches
.value_of(ENDIAN_ARG)
.map(|x| Endian::from_str(x).expect_exit());
info!("Endian = {:?}", endian);
let address =
u64::from_str_radix(matches.value_of(ADDRESS_ARG).unwrap_or("1000"), 16).expect_exit();
info!("Address = 0x{:x}", address);
let input_bytes = if is_hex {
unhexed_bytes(direct_input_bytes)
} else {
direct_input_bytes
};
disasm(
arch,
mode,
extra_mode.iter().map(|x| *x),
endian,
input_bytes.as_slice(),
address,
show_detail,
);
}
|
expect_exit
|
identifier_name
|
attribute-spans-preserved.rs
|
// force-host
// no-prefer-dynamic
#![crate_type = "proc-macro"]
extern crate proc_macro;
use proc_macro::*;
#[proc_macro_attribute]
pub fn
|
(attr: TokenStream, f: TokenStream) -> TokenStream {
let mut tokens = f.into_iter();
assert_eq!(tokens.next().unwrap().to_string(), "#");
let next_attr = match tokens.next().unwrap() {
TokenTree::Group(g) => g,
_ => panic!(),
};
let fn_tok = tokens.next().unwrap();
let ident_tok = tokens.next().unwrap();
let args_tok = tokens.next().unwrap();
let body = tokens.next().unwrap();
let new_body = attr.into_iter()
.chain(next_attr.stream().into_iter().skip(1));
let tokens = vec![
fn_tok,
ident_tok,
args_tok,
Group::new(Delimiter::Brace, new_body.collect()).into(),
].into_iter().collect::<TokenStream>();
println!("{}", tokens);
return tokens
}
|
foo
|
identifier_name
|
attribute-spans-preserved.rs
|
// force-host
// no-prefer-dynamic
#![crate_type = "proc-macro"]
extern crate proc_macro;
use proc_macro::*;
#[proc_macro_attribute]
pub fn foo(attr: TokenStream, f: TokenStream) -> TokenStream
|
Group::new(Delimiter::Brace, new_body.collect()).into(),
].into_iter().collect::<TokenStream>();
println!("{}", tokens);
return tokens
}
|
{
let mut tokens = f.into_iter();
assert_eq!(tokens.next().unwrap().to_string(), "#");
let next_attr = match tokens.next().unwrap() {
TokenTree::Group(g) => g,
_ => panic!(),
};
let fn_tok = tokens.next().unwrap();
let ident_tok = tokens.next().unwrap();
let args_tok = tokens.next().unwrap();
let body = tokens.next().unwrap();
let new_body = attr.into_iter()
.chain(next_attr.stream().into_iter().skip(1));
let tokens = vec![
fn_tok,
ident_tok,
args_tok,
|
identifier_body
|
attribute-spans-preserved.rs
|
// force-host
// no-prefer-dynamic
|
#![crate_type = "proc-macro"]
extern crate proc_macro;
use proc_macro::*;
#[proc_macro_attribute]
pub fn foo(attr: TokenStream, f: TokenStream) -> TokenStream {
let mut tokens = f.into_iter();
assert_eq!(tokens.next().unwrap().to_string(), "#");
let next_attr = match tokens.next().unwrap() {
TokenTree::Group(g) => g,
_ => panic!(),
};
let fn_tok = tokens.next().unwrap();
let ident_tok = tokens.next().unwrap();
let args_tok = tokens.next().unwrap();
let body = tokens.next().unwrap();
let new_body = attr.into_iter()
.chain(next_attr.stream().into_iter().skip(1));
let tokens = vec![
fn_tok,
ident_tok,
args_tok,
Group::new(Delimiter::Brace, new_body.collect()).into(),
].into_iter().collect::<TokenStream>();
println!("{}", tokens);
return tokens
}
|
random_line_split
|
|
cargo.rs
|
extern crate cargo;
extern crate env_logger;
extern crate git2_curl;
extern crate rustc_serialize;
extern crate toml;
#[macro_use] extern crate log;
use std::collections::BTreeSet;
use std::env;
use std::fs;
use std::io;
use std::path::{PathBuf, Path};
use std::process::Command;
use cargo::{execute_main_without_stdin, handle_error, shell};
use cargo::core::MultiShell;
use cargo::util::{CliError, CliResult, lev_distance, Config};
#[derive(RustcDecodable)]
struct Flags {
flag_list: bool,
flag_verbose: bool,
flag_quiet: bool,
flag_color: Option<String>,
arg_command: String,
arg_args: Vec<String>,
}
const USAGE: &'static str = "
Rust's package manager
Usage:
cargo <command> [<args>...]
cargo [options]
Options:
-h, --help Display this message
-V, --version Print version info and exit
--list List installed commands
-v, --verbose Use verbose output
-q, --quiet No output printed to stdout
--color WHEN Coloring: auto, always, never
Some common cargo commands are:
build Compile the current project
clean Remove the target directory
doc Build this project's and its dependencies' documentation
new Create a new cargo project
run Build and execute src/main.rs
test Run the tests
bench Run the benchmarks
update Update dependencies listed in Cargo.lock
search Search registry for crates
See 'cargo help <command>' for more information on a specific command.
";
fn main()
|
macro_rules! each_subcommand{ ($mac:ident) => ({
$mac!(bench);
$mac!(build);
$mac!(clean);
$mac!(doc);
$mac!(fetch);
$mac!(generate_lockfile);
$mac!(git_checkout);
$mac!(help);
$mac!(install);
$mac!(locate_project);
$mac!(login);
$mac!(new);
$mac!(owner);
$mac!(package);
$mac!(pkgid);
$mac!(publish);
$mac!(read_manifest);
$mac!(run);
$mac!(rustc);
$mac!(search);
$mac!(test);
$mac!(uninstall);
$mac!(update);
$mac!(verify_project);
$mac!(version);
$mac!(yank);
}) }
/**
The top-level `cargo` command handles configuration and project location
because they are fundamental (and intertwined). Other commands can rely
on this top-level information.
*/
fn execute(flags: Flags, config: &Config) -> CliResult<Option<()>> {
try!(config.shell().set_verbosity(flags.flag_verbose, flags.flag_quiet));
try!(config.shell().set_color_config(flags.flag_color.as_ref().map(|s| &s[..])));
init_git_transports(config);
if flags.flag_list {
println!("Installed Commands:");
for command in list_commands().into_iter() {
println!(" {}", command);
};
return Ok(None)
}
let args = match &flags.arg_command[..] {
// For the commands `cargo` and `cargo help`, re-execute ourselves as
// `cargo -h` so we can go through the normal process of printing the
// help message.
"" | "help" if flags.arg_args.is_empty() => {
config.shell().set_verbose(true);
let args = &["cargo".to_string(), "-h".to_string()];
let r = cargo::call_main_without_stdin(execute, config, USAGE, args,
false);
cargo::process_executed(r, &mut config.shell());
return Ok(None)
}
// For `cargo help -h` and `cargo help --help`, print out the help
// message for `cargo help`
"help" if flags.arg_args[0] == "-h" ||
flags.arg_args[0] == "--help" => {
vec!["cargo".to_string(), "help".to_string(), "-h".to_string()]
}
// For `cargo help foo`, print out the usage message for the specified
// subcommand by executing the command with the `-h` flag.
"help" => {
vec!["cargo".to_string(), flags.arg_args[0].clone(),
"-h".to_string()]
}
// For all other invocations, we're of the form `cargo foo args...`. We
// use the exact environment arguments to preserve tokens like `--` for
// example.
_ => env::args().collect(),
};
macro_rules! cmd{ ($name:ident) => (
if args[1] == stringify!($name).replace("_", "-") {
mod $name;
config.shell().set_verbose(true);
let r = cargo::call_main_without_stdin($name::execute, config,
$name::USAGE,
&args,
false);
cargo::process_executed(r, &mut config.shell());
return Ok(None)
}
) }
each_subcommand!(cmd);
execute_subcommand(&args[1], &args, &mut config.shell());
Ok(None)
}
fn find_closest(cmd: &str) -> Option<String> {
let cmds = list_commands();
// Only consider candidates with a lev_distance of 3 or less so we don't
// suggest out-of-the-blue options.
let mut filtered = cmds.iter().map(|c| (lev_distance(&c, cmd), c))
.filter(|&(d, _)| d < 4)
.collect::<Vec<_>>();
filtered.sort_by(|a, b| a.0.cmp(&b.0));
if filtered.len() == 0 {
None
} else {
Some(filtered[0].1.to_string())
}
}
fn execute_subcommand(cmd: &str, args: &[String], shell: &mut MultiShell) {
let command = match find_command(cmd) {
Some(command) => command,
None => {
let msg = match find_closest(cmd) {
Some(closest) => format!("No such subcommand\n\n\t\
Did you mean `{}`?\n", closest),
None => "No such subcommand".to_string()
};
return handle_error(CliError::new(&msg, 127), shell)
}
};
match Command::new(&command).args(&args[1..]).status() {
Ok(ref status) if status.success() => {}
Ok(ref status) => {
match status.code() {
Some(code) => handle_error(CliError::new("", code), shell),
None => {
let msg = format!("subcommand failed with: {}", status);
handle_error(CliError::new(&msg, 101), shell)
}
}
}
Err(ref e) if e.kind() == io::ErrorKind::NotFound => {
handle_error(CliError::new("No such subcommand", 127), shell)
}
Err(err) => {
let msg = format!("Subcommand failed to run: {}", err);
handle_error(CliError::new(&msg, 127), shell)
}
}
}
/// List all runnable commands. find_command should always succeed
/// if given one of returned command.
fn list_commands() -> BTreeSet<String> {
let command_prefix = "cargo-";
let mut commands = BTreeSet::new();
for dir in list_command_directory().iter() {
let entries = match fs::read_dir(dir) {
Ok(entries) => entries,
_ => continue
};
for entry in entries {
let entry = match entry { Ok(e) => e, Err(..) => continue };
let entry = entry.path();
let filename = match entry.file_name().and_then(|s| s.to_str()) {
Some(filename) => filename,
_ => continue
};
if filename.starts_with(command_prefix) &&
filename.ends_with(env::consts::EXE_SUFFIX) &&
is_executable(&entry) {
let command = &filename[
command_prefix.len()..
filename.len() - env::consts::EXE_SUFFIX.len()];
commands.insert(command.to_string());
}
}
}
macro_rules! add_cmd{ ($cmd:ident) => ({
commands.insert(stringify!($cmd).replace("_", "-"));
}) }
each_subcommand!(add_cmd);
commands
}
#[cfg(unix)]
fn is_executable(path: &Path) -> bool {
use std::os::unix::prelude::*;
fs::metadata(path).map(|m| {
m.permissions().mode() & 0o001 == 0o001
}).unwrap_or(false)
}
#[cfg(windows)]
fn is_executable(path: &Path) -> bool {
fs::metadata(path).map(|m| m.is_file()).unwrap_or(false)
}
/// Get `Command` to run given command.
fn find_command(cmd: &str) -> Option<PathBuf> {
let command_exe = format!("cargo-{}{}", cmd, env::consts::EXE_SUFFIX);
let dirs = list_command_directory();
let mut command_paths = dirs.iter().map(|dir| dir.join(&command_exe));
command_paths.find(|path| fs::metadata(&path).is_ok())
}
/// List candidate locations where subcommands might be installed.
fn list_command_directory() -> Vec<PathBuf> {
let mut dirs = vec![];
if let Ok(mut path) = env::current_exe() {
path.pop();
dirs.push(path.join("../lib/cargo"));
dirs.push(path);
}
if let Some(val) = env::var_os("PATH") {
dirs.extend(env::split_paths(&val));
}
dirs
}
fn init_git_transports(config: &Config) {
// Only use a custom transport if a proxy is configured, right now libgit2
// doesn't support proxies and we have to use a custom transport in this
// case. The custom transport, however, is not as well battle-tested.
match cargo::ops::http_proxy_exists(config) {
Ok(true) => {}
_ => return
}
let handle = match cargo::ops::http_handle(config) {
Ok(handle) => handle,
Err(..) => return,
};
// The unsafety of the registration function derives from two aspects:
//
// 1. This call must be synchronized with all other registration calls as
// well as construction of new transports.
// 2. The argument is leaked.
//
// We're clear on point (1) because this is only called at the start of this
// binary (we know what the state of the world looks like) and we're mostly
// clear on point (2) because we'd only free it after everything is done
// anyway
unsafe {
git2_curl::register(handle);
}
}
|
{
env_logger::init().unwrap();
execute_main_without_stdin(execute, true, USAGE)
}
|
identifier_body
|
cargo.rs
|
extern crate cargo;
extern crate env_logger;
extern crate git2_curl;
extern crate rustc_serialize;
extern crate toml;
#[macro_use] extern crate log;
use std::collections::BTreeSet;
use std::env;
use std::fs;
use std::io;
use std::path::{PathBuf, Path};
use std::process::Command;
use cargo::{execute_main_without_stdin, handle_error, shell};
use cargo::core::MultiShell;
use cargo::util::{CliError, CliResult, lev_distance, Config};
#[derive(RustcDecodable)]
struct Flags {
flag_list: bool,
flag_verbose: bool,
flag_quiet: bool,
flag_color: Option<String>,
arg_command: String,
arg_args: Vec<String>,
}
const USAGE: &'static str = "
Rust's package manager
Usage:
cargo <command> [<args>...]
cargo [options]
Options:
-h, --help Display this message
-V, --version Print version info and exit
--list List installed commands
-v, --verbose Use verbose output
-q, --quiet No output printed to stdout
--color WHEN Coloring: auto, always, never
Some common cargo commands are:
build Compile the current project
clean Remove the target directory
doc Build this project's and its dependencies' documentation
new Create a new cargo project
run Build and execute src/main.rs
test Run the tests
bench Run the benchmarks
update Update dependencies listed in Cargo.lock
search Search registry for crates
See 'cargo help <command>' for more information on a specific command.
";
fn main() {
env_logger::init().unwrap();
execute_main_without_stdin(execute, true, USAGE)
}
macro_rules! each_subcommand{ ($mac:ident) => ({
$mac!(bench);
$mac!(build);
$mac!(clean);
$mac!(doc);
$mac!(fetch);
$mac!(generate_lockfile);
$mac!(git_checkout);
$mac!(help);
$mac!(install);
$mac!(locate_project);
$mac!(login);
$mac!(new);
$mac!(owner);
$mac!(package);
$mac!(pkgid);
$mac!(publish);
$mac!(read_manifest);
$mac!(run);
$mac!(rustc);
$mac!(search);
$mac!(test);
$mac!(uninstall);
$mac!(update);
$mac!(verify_project);
$mac!(version);
$mac!(yank);
}) }
/**
The top-level `cargo` command handles configuration and project location
because they are fundamental (and intertwined). Other commands can rely
on this top-level information.
*/
fn execute(flags: Flags, config: &Config) -> CliResult<Option<()>> {
try!(config.shell().set_verbosity(flags.flag_verbose, flags.flag_quiet));
try!(config.shell().set_color_config(flags.flag_color.as_ref().map(|s| &s[..])));
init_git_transports(config);
if flags.flag_list {
println!("Installed Commands:");
for command in list_commands().into_iter() {
println!(" {}", command);
};
return Ok(None)
}
let args = match &flags.arg_command[..] {
// For the commands `cargo` and `cargo help`, re-execute ourselves as
// `cargo -h` so we can go through the normal process of printing the
// help message.
"" | "help" if flags.arg_args.is_empty() => {
config.shell().set_verbose(true);
let args = &["cargo".to_string(), "-h".to_string()];
let r = cargo::call_main_without_stdin(execute, config, USAGE, args,
false);
cargo::process_executed(r, &mut config.shell());
return Ok(None)
}
// For `cargo help -h` and `cargo help --help`, print out the help
// message for `cargo help`
"help" if flags.arg_args[0] == "-h" ||
flags.arg_args[0] == "--help" => {
vec!["cargo".to_string(), "help".to_string(), "-h".to_string()]
}
// For `cargo help foo`, print out the usage message for the specified
// subcommand by executing the command with the `-h` flag.
"help" => {
vec!["cargo".to_string(), flags.arg_args[0].clone(),
"-h".to_string()]
}
// For all other invocations, we're of the form `cargo foo args...`. We
// use the exact environment arguments to preserve tokens like `--` for
// example.
_ => env::args().collect(),
};
macro_rules! cmd{ ($name:ident) => (
if args[1] == stringify!($name).replace("_", "-") {
mod $name;
config.shell().set_verbose(true);
let r = cargo::call_main_without_stdin($name::execute, config,
$name::USAGE,
&args,
false);
cargo::process_executed(r, &mut config.shell());
return Ok(None)
}
) }
each_subcommand!(cmd);
execute_subcommand(&args[1], &args, &mut config.shell());
Ok(None)
}
fn find_closest(cmd: &str) -> Option<String> {
let cmds = list_commands();
// Only consider candidates with a lev_distance of 3 or less so we don't
// suggest out-of-the-blue options.
let mut filtered = cmds.iter().map(|c| (lev_distance(&c, cmd), c))
.filter(|&(d, _)| d < 4)
.collect::<Vec<_>>();
filtered.sort_by(|a, b| a.0.cmp(&b.0));
if filtered.len() == 0 {
None
} else {
Some(filtered[0].1.to_string())
}
}
fn execute_subcommand(cmd: &str, args: &[String], shell: &mut MultiShell) {
let command = match find_command(cmd) {
Some(command) => command,
None => {
let msg = match find_closest(cmd) {
Some(closest) => format!("No such subcommand\n\n\t\
Did you mean `{}`?\n", closest),
None => "No such subcommand".to_string()
};
return handle_error(CliError::new(&msg, 127), shell)
}
};
match Command::new(&command).args(&args[1..]).status() {
Ok(ref status) if status.success() => {}
Ok(ref status) => {
match status.code() {
Some(code) => handle_error(CliError::new("", code), shell),
None => {
let msg = format!("subcommand failed with: {}", status);
handle_error(CliError::new(&msg, 101), shell)
}
}
}
Err(ref e) if e.kind() == io::ErrorKind::NotFound => {
handle_error(CliError::new("No such subcommand", 127), shell)
}
Err(err) => {
let msg = format!("Subcommand failed to run: {}", err);
handle_error(CliError::new(&msg, 127), shell)
}
}
}
/// List all runnable commands. find_command should always succeed
/// if given one of returned command.
fn list_commands() -> BTreeSet<String> {
let command_prefix = "cargo-";
let mut commands = BTreeSet::new();
for dir in list_command_directory().iter() {
let entries = match fs::read_dir(dir) {
Ok(entries) => entries,
_ => continue
};
for entry in entries {
let entry = match entry { Ok(e) => e, Err(..) => continue };
let entry = entry.path();
let filename = match entry.file_name().and_then(|s| s.to_str()) {
Some(filename) => filename,
_ => continue
};
if filename.starts_with(command_prefix) &&
filename.ends_with(env::consts::EXE_SUFFIX) &&
is_executable(&entry) {
let command = &filename[
command_prefix.len()..
filename.len() - env::consts::EXE_SUFFIX.len()];
commands.insert(command.to_string());
}
}
}
macro_rules! add_cmd{ ($cmd:ident) => ({
commands.insert(stringify!($cmd).replace("_", "-"));
}) }
each_subcommand!(add_cmd);
commands
}
#[cfg(unix)]
fn
|
(path: &Path) -> bool {
use std::os::unix::prelude::*;
fs::metadata(path).map(|m| {
m.permissions().mode() & 0o001 == 0o001
}).unwrap_or(false)
}
#[cfg(windows)]
fn is_executable(path: &Path) -> bool {
fs::metadata(path).map(|m| m.is_file()).unwrap_or(false)
}
/// Get `Command` to run given command.
fn find_command(cmd: &str) -> Option<PathBuf> {
let command_exe = format!("cargo-{}{}", cmd, env::consts::EXE_SUFFIX);
let dirs = list_command_directory();
let mut command_paths = dirs.iter().map(|dir| dir.join(&command_exe));
command_paths.find(|path| fs::metadata(&path).is_ok())
}
/// List candidate locations where subcommands might be installed.
fn list_command_directory() -> Vec<PathBuf> {
let mut dirs = vec![];
if let Ok(mut path) = env::current_exe() {
path.pop();
dirs.push(path.join("../lib/cargo"));
dirs.push(path);
}
if let Some(val) = env::var_os("PATH") {
dirs.extend(env::split_paths(&val));
}
dirs
}
fn init_git_transports(config: &Config) {
// Only use a custom transport if a proxy is configured, right now libgit2
// doesn't support proxies and we have to use a custom transport in this
// case. The custom transport, however, is not as well battle-tested.
match cargo::ops::http_proxy_exists(config) {
Ok(true) => {}
_ => return
}
let handle = match cargo::ops::http_handle(config) {
Ok(handle) => handle,
Err(..) => return,
};
// The unsafety of the registration function derives from two aspects:
//
// 1. This call must be synchronized with all other registration calls as
// well as construction of new transports.
// 2. The argument is leaked.
//
// We're clear on point (1) because this is only called at the start of this
// binary (we know what the state of the world looks like) and we're mostly
// clear on point (2) because we'd only free it after everything is done
// anyway
unsafe {
git2_curl::register(handle);
}
}
|
is_executable
|
identifier_name
|
cargo.rs
|
extern crate cargo;
extern crate env_logger;
extern crate git2_curl;
extern crate rustc_serialize;
extern crate toml;
#[macro_use] extern crate log;
use std::collections::BTreeSet;
use std::env;
use std::fs;
use std::io;
use std::path::{PathBuf, Path};
use std::process::Command;
use cargo::{execute_main_without_stdin, handle_error, shell};
use cargo::core::MultiShell;
use cargo::util::{CliError, CliResult, lev_distance, Config};
#[derive(RustcDecodable)]
struct Flags {
flag_list: bool,
flag_verbose: bool,
flag_quiet: bool,
flag_color: Option<String>,
arg_command: String,
arg_args: Vec<String>,
}
const USAGE: &'static str = "
Rust's package manager
Usage:
cargo <command> [<args>...]
cargo [options]
Options:
-h, --help Display this message
-V, --version Print version info and exit
--list List installed commands
-v, --verbose Use verbose output
-q, --quiet No output printed to stdout
--color WHEN Coloring: auto, always, never
Some common cargo commands are:
build Compile the current project
clean Remove the target directory
doc Build this project's and its dependencies' documentation
new Create a new cargo project
run Build and execute src/main.rs
test Run the tests
bench Run the benchmarks
update Update dependencies listed in Cargo.lock
search Search registry for crates
See 'cargo help <command>' for more information on a specific command.
";
fn main() {
env_logger::init().unwrap();
execute_main_without_stdin(execute, true, USAGE)
}
macro_rules! each_subcommand{ ($mac:ident) => ({
$mac!(bench);
$mac!(build);
$mac!(clean);
$mac!(doc);
$mac!(fetch);
$mac!(generate_lockfile);
$mac!(git_checkout);
$mac!(help);
$mac!(install);
$mac!(locate_project);
$mac!(login);
$mac!(new);
$mac!(owner);
$mac!(package);
$mac!(pkgid);
$mac!(publish);
$mac!(read_manifest);
$mac!(run);
$mac!(rustc);
$mac!(search);
$mac!(test);
$mac!(uninstall);
$mac!(update);
$mac!(verify_project);
$mac!(version);
$mac!(yank);
}) }
/**
The top-level `cargo` command handles configuration and project location
because they are fundamental (and intertwined). Other commands can rely
on this top-level information.
*/
fn execute(flags: Flags, config: &Config) -> CliResult<Option<()>> {
try!(config.shell().set_verbosity(flags.flag_verbose, flags.flag_quiet));
try!(config.shell().set_color_config(flags.flag_color.as_ref().map(|s| &s[..])));
init_git_transports(config);
if flags.flag_list {
println!("Installed Commands:");
for command in list_commands().into_iter() {
println!(" {}", command);
};
return Ok(None)
}
let args = match &flags.arg_command[..] {
// For the commands `cargo` and `cargo help`, re-execute ourselves as
// `cargo -h` so we can go through the normal process of printing the
// help message.
"" | "help" if flags.arg_args.is_empty() => {
config.shell().set_verbose(true);
let args = &["cargo".to_string(), "-h".to_string()];
let r = cargo::call_main_without_stdin(execute, config, USAGE, args,
false);
cargo::process_executed(r, &mut config.shell());
return Ok(None)
}
// For `cargo help -h` and `cargo help --help`, print out the help
// message for `cargo help`
"help" if flags.arg_args[0] == "-h" ||
flags.arg_args[0] == "--help" => {
vec!["cargo".to_string(), "help".to_string(), "-h".to_string()]
}
// For `cargo help foo`, print out the usage message for the specified
// subcommand by executing the command with the `-h` flag.
"help" => {
vec!["cargo".to_string(), flags.arg_args[0].clone(),
"-h".to_string()]
}
// For all other invocations, we're of the form `cargo foo args...`. We
// use the exact environment arguments to preserve tokens like `--` for
// example.
_ => env::args().collect(),
};
macro_rules! cmd{ ($name:ident) => (
if args[1] == stringify!($name).replace("_", "-") {
mod $name;
config.shell().set_verbose(true);
let r = cargo::call_main_without_stdin($name::execute, config,
$name::USAGE,
&args,
false);
cargo::process_executed(r, &mut config.shell());
return Ok(None)
}
) }
each_subcommand!(cmd);
execute_subcommand(&args[1], &args, &mut config.shell());
Ok(None)
}
fn find_closest(cmd: &str) -> Option<String> {
let cmds = list_commands();
// Only consider candidates with a lev_distance of 3 or less so we don't
// suggest out-of-the-blue options.
let mut filtered = cmds.iter().map(|c| (lev_distance(&c, cmd), c))
.filter(|&(d, _)| d < 4)
.collect::<Vec<_>>();
filtered.sort_by(|a, b| a.0.cmp(&b.0));
if filtered.len() == 0 {
None
} else {
Some(filtered[0].1.to_string())
}
}
fn execute_subcommand(cmd: &str, args: &[String], shell: &mut MultiShell) {
let command = match find_command(cmd) {
Some(command) => command,
None => {
let msg = match find_closest(cmd) {
Some(closest) => format!("No such subcommand\n\n\t\
Did you mean `{}`?\n", closest),
None => "No such subcommand".to_string()
};
return handle_error(CliError::new(&msg, 127), shell)
}
};
match Command::new(&command).args(&args[1..]).status() {
Ok(ref status) if status.success() => {}
Ok(ref status) => {
match status.code() {
Some(code) => handle_error(CliError::new("", code), shell),
None => {
let msg = format!("subcommand failed with: {}", status);
handle_error(CliError::new(&msg, 101), shell)
}
}
}
Err(ref e) if e.kind() == io::ErrorKind::NotFound => {
handle_error(CliError::new("No such subcommand", 127), shell)
}
Err(err) => {
let msg = format!("Subcommand failed to run: {}", err);
handle_error(CliError::new(&msg, 127), shell)
}
}
}
/// List all runnable commands. find_command should always succeed
/// if given one of returned command.
fn list_commands() -> BTreeSet<String> {
let command_prefix = "cargo-";
let mut commands = BTreeSet::new();
for dir in list_command_directory().iter() {
let entries = match fs::read_dir(dir) {
Ok(entries) => entries,
_ => continue
|
let entry = entry.path();
let filename = match entry.file_name().and_then(|s| s.to_str()) {
Some(filename) => filename,
_ => continue
};
if filename.starts_with(command_prefix) &&
filename.ends_with(env::consts::EXE_SUFFIX) &&
is_executable(&entry) {
let command = &filename[
command_prefix.len()..
filename.len() - env::consts::EXE_SUFFIX.len()];
commands.insert(command.to_string());
}
}
}
macro_rules! add_cmd{ ($cmd:ident) => ({
commands.insert(stringify!($cmd).replace("_", "-"));
}) }
each_subcommand!(add_cmd);
commands
}
#[cfg(unix)]
fn is_executable(path: &Path) -> bool {
use std::os::unix::prelude::*;
fs::metadata(path).map(|m| {
m.permissions().mode() & 0o001 == 0o001
}).unwrap_or(false)
}
#[cfg(windows)]
fn is_executable(path: &Path) -> bool {
fs::metadata(path).map(|m| m.is_file()).unwrap_or(false)
}
/// Get `Command` to run given command.
fn find_command(cmd: &str) -> Option<PathBuf> {
let command_exe = format!("cargo-{}{}", cmd, env::consts::EXE_SUFFIX);
let dirs = list_command_directory();
let mut command_paths = dirs.iter().map(|dir| dir.join(&command_exe));
command_paths.find(|path| fs::metadata(&path).is_ok())
}
/// List candidate locations where subcommands might be installed.
fn list_command_directory() -> Vec<PathBuf> {
let mut dirs = vec![];
if let Ok(mut path) = env::current_exe() {
path.pop();
dirs.push(path.join("../lib/cargo"));
dirs.push(path);
}
if let Some(val) = env::var_os("PATH") {
dirs.extend(env::split_paths(&val));
}
dirs
}
fn init_git_transports(config: &Config) {
// Only use a custom transport if a proxy is configured, right now libgit2
// doesn't support proxies and we have to use a custom transport in this
// case. The custom transport, however, is not as well battle-tested.
match cargo::ops::http_proxy_exists(config) {
Ok(true) => {}
_ => return
}
let handle = match cargo::ops::http_handle(config) {
Ok(handle) => handle,
Err(..) => return,
};
// The unsafety of the registration function derives from two aspects:
//
// 1. This call must be synchronized with all other registration calls as
// well as construction of new transports.
// 2. The argument is leaked.
//
// We're clear on point (1) because this is only called at the start of this
// binary (we know what the state of the world looks like) and we're mostly
// clear on point (2) because we'd only free it after everything is done
// anyway
unsafe {
git2_curl::register(handle);
}
}
|
};
for entry in entries {
let entry = match entry { Ok(e) => e, Err(..) => continue };
|
random_line_split
|
decode.rs
|
const GREEK_FROM_BETA: [char; 26] = [
'\u{03b1}', // A => alpha
'\u{03b2}', // B => beta
'\u{03be}', // C => xi
'\u{03b4}', // D => delta
'\u{03b5}', // E => epsilon
'\u{03c6}', // F => phi
'\u{03b3}', // G => gamma
'\u{03b7}', // H => eta
'\u{03b9}', // I => iota
'\u{03c2}', // J => final sigma
'\u{03ba}', // K => kappa
'\u{03bb}', // L => lambda
'\u{03bc}', // M => mu
'\u{03bd}', // N => nu
'\u{03bf}', // O => omicron
'\u{03c0}', // P => pi
'\u{03b8}', // Q => theta
'\u{03c1}', // R => rho
'\u{03c3}', // S => sigma (medial by default)
'\u{03c4}', // T => tau
'\u{03c5}', // U => upsilon
'\u{03dd}', // V => digamma (wau)
'\u{03c9}', // W => omega
'\u{03c7}', // X => chi
'\u{03c8}', // Y => psi
'\u{03b6}', // Z => zeta
];
const MEDIAL_SIGMA: char = '\u{03c3}';
const FINAL_SIGMA: char = '\u{03c2}';
const QUESTION_MARK: char = ';'; // normal semicolon is prefered
const APOSTROPHE: char = '\u{2019}'; // right single quotation mark
const HYPHEN: char = '\u{2010}'; // TLG says to use this instead of '-'
const DASH: char = '\u{2014}'; // em dash
const MIDDLE_DOT: char = '\u{00B7}';
const NUMERAL_SIGN: char = '\u{0374}';
const SMOOTH_BREATHING: char = '\u{0313}';
const ROUGH_BREATHING: char = '\u{0314}';
const ACUTE_ACCENT: char = '\u{0301}';
const CIRCUMFLEX_ACCENT: char = '\u{0342}';
const GRAVE_ACCENT: char = '\u{0300}';
const DIAERESIS: char = '\u{0308}';
const IOTA_SUBSCRIPT: char = '\u{0345}';
pub fn beta_decode(c: char) -> char {
match c {
'a'... 'z' => {
const LITTLE_A: usize = 'a' as usize;
let index = (c as usize) - LITTLE_A;
GREEK_FROM_BETA[index]
},
'A'... 'Z' => {
const BIG_A: usize = 'A' as usize;
let index = (c as usize) - BIG_A;
GREEK_FROM_BETA[index]
}
')' => SMOOTH_BREATHING,
'(' => ROUGH_BREATHING,
'/' => ACUTE_ACCENT,
'=' => CIRCUMFLEX_ACCENT,
'\\'=> GRAVE_ACCENT,
'+' => DIAERESIS,
'|' => IOTA_SUBSCRIPT,
';' => QUESTION_MARK,
'\''=> APOSTROPHE,
'-' => HYPHEN,
'_' => DASH,
':' => MIDDLE_DOT,
'#' => NUMERAL_SIGN,
_ => c,
}
}
pub struct BetaDecoding<I: Iterator<Item=char>> {
betacode: I,
lookahead: Option<char>,
breathing: Option<char>,
accent: Option<char>,
awaiting_uppercase: bool,
}
impl<I: Iterator<Item=char>> BetaDecoding<I> {
pub fn new(betacode: I) -> BetaDecoding<I> {
BetaDecoding {
betacode: betacode,
lookahead: None,
breathing: None,
accent: None,
awaiting_uppercase: false,
}
}
fn next_helper(&mut self) -> Option<char> {
// get_greek_char does most of the work, just have to determine correct sigma
let greek_char = if let Some(_) = self.lookahead {
self.lookahead
} else {
self.get_greek_char()
};
self.lookahead = None;
match greek_char {
Some(MEDIAL_SIGMA) => {
self.lookahead = self.get_greek_char();
match self.lookahead {
Some(c) if c.is_alphabetic() => Some(MEDIAL_SIGMA),
_ => Some(FINAL_SIGMA),
}
},
_ => greek_char,
}
}
fn get_greek_char(&mut self) -> Option<char> {
if let Some(breathing) = self.breathing {
self.breathing = None;
Some(breathing)
} else if let Some(accent) = self.accent {
self.accent = None;
Some(accent)
} else {
match self.betacode.next() {
Some('`') => {
// escape character
self.betacode.next()
},
Some('*') => {
self.awaiting_uppercase = true;
self.next()
},
Some(')') if self.awaiting_uppercase => {
let next = self.next();
self.breathing = Some(SMOOTH_BREATHING);
next
},
Some('(') if self.awaiting_uppercase => {
let next = self.next();
self.breathing = Some(ROUGH_BREATHING);
next
},
Some('/') if self.awaiting_uppercase => {
let next = self.next();
self.accent = Some(ACUTE_ACCENT);
next
},
Some('=') if self.awaiting_uppercase => {
let next = self.next();
self.accent = Some(CIRCUMFLEX_ACCENT);
next
},
Some('\\') if self.awaiting_uppercase => {
let next = self.next();
self.accent = Some(GRAVE_ACCENT);
next
},
Some(c) => {
let greek = beta_decode(c);
if self.awaiting_uppercase {
self.awaiting_uppercase = false;
greek.to_uppercase().next()
} else {
Some(greek)
}
},
None => None,
}
}
}
}
impl<I: Iterator<Item=char>> Iterator for BetaDecoding<I> {
type Item = char;
#[inline]
fn
|
(&mut self) -> Option<char> {
self.next_helper()
}
}
|
next
|
identifier_name
|
decode.rs
|
const GREEK_FROM_BETA: [char; 26] = [
'\u{03b1}', // A => alpha
'\u{03b2}', // B => beta
'\u{03be}', // C => xi
'\u{03b4}', // D => delta
'\u{03b5}', // E => epsilon
'\u{03c6}', // F => phi
'\u{03b3}', // G => gamma
'\u{03b7}', // H => eta
'\u{03b9}', // I => iota
'\u{03c2}', // J => final sigma
'\u{03ba}', // K => kappa
'\u{03bb}', // L => lambda
'\u{03bc}', // M => mu
'\u{03bd}', // N => nu
'\u{03bf}', // O => omicron
'\u{03c0}', // P => pi
'\u{03b8}', // Q => theta
'\u{03c1}', // R => rho
'\u{03c3}', // S => sigma (medial by default)
'\u{03c4}', // T => tau
'\u{03c5}', // U => upsilon
'\u{03dd}', // V => digamma (wau)
'\u{03c9}', // W => omega
'\u{03c7}', // X => chi
'\u{03c8}', // Y => psi
'\u{03b6}', // Z => zeta
];
const MEDIAL_SIGMA: char = '\u{03c3}';
const FINAL_SIGMA: char = '\u{03c2}';
const QUESTION_MARK: char = ';'; // normal semicolon is prefered
const APOSTROPHE: char = '\u{2019}'; // right single quotation mark
const HYPHEN: char = '\u{2010}'; // TLG says to use this instead of '-'
const DASH: char = '\u{2014}'; // em dash
const MIDDLE_DOT: char = '\u{00B7}';
const NUMERAL_SIGN: char = '\u{0374}';
const SMOOTH_BREATHING: char = '\u{0313}';
const ROUGH_BREATHING: char = '\u{0314}';
const ACUTE_ACCENT: char = '\u{0301}';
const CIRCUMFLEX_ACCENT: char = '\u{0342}';
const GRAVE_ACCENT: char = '\u{0300}';
const DIAERESIS: char = '\u{0308}';
const IOTA_SUBSCRIPT: char = '\u{0345}';
pub fn beta_decode(c: char) -> char
|
'\''=> APOSTROPHE,
'-' => HYPHEN,
'_' => DASH,
':' => MIDDLE_DOT,
'#' => NUMERAL_SIGN,
_ => c,
}
}
pub struct BetaDecoding<I: Iterator<Item=char>> {
betacode: I,
lookahead: Option<char>,
breathing: Option<char>,
accent: Option<char>,
awaiting_uppercase: bool,
}
impl<I: Iterator<Item=char>> BetaDecoding<I> {
pub fn new(betacode: I) -> BetaDecoding<I> {
BetaDecoding {
betacode: betacode,
lookahead: None,
breathing: None,
accent: None,
awaiting_uppercase: false,
}
}
fn next_helper(&mut self) -> Option<char> {
// get_greek_char does most of the work, just have to determine correct sigma
let greek_char = if let Some(_) = self.lookahead {
self.lookahead
} else {
self.get_greek_char()
};
self.lookahead = None;
match greek_char {
Some(MEDIAL_SIGMA) => {
self.lookahead = self.get_greek_char();
match self.lookahead {
Some(c) if c.is_alphabetic() => Some(MEDIAL_SIGMA),
_ => Some(FINAL_SIGMA),
}
},
_ => greek_char,
}
}
fn get_greek_char(&mut self) -> Option<char> {
if let Some(breathing) = self.breathing {
self.breathing = None;
Some(breathing)
} else if let Some(accent) = self.accent {
self.accent = None;
Some(accent)
} else {
match self.betacode.next() {
Some('`') => {
// escape character
self.betacode.next()
},
Some('*') => {
self.awaiting_uppercase = true;
self.next()
},
Some(')') if self.awaiting_uppercase => {
let next = self.next();
self.breathing = Some(SMOOTH_BREATHING);
next
},
Some('(') if self.awaiting_uppercase => {
let next = self.next();
self.breathing = Some(ROUGH_BREATHING);
next
},
Some('/') if self.awaiting_uppercase => {
let next = self.next();
self.accent = Some(ACUTE_ACCENT);
next
},
Some('=') if self.awaiting_uppercase => {
let next = self.next();
self.accent = Some(CIRCUMFLEX_ACCENT);
next
},
Some('\\') if self.awaiting_uppercase => {
let next = self.next();
self.accent = Some(GRAVE_ACCENT);
next
},
Some(c) => {
let greek = beta_decode(c);
if self.awaiting_uppercase {
self.awaiting_uppercase = false;
greek.to_uppercase().next()
} else {
Some(greek)
}
},
None => None,
}
}
}
}
impl<I: Iterator<Item=char>> Iterator for BetaDecoding<I> {
type Item = char;
#[inline]
fn next (&mut self) -> Option<char> {
self.next_helper()
}
}
|
{
match c {
'a' ... 'z' => {
const LITTLE_A: usize = 'a' as usize;
let index = (c as usize) - LITTLE_A;
GREEK_FROM_BETA[index]
},
'A' ... 'Z' => {
const BIG_A: usize = 'A' as usize;
let index = (c as usize) - BIG_A;
GREEK_FROM_BETA[index]
}
')' => SMOOTH_BREATHING,
'(' => ROUGH_BREATHING,
'/' => ACUTE_ACCENT,
'=' => CIRCUMFLEX_ACCENT,
'\\'=> GRAVE_ACCENT,
'+' => DIAERESIS,
'|' => IOTA_SUBSCRIPT,
';' => QUESTION_MARK,
|
identifier_body
|
decode.rs
|
const GREEK_FROM_BETA: [char; 26] = [
'\u{03b1}', // A => alpha
'\u{03b2}', // B => beta
'\u{03be}', // C => xi
'\u{03b4}', // D => delta
'\u{03b5}', // E => epsilon
'\u{03c6}', // F => phi
'\u{03b3}', // G => gamma
'\u{03b7}', // H => eta
'\u{03b9}', // I => iota
'\u{03c2}', // J => final sigma
'\u{03ba}', // K => kappa
'\u{03bb}', // L => lambda
'\u{03bc}', // M => mu
'\u{03bd}', // N => nu
'\u{03bf}', // O => omicron
'\u{03c0}', // P => pi
'\u{03b8}', // Q => theta
'\u{03c1}', // R => rho
'\u{03c3}', // S => sigma (medial by default)
'\u{03c4}', // T => tau
'\u{03c5}', // U => upsilon
'\u{03dd}', // V => digamma (wau)
'\u{03c9}', // W => omega
'\u{03c7}', // X => chi
'\u{03c8}', // Y => psi
'\u{03b6}', // Z => zeta
|
const MEDIAL_SIGMA: char = '\u{03c3}';
const FINAL_SIGMA: char = '\u{03c2}';
const QUESTION_MARK: char = ';'; // normal semicolon is prefered
const APOSTROPHE: char = '\u{2019}'; // right single quotation mark
const HYPHEN: char = '\u{2010}'; // TLG says to use this instead of '-'
const DASH: char = '\u{2014}'; // em dash
const MIDDLE_DOT: char = '\u{00B7}';
const NUMERAL_SIGN: char = '\u{0374}';
const SMOOTH_BREATHING: char = '\u{0313}';
const ROUGH_BREATHING: char = '\u{0314}';
const ACUTE_ACCENT: char = '\u{0301}';
const CIRCUMFLEX_ACCENT: char = '\u{0342}';
const GRAVE_ACCENT: char = '\u{0300}';
const DIAERESIS: char = '\u{0308}';
const IOTA_SUBSCRIPT: char = '\u{0345}';
pub fn beta_decode(c: char) -> char {
match c {
'a'... 'z' => {
const LITTLE_A: usize = 'a' as usize;
let index = (c as usize) - LITTLE_A;
GREEK_FROM_BETA[index]
},
'A'... 'Z' => {
const BIG_A: usize = 'A' as usize;
let index = (c as usize) - BIG_A;
GREEK_FROM_BETA[index]
}
')' => SMOOTH_BREATHING,
'(' => ROUGH_BREATHING,
'/' => ACUTE_ACCENT,
'=' => CIRCUMFLEX_ACCENT,
'\\'=> GRAVE_ACCENT,
'+' => DIAERESIS,
'|' => IOTA_SUBSCRIPT,
';' => QUESTION_MARK,
'\''=> APOSTROPHE,
'-' => HYPHEN,
'_' => DASH,
':' => MIDDLE_DOT,
'#' => NUMERAL_SIGN,
_ => c,
}
}
pub struct BetaDecoding<I: Iterator<Item=char>> {
betacode: I,
lookahead: Option<char>,
breathing: Option<char>,
accent: Option<char>,
awaiting_uppercase: bool,
}
impl<I: Iterator<Item=char>> BetaDecoding<I> {
pub fn new(betacode: I) -> BetaDecoding<I> {
BetaDecoding {
betacode: betacode,
lookahead: None,
breathing: None,
accent: None,
awaiting_uppercase: false,
}
}
fn next_helper(&mut self) -> Option<char> {
// get_greek_char does most of the work, just have to determine correct sigma
let greek_char = if let Some(_) = self.lookahead {
self.lookahead
} else {
self.get_greek_char()
};
self.lookahead = None;
match greek_char {
Some(MEDIAL_SIGMA) => {
self.lookahead = self.get_greek_char();
match self.lookahead {
Some(c) if c.is_alphabetic() => Some(MEDIAL_SIGMA),
_ => Some(FINAL_SIGMA),
}
},
_ => greek_char,
}
}
fn get_greek_char(&mut self) -> Option<char> {
if let Some(breathing) = self.breathing {
self.breathing = None;
Some(breathing)
} else if let Some(accent) = self.accent {
self.accent = None;
Some(accent)
} else {
match self.betacode.next() {
Some('`') => {
// escape character
self.betacode.next()
},
Some('*') => {
self.awaiting_uppercase = true;
self.next()
},
Some(')') if self.awaiting_uppercase => {
let next = self.next();
self.breathing = Some(SMOOTH_BREATHING);
next
},
Some('(') if self.awaiting_uppercase => {
let next = self.next();
self.breathing = Some(ROUGH_BREATHING);
next
},
Some('/') if self.awaiting_uppercase => {
let next = self.next();
self.accent = Some(ACUTE_ACCENT);
next
},
Some('=') if self.awaiting_uppercase => {
let next = self.next();
self.accent = Some(CIRCUMFLEX_ACCENT);
next
},
Some('\\') if self.awaiting_uppercase => {
let next = self.next();
self.accent = Some(GRAVE_ACCENT);
next
},
Some(c) => {
let greek = beta_decode(c);
if self.awaiting_uppercase {
self.awaiting_uppercase = false;
greek.to_uppercase().next()
} else {
Some(greek)
}
},
None => None,
}
}
}
}
impl<I: Iterator<Item=char>> Iterator for BetaDecoding<I> {
type Item = char;
#[inline]
fn next (&mut self) -> Option<char> {
self.next_helper()
}
}
|
];
|
random_line_split
|
opt_vec.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*!
*
* Defines a type OptVec<T> that can be used in place of ~[T].
* OptVec avoids the need for allocation for empty vectors.
* OptVec implements the iterable interface as well as
* other useful things like `push()` and `len()`.
*/
use core::prelude::*;
use core::iter;
use core::iter::BaseIter;
#[auto_encode]
#[auto_decode]
pub enum OptVec<T> {
Empty,
Vec(~[T])
}
pub fn with<T>(+t: T) -> OptVec<T> {
Vec(~[t])
}
pub fn from<T>(+t: ~[T]) -> OptVec<T> {
if t.len() == 0 {
Empty
} else {
Vec(t)
}
}
impl<T> OptVec<T> {
fn push(&mut self, +t: T) {
match *self {
Vec(ref mut v) => {
v.push(t);
return;
}
Empty => {}
}
// FIXME(#5074): flow insensitive means we can't move
// assignment inside `match`
*self = Vec(~[t]);
}
fn map<U>(&self, op: &fn(&T) -> U) -> OptVec<U> {
match *self {
Empty => Empty,
Vec(ref v) => Vec(v.map(op))
}
}
fn get(&self, i: uint) -> &'self T {
match *self {
Empty => fail!(fmt!("Invalid index %u", i)),
Vec(ref v) => &v[i]
}
}
fn is_empty(&self) -> bool {
self.len() == 0
}
fn len(&self) -> uint {
match *self {
Empty => 0,
Vec(ref v) => v.len()
}
}
}
pub fn take_vec<T>(+v: OptVec<T>) -> ~[T] {
match v {
Empty => ~[],
Vec(v) => v
}
}
impl<T:Copy> OptVec<T> {
fn prepend(&self, +t: T) -> OptVec<T> {
let mut v0 = ~[t];
match *self {
Empty =>
|
Vec(ref v1) => { v0.push_all(*v1); }
}
return Vec(v0);
}
fn push_all<I: BaseIter<T>>(&mut self, from: &I) {
for from.each |e| {
self.push(copy *e);
}
}
}
impl<A:Eq> Eq for OptVec<A> {
fn eq(&self, other: &OptVec<A>) -> bool {
// Note: cannot use #[deriving(Eq)] here because
// (Empty, Vec(~[])) ought to be equal.
match (self, other) {
(&Empty, &Empty) => true,
(&Empty, &Vec(ref v)) => v.is_empty(),
(&Vec(ref v), &Empty) => v.is_empty(),
(&Vec(ref v1), &Vec(ref v2)) => *v1 == *v2
}
}
fn ne(&self, other: &OptVec<A>) -> bool {
!self.eq(other)
}
}
impl<A> BaseIter<A> for OptVec<A> {
fn each(&self, blk: &fn(v: &A) -> bool) {
match *self {
Empty => {}
Vec(ref v) => v.each(blk)
}
}
fn size_hint(&self) -> Option<uint> {
Some(self.len())
}
}
impl<A> iter::ExtendedIter<A> for OptVec<A> {
#[inline(always)]
fn eachi(&self, blk: &fn(+v: uint, v: &A) -> bool) {
iter::eachi(self, blk)
}
#[inline(always)]
fn all(&self, blk: &fn(&A) -> bool) -> bool {
iter::all(self, blk)
}
#[inline(always)]
fn any(&self, blk: &fn(&A) -> bool) -> bool {
iter::any(self, blk)
}
#[inline(always)]
fn foldl<B>(&self, +b0: B, blk: &fn(&B, &A) -> B) -> B {
iter::foldl(self, b0, blk)
}
#[inline(always)]
fn position(&self, f: &fn(&A) -> bool) -> Option<uint> {
iter::position(self, f)
}
#[inline(always)]
fn map_to_vec<B>(&self, op: &fn(&A) -> B) -> ~[B] {
iter::map_to_vec(self, op)
}
#[inline(always)]
fn flat_map_to_vec<B,IB:BaseIter<B>>(&self, op: &fn(&A) -> IB)
-> ~[B] {
iter::flat_map_to_vec(self, op)
}
}
impl<A: Eq> iter::EqIter<A> for OptVec<A> {
#[inline(always)]
fn contains(&self, x: &A) -> bool { iter::contains(self, x) }
#[inline(always)]
fn count(&self, x: &A) -> uint { iter::count(self, x) }
}
impl<A: Copy> iter::CopyableIter<A> for OptVec<A> {
#[inline(always)]
fn filter_to_vec(&self, pred: &fn(&A) -> bool) -> ~[A] {
iter::filter_to_vec(self, pred)
}
#[inline(always)]
fn to_vec(&self) -> ~[A] { iter::to_vec(self) }
#[inline(always)]
fn find(&self, f: &fn(&A) -> bool) -> Option<A> {
iter::find(self, f)
}
}
impl<A: Copy+Ord> iter::CopyableOrderedIter<A> for OptVec<A> {
#[inline(always)]
fn min(&self) -> A { iter::min(self) }
#[inline(always)]
fn max(&self) -> A { iter::max(self) }
}
|
{}
|
conditional_block
|
opt_vec.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*!
*
* Defines a type OptVec<T> that can be used in place of ~[T].
* OptVec avoids the need for allocation for empty vectors.
* OptVec implements the iterable interface as well as
* other useful things like `push()` and `len()`.
*/
use core::prelude::*;
use core::iter;
use core::iter::BaseIter;
#[auto_encode]
#[auto_decode]
pub enum OptVec<T> {
Empty,
Vec(~[T])
}
pub fn with<T>(+t: T) -> OptVec<T> {
Vec(~[t])
}
pub fn from<T>(+t: ~[T]) -> OptVec<T> {
if t.len() == 0 {
Empty
} else {
Vec(t)
}
}
impl<T> OptVec<T> {
fn push(&mut self, +t: T) {
match *self {
Vec(ref mut v) => {
v.push(t);
return;
}
Empty => {}
}
// FIXME(#5074): flow insensitive means we can't move
// assignment inside `match`
*self = Vec(~[t]);
}
fn map<U>(&self, op: &fn(&T) -> U) -> OptVec<U> {
match *self {
Empty => Empty,
Vec(ref v) => Vec(v.map(op))
}
}
fn get(&self, i: uint) -> &'self T {
match *self {
Empty => fail!(fmt!("Invalid index %u", i)),
Vec(ref v) => &v[i]
}
}
fn is_empty(&self) -> bool {
self.len() == 0
}
fn len(&self) -> uint {
match *self {
Empty => 0,
Vec(ref v) => v.len()
}
}
}
pub fn take_vec<T>(+v: OptVec<T>) -> ~[T] {
match v {
Empty => ~[],
Vec(v) => v
}
}
impl<T:Copy> OptVec<T> {
fn prepend(&self, +t: T) -> OptVec<T> {
let mut v0 = ~[t];
match *self {
Empty => {}
Vec(ref v1) => { v0.push_all(*v1); }
}
return Vec(v0);
}
fn push_all<I: BaseIter<T>>(&mut self, from: &I) {
for from.each |e| {
self.push(copy *e);
}
}
}
impl<A:Eq> Eq for OptVec<A> {
fn eq(&self, other: &OptVec<A>) -> bool
|
fn ne(&self, other: &OptVec<A>) -> bool {
!self.eq(other)
}
}
impl<A> BaseIter<A> for OptVec<A> {
fn each(&self, blk: &fn(v: &A) -> bool) {
match *self {
Empty => {}
Vec(ref v) => v.each(blk)
}
}
fn size_hint(&self) -> Option<uint> {
Some(self.len())
}
}
impl<A> iter::ExtendedIter<A> for OptVec<A> {
#[inline(always)]
fn eachi(&self, blk: &fn(+v: uint, v: &A) -> bool) {
iter::eachi(self, blk)
}
#[inline(always)]
fn all(&self, blk: &fn(&A) -> bool) -> bool {
iter::all(self, blk)
}
#[inline(always)]
fn any(&self, blk: &fn(&A) -> bool) -> bool {
iter::any(self, blk)
}
#[inline(always)]
fn foldl<B>(&self, +b0: B, blk: &fn(&B, &A) -> B) -> B {
iter::foldl(self, b0, blk)
}
#[inline(always)]
fn position(&self, f: &fn(&A) -> bool) -> Option<uint> {
iter::position(self, f)
}
#[inline(always)]
fn map_to_vec<B>(&self, op: &fn(&A) -> B) -> ~[B] {
iter::map_to_vec(self, op)
}
#[inline(always)]
fn flat_map_to_vec<B,IB:BaseIter<B>>(&self, op: &fn(&A) -> IB)
-> ~[B] {
iter::flat_map_to_vec(self, op)
}
}
impl<A: Eq> iter::EqIter<A> for OptVec<A> {
#[inline(always)]
fn contains(&self, x: &A) -> bool { iter::contains(self, x) }
#[inline(always)]
fn count(&self, x: &A) -> uint { iter::count(self, x) }
}
impl<A: Copy> iter::CopyableIter<A> for OptVec<A> {
#[inline(always)]
fn filter_to_vec(&self, pred: &fn(&A) -> bool) -> ~[A] {
iter::filter_to_vec(self, pred)
}
#[inline(always)]
fn to_vec(&self) -> ~[A] { iter::to_vec(self) }
#[inline(always)]
fn find(&self, f: &fn(&A) -> bool) -> Option<A> {
iter::find(self, f)
}
}
impl<A: Copy+Ord> iter::CopyableOrderedIter<A> for OptVec<A> {
#[inline(always)]
fn min(&self) -> A { iter::min(self) }
#[inline(always)]
fn max(&self) -> A { iter::max(self) }
}
|
{
// Note: cannot use #[deriving(Eq)] here because
// (Empty, Vec(~[])) ought to be equal.
match (self, other) {
(&Empty, &Empty) => true,
(&Empty, &Vec(ref v)) => v.is_empty(),
(&Vec(ref v), &Empty) => v.is_empty(),
(&Vec(ref v1), &Vec(ref v2)) => *v1 == *v2
}
}
|
identifier_body
|
opt_vec.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*!
*
* Defines a type OptVec<T> that can be used in place of ~[T].
* OptVec avoids the need for allocation for empty vectors.
* OptVec implements the iterable interface as well as
* other useful things like `push()` and `len()`.
*/
use core::prelude::*;
use core::iter;
use core::iter::BaseIter;
#[auto_encode]
#[auto_decode]
pub enum OptVec<T> {
Empty,
Vec(~[T])
}
pub fn with<T>(+t: T) -> OptVec<T> {
Vec(~[t])
}
pub fn from<T>(+t: ~[T]) -> OptVec<T> {
if t.len() == 0 {
Empty
} else {
Vec(t)
}
}
impl<T> OptVec<T> {
fn push(&mut self, +t: T) {
match *self {
Vec(ref mut v) => {
v.push(t);
return;
}
Empty => {}
}
// FIXME(#5074): flow insensitive means we can't move
// assignment inside `match`
*self = Vec(~[t]);
}
fn map<U>(&self, op: &fn(&T) -> U) -> OptVec<U> {
match *self {
Empty => Empty,
Vec(ref v) => Vec(v.map(op))
}
}
fn get(&self, i: uint) -> &'self T {
match *self {
Empty => fail!(fmt!("Invalid index %u", i)),
Vec(ref v) => &v[i]
}
}
fn is_empty(&self) -> bool {
self.len() == 0
}
fn len(&self) -> uint {
match *self {
Empty => 0,
Vec(ref v) => v.len()
}
}
}
pub fn take_vec<T>(+v: OptVec<T>) -> ~[T] {
match v {
Empty => ~[],
Vec(v) => v
}
}
impl<T:Copy> OptVec<T> {
fn prepend(&self, +t: T) -> OptVec<T> {
let mut v0 = ~[t];
match *self {
Empty => {}
Vec(ref v1) => { v0.push_all(*v1); }
}
return Vec(v0);
}
fn
|
<I: BaseIter<T>>(&mut self, from: &I) {
for from.each |e| {
self.push(copy *e);
}
}
}
impl<A:Eq> Eq for OptVec<A> {
fn eq(&self, other: &OptVec<A>) -> bool {
// Note: cannot use #[deriving(Eq)] here because
// (Empty, Vec(~[])) ought to be equal.
match (self, other) {
(&Empty, &Empty) => true,
(&Empty, &Vec(ref v)) => v.is_empty(),
(&Vec(ref v), &Empty) => v.is_empty(),
(&Vec(ref v1), &Vec(ref v2)) => *v1 == *v2
}
}
fn ne(&self, other: &OptVec<A>) -> bool {
!self.eq(other)
}
}
impl<A> BaseIter<A> for OptVec<A> {
fn each(&self, blk: &fn(v: &A) -> bool) {
match *self {
Empty => {}
Vec(ref v) => v.each(blk)
}
}
fn size_hint(&self) -> Option<uint> {
Some(self.len())
}
}
impl<A> iter::ExtendedIter<A> for OptVec<A> {
#[inline(always)]
fn eachi(&self, blk: &fn(+v: uint, v: &A) -> bool) {
iter::eachi(self, blk)
}
#[inline(always)]
fn all(&self, blk: &fn(&A) -> bool) -> bool {
iter::all(self, blk)
}
#[inline(always)]
fn any(&self, blk: &fn(&A) -> bool) -> bool {
iter::any(self, blk)
}
#[inline(always)]
fn foldl<B>(&self, +b0: B, blk: &fn(&B, &A) -> B) -> B {
iter::foldl(self, b0, blk)
}
#[inline(always)]
fn position(&self, f: &fn(&A) -> bool) -> Option<uint> {
iter::position(self, f)
}
#[inline(always)]
fn map_to_vec<B>(&self, op: &fn(&A) -> B) -> ~[B] {
iter::map_to_vec(self, op)
}
#[inline(always)]
fn flat_map_to_vec<B,IB:BaseIter<B>>(&self, op: &fn(&A) -> IB)
-> ~[B] {
iter::flat_map_to_vec(self, op)
}
}
impl<A: Eq> iter::EqIter<A> for OptVec<A> {
#[inline(always)]
fn contains(&self, x: &A) -> bool { iter::contains(self, x) }
#[inline(always)]
fn count(&self, x: &A) -> uint { iter::count(self, x) }
}
impl<A: Copy> iter::CopyableIter<A> for OptVec<A> {
#[inline(always)]
fn filter_to_vec(&self, pred: &fn(&A) -> bool) -> ~[A] {
iter::filter_to_vec(self, pred)
}
#[inline(always)]
fn to_vec(&self) -> ~[A] { iter::to_vec(self) }
#[inline(always)]
fn find(&self, f: &fn(&A) -> bool) -> Option<A> {
iter::find(self, f)
}
}
impl<A: Copy+Ord> iter::CopyableOrderedIter<A> for OptVec<A> {
#[inline(always)]
fn min(&self) -> A { iter::min(self) }
#[inline(always)]
fn max(&self) -> A { iter::max(self) }
}
|
push_all
|
identifier_name
|
opt_vec.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*!
*
* Defines a type OptVec<T> that can be used in place of ~[T].
* OptVec avoids the need for allocation for empty vectors.
* OptVec implements the iterable interface as well as
* other useful things like `push()` and `len()`.
*/
use core::prelude::*;
use core::iter;
use core::iter::BaseIter;
#[auto_encode]
#[auto_decode]
pub enum OptVec<T> {
Empty,
Vec(~[T])
}
pub fn with<T>(+t: T) -> OptVec<T> {
Vec(~[t])
}
pub fn from<T>(+t: ~[T]) -> OptVec<T> {
if t.len() == 0 {
Empty
} else {
Vec(t)
}
}
impl<T> OptVec<T> {
fn push(&mut self, +t: T) {
match *self {
Vec(ref mut v) => {
v.push(t);
return;
}
Empty => {}
}
// FIXME(#5074): flow insensitive means we can't move
// assignment inside `match`
*self = Vec(~[t]);
}
fn map<U>(&self, op: &fn(&T) -> U) -> OptVec<U> {
match *self {
Empty => Empty,
Vec(ref v) => Vec(v.map(op))
}
}
fn get(&self, i: uint) -> &'self T {
match *self {
Empty => fail!(fmt!("Invalid index %u", i)),
Vec(ref v) => &v[i]
}
}
fn is_empty(&self) -> bool {
self.len() == 0
}
fn len(&self) -> uint {
match *self {
Empty => 0,
Vec(ref v) => v.len()
}
}
}
pub fn take_vec<T>(+v: OptVec<T>) -> ~[T] {
match v {
Empty => ~[],
Vec(v) => v
}
|
impl<T:Copy> OptVec<T> {
fn prepend(&self, +t: T) -> OptVec<T> {
let mut v0 = ~[t];
match *self {
Empty => {}
Vec(ref v1) => { v0.push_all(*v1); }
}
return Vec(v0);
}
fn push_all<I: BaseIter<T>>(&mut self, from: &I) {
for from.each |e| {
self.push(copy *e);
}
}
}
impl<A:Eq> Eq for OptVec<A> {
fn eq(&self, other: &OptVec<A>) -> bool {
// Note: cannot use #[deriving(Eq)] here because
// (Empty, Vec(~[])) ought to be equal.
match (self, other) {
(&Empty, &Empty) => true,
(&Empty, &Vec(ref v)) => v.is_empty(),
(&Vec(ref v), &Empty) => v.is_empty(),
(&Vec(ref v1), &Vec(ref v2)) => *v1 == *v2
}
}
fn ne(&self, other: &OptVec<A>) -> bool {
!self.eq(other)
}
}
impl<A> BaseIter<A> for OptVec<A> {
fn each(&self, blk: &fn(v: &A) -> bool) {
match *self {
Empty => {}
Vec(ref v) => v.each(blk)
}
}
fn size_hint(&self) -> Option<uint> {
Some(self.len())
}
}
impl<A> iter::ExtendedIter<A> for OptVec<A> {
#[inline(always)]
fn eachi(&self, blk: &fn(+v: uint, v: &A) -> bool) {
iter::eachi(self, blk)
}
#[inline(always)]
fn all(&self, blk: &fn(&A) -> bool) -> bool {
iter::all(self, blk)
}
#[inline(always)]
fn any(&self, blk: &fn(&A) -> bool) -> bool {
iter::any(self, blk)
}
#[inline(always)]
fn foldl<B>(&self, +b0: B, blk: &fn(&B, &A) -> B) -> B {
iter::foldl(self, b0, blk)
}
#[inline(always)]
fn position(&self, f: &fn(&A) -> bool) -> Option<uint> {
iter::position(self, f)
}
#[inline(always)]
fn map_to_vec<B>(&self, op: &fn(&A) -> B) -> ~[B] {
iter::map_to_vec(self, op)
}
#[inline(always)]
fn flat_map_to_vec<B,IB:BaseIter<B>>(&self, op: &fn(&A) -> IB)
-> ~[B] {
iter::flat_map_to_vec(self, op)
}
}
impl<A: Eq> iter::EqIter<A> for OptVec<A> {
#[inline(always)]
fn contains(&self, x: &A) -> bool { iter::contains(self, x) }
#[inline(always)]
fn count(&self, x: &A) -> uint { iter::count(self, x) }
}
impl<A: Copy> iter::CopyableIter<A> for OptVec<A> {
#[inline(always)]
fn filter_to_vec(&self, pred: &fn(&A) -> bool) -> ~[A] {
iter::filter_to_vec(self, pred)
}
#[inline(always)]
fn to_vec(&self) -> ~[A] { iter::to_vec(self) }
#[inline(always)]
fn find(&self, f: &fn(&A) -> bool) -> Option<A> {
iter::find(self, f)
}
}
impl<A: Copy+Ord> iter::CopyableOrderedIter<A> for OptVec<A> {
#[inline(always)]
fn min(&self) -> A { iter::min(self) }
#[inline(always)]
fn max(&self) -> A { iter::max(self) }
}
|
}
|
random_line_split
|
font_context.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use app_units::Au;
use fnv::FnvHasher;
use font::{Font, FontGroup, FontHandleMethods};
use font_cache_thread::FontCacheThread;
use font_template::FontTemplateDescriptor;
use heapsize::HeapSizeOf;
use platform::font::FontHandle;
use platform::font_context::FontContextHandle;
use platform::font_template::FontTemplateData;
use servo_arc::Arc as ServoArc;
use smallvec::SmallVec;
use std::cell::RefCell;
use std::collections::HashMap;
use std::default::Default;
use std::hash::{BuildHasherDefault, Hash, Hasher};
use std::rc::Rc;
use std::sync::Arc;
use std::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT};
use style::computed_values::{font_style, font_variant_caps};
use style::properties::style_structs;
use webrender_api;
static SMALL_CAPS_SCALE_FACTOR: f32 = 0.8; // Matches FireFox (see gfxFont.h)
#[derive(Debug)]
struct LayoutFontCacheEntry {
family: String,
font: Option<Rc<RefCell<Font>>>,
}
#[derive(Debug)]
struct FallbackFontCacheEntry {
font: Rc<RefCell<Font>>,
}
/// An epoch for the font context cache. The cache is flushed if the current epoch does not match
/// this one.
static FONT_CACHE_EPOCH: AtomicUsize = ATOMIC_USIZE_INIT;
/// The FontContext represents the per-thread/thread state necessary for
/// working with fonts. It is the public API used by the layout and
/// paint code. It talks directly to the font cache thread where
/// required.
#[derive(Debug)]
pub struct FontContext {
platform_handle: FontContextHandle,
font_cache_thread: FontCacheThread,
/// TODO: See bug https://github.com/servo/servo/issues/3300.
layout_font_cache: Vec<LayoutFontCacheEntry>,
fallback_font_cache: Vec<FallbackFontCacheEntry>,
layout_font_group_cache:
HashMap<LayoutFontGroupCacheKey, Rc<FontGroup>, BuildHasherDefault<FnvHasher>>,
epoch: usize,
}
impl FontContext {
pub fn new(font_cache_thread: FontCacheThread) -> FontContext {
let handle = FontContextHandle::new();
FontContext {
platform_handle: handle,
font_cache_thread: font_cache_thread,
layout_font_cache: vec!(),
fallback_font_cache: vec!(),
layout_font_group_cache: HashMap::with_hasher(Default::default()),
epoch: 0,
}
}
/// Create a font for use in layout calculations.
fn create_layout_font(&self,
template: Arc<FontTemplateData>,
descriptor: FontTemplateDescriptor,
pt_size: Au,
variant: font_variant_caps::T,
font_key: webrender_api::FontKey) -> Result<Font, ()> {
// TODO: (Bug #3463): Currently we only support fake small-caps
|
};
let handle = FontHandle::new_from_template(&self.platform_handle,
template,
Some(actual_pt_size))?;
Ok(Font::new(handle, variant, descriptor, pt_size, actual_pt_size, font_key))
}
fn expire_font_caches_if_necessary(&mut self) {
let current_epoch = FONT_CACHE_EPOCH.load(Ordering::SeqCst);
if current_epoch == self.epoch {
return
}
self.layout_font_cache.clear();
self.fallback_font_cache.clear();
self.layout_font_group_cache.clear();
self.epoch = current_epoch
}
/// Create a group of fonts for use in layout calculations. May return
/// a cached font if this font instance has already been used by
/// this context.
pub fn layout_font_group_for_style(&mut self, style: ServoArc<style_structs::Font>)
-> Rc<FontGroup> {
self.expire_font_caches_if_necessary();
let layout_font_group_cache_key = LayoutFontGroupCacheKey {
pointer: style.clone(),
size: style.font_size.0,
};
if let Some(ref cached_font_group) = self.layout_font_group_cache.get(
&layout_font_group_cache_key) {
return (*cached_font_group).clone()
}
// TODO: The font context holds a strong ref to the cached fonts
// so they will never be released. Find out a good time to drop them.
let desc = FontTemplateDescriptor::new(style.font_weight,
style.font_stretch,
style.font_style == font_style::T::italic ||
style.font_style == font_style::T::oblique);
let mut fonts: SmallVec<[Rc<RefCell<Font>>; 8]> = SmallVec::new();
for family in &style.font_family.0 {
// GWTODO: Check on real pages if this is faster as Vec() or HashMap().
let mut cache_hit = false;
for cached_font_entry in &self.layout_font_cache {
if cached_font_entry.family == family.name() {
match cached_font_entry.font {
None => {
cache_hit = true;
break;
}
Some(ref cached_font_ref) => {
let cached_font = (*cached_font_ref).borrow();
if cached_font.descriptor == desc &&
cached_font.requested_pt_size == style.font_size.0 &&
cached_font.variant == style.font_variant_caps {
fonts.push((*cached_font_ref).clone());
cache_hit = true;
break;
}
}
}
}
}
if!cache_hit {
let template_info = self.font_cache_thread.find_font_template(family.clone(),
desc.clone());
match template_info {
Some(template_info) => {
let layout_font = self.create_layout_font(template_info.font_template,
desc.clone(),
style.font_size.0,
style.font_variant_caps,
template_info.font_key
.expect("No font key present!"));
let font = match layout_font {
Ok(layout_font) => {
let layout_font = Rc::new(RefCell::new(layout_font));
fonts.push(layout_font.clone());
Some(layout_font)
}
Err(_) => None
};
self.layout_font_cache.push(LayoutFontCacheEntry {
family: family.name().to_owned(),
font: font
});
}
None => {
self.layout_font_cache.push(LayoutFontCacheEntry {
family: family.name().to_owned(),
font: None,
});
}
}
}
}
// Add a last resort font as a fallback option.
let mut cache_hit = false;
for cached_font_entry in &self.fallback_font_cache {
let cached_font = cached_font_entry.font.borrow();
if cached_font.descriptor == desc &&
cached_font.requested_pt_size == style.font_size.0 &&
cached_font.variant == style.font_variant_caps {
fonts.push(cached_font_entry.font.clone());
cache_hit = true;
break;
}
}
if!cache_hit {
let template_info = self.font_cache_thread.last_resort_font_template(desc.clone());
let layout_font = self.create_layout_font(template_info.font_template,
desc.clone(),
style.font_size.0,
style.font_variant_caps,
template_info.font_key.expect("No font key present!"));
match layout_font {
Ok(layout_font) => {
let layout_font = Rc::new(RefCell::new(layout_font));
self.fallback_font_cache.push(FallbackFontCacheEntry {
font: layout_font.clone(),
});
fonts.push(layout_font);
}
Err(_) => debug!("Failed to create fallback layout font!")
}
}
let font_group = Rc::new(FontGroup::new(fonts));
self.layout_font_group_cache.insert(layout_font_group_cache_key, font_group.clone());
font_group
}
}
impl HeapSizeOf for FontContext {
fn heap_size_of_children(&self) -> usize {
// FIXME(njn): Measure other fields eventually.
self.platform_handle.heap_size_of_children()
}
}
#[derive(Debug)]
struct LayoutFontGroupCacheKey {
pointer: ServoArc<style_structs::Font>,
size: Au,
}
impl PartialEq for LayoutFontGroupCacheKey {
fn eq(&self, other: &LayoutFontGroupCacheKey) -> bool {
self.pointer == other.pointer && self.size == other.size
}
}
impl Eq for LayoutFontGroupCacheKey {}
impl Hash for LayoutFontGroupCacheKey {
fn hash<H>(&self, hasher: &mut H) where H: Hasher {
self.pointer.hash.hash(hasher)
}
}
#[inline]
pub fn invalidate_font_caches() {
FONT_CACHE_EPOCH.fetch_add(1, Ordering::SeqCst);
}
|
// painting. We should also support true small-caps (where the
// font supports it) in the future.
let actual_pt_size = match variant {
font_variant_caps::T::small_caps => pt_size.scale_by(SMALL_CAPS_SCALE_FACTOR),
font_variant_caps::T::normal => pt_size,
|
random_line_split
|
font_context.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use app_units::Au;
use fnv::FnvHasher;
use font::{Font, FontGroup, FontHandleMethods};
use font_cache_thread::FontCacheThread;
use font_template::FontTemplateDescriptor;
use heapsize::HeapSizeOf;
use platform::font::FontHandle;
use platform::font_context::FontContextHandle;
use platform::font_template::FontTemplateData;
use servo_arc::Arc as ServoArc;
use smallvec::SmallVec;
use std::cell::RefCell;
use std::collections::HashMap;
use std::default::Default;
use std::hash::{BuildHasherDefault, Hash, Hasher};
use std::rc::Rc;
use std::sync::Arc;
use std::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT};
use style::computed_values::{font_style, font_variant_caps};
use style::properties::style_structs;
use webrender_api;
static SMALL_CAPS_SCALE_FACTOR: f32 = 0.8; // Matches FireFox (see gfxFont.h)
#[derive(Debug)]
struct LayoutFontCacheEntry {
family: String,
font: Option<Rc<RefCell<Font>>>,
}
#[derive(Debug)]
struct FallbackFontCacheEntry {
font: Rc<RefCell<Font>>,
}
/// An epoch for the font context cache. The cache is flushed if the current epoch does not match
/// this one.
static FONT_CACHE_EPOCH: AtomicUsize = ATOMIC_USIZE_INIT;
/// The FontContext represents the per-thread/thread state necessary for
/// working with fonts. It is the public API used by the layout and
/// paint code. It talks directly to the font cache thread where
/// required.
#[derive(Debug)]
pub struct FontContext {
platform_handle: FontContextHandle,
font_cache_thread: FontCacheThread,
/// TODO: See bug https://github.com/servo/servo/issues/3300.
layout_font_cache: Vec<LayoutFontCacheEntry>,
fallback_font_cache: Vec<FallbackFontCacheEntry>,
layout_font_group_cache:
HashMap<LayoutFontGroupCacheKey, Rc<FontGroup>, BuildHasherDefault<FnvHasher>>,
epoch: usize,
}
impl FontContext {
pub fn new(font_cache_thread: FontCacheThread) -> FontContext {
let handle = FontContextHandle::new();
FontContext {
platform_handle: handle,
font_cache_thread: font_cache_thread,
layout_font_cache: vec!(),
fallback_font_cache: vec!(),
layout_font_group_cache: HashMap::with_hasher(Default::default()),
epoch: 0,
}
}
/// Create a font for use in layout calculations.
fn create_layout_font(&self,
template: Arc<FontTemplateData>,
descriptor: FontTemplateDescriptor,
pt_size: Au,
variant: font_variant_caps::T,
font_key: webrender_api::FontKey) -> Result<Font, ()> {
// TODO: (Bug #3463): Currently we only support fake small-caps
// painting. We should also support true small-caps (where the
// font supports it) in the future.
let actual_pt_size = match variant {
font_variant_caps::T::small_caps => pt_size.scale_by(SMALL_CAPS_SCALE_FACTOR),
font_variant_caps::T::normal => pt_size,
};
let handle = FontHandle::new_from_template(&self.platform_handle,
template,
Some(actual_pt_size))?;
Ok(Font::new(handle, variant, descriptor, pt_size, actual_pt_size, font_key))
}
fn expire_font_caches_if_necessary(&mut self) {
let current_epoch = FONT_CACHE_EPOCH.load(Ordering::SeqCst);
if current_epoch == self.epoch {
return
}
self.layout_font_cache.clear();
self.fallback_font_cache.clear();
self.layout_font_group_cache.clear();
self.epoch = current_epoch
}
/// Create a group of fonts for use in layout calculations. May return
/// a cached font if this font instance has already been used by
/// this context.
pub fn layout_font_group_for_style(&mut self, style: ServoArc<style_structs::Font>)
-> Rc<FontGroup> {
self.expire_font_caches_if_necessary();
let layout_font_group_cache_key = LayoutFontGroupCacheKey {
pointer: style.clone(),
size: style.font_size.0,
};
if let Some(ref cached_font_group) = self.layout_font_group_cache.get(
&layout_font_group_cache_key) {
return (*cached_font_group).clone()
}
// TODO: The font context holds a strong ref to the cached fonts
// so they will never be released. Find out a good time to drop them.
let desc = FontTemplateDescriptor::new(style.font_weight,
style.font_stretch,
style.font_style == font_style::T::italic ||
style.font_style == font_style::T::oblique);
let mut fonts: SmallVec<[Rc<RefCell<Font>>; 8]> = SmallVec::new();
for family in &style.font_family.0 {
// GWTODO: Check on real pages if this is faster as Vec() or HashMap().
let mut cache_hit = false;
for cached_font_entry in &self.layout_font_cache {
if cached_font_entry.family == family.name() {
match cached_font_entry.font {
None => {
cache_hit = true;
break;
}
Some(ref cached_font_ref) => {
let cached_font = (*cached_font_ref).borrow();
if cached_font.descriptor == desc &&
cached_font.requested_pt_size == style.font_size.0 &&
cached_font.variant == style.font_variant_caps {
fonts.push((*cached_font_ref).clone());
cache_hit = true;
break;
}
}
}
}
}
if!cache_hit {
let template_info = self.font_cache_thread.find_font_template(family.clone(),
desc.clone());
match template_info {
Some(template_info) => {
let layout_font = self.create_layout_font(template_info.font_template,
desc.clone(),
style.font_size.0,
style.font_variant_caps,
template_info.font_key
.expect("No font key present!"));
let font = match layout_font {
Ok(layout_font) => {
let layout_font = Rc::new(RefCell::new(layout_font));
fonts.push(layout_font.clone());
Some(layout_font)
}
Err(_) => None
};
self.layout_font_cache.push(LayoutFontCacheEntry {
family: family.name().to_owned(),
font: font
});
}
None => {
self.layout_font_cache.push(LayoutFontCacheEntry {
family: family.name().to_owned(),
font: None,
});
}
}
}
}
// Add a last resort font as a fallback option.
let mut cache_hit = false;
for cached_font_entry in &self.fallback_font_cache {
let cached_font = cached_font_entry.font.borrow();
if cached_font.descriptor == desc &&
cached_font.requested_pt_size == style.font_size.0 &&
cached_font.variant == style.font_variant_caps {
fonts.push(cached_font_entry.font.clone());
cache_hit = true;
break;
}
}
if!cache_hit {
let template_info = self.font_cache_thread.last_resort_font_template(desc.clone());
let layout_font = self.create_layout_font(template_info.font_template,
desc.clone(),
style.font_size.0,
style.font_variant_caps,
template_info.font_key.expect("No font key present!"));
match layout_font {
Ok(layout_font) => {
let layout_font = Rc::new(RefCell::new(layout_font));
self.fallback_font_cache.push(FallbackFontCacheEntry {
font: layout_font.clone(),
});
fonts.push(layout_font);
}
Err(_) => debug!("Failed to create fallback layout font!")
}
}
let font_group = Rc::new(FontGroup::new(fonts));
self.layout_font_group_cache.insert(layout_font_group_cache_key, font_group.clone());
font_group
}
}
impl HeapSizeOf for FontContext {
fn heap_size_of_children(&self) -> usize
|
}
#[derive(Debug)]
struct LayoutFontGroupCacheKey {
pointer: ServoArc<style_structs::Font>,
size: Au,
}
impl PartialEq for LayoutFontGroupCacheKey {
fn eq(&self, other: &LayoutFontGroupCacheKey) -> bool {
self.pointer == other.pointer && self.size == other.size
}
}
impl Eq for LayoutFontGroupCacheKey {}
impl Hash for LayoutFontGroupCacheKey {
fn hash<H>(&self, hasher: &mut H) where H: Hasher {
self.pointer.hash.hash(hasher)
}
}
#[inline]
pub fn invalidate_font_caches() {
FONT_CACHE_EPOCH.fetch_add(1, Ordering::SeqCst);
}
|
{
// FIXME(njn): Measure other fields eventually.
self.platform_handle.heap_size_of_children()
}
|
identifier_body
|
font_context.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use app_units::Au;
use fnv::FnvHasher;
use font::{Font, FontGroup, FontHandleMethods};
use font_cache_thread::FontCacheThread;
use font_template::FontTemplateDescriptor;
use heapsize::HeapSizeOf;
use platform::font::FontHandle;
use platform::font_context::FontContextHandle;
use platform::font_template::FontTemplateData;
use servo_arc::Arc as ServoArc;
use smallvec::SmallVec;
use std::cell::RefCell;
use std::collections::HashMap;
use std::default::Default;
use std::hash::{BuildHasherDefault, Hash, Hasher};
use std::rc::Rc;
use std::sync::Arc;
use std::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT};
use style::computed_values::{font_style, font_variant_caps};
use style::properties::style_structs;
use webrender_api;
static SMALL_CAPS_SCALE_FACTOR: f32 = 0.8; // Matches FireFox (see gfxFont.h)
#[derive(Debug)]
struct LayoutFontCacheEntry {
family: String,
font: Option<Rc<RefCell<Font>>>,
}
#[derive(Debug)]
struct FallbackFontCacheEntry {
font: Rc<RefCell<Font>>,
}
/// An epoch for the font context cache. The cache is flushed if the current epoch does not match
/// this one.
static FONT_CACHE_EPOCH: AtomicUsize = ATOMIC_USIZE_INIT;
/// The FontContext represents the per-thread/thread state necessary for
/// working with fonts. It is the public API used by the layout and
/// paint code. It talks directly to the font cache thread where
/// required.
#[derive(Debug)]
pub struct FontContext {
platform_handle: FontContextHandle,
font_cache_thread: FontCacheThread,
/// TODO: See bug https://github.com/servo/servo/issues/3300.
layout_font_cache: Vec<LayoutFontCacheEntry>,
fallback_font_cache: Vec<FallbackFontCacheEntry>,
layout_font_group_cache:
HashMap<LayoutFontGroupCacheKey, Rc<FontGroup>, BuildHasherDefault<FnvHasher>>,
epoch: usize,
}
impl FontContext {
pub fn new(font_cache_thread: FontCacheThread) -> FontContext {
let handle = FontContextHandle::new();
FontContext {
platform_handle: handle,
font_cache_thread: font_cache_thread,
layout_font_cache: vec!(),
fallback_font_cache: vec!(),
layout_font_group_cache: HashMap::with_hasher(Default::default()),
epoch: 0,
}
}
/// Create a font for use in layout calculations.
fn create_layout_font(&self,
template: Arc<FontTemplateData>,
descriptor: FontTemplateDescriptor,
pt_size: Au,
variant: font_variant_caps::T,
font_key: webrender_api::FontKey) -> Result<Font, ()> {
// TODO: (Bug #3463): Currently we only support fake small-caps
// painting. We should also support true small-caps (where the
// font supports it) in the future.
let actual_pt_size = match variant {
font_variant_caps::T::small_caps => pt_size.scale_by(SMALL_CAPS_SCALE_FACTOR),
font_variant_caps::T::normal => pt_size,
};
let handle = FontHandle::new_from_template(&self.platform_handle,
template,
Some(actual_pt_size))?;
Ok(Font::new(handle, variant, descriptor, pt_size, actual_pt_size, font_key))
}
fn expire_font_caches_if_necessary(&mut self) {
let current_epoch = FONT_CACHE_EPOCH.load(Ordering::SeqCst);
if current_epoch == self.epoch {
return
}
self.layout_font_cache.clear();
self.fallback_font_cache.clear();
self.layout_font_group_cache.clear();
self.epoch = current_epoch
}
/// Create a group of fonts for use in layout calculations. May return
/// a cached font if this font instance has already been used by
/// this context.
pub fn layout_font_group_for_style(&mut self, style: ServoArc<style_structs::Font>)
-> Rc<FontGroup> {
self.expire_font_caches_if_necessary();
let layout_font_group_cache_key = LayoutFontGroupCacheKey {
pointer: style.clone(),
size: style.font_size.0,
};
if let Some(ref cached_font_group) = self.layout_font_group_cache.get(
&layout_font_group_cache_key) {
return (*cached_font_group).clone()
}
// TODO: The font context holds a strong ref to the cached fonts
// so they will never be released. Find out a good time to drop them.
let desc = FontTemplateDescriptor::new(style.font_weight,
style.font_stretch,
style.font_style == font_style::T::italic ||
style.font_style == font_style::T::oblique);
let mut fonts: SmallVec<[Rc<RefCell<Font>>; 8]> = SmallVec::new();
for family in &style.font_family.0 {
// GWTODO: Check on real pages if this is faster as Vec() or HashMap().
let mut cache_hit = false;
for cached_font_entry in &self.layout_font_cache {
if cached_font_entry.family == family.name() {
match cached_font_entry.font {
None => {
cache_hit = true;
break;
}
Some(ref cached_font_ref) => {
let cached_font = (*cached_font_ref).borrow();
if cached_font.descriptor == desc &&
cached_font.requested_pt_size == style.font_size.0 &&
cached_font.variant == style.font_variant_caps {
fonts.push((*cached_font_ref).clone());
cache_hit = true;
break;
}
}
}
}
}
if!cache_hit {
let template_info = self.font_cache_thread.find_font_template(family.clone(),
desc.clone());
match template_info {
Some(template_info) => {
let layout_font = self.create_layout_font(template_info.font_template,
desc.clone(),
style.font_size.0,
style.font_variant_caps,
template_info.font_key
.expect("No font key present!"));
let font = match layout_font {
Ok(layout_font) => {
let layout_font = Rc::new(RefCell::new(layout_font));
fonts.push(layout_font.clone());
Some(layout_font)
}
Err(_) => None
};
self.layout_font_cache.push(LayoutFontCacheEntry {
family: family.name().to_owned(),
font: font
});
}
None => {
self.layout_font_cache.push(LayoutFontCacheEntry {
family: family.name().to_owned(),
font: None,
});
}
}
}
}
// Add a last resort font as a fallback option.
let mut cache_hit = false;
for cached_font_entry in &self.fallback_font_cache {
let cached_font = cached_font_entry.font.borrow();
if cached_font.descriptor == desc &&
cached_font.requested_pt_size == style.font_size.0 &&
cached_font.variant == style.font_variant_caps {
fonts.push(cached_font_entry.font.clone());
cache_hit = true;
break;
}
}
if!cache_hit {
let template_info = self.font_cache_thread.last_resort_font_template(desc.clone());
let layout_font = self.create_layout_font(template_info.font_template,
desc.clone(),
style.font_size.0,
style.font_variant_caps,
template_info.font_key.expect("No font key present!"));
match layout_font {
Ok(layout_font) => {
let layout_font = Rc::new(RefCell::new(layout_font));
self.fallback_font_cache.push(FallbackFontCacheEntry {
font: layout_font.clone(),
});
fonts.push(layout_font);
}
Err(_) => debug!("Failed to create fallback layout font!")
}
}
let font_group = Rc::new(FontGroup::new(fonts));
self.layout_font_group_cache.insert(layout_font_group_cache_key, font_group.clone());
font_group
}
}
impl HeapSizeOf for FontContext {
fn heap_size_of_children(&self) -> usize {
// FIXME(njn): Measure other fields eventually.
self.platform_handle.heap_size_of_children()
}
}
#[derive(Debug)]
struct LayoutFontGroupCacheKey {
pointer: ServoArc<style_structs::Font>,
size: Au,
}
impl PartialEq for LayoutFontGroupCacheKey {
fn
|
(&self, other: &LayoutFontGroupCacheKey) -> bool {
self.pointer == other.pointer && self.size == other.size
}
}
impl Eq for LayoutFontGroupCacheKey {}
impl Hash for LayoutFontGroupCacheKey {
fn hash<H>(&self, hasher: &mut H) where H: Hasher {
self.pointer.hash.hash(hasher)
}
}
#[inline]
pub fn invalidate_font_caches() {
FONT_CACHE_EPOCH.fetch_add(1, Ordering::SeqCst);
}
|
eq
|
identifier_name
|
mem.rs
|
use core::intrinsics;
use core::mem;
use core::ptr;
/// This replaces the value behind the `v` unique reference by calling the
/// relevant function.
///
/// If a panic occurs in the `change` closure, the entire process will be aborted.
#[allow(dead_code)] // keep as illustration and for future use
#[inline]
pub fn take_mut<T>(v: &mut T, change: impl FnOnce(T) -> T) {
replace(v, |value| (change(value), ()))
}
/// This replaces the value behind the `v` unique reference by calling the
/// relevant function, and returns a result obtained along the way.
///
/// If a panic occurs in the `change` closure, the entire process will be aborted.
#[inline]
pub fn replace<T, R>(v: &mut T, change: impl FnOnce(T) -> (T, R)) -> R {
struct PanicGuard;
impl Drop for PanicGuard {
fn drop(&mut self)
|
}
let guard = PanicGuard;
let value = unsafe { ptr::read(v) };
let (new_value, ret) = change(value);
unsafe {
ptr::write(v, new_value);
}
mem::forget(guard);
ret
}
|
{
intrinsics::abort()
}
|
identifier_body
|
mem.rs
|
use core::intrinsics;
use core::mem;
use core::ptr;
/// This replaces the value behind the `v` unique reference by calling the
/// relevant function.
///
/// If a panic occurs in the `change` closure, the entire process will be aborted.
#[allow(dead_code)] // keep as illustration and for future use
#[inline]
pub fn take_mut<T>(v: &mut T, change: impl FnOnce(T) -> T) {
replace(v, |value| (change(value), ()))
}
/// This replaces the value behind the `v` unique reference by calling the
/// relevant function, and returns a result obtained along the way.
///
/// If a panic occurs in the `change` closure, the entire process will be aborted.
#[inline]
pub fn replace<T, R>(v: &mut T, change: impl FnOnce(T) -> (T, R)) -> R {
struct PanicGuard;
impl Drop for PanicGuard {
fn drop(&mut self) {
intrinsics::abort()
}
}
let guard = PanicGuard;
let value = unsafe { ptr::read(v) };
let (new_value, ret) = change(value);
unsafe {
ptr::write(v, new_value);
}
mem::forget(guard);
ret
|
}
|
random_line_split
|
|
mem.rs
|
use core::intrinsics;
use core::mem;
use core::ptr;
/// This replaces the value behind the `v` unique reference by calling the
/// relevant function.
///
/// If a panic occurs in the `change` closure, the entire process will be aborted.
#[allow(dead_code)] // keep as illustration and for future use
#[inline]
pub fn take_mut<T>(v: &mut T, change: impl FnOnce(T) -> T) {
replace(v, |value| (change(value), ()))
}
/// This replaces the value behind the `v` unique reference by calling the
/// relevant function, and returns a result obtained along the way.
///
/// If a panic occurs in the `change` closure, the entire process will be aborted.
#[inline]
pub fn replace<T, R>(v: &mut T, change: impl FnOnce(T) -> (T, R)) -> R {
struct PanicGuard;
impl Drop for PanicGuard {
fn
|
(&mut self) {
intrinsics::abort()
}
}
let guard = PanicGuard;
let value = unsafe { ptr::read(v) };
let (new_value, ret) = change(value);
unsafe {
ptr::write(v, new_value);
}
mem::forget(guard);
ret
}
|
drop
|
identifier_name
|
MoTSA.rs
|
// pub fn find_median_sorted_arrays(nums1: Vec<i32>, nums2: Vec<i32>) -> f64 {
// let (l1, l2) = (nums1.len() as f32, nums2.len() as f32);
// let l = ((l1 + l2) / 2_f32).floor() as usize;
// dbg!(l);
// if l == 0 {
// if l1 > l2 {
// return nums1[0] as f64;
// } else {
// return nums2[0] as f64;
// }
// }
// let (mut n1, mut n2) = (nums1.iter(), nums2.iter());
// let (mut a, mut b) = (n1.next(), n2.next());
// let mut i = 0;
// while i < l - 1 {
// match (a, b) {
// (Some(aa), Some(bb)) => {
// if aa >= bb {
// b = n2.next();
// } else {
// a = n1.next();
// }
// i += 1;
// }
// (None, Some(_)) => {
// a = n2.next();
// }
// (Some(_), None) => {
// b = n1.next();
// }
// _ => {}
// }
// }
// match (a, b) {
// (None, Some(_)) => {
// a = n2.next();
// }
// (Some(_), None) => {
// b = n1.next();
// }
// _ => {}
// }
// dbg!(a);
// dbg!(b);
// if (l1 + l2) % 2_f32 == 0_f32 {
// return (*a.unwrap() as f64 + *b.unwrap() as f64) / 2_f64;
// } else {
// (*a.unwrap() as f64).max(*b.unwrap() as f64)
// }
// }
pub fn find_median_sorted_arrays(mut nums1: Vec<i32>, mut nums2: Vec<i32>) -> f64 {
let (l1, l2) = (nums1.len() as f32, nums2.len() as f32);
let l = ((l1 + l2) / 2_f32).floor() as usize;
nums1.append(&mut nums2);
nums1.sort();
let mut n = nums1.iter();
if (l1 + l2) % 2_f32 == 0_f32 {
(*n.nth(l - 1).unwrap() as f64 + *n.next().unwrap() as f64) / 2_f64
} else {
*n.nth(l).unwrap() as f64
}
}
fn main()
|
{
assert_eq!(find_median_sorted_arrays(vec![1, 3], vec![2]), 2.0);
assert_eq!(find_median_sorted_arrays(vec![1, 2], vec![3, 4]), 2.5);
assert_eq!(find_median_sorted_arrays(vec![0, 0], vec![0, 0]), 0.0);
assert_eq!(find_median_sorted_arrays(vec![], vec![1]), 1.0);
assert_eq!(find_median_sorted_arrays(vec![], vec![1, 2]), 1.5);
assert_eq!(find_median_sorted_arrays(vec![2], vec![]), 2.0);
assert_eq!(find_median_sorted_arrays(vec![3], vec![-2, -1]), -1.0);
}
|
identifier_body
|
|
MoTSA.rs
|
// pub fn find_median_sorted_arrays(nums1: Vec<i32>, nums2: Vec<i32>) -> f64 {
// let (l1, l2) = (nums1.len() as f32, nums2.len() as f32);
// let l = ((l1 + l2) / 2_f32).floor() as usize;
// dbg!(l);
// if l == 0 {
// if l1 > l2 {
// return nums1[0] as f64;
// } else {
// return nums2[0] as f64;
// }
// }
// let (mut n1, mut n2) = (nums1.iter(), nums2.iter());
// let (mut a, mut b) = (n1.next(), n2.next());
// let mut i = 0;
// while i < l - 1 {
// match (a, b) {
// (Some(aa), Some(bb)) => {
// if aa >= bb {
// b = n2.next();
// } else {
// a = n1.next();
// }
// i += 1;
// }
// (None, Some(_)) => {
// a = n2.next();
// }
// (Some(_), None) => {
// b = n1.next();
// }
// _ => {}
// }
// }
// match (a, b) {
// (None, Some(_)) => {
// a = n2.next();
// }
// (Some(_), None) => {
// b = n1.next();
// }
// _ => {}
// }
// dbg!(a);
// dbg!(b);
// if (l1 + l2) % 2_f32 == 0_f32 {
// return (*a.unwrap() as f64 + *b.unwrap() as f64) / 2_f64;
// } else {
// (*a.unwrap() as f64).max(*b.unwrap() as f64)
// }
// }
pub fn find_median_sorted_arrays(mut nums1: Vec<i32>, mut nums2: Vec<i32>) -> f64 {
let (l1, l2) = (nums1.len() as f32, nums2.len() as f32);
let l = ((l1 + l2) / 2_f32).floor() as usize;
nums1.append(&mut nums2);
nums1.sort();
let mut n = nums1.iter();
if (l1 + l2) % 2_f32 == 0_f32
|
else {
*n.nth(l).unwrap() as f64
}
}
fn main() {
assert_eq!(find_median_sorted_arrays(vec![1, 3], vec![2]), 2.0);
assert_eq!(find_median_sorted_arrays(vec![1, 2], vec![3, 4]), 2.5);
assert_eq!(find_median_sorted_arrays(vec![0, 0], vec![0, 0]), 0.0);
assert_eq!(find_median_sorted_arrays(vec![], vec![1]), 1.0);
assert_eq!(find_median_sorted_arrays(vec![], vec![1, 2]), 1.5);
assert_eq!(find_median_sorted_arrays(vec![2], vec![]), 2.0);
assert_eq!(find_median_sorted_arrays(vec![3], vec![-2, -1]), -1.0);
}
|
{
(*n.nth(l - 1).unwrap() as f64 + *n.next().unwrap() as f64) / 2_f64
}
|
conditional_block
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.