file_name
large_stringlengths
4
69
prefix
large_stringlengths
0
26.7k
suffix
large_stringlengths
0
24.8k
middle
large_stringlengths
0
2.12k
fim_type
large_stringclasses
4 values
person.rs
use std::time; use time::Tm; use mre::model::{Model, Error}; // Create a class to act as our model. Unfortunately Rust's classes aren't // finished yet, and are missing a couple features that would help clean up // models. First, there's no way to mix in implementation of functions, so we // need to duplicate some code that's common across all models. Second, there // is no way to have multiple constructors or static functions, so we need to // move Error handling out into a wrapper function. So to keep the api clean, // we cheat and hide the class so we can make a function that acts like what we // want. pub struct Person { model: Model, } pub impl Person { fn id(&self) -> &self/~str { &self.model._id } fn timestamp(&self) -> ~str { self.model.get_str(&~"timestamp") } fn set_timestamp(&mut self, timestamp: ~str) -> bool { self.model.set_str(~"timestamp", timestamp) } fn name(&self) -> ~str { self.model.get_str(&~"name") } fn set_name(&mut self, name: ~str) -> bool { self.model.set_str(~"name", name) } fn create(&self) -> Result<(~str, uint), Error> { self.model.create() } fn save(&self) -> Result<(~str, uint), Error> { self.model.save() } fn delete(&self) { self.model.delete() } } // Create a new person model. pub fn Person(es: elasticsearch::Client, name: ~str) -> Person { // Create a person. We'll store the model in the ES index named // "helloeveryone", under the type "person". We'd like ES to make the index // for us, so we leave the id blank. let mut person = Person { model: Model(es, ~"helloeveryone", ~"person", ~"") }; person.set_name(name); person.set_timestamp(time::now().rfc3339()); person } // Return the last 50 people we have said hello to. pub fn
(es: elasticsearch::Client) -> ~[Person] { // This query can be a little complicated for those who have never used // elasticsearch. All it says is that we want to fetch 50 documents on the // index "helloeveryone" and the type "person", sorted by time. do mre::model::search(es) |bld| { bld .set_indices(~[~"helloeveryone"]) .set_types(~[~"person"]) .set_source(JsonObjectBuilder() .insert(~"size", 50.0) .insert_list(~"sort", |bld| { bld.push_object(|bld| { bld.insert(~"timestamp", ~"desc"); }); }) .object.take() ); }.map(|model| // Construct a person model from the raw model data. Person { model: *model } ) }
last_50
identifier_name
reflector.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use syntax::ext::base::ExtCtxt; use syntax::codemap::Span; use syntax::ptr::P; use syntax::ast::{Item, MetaItem}; use syntax::ast; use utils::match_ty_unwrap; pub fn expand_reflector(cx: &mut ExtCtxt, span: Span, _: &MetaItem, item: &Item, push: |P<Item>|)
let impl_item = quote_item!(cx, impl ::dom::bindings::utils::Reflectable for $struct_name { fn reflector<'a>(&'a self) -> &'a ::dom::bindings::utils::Reflector { self.$field_name.reflector() } } ); impl_item.map(|it| push(it)) } }; } else { cx.span_err(span, "#[dom_struct] seems to have been applied to a non-struct"); } }
{ if let ast::ItemStruct(ref def, _) = item.node { let struct_name = item.ident; // This path has to be hardcoded, unfortunately, since we can't resolve paths at expansion time match def.fields.iter().find(|f| match_ty_unwrap(&*f.node.ty, &["dom", "bindings", "utils", "Reflector"]).is_some()) { // If it has a field that is a Reflector, use that Some(f) => { let field_name = f.node.ident(); let impl_item = quote_item!(cx, impl ::dom::bindings::utils::Reflectable for $struct_name { fn reflector<'a>(&'a self) -> &'a ::dom::bindings::utils::Reflector { &self.$field_name } } ); impl_item.map(|it| push(it)) }, // Or just call it on the first field (supertype). None => { let field_name = def.fields[0].node.ident();
identifier_body
reflector.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use syntax::ext::base::ExtCtxt; use syntax::codemap::Span; use syntax::ptr::P; use syntax::ast::{Item, MetaItem}; use syntax::ast; use utils::match_ty_unwrap; pub fn
(cx: &mut ExtCtxt, span: Span, _: &MetaItem, item: &Item, push: |P<Item>|) { if let ast::ItemStruct(ref def, _) = item.node { let struct_name = item.ident; // This path has to be hardcoded, unfortunately, since we can't resolve paths at expansion time match def.fields.iter().find(|f| match_ty_unwrap(&*f.node.ty, &["dom", "bindings", "utils", "Reflector"]).is_some()) { // If it has a field that is a Reflector, use that Some(f) => { let field_name = f.node.ident(); let impl_item = quote_item!(cx, impl ::dom::bindings::utils::Reflectable for $struct_name { fn reflector<'a>(&'a self) -> &'a ::dom::bindings::utils::Reflector { &self.$field_name } } ); impl_item.map(|it| push(it)) }, // Or just call it on the first field (supertype). None => { let field_name = def.fields[0].node.ident(); let impl_item = quote_item!(cx, impl ::dom::bindings::utils::Reflectable for $struct_name { fn reflector<'a>(&'a self) -> &'a ::dom::bindings::utils::Reflector { self.$field_name.reflector() } } ); impl_item.map(|it| push(it)) } }; } else { cx.span_err(span, "#[dom_struct] seems to have been applied to a non-struct"); } }
expand_reflector
identifier_name
reflector.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use syntax::ext::base::ExtCtxt; use syntax::codemap::Span; use syntax::ptr::P; use syntax::ast::{Item, MetaItem}; use syntax::ast; use utils::match_ty_unwrap; pub fn expand_reflector(cx: &mut ExtCtxt, span: Span, _: &MetaItem, item: &Item, push: |P<Item>|) { if let ast::ItemStruct(ref def, _) = item.node { let struct_name = item.ident; // This path has to be hardcoded, unfortunately, since we can't resolve paths at expansion time match def.fields.iter().find(|f| match_ty_unwrap(&*f.node.ty, &["dom", "bindings", "utils", "Reflector"]).is_some()) { // If it has a field that is a Reflector, use that Some(f) => { let field_name = f.node.ident(); let impl_item = quote_item!(cx, impl ::dom::bindings::utils::Reflectable for $struct_name { fn reflector<'a>(&'a self) -> &'a ::dom::bindings::utils::Reflector { &self.$field_name } } ); impl_item.map(|it| push(it)) }, // Or just call it on the first field (supertype). None => { let field_name = def.fields[0].node.ident(); let impl_item = quote_item!(cx, impl ::dom::bindings::utils::Reflectable for $struct_name { fn reflector<'a>(&'a self) -> &'a ::dom::bindings::utils::Reflector {
} } ); impl_item.map(|it| push(it)) } }; } else { cx.span_err(span, "#[dom_struct] seems to have been applied to a non-struct"); } }
self.$field_name.reflector()
random_line_split
reflector.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use syntax::ext::base::ExtCtxt; use syntax::codemap::Span; use syntax::ptr::P; use syntax::ast::{Item, MetaItem}; use syntax::ast; use utils::match_ty_unwrap; pub fn expand_reflector(cx: &mut ExtCtxt, span: Span, _: &MetaItem, item: &Item, push: |P<Item>|) { if let ast::ItemStruct(ref def, _) = item.node
impl ::dom::bindings::utils::Reflectable for $struct_name { fn reflector<'a>(&'a self) -> &'a ::dom::bindings::utils::Reflector { self.$field_name.reflector() } } ); impl_item.map(|it| push(it)) } }; } else { cx.span_err(span, "#[dom_struct] seems to have been applied to a non-struct"); } }
{ let struct_name = item.ident; // This path has to be hardcoded, unfortunately, since we can't resolve paths at expansion time match def.fields.iter().find(|f| match_ty_unwrap(&*f.node.ty, &["dom", "bindings", "utils", "Reflector"]).is_some()) { // If it has a field that is a Reflector, use that Some(f) => { let field_name = f.node.ident(); let impl_item = quote_item!(cx, impl ::dom::bindings::utils::Reflectable for $struct_name { fn reflector<'a>(&'a self) -> &'a ::dom::bindings::utils::Reflector { &self.$field_name } } ); impl_item.map(|it| push(it)) }, // Or just call it on the first field (supertype). None => { let field_name = def.fields[0].node.ident(); let impl_item = quote_item!(cx,
conditional_block
angle.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use glutin; use glutin::{WindowBuilder, ContextBuilder, EventsLoop, Window, CreationError}; #[cfg(not(windows))] pub enum Context {} #[cfg(windows)] pub use ::egl::Context; impl Context { #[cfg(not(windows))] pub fn with_window( _: WindowBuilder, _: ContextBuilder, _: &EventsLoop, ) -> Result<(Window, Self), CreationError>
#[cfg(windows)] pub fn with_window( window_builder: WindowBuilder, context_builder: ContextBuilder, events_loop: &EventsLoop, ) -> Result<(Window, Self), CreationError> { use glutin::os::windows::WindowExt; // FIXME: &context_builder.pf_reqs https://github.com/tomaka/glutin/pull/1002 let pf_reqs = &glutin::PixelFormatRequirements::default(); let gl_attr = &context_builder.gl_attr.map_sharing(|_| unimplemented!()); let window = window_builder.build(events_loop)?; Self::new(pf_reqs, gl_attr) .and_then(|p| p.finish(window.get_hwnd() as _)) .map(|context| (window, context)) } } #[cfg(not(windows))] impl glutin::GlContext for Context { unsafe fn make_current(&self) -> Result<(), glutin::ContextError> { match *self {} } fn is_current(&self) -> bool { match *self {} } fn get_proc_address(&self, _: &str) -> *const () { match *self {} } fn swap_buffers(&self) -> Result<(), glutin::ContextError> { match *self {} } fn get_api(&self) -> glutin::Api { match *self {} } fn get_pixel_format(&self) -> glutin::PixelFormat { match *self {} } fn resize(&self, _: u32, _: u32) { match *self {} } }
{ Err(CreationError::PlatformSpecific("ANGLE rendering is only supported on Windows".into())) }
identifier_body
angle.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use glutin; use glutin::{WindowBuilder, ContextBuilder, EventsLoop, Window, CreationError}; #[cfg(not(windows))] pub enum Context {} #[cfg(windows)] pub use ::egl::Context; impl Context { #[cfg(not(windows))] pub fn
( _: WindowBuilder, _: ContextBuilder, _: &EventsLoop, ) -> Result<(Window, Self), CreationError> { Err(CreationError::PlatformSpecific("ANGLE rendering is only supported on Windows".into())) } #[cfg(windows)] pub fn with_window( window_builder: WindowBuilder, context_builder: ContextBuilder, events_loop: &EventsLoop, ) -> Result<(Window, Self), CreationError> { use glutin::os::windows::WindowExt; // FIXME: &context_builder.pf_reqs https://github.com/tomaka/glutin/pull/1002 let pf_reqs = &glutin::PixelFormatRequirements::default(); let gl_attr = &context_builder.gl_attr.map_sharing(|_| unimplemented!()); let window = window_builder.build(events_loop)?; Self::new(pf_reqs, gl_attr) .and_then(|p| p.finish(window.get_hwnd() as _)) .map(|context| (window, context)) } } #[cfg(not(windows))] impl glutin::GlContext for Context { unsafe fn make_current(&self) -> Result<(), glutin::ContextError> { match *self {} } fn is_current(&self) -> bool { match *self {} } fn get_proc_address(&self, _: &str) -> *const () { match *self {} } fn swap_buffers(&self) -> Result<(), glutin::ContextError> { match *self {} } fn get_api(&self) -> glutin::Api { match *self {} } fn get_pixel_format(&self) -> glutin::PixelFormat { match *self {} } fn resize(&self, _: u32, _: u32) { match *self {} } }
with_window
identifier_name
angle.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use glutin; use glutin::{WindowBuilder, ContextBuilder, EventsLoop, Window, CreationError}; #[cfg(not(windows))] pub enum Context {} #[cfg(windows)] pub use ::egl::Context; impl Context { #[cfg(not(windows))] pub fn with_window( _: WindowBuilder, _: ContextBuilder, _: &EventsLoop, ) -> Result<(Window, Self), CreationError> { Err(CreationError::PlatformSpecific("ANGLE rendering is only supported on Windows".into())) } #[cfg(windows)]
events_loop: &EventsLoop, ) -> Result<(Window, Self), CreationError> { use glutin::os::windows::WindowExt; // FIXME: &context_builder.pf_reqs https://github.com/tomaka/glutin/pull/1002 let pf_reqs = &glutin::PixelFormatRequirements::default(); let gl_attr = &context_builder.gl_attr.map_sharing(|_| unimplemented!()); let window = window_builder.build(events_loop)?; Self::new(pf_reqs, gl_attr) .and_then(|p| p.finish(window.get_hwnd() as _)) .map(|context| (window, context)) } } #[cfg(not(windows))] impl glutin::GlContext for Context { unsafe fn make_current(&self) -> Result<(), glutin::ContextError> { match *self {} } fn is_current(&self) -> bool { match *self {} } fn get_proc_address(&self, _: &str) -> *const () { match *self {} } fn swap_buffers(&self) -> Result<(), glutin::ContextError> { match *self {} } fn get_api(&self) -> glutin::Api { match *self {} } fn get_pixel_format(&self) -> glutin::PixelFormat { match *self {} } fn resize(&self, _: u32, _: u32) { match *self {} } }
pub fn with_window( window_builder: WindowBuilder, context_builder: ContextBuilder,
random_line_split
errors.rs
// Copyright (c) 2020 Google LLC All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. use { proc_macro2::{Span, TokenStream}, quote::ToTokens, std::cell::RefCell, }; /// A type for collecting procedural macro errors. #[derive(Default)] pub struct Errors { errors: RefCell<Vec<syn::Error>>, } /// Produce functions to expect particular variants of `syn::Lit` macro_rules! expect_lit_fn { ($(($fn_name:ident, $syn_type:ident, $variant:ident, $lit_name:literal),)*) => { $( pub fn $fn_name<'a>(&self, lit: &'a syn::Lit) -> Option<&'a syn::$syn_type> { if let syn::Lit::$variant(inner) = lit { Some(inner) } else { self.unexpected_lit($lit_name, lit); None } } )* } } /// Produce functions to expect particular variants of `syn::Meta` macro_rules! expect_meta_fn { ($(($fn_name:ident, $syn_type:ident, $variant:ident, $meta_name:literal),)*) => { $( pub fn $fn_name<'a>(&self, meta: &'a syn::Meta) -> Option<&'a syn::$syn_type> { if let syn::Meta::$variant(inner) = meta { Some(inner) } else { self.unexpected_meta($meta_name, meta); None } } )* } } impl Errors { /// Issue an error like: /// /// Duplicate foo attribute /// First foo attribute here pub fn duplicate_attrs( &self, attr_kind: &str, first: &impl syn::spanned::Spanned, second: &impl syn::spanned::Spanned, ) { self.duplicate_attrs_inner(attr_kind, first.span(), second.span()) } fn duplicate_attrs_inner(&self, attr_kind: &str, first: Span, second: Span)
/// Error on literals, expecting attribute syntax. pub fn expect_nested_meta<'a>(&self, nm: &'a syn::NestedMeta) -> Option<&'a syn::Meta> { match nm { syn::NestedMeta::Lit(l) => { self.err(l, "Unexpected literal"); None } syn::NestedMeta::Meta(m) => Some(m), } } /// Error on attribute syntax, expecting literals pub fn expect_nested_lit<'a>(&self, nm: &'a syn::NestedMeta) -> Option<&'a syn::Lit> { match nm { syn::NestedMeta::Meta(m) => { self.err(m, "Expected literal"); None } syn::NestedMeta::Lit(l) => Some(l), } } expect_lit_fn![ (expect_lit_str, LitStr, Str, "string"), (expect_lit_char, LitChar, Char, "character"), (expect_lit_int, LitInt, Int, "integer"), ]; expect_meta_fn![ (expect_meta_word, Path, Path, "path"), (expect_meta_list, MetaList, List, "list"), (expect_meta_name_value, MetaNameValue, NameValue, "name-value pair"), ]; fn unexpected_lit(&self, expected: &str, found: &syn::Lit) { fn lit_kind(lit: &syn::Lit) -> &'static str { use syn::Lit::{Bool, Byte, ByteStr, Char, Float, Int, Str, Verbatim}; match lit { Str(_) => "string", ByteStr(_) => "bytestring", Byte(_) => "byte", Char(_) => "character", Int(_) => "integer", Float(_) => "float", Bool(_) => "boolean", Verbatim(_) => "unknown (possibly extra-large integer)", } } self.err( found, &["Expected ", expected, " literal, found ", lit_kind(found), " literal"].concat(), ) } fn unexpected_meta(&self, expected: &str, found: &syn::Meta) { fn meta_kind(meta: &syn::Meta) -> &'static str { use syn::Meta::{List, NameValue, Path}; match meta { Path(_) => "path", List(_) => "list", NameValue(_) => "name-value pair", } } self.err( found, &["Expected ", expected, " attribute, found ", meta_kind(found), " attribute"].concat(), ) } /// Issue an error relating to a particular `Spanned` structure. pub fn err(&self, spanned: &impl syn::spanned::Spanned, msg: &str) { self.err_span(spanned.span(), msg); } /// Issue an error relating to a particular `Span`. pub fn err_span(&self, span: Span, msg: &str) { self.push(syn::Error::new(span, msg)); } /// Push a `syn::Error` onto the list of errors to issue. pub fn push(&self, err: syn::Error) { self.errors.borrow_mut().push(err); } } impl ToTokens for Errors { /// Convert the errors into tokens that, when emit, will cause /// the user of the macro to receive compiler errors. fn to_tokens(&self, tokens: &mut TokenStream) { tokens.extend(self.errors.borrow().iter().map(|e| e.to_compile_error())); } }
{ self.err_span(second, &["Duplicate ", attr_kind, " attribute"].concat()); self.err_span(first, &["First ", attr_kind, " attribute here"].concat()); }
identifier_body
errors.rs
// Copyright (c) 2020 Google LLC All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. use { proc_macro2::{Span, TokenStream}, quote::ToTokens, std::cell::RefCell, }; /// A type for collecting procedural macro errors. #[derive(Default)] pub struct Errors { errors: RefCell<Vec<syn::Error>>, } /// Produce functions to expect particular variants of `syn::Lit` macro_rules! expect_lit_fn { ($(($fn_name:ident, $syn_type:ident, $variant:ident, $lit_name:literal),)*) => { $( pub fn $fn_name<'a>(&self, lit: &'a syn::Lit) -> Option<&'a syn::$syn_type> { if let syn::Lit::$variant(inner) = lit { Some(inner) } else { self.unexpected_lit($lit_name, lit); None } } )* } } /// Produce functions to expect particular variants of `syn::Meta` macro_rules! expect_meta_fn { ($(($fn_name:ident, $syn_type:ident, $variant:ident, $meta_name:literal),)*) => { $( pub fn $fn_name<'a>(&self, meta: &'a syn::Meta) -> Option<&'a syn::$syn_type> { if let syn::Meta::$variant(inner) = meta { Some(inner) } else { self.unexpected_meta($meta_name, meta); None } } )* } } impl Errors { /// Issue an error like: /// /// Duplicate foo attribute /// First foo attribute here pub fn duplicate_attrs( &self, attr_kind: &str, first: &impl syn::spanned::Spanned, second: &impl syn::spanned::Spanned, ) { self.duplicate_attrs_inner(attr_kind, first.span(), second.span()) } fn duplicate_attrs_inner(&self, attr_kind: &str, first: Span, second: Span) { self.err_span(second, &["Duplicate ", attr_kind, " attribute"].concat()); self.err_span(first, &["First ", attr_kind, " attribute here"].concat()); } /// Error on literals, expecting attribute syntax. pub fn expect_nested_meta<'a>(&self, nm: &'a syn::NestedMeta) -> Option<&'a syn::Meta> { match nm { syn::NestedMeta::Lit(l) => { self.err(l, "Unexpected literal"); None } syn::NestedMeta::Meta(m) => Some(m), } } /// Error on attribute syntax, expecting literals pub fn expect_nested_lit<'a>(&self, nm: &'a syn::NestedMeta) -> Option<&'a syn::Lit> { match nm { syn::NestedMeta::Meta(m) => { self.err(m, "Expected literal"); None } syn::NestedMeta::Lit(l) => Some(l), } } expect_lit_fn![ (expect_lit_str, LitStr, Str, "string"), (expect_lit_char, LitChar, Char, "character"), (expect_lit_int, LitInt, Int, "integer"), ]; expect_meta_fn![ (expect_meta_word, Path, Path, "path"), (expect_meta_list, MetaList, List, "list"), (expect_meta_name_value, MetaNameValue, NameValue, "name-value pair"), ]; fn unexpected_lit(&self, expected: &str, found: &syn::Lit) { fn lit_kind(lit: &syn::Lit) -> &'static str { use syn::Lit::{Bool, Byte, ByteStr, Char, Float, Int, Str, Verbatim}; match lit { Str(_) => "string", ByteStr(_) => "bytestring", Byte(_) => "byte", Char(_) => "character", Int(_) => "integer", Float(_) => "float", Bool(_) => "boolean", Verbatim(_) => "unknown (possibly extra-large integer)", } } self.err( found, &["Expected ", expected, " literal, found ", lit_kind(found), " literal"].concat(), ) } fn unexpected_meta(&self, expected: &str, found: &syn::Meta) { fn meta_kind(meta: &syn::Meta) -> &'static str { use syn::Meta::{List, NameValue, Path}; match meta { Path(_) => "path", List(_) => "list", NameValue(_) => "name-value pair", } } self.err( found, &["Expected ", expected, " attribute, found ", meta_kind(found), " attribute"].concat(), ) } /// Issue an error relating to a particular `Spanned` structure. pub fn err(&self, spanned: &impl syn::spanned::Spanned, msg: &str) { self.err_span(spanned.span(), msg); } /// Issue an error relating to a particular `Span`. pub fn
(&self, span: Span, msg: &str) { self.push(syn::Error::new(span, msg)); } /// Push a `syn::Error` onto the list of errors to issue. pub fn push(&self, err: syn::Error) { self.errors.borrow_mut().push(err); } } impl ToTokens for Errors { /// Convert the errors into tokens that, when emit, will cause /// the user of the macro to receive compiler errors. fn to_tokens(&self, tokens: &mut TokenStream) { tokens.extend(self.errors.borrow().iter().map(|e| e.to_compile_error())); } }
err_span
identifier_name
errors.rs
// Copyright (c) 2020 Google LLC All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. use { proc_macro2::{Span, TokenStream}, quote::ToTokens, std::cell::RefCell, }; /// A type for collecting procedural macro errors. #[derive(Default)] pub struct Errors { errors: RefCell<Vec<syn::Error>>, } /// Produce functions to expect particular variants of `syn::Lit` macro_rules! expect_lit_fn { ($(($fn_name:ident, $syn_type:ident, $variant:ident, $lit_name:literal),)*) => { $( pub fn $fn_name<'a>(&self, lit: &'a syn::Lit) -> Option<&'a syn::$syn_type> { if let syn::Lit::$variant(inner) = lit { Some(inner) } else { self.unexpected_lit($lit_name, lit); None } } )* } } /// Produce functions to expect particular variants of `syn::Meta` macro_rules! expect_meta_fn { ($(($fn_name:ident, $syn_type:ident, $variant:ident, $meta_name:literal),)*) => { $( pub fn $fn_name<'a>(&self, meta: &'a syn::Meta) -> Option<&'a syn::$syn_type> { if let syn::Meta::$variant(inner) = meta { Some(inner) } else { self.unexpected_meta($meta_name, meta); None } } )* } } impl Errors { /// Issue an error like: /// /// Duplicate foo attribute /// First foo attribute here pub fn duplicate_attrs( &self, attr_kind: &str, first: &impl syn::spanned::Spanned, second: &impl syn::spanned::Spanned, ) { self.duplicate_attrs_inner(attr_kind, first.span(), second.span()) } fn duplicate_attrs_inner(&self, attr_kind: &str, first: Span, second: Span) { self.err_span(second, &["Duplicate ", attr_kind, " attribute"].concat()); self.err_span(first, &["First ", attr_kind, " attribute here"].concat()); } /// Error on literals, expecting attribute syntax. pub fn expect_nested_meta<'a>(&self, nm: &'a syn::NestedMeta) -> Option<&'a syn::Meta> { match nm { syn::NestedMeta::Lit(l) => { self.err(l, "Unexpected literal"); None } syn::NestedMeta::Meta(m) => Some(m), } } /// Error on attribute syntax, expecting literals pub fn expect_nested_lit<'a>(&self, nm: &'a syn::NestedMeta) -> Option<&'a syn::Lit> { match nm { syn::NestedMeta::Meta(m) => { self.err(m, "Expected literal"); None } syn::NestedMeta::Lit(l) => Some(l), } } expect_lit_fn![ (expect_lit_str, LitStr, Str, "string"), (expect_lit_char, LitChar, Char, "character"), (expect_lit_int, LitInt, Int, "integer"), ]; expect_meta_fn![ (expect_meta_word, Path, Path, "path"), (expect_meta_list, MetaList, List, "list"), (expect_meta_name_value, MetaNameValue, NameValue, "name-value pair"), ]; fn unexpected_lit(&self, expected: &str, found: &syn::Lit) { fn lit_kind(lit: &syn::Lit) -> &'static str { use syn::Lit::{Bool, Byte, ByteStr, Char, Float, Int, Str, Verbatim}; match lit { Str(_) => "string", ByteStr(_) => "bytestring", Byte(_) => "byte", Char(_) => "character", Int(_) => "integer", Float(_) => "float", Bool(_) => "boolean", Verbatim(_) => "unknown (possibly extra-large integer)", } } self.err( found, &["Expected ", expected, " literal, found ", lit_kind(found), " literal"].concat(), ) } fn unexpected_meta(&self, expected: &str, found: &syn::Meta) { fn meta_kind(meta: &syn::Meta) -> &'static str { use syn::Meta::{List, NameValue, Path}; match meta { Path(_) => "path", List(_) => "list", NameValue(_) => "name-value pair", } } self.err( found, &["Expected ", expected, " attribute, found ", meta_kind(found), " attribute"].concat(), ) } /// Issue an error relating to a particular `Spanned` structure. pub fn err(&self, spanned: &impl syn::spanned::Spanned, msg: &str) { self.err_span(spanned.span(), msg); } /// Issue an error relating to a particular `Span`.
/// Push a `syn::Error` onto the list of errors to issue. pub fn push(&self, err: syn::Error) { self.errors.borrow_mut().push(err); } } impl ToTokens for Errors { /// Convert the errors into tokens that, when emit, will cause /// the user of the macro to receive compiler errors. fn to_tokens(&self, tokens: &mut TokenStream) { tokens.extend(self.errors.borrow().iter().map(|e| e.to_compile_error())); } }
pub fn err_span(&self, span: Span, msg: &str) { self.push(syn::Error::new(span, msg)); }
random_line_split
dead_code.rs
use ast::cfg::{Block, BlockEnd, BlockId, BlockLink, Function, Instruction, Program}; use plank_syntax::position::Spanned; use std::collections::{HashSet, VecDeque}; use CompileCtx; fn function_block_chain(f: &Function) -> VecDeque<BlockId> { let mut blocks = VecDeque::new(); let mut current = if let Some(block) = f.start_block { block } else { return blocks; }; loop { blocks.push_back(current); match f.blocks[&current].link { BlockLink::Strong(next) | BlockLink::Weak(next) => current = next, BlockLink::None => break, } } blocks } fn report_unreachable(block: &Block, ctx: &mut CompileCtx) { let mut span = Spanned::span(&block.ops[0]); for i in &block.ops { match **i { Instruction::StartStatement => { span = Spanned::span(i); break; } _ => { span = span.merge(Spanned::span(i)); } }
.span(span) .build(); } fn can_be_dead(block: &Block) -> bool { for op in &block.ops { match **op { Instruction::Drop(_) => {} _ => return true, } } false } fn analyze_function(f: &mut Function, ctx: &mut CompileCtx) { let mut blocks = function_block_chain(f); debug_assert_eq!(blocks.len(), f.blocks.len()); let mut reachable = HashSet::new(); let mut strong_reachable = None; let mut queue = VecDeque::new(); if let Some(block) = f.start_block { queue.push_back(block); } let mut follow_strong = false; while!queue.is_empty() { while let Some(block) = queue.pop_front() { if!reachable.contains(&block) { reachable.insert(block); let block = &f.blocks[&block]; match block.end { BlockEnd::Branch(_, a, b) => { queue.push_back(a); queue.push_back(b); } BlockEnd::Jump(next) => { queue.push_back(next); } BlockEnd::Return(_) | BlockEnd::Error => {} } if follow_strong { if let BlockLink::Strong(next) = block.link { queue.push_back(next); } } } } if strong_reachable.is_none() { strong_reachable = Some(reachable.clone()); } follow_strong = true; while let Some(block) = blocks.pop_front() { if!reachable.contains(&block) && can_be_dead(&f.blocks[&block]) { report_unreachable(&f.blocks[&block], ctx); queue.push_back(block); break; } } } let strong_reachable = strong_reachable.unwrap_or_default(); f.blocks.retain(|k, _| strong_reachable.contains(k)); } pub(crate) fn remove_dead_code(program: &mut Program, ctx: &mut CompileCtx) { for f in program.functions.values_mut() { analyze_function(f, ctx); } }
} ctx.reporter .warning("dead code detected", span)
random_line_split
dead_code.rs
use ast::cfg::{Block, BlockEnd, BlockId, BlockLink, Function, Instruction, Program}; use plank_syntax::position::Spanned; use std::collections::{HashSet, VecDeque}; use CompileCtx; fn function_block_chain(f: &Function) -> VecDeque<BlockId> { let mut blocks = VecDeque::new(); let mut current = if let Some(block) = f.start_block { block } else { return blocks; }; loop { blocks.push_back(current); match f.blocks[&current].link { BlockLink::Strong(next) | BlockLink::Weak(next) => current = next, BlockLink::None => break, } } blocks } fn report_unreachable(block: &Block, ctx: &mut CompileCtx)
fn can_be_dead(block: &Block) -> bool { for op in &block.ops { match **op { Instruction::Drop(_) => {} _ => return true, } } false } fn analyze_function(f: &mut Function, ctx: &mut CompileCtx) { let mut blocks = function_block_chain(f); debug_assert_eq!(blocks.len(), f.blocks.len()); let mut reachable = HashSet::new(); let mut strong_reachable = None; let mut queue = VecDeque::new(); if let Some(block) = f.start_block { queue.push_back(block); } let mut follow_strong = false; while!queue.is_empty() { while let Some(block) = queue.pop_front() { if!reachable.contains(&block) { reachable.insert(block); let block = &f.blocks[&block]; match block.end { BlockEnd::Branch(_, a, b) => { queue.push_back(a); queue.push_back(b); } BlockEnd::Jump(next) => { queue.push_back(next); } BlockEnd::Return(_) | BlockEnd::Error => {} } if follow_strong { if let BlockLink::Strong(next) = block.link { queue.push_back(next); } } } } if strong_reachable.is_none() { strong_reachable = Some(reachable.clone()); } follow_strong = true; while let Some(block) = blocks.pop_front() { if!reachable.contains(&block) && can_be_dead(&f.blocks[&block]) { report_unreachable(&f.blocks[&block], ctx); queue.push_back(block); break; } } } let strong_reachable = strong_reachable.unwrap_or_default(); f.blocks.retain(|k, _| strong_reachable.contains(k)); } pub(crate) fn remove_dead_code(program: &mut Program, ctx: &mut CompileCtx) { for f in program.functions.values_mut() { analyze_function(f, ctx); } }
{ let mut span = Spanned::span(&block.ops[0]); for i in &block.ops { match **i { Instruction::StartStatement => { span = Spanned::span(i); break; } _ => { span = span.merge(Spanned::span(i)); } } } ctx.reporter .warning("dead code detected", span) .span(span) .build(); }
identifier_body
dead_code.rs
use ast::cfg::{Block, BlockEnd, BlockId, BlockLink, Function, Instruction, Program}; use plank_syntax::position::Spanned; use std::collections::{HashSet, VecDeque}; use CompileCtx; fn function_block_chain(f: &Function) -> VecDeque<BlockId> { let mut blocks = VecDeque::new(); let mut current = if let Some(block) = f.start_block { block } else { return blocks; }; loop { blocks.push_back(current); match f.blocks[&current].link { BlockLink::Strong(next) | BlockLink::Weak(next) => current = next, BlockLink::None => break, } } blocks } fn
(block: &Block, ctx: &mut CompileCtx) { let mut span = Spanned::span(&block.ops[0]); for i in &block.ops { match **i { Instruction::StartStatement => { span = Spanned::span(i); break; } _ => { span = span.merge(Spanned::span(i)); } } } ctx.reporter .warning("dead code detected", span) .span(span) .build(); } fn can_be_dead(block: &Block) -> bool { for op in &block.ops { match **op { Instruction::Drop(_) => {} _ => return true, } } false } fn analyze_function(f: &mut Function, ctx: &mut CompileCtx) { let mut blocks = function_block_chain(f); debug_assert_eq!(blocks.len(), f.blocks.len()); let mut reachable = HashSet::new(); let mut strong_reachable = None; let mut queue = VecDeque::new(); if let Some(block) = f.start_block { queue.push_back(block); } let mut follow_strong = false; while!queue.is_empty() { while let Some(block) = queue.pop_front() { if!reachable.contains(&block) { reachable.insert(block); let block = &f.blocks[&block]; match block.end { BlockEnd::Branch(_, a, b) => { queue.push_back(a); queue.push_back(b); } BlockEnd::Jump(next) => { queue.push_back(next); } BlockEnd::Return(_) | BlockEnd::Error => {} } if follow_strong { if let BlockLink::Strong(next) = block.link { queue.push_back(next); } } } } if strong_reachable.is_none() { strong_reachable = Some(reachable.clone()); } follow_strong = true; while let Some(block) = blocks.pop_front() { if!reachable.contains(&block) && can_be_dead(&f.blocks[&block]) { report_unreachable(&f.blocks[&block], ctx); queue.push_back(block); break; } } } let strong_reachable = strong_reachable.unwrap_or_default(); f.blocks.retain(|k, _| strong_reachable.contains(k)); } pub(crate) fn remove_dead_code(program: &mut Program, ctx: &mut CompileCtx) { for f in program.functions.values_mut() { analyze_function(f, ctx); } }
report_unreachable
identifier_name
dead_code.rs
use ast::cfg::{Block, BlockEnd, BlockId, BlockLink, Function, Instruction, Program}; use plank_syntax::position::Spanned; use std::collections::{HashSet, VecDeque}; use CompileCtx; fn function_block_chain(f: &Function) -> VecDeque<BlockId> { let mut blocks = VecDeque::new(); let mut current = if let Some(block) = f.start_block { block } else { return blocks; }; loop { blocks.push_back(current); match f.blocks[&current].link { BlockLink::Strong(next) | BlockLink::Weak(next) => current = next, BlockLink::None => break, } } blocks } fn report_unreachable(block: &Block, ctx: &mut CompileCtx) { let mut span = Spanned::span(&block.ops[0]); for i in &block.ops { match **i { Instruction::StartStatement => { span = Spanned::span(i); break; } _ => { span = span.merge(Spanned::span(i)); } } } ctx.reporter .warning("dead code detected", span) .span(span) .build(); } fn can_be_dead(block: &Block) -> bool { for op in &block.ops { match **op { Instruction::Drop(_) => {} _ => return true, } } false } fn analyze_function(f: &mut Function, ctx: &mut CompileCtx) { let mut blocks = function_block_chain(f); debug_assert_eq!(blocks.len(), f.blocks.len()); let mut reachable = HashSet::new(); let mut strong_reachable = None; let mut queue = VecDeque::new(); if let Some(block) = f.start_block { queue.push_back(block); } let mut follow_strong = false; while!queue.is_empty() { while let Some(block) = queue.pop_front() { if!reachable.contains(&block)
} if strong_reachable.is_none() { strong_reachable = Some(reachable.clone()); } follow_strong = true; while let Some(block) = blocks.pop_front() { if!reachable.contains(&block) && can_be_dead(&f.blocks[&block]) { report_unreachable(&f.blocks[&block], ctx); queue.push_back(block); break; } } } let strong_reachable = strong_reachable.unwrap_or_default(); f.blocks.retain(|k, _| strong_reachable.contains(k)); } pub(crate) fn remove_dead_code(program: &mut Program, ctx: &mut CompileCtx) { for f in program.functions.values_mut() { analyze_function(f, ctx); } }
{ reachable.insert(block); let block = &f.blocks[&block]; match block.end { BlockEnd::Branch(_, a, b) => { queue.push_back(a); queue.push_back(b); } BlockEnd::Jump(next) => { queue.push_back(next); } BlockEnd::Return(_) | BlockEnd::Error => {} } if follow_strong { if let BlockLink::Strong(next) = block.link { queue.push_back(next); } } }
conditional_block
hex.rs
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Hex binary-to-text encoding pub use self::FromHexError::*; use std::fmt; use std::error; /// A trait for converting a value to hexadecimal encoding pub trait ToHex { /// Converts the value of `self` to a hex value, returning the owned /// string. fn to_hex(&self) -> String; } const CHARS: &[u8] = b"0123456789abcdef"; impl ToHex for [u8] { /// Turn a vector of `u8` bytes into a hexadecimal string. /// /// # Examples /// /// ``` /// #![feature(rustc_private)] /// /// extern crate serialize; /// use serialize::hex::ToHex; /// /// fn main () { /// let str = [52,32].to_hex(); /// println!("{}", str); /// } /// ``` fn to_hex(&self) -> String { let mut v = Vec::with_capacity(self.len() * 2); for &byte in self { v.push(CHARS[(byte >> 4) as usize]); v.push(CHARS[(byte & 0xf) as usize]); } unsafe { String::from_utf8_unchecked(v) } } } /// A trait for converting hexadecimal encoded values pub trait FromHex { /// Converts the value of `self`, interpreted as hexadecimal encoded data, /// into an owned vector of bytes, returning the vector. fn from_hex(&self) -> Result<Vec<u8>, FromHexError>; } /// Errors that can occur when decoding a hex encoded string #[derive(Copy, Clone, Debug)] pub enum FromHexError { /// The input contained a character not part of the hex format InvalidHexCharacter(char, usize), /// The input had an invalid length InvalidHexLength, } impl fmt::Display for FromHexError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { InvalidHexCharacter(ch, idx) => write!(f, "Invalid character '{}' at position {}", ch, idx), InvalidHexLength => write!(f, "Invalid input length"), } } } impl error::Error for FromHexError { fn description(&self) -> &str { match *self { InvalidHexCharacter(..) => "invalid character", InvalidHexLength => "invalid length", } } } impl FromHex for str { /// Convert any hexadecimal encoded string (literal, `@`, `&`, or `~`) /// to the byte values it encodes. /// /// You can use the `String::from_utf8` function to turn a /// `Vec<u8>` into a string with characters corresponding to those values. /// /// # Examples /// /// This converts a string literal to hexadecimal and back. /// /// ``` /// #![feature(rustc_private)] /// /// extern crate serialize; /// use serialize::hex::{FromHex, ToHex}; /// /// fn main () { /// let hello_str = "Hello, World".as_bytes().to_hex(); /// println!("{}", hello_str); /// let bytes = hello_str.from_hex().unwrap(); /// println!("{:?}", bytes); /// let result_str = String::from_utf8(bytes).unwrap(); /// println!("{}", result_str); /// } /// ``` fn from_hex(&self) -> Result<Vec<u8>, FromHexError> { // This may be an overestimate if there is any whitespace let mut b = Vec::with_capacity(self.len() / 2); let mut modulus = 0; let mut buf = 0; for (idx, byte) in self.bytes().enumerate() { buf <<= 4; match byte { b'A'..=b'F' => buf |= byte - b'A' + 10, b'a'..=b'f' => buf |= byte - b'a' + 10, b'0'..=b'9' => buf |= byte - b'0', b' '|b'\r'|b'\n'|b'\t' => { buf >>= 4; continue } _ => { let ch = self[idx..].chars().next().unwrap(); return Err(InvalidHexCharacter(ch, idx)) } } modulus += 1; if modulus == 2 { modulus = 0; b.push(buf); } } match modulus { 0 => Ok(b), _ => Err(InvalidHexLength), } } } #[cfg(test)] mod tests { extern crate test; use self::test::Bencher; use hex::{FromHex, ToHex}; #[test] pub fn test_to_hex() { assert_eq!("foobar".as_bytes().to_hex(), "666f6f626172"); } #[test] pub fn test_from_hex_okay() { assert_eq!("666f6f626172".from_hex().unwrap(), b"foobar"); assert_eq!("666F6F626172".from_hex().unwrap(), b"foobar"); } #[test] pub fn test_from_hex_odd_len() { assert!("666".from_hex().is_err()); assert!("66 6".from_hex().is_err()); }
#[test] pub fn test_from_hex_invalid_char() { assert!("66y6".from_hex().is_err()); } #[test] pub fn test_from_hex_ignores_whitespace() { assert_eq!("666f 6f6\r\n26172 ".from_hex().unwrap(), b"foobar"); } #[test] pub fn test_to_hex_all_bytes() { for i in 0..256 { assert_eq!([i as u8].to_hex(), format!("{:02x}", i as usize)); } } #[test] pub fn test_from_hex_all_bytes() { for i in 0..256 { let ii: &[u8] = &[i as u8]; assert_eq!(format!("{:02x}", i as usize).from_hex() .unwrap(), ii); assert_eq!(format!("{:02X}", i as usize).from_hex() .unwrap(), ii); } } #[bench] pub fn bench_to_hex(b: &mut Bencher) { let s = "イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム \ ウヰノオクヤマ ケフコエテ アサキユメミシ ヱヒモセスン"; b.iter(|| { s.as_bytes().to_hex(); }); b.bytes = s.len() as u64; } #[bench] pub fn bench_from_hex(b: &mut Bencher) { let s = "イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム \ ウヰノオクヤマ ケフコエテ アサキユメミシ ヱヒモセスン"; let sb = s.as_bytes().to_hex(); b.iter(|| { sb.from_hex().unwrap(); }); b.bytes = sb.len() as u64; } }
random_line_split
hex.rs
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Hex binary-to-text encoding pub use self::FromHexError::*; use std::fmt; use std::error; /// A trait for converting a value to hexadecimal encoding pub trait ToHex { /// Converts the value of `self` to a hex value, returning the owned /// string. fn to_hex(&self) -> String; } const CHARS: &[u8] = b"0123456789abcdef"; impl ToHex for [u8] { /// Turn a vector of `u8` bytes into a hexadecimal string. /// /// # Examples /// /// ``` /// #![feature(rustc_private)] /// /// extern crate serialize; /// use serialize::hex::ToHex; /// /// fn main () { /// let str = [52,32].to_hex(); /// println!("{}", str); /// } /// ``` fn to_hex(&self) -> String { let mut v = Vec::with_capacity(self.len() * 2); for &byte in self { v.push(CHARS[(byte >> 4) as usize]); v.push(CHARS[(byte & 0xf) as usize]); } unsafe { String::from_utf8_unchecked(v) } } } /// A trait for converting hexadecimal encoded values pub trait FromHex { /// Converts the value of `self`, interpreted as hexadecimal encoded data, /// into an owned vector of bytes, returning the vector. fn from_hex(&self) -> Result<Vec<u8>, FromHexError>; } /// Errors that can occur when decoding a hex encoded string #[derive(Copy, Clone, Debug)] pub enum FromHexError { /// The input contained a character not part of the hex format InvalidHexCharacter(char, usize), /// The input had an invalid length InvalidHexLength, } impl fmt::Display for FromHexError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { InvalidHexCharacter(ch, idx) => write!(f, "Invalid character '{}' at position {}", ch, idx), InvalidHexLength => write!(f, "Invalid input length"), } } } impl error::Error for FromHexError { fn
(&self) -> &str { match *self { InvalidHexCharacter(..) => "invalid character", InvalidHexLength => "invalid length", } } } impl FromHex for str { /// Convert any hexadecimal encoded string (literal, `@`, `&`, or `~`) /// to the byte values it encodes. /// /// You can use the `String::from_utf8` function to turn a /// `Vec<u8>` into a string with characters corresponding to those values. /// /// # Examples /// /// This converts a string literal to hexadecimal and back. /// /// ``` /// #![feature(rustc_private)] /// /// extern crate serialize; /// use serialize::hex::{FromHex, ToHex}; /// /// fn main () { /// let hello_str = "Hello, World".as_bytes().to_hex(); /// println!("{}", hello_str); /// let bytes = hello_str.from_hex().unwrap(); /// println!("{:?}", bytes); /// let result_str = String::from_utf8(bytes).unwrap(); /// println!("{}", result_str); /// } /// ``` fn from_hex(&self) -> Result<Vec<u8>, FromHexError> { // This may be an overestimate if there is any whitespace let mut b = Vec::with_capacity(self.len() / 2); let mut modulus = 0; let mut buf = 0; for (idx, byte) in self.bytes().enumerate() { buf <<= 4; match byte { b'A'..=b'F' => buf |= byte - b'A' + 10, b'a'..=b'f' => buf |= byte - b'a' + 10, b'0'..=b'9' => buf |= byte - b'0', b' '|b'\r'|b'\n'|b'\t' => { buf >>= 4; continue } _ => { let ch = self[idx..].chars().next().unwrap(); return Err(InvalidHexCharacter(ch, idx)) } } modulus += 1; if modulus == 2 { modulus = 0; b.push(buf); } } match modulus { 0 => Ok(b), _ => Err(InvalidHexLength), } } } #[cfg(test)] mod tests { extern crate test; use self::test::Bencher; use hex::{FromHex, ToHex}; #[test] pub fn test_to_hex() { assert_eq!("foobar".as_bytes().to_hex(), "666f6f626172"); } #[test] pub fn test_from_hex_okay() { assert_eq!("666f6f626172".from_hex().unwrap(), b"foobar"); assert_eq!("666F6F626172".from_hex().unwrap(), b"foobar"); } #[test] pub fn test_from_hex_odd_len() { assert!("666".from_hex().is_err()); assert!("66 6".from_hex().is_err()); } #[test] pub fn test_from_hex_invalid_char() { assert!("66y6".from_hex().is_err()); } #[test] pub fn test_from_hex_ignores_whitespace() { assert_eq!("666f 6f6\r\n26172 ".from_hex().unwrap(), b"foobar"); } #[test] pub fn test_to_hex_all_bytes() { for i in 0..256 { assert_eq!([i as u8].to_hex(), format!("{:02x}", i as usize)); } } #[test] pub fn test_from_hex_all_bytes() { for i in 0..256 { let ii: &[u8] = &[i as u8]; assert_eq!(format!("{:02x}", i as usize).from_hex() .unwrap(), ii); assert_eq!(format!("{:02X}", i as usize).from_hex() .unwrap(), ii); } } #[bench] pub fn bench_to_hex(b: &mut Bencher) { let s = "イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム \ ウヰノオクヤマ ケフコエテ アサキユメミシ ヱヒモセスン"; b.iter(|| { s.as_bytes().to_hex(); }); b.bytes = s.len() as u64; } #[bench] pub fn bench_from_hex(b: &mut Bencher) { let s = "イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム \ ウヰノオクヤマ ケフコエテ アサキユメミシ ヱヒモセスン"; let sb = s.as_bytes().to_hex(); b.iter(|| { sb.from_hex().unwrap(); }); b.bytes = sb.len() as u64; } }
description
identifier_name
hex.rs
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Hex binary-to-text encoding pub use self::FromHexError::*; use std::fmt; use std::error; /// A trait for converting a value to hexadecimal encoding pub trait ToHex { /// Converts the value of `self` to a hex value, returning the owned /// string. fn to_hex(&self) -> String; } const CHARS: &[u8] = b"0123456789abcdef"; impl ToHex for [u8] { /// Turn a vector of `u8` bytes into a hexadecimal string. /// /// # Examples /// /// ``` /// #![feature(rustc_private)] /// /// extern crate serialize; /// use serialize::hex::ToHex; /// /// fn main () { /// let str = [52,32].to_hex(); /// println!("{}", str); /// } /// ``` fn to_hex(&self) -> String { let mut v = Vec::with_capacity(self.len() * 2); for &byte in self { v.push(CHARS[(byte >> 4) as usize]); v.push(CHARS[(byte & 0xf) as usize]); } unsafe { String::from_utf8_unchecked(v) } } } /// A trait for converting hexadecimal encoded values pub trait FromHex { /// Converts the value of `self`, interpreted as hexadecimal encoded data, /// into an owned vector of bytes, returning the vector. fn from_hex(&self) -> Result<Vec<u8>, FromHexError>; } /// Errors that can occur when decoding a hex encoded string #[derive(Copy, Clone, Debug)] pub enum FromHexError { /// The input contained a character not part of the hex format InvalidHexCharacter(char, usize), /// The input had an invalid length InvalidHexLength, } impl fmt::Display for FromHexError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { InvalidHexCharacter(ch, idx) => write!(f, "Invalid character '{}' at position {}", ch, idx), InvalidHexLength => write!(f, "Invalid input length"), } } } impl error::Error for FromHexError { fn description(&self) -> &str { match *self { InvalidHexCharacter(..) => "invalid character", InvalidHexLength => "invalid length", } } } impl FromHex for str { /// Convert any hexadecimal encoded string (literal, `@`, `&`, or `~`) /// to the byte values it encodes. /// /// You can use the `String::from_utf8` function to turn a /// `Vec<u8>` into a string with characters corresponding to those values. /// /// # Examples /// /// This converts a string literal to hexadecimal and back. /// /// ``` /// #![feature(rustc_private)] /// /// extern crate serialize; /// use serialize::hex::{FromHex, ToHex}; /// /// fn main () { /// let hello_str = "Hello, World".as_bytes().to_hex(); /// println!("{}", hello_str); /// let bytes = hello_str.from_hex().unwrap(); /// println!("{:?}", bytes); /// let result_str = String::from_utf8(bytes).unwrap(); /// println!("{}", result_str); /// } /// ``` fn from_hex(&self) -> Result<Vec<u8>, FromHexError> { // This may be an overestimate if there is any whitespace let mut b = Vec::with_capacity(self.len() / 2); let mut modulus = 0; let mut buf = 0; for (idx, byte) in self.bytes().enumerate() { buf <<= 4; match byte { b'A'..=b'F' => buf |= byte - b'A' + 10, b'a'..=b'f' => buf |= byte - b'a' + 10, b'0'..=b'9' => buf |= byte - b'0', b' '|b'\r'|b'\n'|b'\t' => { buf >>= 4; continue } _ => { let ch = self[idx..].chars().next().unwrap(); return Err(InvalidHexCharacter(ch, idx)) } } modulus += 1; if modulus == 2 { modulus = 0; b.push(buf); } } match modulus { 0 => Ok(b), _ => Err(InvalidHexLength), } } } #[cfg(test)] mod tests { extern crate test; use self::test::Bencher; use hex::{FromHex, ToHex}; #[test] pub fn test_to_hex() { assert_eq!("foobar".as_bytes().to_hex(), "666f6f626172"); } #[test] pub fn test_from_hex_okay() { assert_eq!("666f6f626172".from_hex().unwrap(), b"foobar"); assert_eq!("666F6F626172".from_hex().unwrap(), b"foobar"); } #[test] pub fn test_from_hex_odd_len() { assert!("666".from_hex().is_err()); assert!("66 6".from_hex().is_err()); } #[test] pub fn test_from_hex_invalid_char() { assert!("66y6".from_hex().is_err()); } #[test] pub fn test_from_hex_ignores_whitespace() { assert_eq!("666f 6f6\r\n26172 ".from_hex().unwrap(), b"foobar"); } #[test] pub fn test_to_hex_all_bytes() { for i in 0..256 { assert_eq!([i as u8].to_hex(), format!("{:02x}", i as usize)); } } #[test] pub fn test_from_hex_all_bytes() { for i in 0..256 { let ii: &[u8] = &[i as u8]; assert_eq!(format!("{:02x}", i as usize).from_hex() .unwrap(), ii); assert_eq!(format!("{:02X}", i as usize).from_hex() .unwrap(), ii); } } #[bench] pub fn bench_to_hex(b: &mut Bencher) { let s = "イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム \ ウヰノオクヤマ ケフコエテ アサキユメミシ ヱヒモセスン"; b.iter(|| { s.as_bytes().to_hex(); }); b.bytes = s.len() as u64; } #[bench] pub fn bench_from_hex(b: &mut Bencher) { let s = "イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム \ ウヰノオクヤマ ケフコエテ アサキユメミシ ヱヒモセスン";
let sb = s.as_bytes().to_hex(); b.iter(|| { sb.from_hex().unwrap(); }); b.bytes = sb.len() as u64; } }
identifier_body
lib.register_restriction.rs
// This file was generated by `cargo dev update_lints`. // Use that command to update this file and do not edit by hand. // Manual edits will be overwritten. store.register_group(true, "clippy::restriction", Some("clippy_restriction"), vec![ LintId::of(arithmetic::FLOAT_ARITHMETIC), LintId::of(arithmetic::INTEGER_ARITHMETIC), LintId::of(as_conversions::AS_CONVERSIONS), LintId::of(asm_syntax::INLINE_ASM_X86_ATT_SYNTAX), LintId::of(asm_syntax::INLINE_ASM_X86_INTEL_SYNTAX), LintId::of(casts::FN_TO_NUMERIC_CAST_ANY), LintId::of(create_dir::CREATE_DIR), LintId::of(dbg_macro::DBG_MACRO), LintId::of(default_numeric_fallback::DEFAULT_NUMERIC_FALLBACK), LintId::of(disallowed_script_idents::DISALLOWED_SCRIPT_IDENTS), LintId::of(else_if_without_else::ELSE_IF_WITHOUT_ELSE), LintId::of(exhaustive_items::EXHAUSTIVE_ENUMS), LintId::of(exhaustive_items::EXHAUSTIVE_STRUCTS), LintId::of(exit::EXIT), LintId::of(float_literal::LOSSY_FLOAT_LITERAL), LintId::of(if_then_some_else_none::IF_THEN_SOME_ELSE_NONE), LintId::of(implicit_return::IMPLICIT_RETURN), LintId::of(indexing_slicing::INDEXING_SLICING), LintId::of(inherent_impl::MULTIPLE_INHERENT_IMPL), LintId::of(integer_division::INTEGER_DIVISION), LintId::of(let_underscore::LET_UNDERSCORE_MUST_USE), LintId::of(literal_representation::DECIMAL_LITERAL_REPRESENTATION), LintId::of(map_err_ignore::MAP_ERR_IGNORE), LintId::of(matches::REST_PAT_IN_FULLY_BOUND_STRUCTS), LintId::of(matches::WILDCARD_ENUM_MATCH_ARM),
LintId::of(methods::CLONE_ON_REF_PTR), LintId::of(methods::EXPECT_USED), LintId::of(methods::FILETYPE_IS_FILE), LintId::of(methods::GET_UNWRAP), LintId::of(methods::UNWRAP_USED), LintId::of(misc::FLOAT_CMP_CONST), LintId::of(misc_early::SEPARATED_LITERAL_SUFFIX), LintId::of(misc_early::UNNEEDED_FIELD_PATTERN), LintId::of(misc_early::UNSEPARATED_LITERAL_SUFFIX), LintId::of(missing_doc::MISSING_DOCS_IN_PRIVATE_ITEMS), LintId::of(missing_enforced_import_rename::MISSING_ENFORCED_IMPORT_RENAMES), LintId::of(missing_inline::MISSING_INLINE_IN_PUBLIC_ITEMS), LintId::of(module_style::MOD_MODULE_FILES), LintId::of(module_style::SELF_NAMED_MODULE_FILES), LintId::of(modulo_arithmetic::MODULO_ARITHMETIC), LintId::of(panic_in_result_fn::PANIC_IN_RESULT_FN), LintId::of(panic_unimplemented::PANIC), LintId::of(panic_unimplemented::TODO), LintId::of(panic_unimplemented::UNIMPLEMENTED), LintId::of(panic_unimplemented::UNREACHABLE), LintId::of(pattern_type_mismatch::PATTERN_TYPE_MISMATCH), LintId::of(same_name_method::SAME_NAME_METHOD), LintId::of(shadow::SHADOW_REUSE), LintId::of(shadow::SHADOW_SAME), LintId::of(shadow::SHADOW_UNRELATED), LintId::of(strings::STRING_ADD), LintId::of(strings::STRING_SLICE), LintId::of(strings::STRING_TO_STRING), LintId::of(strings::STR_TO_STRING), LintId::of(types::RC_BUFFER), LintId::of(types::RC_MUTEX), LintId::of(undocumented_unsafe_blocks::UNDOCUMENTED_UNSAFE_BLOCKS), LintId::of(unicode::NON_ASCII_LITERAL), LintId::of(unnecessary_self_imports::UNNECESSARY_SELF_IMPORTS), LintId::of(unwrap_in_result::UNWRAP_IN_RESULT), LintId::of(verbose_file_reads::VERBOSE_FILE_READS), LintId::of(write::PRINT_STDERR), LintId::of(write::PRINT_STDOUT), LintId::of(write::USE_DEBUG), ])
LintId::of(mem_forget::MEM_FORGET),
random_line_split
cholesky.rs
#[macro_use] extern crate linxal; extern crate ndarray; extern crate num_traits; extern crate rand; use ndarray::{Array, ArrayBase, Data, Ix2}; use rand::thread_rng; use linxal::types::{LinxalScalar, LinxalMatrix, Symmetric, c32, c64}; use linxal::types::error::{ CholeskyError}; use linxal::generate::{RandomSemiPositive}; fn check_cholesky<T, D1, D2>(mat: &ArrayBase<D1, Ix2>, chol: &ArrayBase<D2, Ix2>, uplo: Symmetric) where T: LinxalScalar, D1: Data<Elem=T>, D2: Data<Elem=T>
} fn cholesky_identity_generic<T: LinxalScalar>() { for n in 1..11 { let m: Array<T, Ix2> = Array::eye(n); let l = m.cholesky(Symmetric::Upper).ok().unwrap(); assert_eq_within_tol!(l, m, 1e-5.into()); } } fn cholesky_generate_generic<T: LinxalScalar>(uplo: Symmetric) { let mut rng = thread_rng(); for n in 1..11 { let m: Array<T, Ix2> = RandomSemiPositive::new(n, &mut rng).generate().unwrap(); let res = m.cholesky(uplo); let chol = res.ok().unwrap(); check_cholesky(&m, &chol, uplo); } } fn cholesky_fail_zero_ev<T: LinxalScalar>() { let mut rng = thread_rng(); for n in 4..11 { let mut gen: RandomSemiPositive<T> = RandomSemiPositive::new(n, &mut rng); let r = gen.rank(0).generate_with_sv(); let m = r.ok().unwrap(); let res = m.0.cholesky(Symmetric::Upper); assert_eq!(res.err().unwrap(), CholeskyError::NotPositiveDefinite); } } #[test] fn cholesky_identity() { cholesky_identity_generic::<f32>(); cholesky_identity_generic::<c32>(); cholesky_identity_generic::<f64>(); cholesky_identity_generic::<c64>(); } #[test] fn cholesky_generate() { cholesky_generate_generic::<f32>(Symmetric::Upper); cholesky_generate_generic::<f32>(Symmetric::Lower); cholesky_generate_generic::<f64>(Symmetric::Upper); cholesky_generate_generic::<f64>(Symmetric::Lower); cholesky_generate_generic::<c32>(Symmetric::Upper); cholesky_generate_generic::<c32>(Symmetric::Lower); cholesky_generate_generic::<c64>(Symmetric::Upper); cholesky_generate_generic::<c64>(Symmetric::Lower); } #[test] fn cholesky_zero() { cholesky_fail_zero_ev::<f32>(); } #[test] fn cholesky_fail_not_square() { for r in 1..11 { for c in 1..11 { if r == c { continue; } let m: Array<f32, Ix2> = Array::linspace(1.0, 2.0, r*c).into_shape((r, c)).unwrap(); let res = m.cholesky(Symmetric::Upper); assert_eq!(res.err().unwrap(), CholeskyError::NotSquare); } } }
{ // Check the dimension assert_eq!(mat.dim(), chol.dim()); // The matrix must be triangular assert!(chol.is_triangular(uplo, None)); // The fatorization must match the original matrix. match uplo { Symmetric::Lower => { let u = chol.conj_t(); assert_eq_within_tol!(chol.dot(&u), mat, 1e-4.into()); }, Symmetric::Upper => { let l = chol.conj_t(); println!("{:?} {:?} {:?} {:?}", chol, l, l.dot(chol), mat); assert_eq_within_tol!(l.dot(chol), mat, 1e-4.into()); } }
identifier_body
cholesky.rs
#[macro_use] extern crate linxal; extern crate ndarray; extern crate num_traits; extern crate rand; use ndarray::{Array, ArrayBase, Data, Ix2}; use rand::thread_rng; use linxal::types::{LinxalScalar, LinxalMatrix, Symmetric, c32, c64}; use linxal::types::error::{ CholeskyError}; use linxal::generate::{RandomSemiPositive}; fn check_cholesky<T, D1, D2>(mat: &ArrayBase<D1, Ix2>, chol: &ArrayBase<D2, Ix2>, uplo: Symmetric) where T: LinxalScalar, D1: Data<Elem=T>, D2: Data<Elem=T> { // Check the dimension assert_eq!(mat.dim(), chol.dim()); // The matrix must be triangular assert!(chol.is_triangular(uplo, None)); // The fatorization must match the original matrix. match uplo { Symmetric::Lower => { let u = chol.conj_t(); assert_eq_within_tol!(chol.dot(&u), mat, 1e-4.into()); }, Symmetric::Upper => { let l = chol.conj_t(); println!("{:?} {:?} {:?} {:?}", chol, l, l.dot(chol), mat); assert_eq_within_tol!(l.dot(chol), mat, 1e-4.into()); } } } fn cholesky_identity_generic<T: LinxalScalar>() { for n in 1..11 { let m: Array<T, Ix2> = Array::eye(n);
assert_eq_within_tol!(l, m, 1e-5.into()); } } fn cholesky_generate_generic<T: LinxalScalar>(uplo: Symmetric) { let mut rng = thread_rng(); for n in 1..11 { let m: Array<T, Ix2> = RandomSemiPositive::new(n, &mut rng).generate().unwrap(); let res = m.cholesky(uplo); let chol = res.ok().unwrap(); check_cholesky(&m, &chol, uplo); } } fn cholesky_fail_zero_ev<T: LinxalScalar>() { let mut rng = thread_rng(); for n in 4..11 { let mut gen: RandomSemiPositive<T> = RandomSemiPositive::new(n, &mut rng); let r = gen.rank(0).generate_with_sv(); let m = r.ok().unwrap(); let res = m.0.cholesky(Symmetric::Upper); assert_eq!(res.err().unwrap(), CholeskyError::NotPositiveDefinite); } } #[test] fn cholesky_identity() { cholesky_identity_generic::<f32>(); cholesky_identity_generic::<c32>(); cholesky_identity_generic::<f64>(); cholesky_identity_generic::<c64>(); } #[test] fn cholesky_generate() { cholesky_generate_generic::<f32>(Symmetric::Upper); cholesky_generate_generic::<f32>(Symmetric::Lower); cholesky_generate_generic::<f64>(Symmetric::Upper); cholesky_generate_generic::<f64>(Symmetric::Lower); cholesky_generate_generic::<c32>(Symmetric::Upper); cholesky_generate_generic::<c32>(Symmetric::Lower); cholesky_generate_generic::<c64>(Symmetric::Upper); cholesky_generate_generic::<c64>(Symmetric::Lower); } #[test] fn cholesky_zero() { cholesky_fail_zero_ev::<f32>(); } #[test] fn cholesky_fail_not_square() { for r in 1..11 { for c in 1..11 { if r == c { continue; } let m: Array<f32, Ix2> = Array::linspace(1.0, 2.0, r*c).into_shape((r, c)).unwrap(); let res = m.cholesky(Symmetric::Upper); assert_eq!(res.err().unwrap(), CholeskyError::NotSquare); } } }
let l = m.cholesky(Symmetric::Upper).ok().unwrap();
random_line_split
cholesky.rs
#[macro_use] extern crate linxal; extern crate ndarray; extern crate num_traits; extern crate rand; use ndarray::{Array, ArrayBase, Data, Ix2}; use rand::thread_rng; use linxal::types::{LinxalScalar, LinxalMatrix, Symmetric, c32, c64}; use linxal::types::error::{ CholeskyError}; use linxal::generate::{RandomSemiPositive}; fn check_cholesky<T, D1, D2>(mat: &ArrayBase<D1, Ix2>, chol: &ArrayBase<D2, Ix2>, uplo: Symmetric) where T: LinxalScalar, D1: Data<Elem=T>, D2: Data<Elem=T> { // Check the dimension assert_eq!(mat.dim(), chol.dim()); // The matrix must be triangular assert!(chol.is_triangular(uplo, None)); // The fatorization must match the original matrix. match uplo { Symmetric::Lower => { let u = chol.conj_t(); assert_eq_within_tol!(chol.dot(&u), mat, 1e-4.into()); }, Symmetric::Upper => { let l = chol.conj_t(); println!("{:?} {:?} {:?} {:?}", chol, l, l.dot(chol), mat); assert_eq_within_tol!(l.dot(chol), mat, 1e-4.into()); } } } fn
<T: LinxalScalar>() { for n in 1..11 { let m: Array<T, Ix2> = Array::eye(n); let l = m.cholesky(Symmetric::Upper).ok().unwrap(); assert_eq_within_tol!(l, m, 1e-5.into()); } } fn cholesky_generate_generic<T: LinxalScalar>(uplo: Symmetric) { let mut rng = thread_rng(); for n in 1..11 { let m: Array<T, Ix2> = RandomSemiPositive::new(n, &mut rng).generate().unwrap(); let res = m.cholesky(uplo); let chol = res.ok().unwrap(); check_cholesky(&m, &chol, uplo); } } fn cholesky_fail_zero_ev<T: LinxalScalar>() { let mut rng = thread_rng(); for n in 4..11 { let mut gen: RandomSemiPositive<T> = RandomSemiPositive::new(n, &mut rng); let r = gen.rank(0).generate_with_sv(); let m = r.ok().unwrap(); let res = m.0.cholesky(Symmetric::Upper); assert_eq!(res.err().unwrap(), CholeskyError::NotPositiveDefinite); } } #[test] fn cholesky_identity() { cholesky_identity_generic::<f32>(); cholesky_identity_generic::<c32>(); cholesky_identity_generic::<f64>(); cholesky_identity_generic::<c64>(); } #[test] fn cholesky_generate() { cholesky_generate_generic::<f32>(Symmetric::Upper); cholesky_generate_generic::<f32>(Symmetric::Lower); cholesky_generate_generic::<f64>(Symmetric::Upper); cholesky_generate_generic::<f64>(Symmetric::Lower); cholesky_generate_generic::<c32>(Symmetric::Upper); cholesky_generate_generic::<c32>(Symmetric::Lower); cholesky_generate_generic::<c64>(Symmetric::Upper); cholesky_generate_generic::<c64>(Symmetric::Lower); } #[test] fn cholesky_zero() { cholesky_fail_zero_ev::<f32>(); } #[test] fn cholesky_fail_not_square() { for r in 1..11 { for c in 1..11 { if r == c { continue; } let m: Array<f32, Ix2> = Array::linspace(1.0, 2.0, r*c).into_shape((r, c)).unwrap(); let res = m.cholesky(Symmetric::Upper); assert_eq!(res.err().unwrap(), CholeskyError::NotSquare); } } }
cholesky_identity_generic
identifier_name
cholesky.rs
#[macro_use] extern crate linxal; extern crate ndarray; extern crate num_traits; extern crate rand; use ndarray::{Array, ArrayBase, Data, Ix2}; use rand::thread_rng; use linxal::types::{LinxalScalar, LinxalMatrix, Symmetric, c32, c64}; use linxal::types::error::{ CholeskyError}; use linxal::generate::{RandomSemiPositive}; fn check_cholesky<T, D1, D2>(mat: &ArrayBase<D1, Ix2>, chol: &ArrayBase<D2, Ix2>, uplo: Symmetric) where T: LinxalScalar, D1: Data<Elem=T>, D2: Data<Elem=T> { // Check the dimension assert_eq!(mat.dim(), chol.dim()); // The matrix must be triangular assert!(chol.is_triangular(uplo, None)); // The fatorization must match the original matrix. match uplo { Symmetric::Lower => { let u = chol.conj_t(); assert_eq_within_tol!(chol.dot(&u), mat, 1e-4.into()); }, Symmetric::Upper =>
} } fn cholesky_identity_generic<T: LinxalScalar>() { for n in 1..11 { let m: Array<T, Ix2> = Array::eye(n); let l = m.cholesky(Symmetric::Upper).ok().unwrap(); assert_eq_within_tol!(l, m, 1e-5.into()); } } fn cholesky_generate_generic<T: LinxalScalar>(uplo: Symmetric) { let mut rng = thread_rng(); for n in 1..11 { let m: Array<T, Ix2> = RandomSemiPositive::new(n, &mut rng).generate().unwrap(); let res = m.cholesky(uplo); let chol = res.ok().unwrap(); check_cholesky(&m, &chol, uplo); } } fn cholesky_fail_zero_ev<T: LinxalScalar>() { let mut rng = thread_rng(); for n in 4..11 { let mut gen: RandomSemiPositive<T> = RandomSemiPositive::new(n, &mut rng); let r = gen.rank(0).generate_with_sv(); let m = r.ok().unwrap(); let res = m.0.cholesky(Symmetric::Upper); assert_eq!(res.err().unwrap(), CholeskyError::NotPositiveDefinite); } } #[test] fn cholesky_identity() { cholesky_identity_generic::<f32>(); cholesky_identity_generic::<c32>(); cholesky_identity_generic::<f64>(); cholesky_identity_generic::<c64>(); } #[test] fn cholesky_generate() { cholesky_generate_generic::<f32>(Symmetric::Upper); cholesky_generate_generic::<f32>(Symmetric::Lower); cholesky_generate_generic::<f64>(Symmetric::Upper); cholesky_generate_generic::<f64>(Symmetric::Lower); cholesky_generate_generic::<c32>(Symmetric::Upper); cholesky_generate_generic::<c32>(Symmetric::Lower); cholesky_generate_generic::<c64>(Symmetric::Upper); cholesky_generate_generic::<c64>(Symmetric::Lower); } #[test] fn cholesky_zero() { cholesky_fail_zero_ev::<f32>(); } #[test] fn cholesky_fail_not_square() { for r in 1..11 { for c in 1..11 { if r == c { continue; } let m: Array<f32, Ix2> = Array::linspace(1.0, 2.0, r*c).into_shape((r, c)).unwrap(); let res = m.cholesky(Symmetric::Upper); assert_eq!(res.err().unwrap(), CholeskyError::NotSquare); } } }
{ let l = chol.conj_t(); println!("{:?} {:?} {:?} {:?}", chol, l, l.dot(chol), mat); assert_eq_within_tol!(l.dot(chol), mat, 1e-4.into()); }
conditional_block
utils.rs
#![allow(dead_code)] use std::{cmp, io}; use bytes::{BufMut, BytesMut}; pub const SIZE: usize = 31; #[derive(Serialize, Deserialize)] pub struct Message { pub message: &'static str, } pub struct Writer<'a>(pub &'a mut BytesMut); impl<'a> io::Write for Writer<'a> { fn write(&mut self, buf: &[u8]) -> io::Result<usize> { self.0.put_slice(buf); Ok(buf.len()) } fn flush(&mut self) -> io::Result<()> { Ok(()) } } pub fn get_query_param(query: &str) -> u16 { let q = if let Some(pos) = query.find("q") { query.split_at(pos + 2).1.parse::<u16>().ok().unwrap_or(1) } else { 1 }; cmp::min(500, cmp::max(1, q)) } fn escapable(b: u8) -> bool { match b { b'<' | b'>' | b'&' | b'"' | b'\'' | b'/' => true, _ => false, } } pub fn escape(writer: &mut Writer, s: String)
} b'\'' => { let _ = writer.0.put_slice(b"&#x27;"); } b'/' => { let _ = writer.0.put_slice(b"&#x2f;"); } _ => panic!("incorrect indexing"), } } } if last_pos < bytes.len() - 1 { let _ = writer.0.put_slice(&bytes[last_pos..]); } }
{ let bytes = s.as_bytes(); let mut last_pos = 0; for (idx, b) in s.as_bytes().iter().enumerate() { if escapable(*b) { let _ = writer.0.put_slice(&bytes[last_pos..idx]); last_pos = idx + 1; match *b { b'<' => { let _ = writer.0.put_slice(b"&lt;"); } b'>' => { let _ = writer.0.put_slice(b"&gt;"); } b'&' => { let _ = writer.0.put_slice(b"&amp;"); } b'"' => { let _ = writer.0.put_slice(b"&quot;");
identifier_body
utils.rs
#![allow(dead_code)] use std::{cmp, io}; use bytes::{BufMut, BytesMut}; pub const SIZE: usize = 31; #[derive(Serialize, Deserialize)] pub struct Message {
} pub struct Writer<'a>(pub &'a mut BytesMut); impl<'a> io::Write for Writer<'a> { fn write(&mut self, buf: &[u8]) -> io::Result<usize> { self.0.put_slice(buf); Ok(buf.len()) } fn flush(&mut self) -> io::Result<()> { Ok(()) } } pub fn get_query_param(query: &str) -> u16 { let q = if let Some(pos) = query.find("q") { query.split_at(pos + 2).1.parse::<u16>().ok().unwrap_or(1) } else { 1 }; cmp::min(500, cmp::max(1, q)) } fn escapable(b: u8) -> bool { match b { b'<' | b'>' | b'&' | b'"' | b'\'' | b'/' => true, _ => false, } } pub fn escape(writer: &mut Writer, s: String) { let bytes = s.as_bytes(); let mut last_pos = 0; for (idx, b) in s.as_bytes().iter().enumerate() { if escapable(*b) { let _ = writer.0.put_slice(&bytes[last_pos..idx]); last_pos = idx + 1; match *b { b'<' => { let _ = writer.0.put_slice(b"&lt;"); } b'>' => { let _ = writer.0.put_slice(b"&gt;"); } b'&' => { let _ = writer.0.put_slice(b"&amp;"); } b'"' => { let _ = writer.0.put_slice(b"&quot;"); } b'\'' => { let _ = writer.0.put_slice(b"&#x27;"); } b'/' => { let _ = writer.0.put_slice(b"&#x2f;"); } _ => panic!("incorrect indexing"), } } } if last_pos < bytes.len() - 1 { let _ = writer.0.put_slice(&bytes[last_pos..]); } }
pub message: &'static str,
random_line_split
utils.rs
#![allow(dead_code)] use std::{cmp, io}; use bytes::{BufMut, BytesMut}; pub const SIZE: usize = 31; #[derive(Serialize, Deserialize)] pub struct Message { pub message: &'static str, } pub struct Writer<'a>(pub &'a mut BytesMut); impl<'a> io::Write for Writer<'a> { fn write(&mut self, buf: &[u8]) -> io::Result<usize> { self.0.put_slice(buf); Ok(buf.len()) } fn flush(&mut self) -> io::Result<()> { Ok(()) } } pub fn get_query_param(query: &str) -> u16 { let q = if let Some(pos) = query.find("q") { query.split_at(pos + 2).1.parse::<u16>().ok().unwrap_or(1) } else { 1 }; cmp::min(500, cmp::max(1, q)) } fn escapable(b: u8) -> bool { match b { b'<' | b'>' | b'&' | b'"' | b'\'' | b'/' => true, _ => false, } } pub fn escape(writer: &mut Writer, s: String) { let bytes = s.as_bytes(); let mut last_pos = 0; for (idx, b) in s.as_bytes().iter().enumerate() { if escapable(*b) { let _ = writer.0.put_slice(&bytes[last_pos..idx]); last_pos = idx + 1; match *b { b'<' => { let _ = writer.0.put_slice(b"&lt;"); } b'>' =>
b'&' => { let _ = writer.0.put_slice(b"&amp;"); } b'"' => { let _ = writer.0.put_slice(b"&quot;"); } b'\'' => { let _ = writer.0.put_slice(b"&#x27;"); } b'/' => { let _ = writer.0.put_slice(b"&#x2f;"); } _ => panic!("incorrect indexing"), } } } if last_pos < bytes.len() - 1 { let _ = writer.0.put_slice(&bytes[last_pos..]); } }
{ let _ = writer.0.put_slice(b"&gt;"); }
conditional_block
utils.rs
#![allow(dead_code)] use std::{cmp, io}; use bytes::{BufMut, BytesMut}; pub const SIZE: usize = 31; #[derive(Serialize, Deserialize)] pub struct Message { pub message: &'static str, } pub struct Writer<'a>(pub &'a mut BytesMut); impl<'a> io::Write for Writer<'a> { fn
(&mut self, buf: &[u8]) -> io::Result<usize> { self.0.put_slice(buf); Ok(buf.len()) } fn flush(&mut self) -> io::Result<()> { Ok(()) } } pub fn get_query_param(query: &str) -> u16 { let q = if let Some(pos) = query.find("q") { query.split_at(pos + 2).1.parse::<u16>().ok().unwrap_or(1) } else { 1 }; cmp::min(500, cmp::max(1, q)) } fn escapable(b: u8) -> bool { match b { b'<' | b'>' | b'&' | b'"' | b'\'' | b'/' => true, _ => false, } } pub fn escape(writer: &mut Writer, s: String) { let bytes = s.as_bytes(); let mut last_pos = 0; for (idx, b) in s.as_bytes().iter().enumerate() { if escapable(*b) { let _ = writer.0.put_slice(&bytes[last_pos..idx]); last_pos = idx + 1; match *b { b'<' => { let _ = writer.0.put_slice(b"&lt;"); } b'>' => { let _ = writer.0.put_slice(b"&gt;"); } b'&' => { let _ = writer.0.put_slice(b"&amp;"); } b'"' => { let _ = writer.0.put_slice(b"&quot;"); } b'\'' => { let _ = writer.0.put_slice(b"&#x27;"); } b'/' => { let _ = writer.0.put_slice(b"&#x2f;"); } _ => panic!("incorrect indexing"), } } } if last_pos < bytes.len() - 1 { let _ = writer.0.put_slice(&bytes[last_pos..]); } }
write
identifier_name
is_eq.rs
// Compares two token tree lists. Expands to the token tree list inside the first block if the // lists are equivalent, and the second if they are not. Expensive - generates a new macro for // every invocation! This macro is only valid in item position, due to its use of macro_rules. macro_rules! is_eq { ( if ($($thingA:tt)*) == ($($thingB:tt)*) { $($if_true:tt)* } else { $($if_false:tt)* } ) => ( macro_rules! is_eq_test { ($($thingA)*, $($thingA)*) => ($($if_true)*); ($($thingA)*, $($thingB)*) => ($($if_false)*); } is_eq_test!($($thingA)*, $($thingB)*); ); } is_eq! { if (a bunch of tokens!) == (another bunch of tokens!) { fn eq_test_1() { println!("They're equal!"); } } else { fn eq_test_1() { println!("As expected, they're not equal."); } } } is_eq! { if (a pile of equivalent tokens.) == (a pile of equivalent tokens.) { fn eq_test_2() { println!("As expected, they're equal."); } } else { fn eq_test_2() { println!("They're not equal!"); } } } fn
() { eq_test_1(); eq_test_2(); }
main
identifier_name
is_eq.rs
// Compares two token tree lists. Expands to the token tree list inside the first block if the // lists are equivalent, and the second if they are not. Expensive - generates a new macro for // every invocation! This macro is only valid in item position, due to its use of macro_rules. macro_rules! is_eq { ( if ($($thingA:tt)*) == ($($thingB:tt)*) { $($if_true:tt)* } else { $($if_false:tt)* } ) => ( macro_rules! is_eq_test { ($($thingA)*, $($thingA)*) => ($($if_true)*); ($($thingA)*, $($thingB)*) => ($($if_false)*); } is_eq_test!($($thingA)*, $($thingB)*); ); } is_eq! { if (a bunch of tokens!) == (another bunch of tokens!) { fn eq_test_1() { println!("They're equal!"); } } else { fn eq_test_1() { println!("As expected, they're not equal."); } } } is_eq! { if (a pile of equivalent tokens.) == (a pile of equivalent tokens.) { fn eq_test_2() { println!("As expected, they're equal."); } } else { fn eq_test_2() { println!("They're not equal!"); } } } fn main()
{ eq_test_1(); eq_test_2(); }
identifier_body
is_eq.rs
// Compares two token tree lists. Expands to the token tree list inside the first block if the // lists are equivalent, and the second if they are not. Expensive - generates a new macro for // every invocation! This macro is only valid in item position, due to its use of macro_rules. macro_rules! is_eq { ( if ($($thingA:tt)*) == ($($thingB:tt)*) { $($if_true:tt)* } else { $($if_false:tt)* } ) => ( macro_rules! is_eq_test { ($($thingA)*, $($thingA)*) => ($($if_true)*); ($($thingA)*, $($thingB)*) => ($($if_false)*);
is_eq_test!($($thingA)*, $($thingB)*); ); } is_eq! { if (a bunch of tokens!) == (another bunch of tokens!) { fn eq_test_1() { println!("They're equal!"); } } else { fn eq_test_1() { println!("As expected, they're not equal."); } } } is_eq! { if (a pile of equivalent tokens.) == (a pile of equivalent tokens.) { fn eq_test_2() { println!("As expected, they're equal."); } } else { fn eq_test_2() { println!("They're not equal!"); } } } fn main() { eq_test_1(); eq_test_2(); }
}
random_line_split
exhaustive_ascii_chars.rs
use malachite_base::chars::exhaustive::exhaustive_ascii_chars; #[test] fn
() { assert_eq!( exhaustive_ascii_chars().collect::<String>(), "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&\'()*+,-./:;<=>?@[\\\ ]^_`{|}~\u{0}\u{1}\u{2}\u{3}\u{4}\u{5}\u{6}\u{7}\u{8}\t\n\u{b}\u{c}\r\u{e}\u{f}\u{10}\u{11}\ \u{12}\u{13}\u{14}\u{15}\u{16}\u{17}\u{18}\u{19}\u{1a}\u{1b}\u{1c}\u{1d}\u{1e}\u{1f}\u{7f}" ); assert_eq!(exhaustive_ascii_chars().count(), 1 << 7); }
test_exhaustive_ascii_chars
identifier_name
exhaustive_ascii_chars.rs
use malachite_base::chars::exhaustive::exhaustive_ascii_chars; #[test] fn test_exhaustive_ascii_chars() { assert_eq!( exhaustive_ascii_chars().collect::<String>(), "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&\'()*+,-./:;<=>?@[\\\ ]^_`{|}~\u{0}\u{1}\u{2}\u{3}\u{4}\u{5}\u{6}\u{7}\u{8}\t\n\u{b}\u{c}\r\u{e}\u{f}\u{10}\u{11}\ \u{12}\u{13}\u{14}\u{15}\u{16}\u{17}\u{18}\u{19}\u{1a}\u{1b}\u{1c}\u{1d}\u{1e}\u{1f}\u{7f}" );
assert_eq!(exhaustive_ascii_chars().count(), 1 << 7); }
random_line_split
exhaustive_ascii_chars.rs
use malachite_base::chars::exhaustive::exhaustive_ascii_chars; #[test] fn test_exhaustive_ascii_chars()
{ assert_eq!( exhaustive_ascii_chars().collect::<String>(), "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789 !\"#$%&\'()*+,-./:;<=>?@[\\\ ]^_`{|}~\u{0}\u{1}\u{2}\u{3}\u{4}\u{5}\u{6}\u{7}\u{8}\t\n\u{b}\u{c}\r\u{e}\u{f}\u{10}\u{11}\ \u{12}\u{13}\u{14}\u{15}\u{16}\u{17}\u{18}\u{19}\u{1a}\u{1b}\u{1c}\u{1d}\u{1e}\u{1f}\u{7f}" ); assert_eq!(exhaustive_ascii_chars().count(), 1 << 7); }
identifier_body
tests.rs
#[test] fn get_index() { let _ = pretty_env_logger::try_init(); let res = warp::test::request() .reply(&crate::index()); assert_eq!(res.status(), 200); assert_eq!(res.headers()["content-type"], "text/html; charset=utf-8"); } #[test] fn get_badge() { let _ = pretty_env_logger::try_init(); let res = warp::test::request() .path("/warp") .reply(&crate::badge()); assert_eq!(res.status(), 302); assert_eq!(res.headers()["expires"], "Sun, 01 Jan 1990 00:00:00 GMT"); assert_eq!(res.headers()["pragma"], "no-cache"); assert_eq!( res.headers()["cache-control"], "no-cache, no-store, max-age=0, must-revalidate" ); assert_eq!( res.headers()["location"], "https://img.shields.io/badge/crates.io-v0.1.9-orange.svg" ); } #[test] fn get_badge_404() { let _ = pretty_env_logger::try_init(); let res = warp::test::request() .path("/this-crate-should-never-exist-amirite") .reply(&crate::badge()); assert_eq!(res.status(), 404); } #[test] fn badge_style() { let _ = pretty_env_logger::try_init(); let style = warp::test::request() .path("/warp?style=flat-square") .filter(&crate::style()) .expect("filter flat-square"); assert_eq!(style, Some(crate::Style::FlatSquare)); let style = warp::test::request() .path("/warp?style=flat-square&warp=speed") .filter(&crate::style()) .expect("filter with unknown query param"); assert_eq!(style, Some(crate::Style::FlatSquare)); assert!( !warp::test::request() .path("/warp?style=new-phone-who-dis") .matches(&crate::style()), "unknown style query param should reject", ); let style = warp::test::request() .path("/warp") .filter(&crate::style())
.expect("filter no query"); assert_eq!(style, None); }
random_line_split
tests.rs
#[test] fn get_index() { let _ = pretty_env_logger::try_init(); let res = warp::test::request() .reply(&crate::index()); assert_eq!(res.status(), 200); assert_eq!(res.headers()["content-type"], "text/html; charset=utf-8"); } #[test] fn get_badge() { let _ = pretty_env_logger::try_init(); let res = warp::test::request() .path("/warp") .reply(&crate::badge()); assert_eq!(res.status(), 302); assert_eq!(res.headers()["expires"], "Sun, 01 Jan 1990 00:00:00 GMT"); assert_eq!(res.headers()["pragma"], "no-cache"); assert_eq!( res.headers()["cache-control"], "no-cache, no-store, max-age=0, must-revalidate" ); assert_eq!( res.headers()["location"], "https://img.shields.io/badge/crates.io-v0.1.9-orange.svg" ); } #[test] fn get_badge_404() { let _ = pretty_env_logger::try_init(); let res = warp::test::request() .path("/this-crate-should-never-exist-amirite") .reply(&crate::badge()); assert_eq!(res.status(), 404); } #[test] fn
() { let _ = pretty_env_logger::try_init(); let style = warp::test::request() .path("/warp?style=flat-square") .filter(&crate::style()) .expect("filter flat-square"); assert_eq!(style, Some(crate::Style::FlatSquare)); let style = warp::test::request() .path("/warp?style=flat-square&warp=speed") .filter(&crate::style()) .expect("filter with unknown query param"); assert_eq!(style, Some(crate::Style::FlatSquare)); assert!( !warp::test::request() .path("/warp?style=new-phone-who-dis") .matches(&crate::style()), "unknown style query param should reject", ); let style = warp::test::request() .path("/warp") .filter(&crate::style()) .expect("filter no query"); assert_eq!(style, None); }
badge_style
identifier_name
tests.rs
#[test] fn get_index() { let _ = pretty_env_logger::try_init(); let res = warp::test::request() .reply(&crate::index()); assert_eq!(res.status(), 200); assert_eq!(res.headers()["content-type"], "text/html; charset=utf-8"); } #[test] fn get_badge()
#[test] fn get_badge_404() { let _ = pretty_env_logger::try_init(); let res = warp::test::request() .path("/this-crate-should-never-exist-amirite") .reply(&crate::badge()); assert_eq!(res.status(), 404); } #[test] fn badge_style() { let _ = pretty_env_logger::try_init(); let style = warp::test::request() .path("/warp?style=flat-square") .filter(&crate::style()) .expect("filter flat-square"); assert_eq!(style, Some(crate::Style::FlatSquare)); let style = warp::test::request() .path("/warp?style=flat-square&warp=speed") .filter(&crate::style()) .expect("filter with unknown query param"); assert_eq!(style, Some(crate::Style::FlatSquare)); assert!( !warp::test::request() .path("/warp?style=new-phone-who-dis") .matches(&crate::style()), "unknown style query param should reject", ); let style = warp::test::request() .path("/warp") .filter(&crate::style()) .expect("filter no query"); assert_eq!(style, None); }
{ let _ = pretty_env_logger::try_init(); let res = warp::test::request() .path("/warp") .reply(&crate::badge()); assert_eq!(res.status(), 302); assert_eq!(res.headers()["expires"], "Sun, 01 Jan 1990 00:00:00 GMT"); assert_eq!(res.headers()["pragma"], "no-cache"); assert_eq!( res.headers()["cache-control"], "no-cache, no-store, max-age=0, must-revalidate" ); assert_eq!( res.headers()["location"], "https://img.shields.io/badge/crates.io-v0.1.9-orange.svg" ); }
identifier_body
instr_cmpss.rs
use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode}; use ::RegType::*; use ::instruction_def::*; use ::Operand::*; use ::Reg::*; use ::RegScale::*; use ::test::run_test; #[test] fn cmpss_1() { run_test(&Instruction { mnemonic: Mnemonic::CMPSS, operand1: Some(Direct(XMM0)), operand2: Some(Direct(XMM4)), operand3: Some(Literal8(94)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[243, 15, 194, 196, 94], OperandSize::Dword)
fn cmpss_2() { run_test(&Instruction { mnemonic: Mnemonic::CMPSS, operand1: Some(Direct(XMM7)), operand2: Some(IndirectDisplaced(ECX, 712415121, Some(OperandSize::Dword), None)), operand3: Some(Literal8(19)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[243, 15, 194, 185, 145, 151, 118, 42, 19], OperandSize::Dword) } #[test] fn cmpss_3() { run_test(&Instruction { mnemonic: Mnemonic::CMPSS, operand1: Some(Direct(XMM4)), operand2: Some(Direct(XMM7)), operand3: Some(Literal8(11)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[243, 15, 194, 231, 11], OperandSize::Qword) } #[test] fn cmpss_4() { run_test(&Instruction { mnemonic: Mnemonic::CMPSS, operand1: Some(Direct(XMM7)), operand2: Some(IndirectDisplaced(RAX, 1640090676, Some(OperandSize::Dword), None)), operand3: Some(Literal8(26)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[243, 15, 194, 184, 52, 204, 193, 97, 26], OperandSize::Qword) }
} #[test]
random_line_split
instr_cmpss.rs
use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode}; use ::RegType::*; use ::instruction_def::*; use ::Operand::*; use ::Reg::*; use ::RegScale::*; use ::test::run_test; #[test] fn
() { run_test(&Instruction { mnemonic: Mnemonic::CMPSS, operand1: Some(Direct(XMM0)), operand2: Some(Direct(XMM4)), operand3: Some(Literal8(94)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[243, 15, 194, 196, 94], OperandSize::Dword) } #[test] fn cmpss_2() { run_test(&Instruction { mnemonic: Mnemonic::CMPSS, operand1: Some(Direct(XMM7)), operand2: Some(IndirectDisplaced(ECX, 712415121, Some(OperandSize::Dword), None)), operand3: Some(Literal8(19)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[243, 15, 194, 185, 145, 151, 118, 42, 19], OperandSize::Dword) } #[test] fn cmpss_3() { run_test(&Instruction { mnemonic: Mnemonic::CMPSS, operand1: Some(Direct(XMM4)), operand2: Some(Direct(XMM7)), operand3: Some(Literal8(11)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[243, 15, 194, 231, 11], OperandSize::Qword) } #[test] fn cmpss_4() { run_test(&Instruction { mnemonic: Mnemonic::CMPSS, operand1: Some(Direct(XMM7)), operand2: Some(IndirectDisplaced(RAX, 1640090676, Some(OperandSize::Dword), None)), operand3: Some(Literal8(26)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[243, 15, 194, 184, 52, 204, 193, 97, 26], OperandSize::Qword) }
cmpss_1
identifier_name
instr_cmpss.rs
use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode}; use ::RegType::*; use ::instruction_def::*; use ::Operand::*; use ::Reg::*; use ::RegScale::*; use ::test::run_test; #[test] fn cmpss_1() { run_test(&Instruction { mnemonic: Mnemonic::CMPSS, operand1: Some(Direct(XMM0)), operand2: Some(Direct(XMM4)), operand3: Some(Literal8(94)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[243, 15, 194, 196, 94], OperandSize::Dword) } #[test] fn cmpss_2() { run_test(&Instruction { mnemonic: Mnemonic::CMPSS, operand1: Some(Direct(XMM7)), operand2: Some(IndirectDisplaced(ECX, 712415121, Some(OperandSize::Dword), None)), operand3: Some(Literal8(19)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[243, 15, 194, 185, 145, 151, 118, 42, 19], OperandSize::Dword) } #[test] fn cmpss_3() { run_test(&Instruction { mnemonic: Mnemonic::CMPSS, operand1: Some(Direct(XMM4)), operand2: Some(Direct(XMM7)), operand3: Some(Literal8(11)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[243, 15, 194, 231, 11], OperandSize::Qword) } #[test] fn cmpss_4()
{ run_test(&Instruction { mnemonic: Mnemonic::CMPSS, operand1: Some(Direct(XMM7)), operand2: Some(IndirectDisplaced(RAX, 1640090676, Some(OperandSize::Dword), None)), operand3: Some(Literal8(26)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[243, 15, 194, 184, 52, 204, 193, 97, 26], OperandSize::Qword) }
identifier_body
sinf64.rs
#![feature(core, core_intrinsics, core_float)] extern crate core; #[cfg(test)] mod tests { use core::intrinsics::sinf64; use core::num::Float; use core::f64; use core::f64::consts::PI; // pub fn sinf64(x: f64) -> f64; #[test] fn sinf64_test1() { let x: f64 = f64::nan(); let result: f64 = unsafe { sinf64(x) }; assert_eq!(result.is_nan(), true); } #[test] fn sinf64_test2() { let x: f64 = f64::infinity(); let result: f64 = unsafe { sinf64(x) }; assert_eq!(result.is_nan(), true); } #[test] fn sinf64_test3() { let x: f64 = f64::neg_infinity(); let result: f64 = unsafe { sinf64(x) }; assert_eq!(result.is_nan(), true); } #[test] fn sinf64_test4() { let x: f64 = 0.0 * PI / 180.0; let result: f64 = unsafe { sinf64(x) }; assert_eq!(result, 0.0); } #[test] fn sinf64_test5() { let x: f64 = 45.0 * PI / 180.0; let result: f64 = unsafe { sinf64(x) }; assert_eq!(result, 0.7071067811865475); } #[test] fn
() { let x: f64 = 90.0 * PI / 180.0; let result: f64 = unsafe { sinf64(x) }; assert_eq!(result, 1.0); } }
sinf64_test6
identifier_name
sinf64.rs
#![feature(core, core_intrinsics, core_float)] extern crate core; #[cfg(test)] mod tests { use core::intrinsics::sinf64; use core::num::Float; use core::f64; use core::f64::consts::PI; // pub fn sinf64(x: f64) -> f64; #[test] fn sinf64_test1() { let x: f64 = f64::nan(); let result: f64 = unsafe { sinf64(x) }; assert_eq!(result.is_nan(), true); } #[test] fn sinf64_test2() { let x: f64 = f64::infinity(); let result: f64 = unsafe { sinf64(x) }; assert_eq!(result.is_nan(), true); } #[test] fn sinf64_test3() { let x: f64 = f64::neg_infinity(); let result: f64 = unsafe { sinf64(x) }; assert_eq!(result.is_nan(), true); } #[test] fn sinf64_test4()
#[test] fn sinf64_test5() { let x: f64 = 45.0 * PI / 180.0; let result: f64 = unsafe { sinf64(x) }; assert_eq!(result, 0.7071067811865475); } #[test] fn sinf64_test6() { let x: f64 = 90.0 * PI / 180.0; let result: f64 = unsafe { sinf64(x) }; assert_eq!(result, 1.0); } }
{ let x: f64 = 0.0 * PI / 180.0; let result: f64 = unsafe { sinf64(x) }; assert_eq!(result, 0.0); }
identifier_body
sinf64.rs
#![feature(core, core_intrinsics, core_float)] extern crate core; #[cfg(test)] mod tests { use core::intrinsics::sinf64; use core::num::Float; use core::f64; use core::f64::consts::PI; // pub fn sinf64(x: f64) -> f64; #[test] fn sinf64_test1() { let x: f64 = f64::nan(); let result: f64 = unsafe { sinf64(x) }; assert_eq!(result.is_nan(), true); } #[test] fn sinf64_test2() { let x: f64 = f64::infinity(); let result: f64 = unsafe { sinf64(x) }; assert_eq!(result.is_nan(), true); } #[test] fn sinf64_test3() { let x: f64 = f64::neg_infinity(); let result: f64 = unsafe { sinf64(x) }; assert_eq!(result.is_nan(), true); } #[test] fn sinf64_test4() { let x: f64 = 0.0 * PI / 180.0; let result: f64 = unsafe { sinf64(x) }; assert_eq!(result, 0.0); } #[test] fn sinf64_test5() { let x: f64 = 45.0 * PI / 180.0; let result: f64 = unsafe { sinf64(x) }; assert_eq!(result, 0.7071067811865475); } #[test] fn sinf64_test6() { let x: f64 = 90.0 * PI / 180.0; let result: f64 = unsafe { sinf64(x) };
} }
assert_eq!(result, 1.0);
random_line_split
focusevent.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use dom::bindings::codegen::Bindings::FocusEventBinding; use dom::bindings::codegen::Bindings::FocusEventBinding::FocusEventMethods; use dom::bindings::codegen::Bindings::UIEventBinding::UIEventMethods; use dom::bindings::error::Fallible; use dom::bindings::global::GlobalRef; use dom::bindings::inheritance::Castable; use dom::bindings::js::{JS, MutNullableHeap, Root, RootedReference}; use dom::bindings::reflector::reflect_dom_object; use dom::event::{EventBubbles, EventCancelable}; use dom::eventtarget::EventTarget; use dom::uievent::UIEvent; use dom::window::Window; use std::default::Default; use util::str::DOMString; #[dom_struct] pub struct FocusEvent { uievent: UIEvent, related_target: MutNullableHeap<JS<EventTarget>>, } impl FocusEvent { fn new_inherited() -> FocusEvent { FocusEvent { uievent: UIEvent::new_inherited(), related_target: Default::default(), } } pub fn new(window: &Window, type_: DOMString, can_bubble: EventBubbles, cancelable: EventCancelable, view: Option<&Window>, detail: i32, related_target: Option<&EventTarget>) -> Root<FocusEvent> { let event = box FocusEvent::new_inherited(); let ev = reflect_dom_object(event, GlobalRef::Window(window), FocusEventBinding::Wrap); ev.upcast::<UIEvent>().InitUIEvent(type_, can_bubble == EventBubbles::Bubbles, cancelable == EventCancelable::Cancelable, view, detail); ev.related_target.set(related_target); ev } pub fn Constructor(global: GlobalRef, type_: DOMString, init: &FocusEventBinding::FocusEventInit) -> Fallible<Root<FocusEvent>> { let bubbles = if init.parent.parent.bubbles
else { EventBubbles::DoesNotBubble }; let cancelable = if init.parent.parent.cancelable { EventCancelable::Cancelable } else { EventCancelable::NotCancelable }; let event = FocusEvent::new(global.as_window(), type_, bubbles, cancelable, init.parent.view.r(), init.parent.detail, init.relatedTarget.r()); Ok(event) } } impl FocusEventMethods for FocusEvent { // https://w3c.github.io/uievents/#widl-FocusEvent-relatedTarget fn GetRelatedTarget(&self) -> Option<Root<EventTarget>> { self.related_target.get() } // https://dom.spec.whatwg.org/#dom-event-istrusted fn IsTrusted(&self) -> bool { self.uievent.IsTrusted() } }
{ EventBubbles::Bubbles }
conditional_block
focusevent.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use dom::bindings::codegen::Bindings::FocusEventBinding; use dom::bindings::codegen::Bindings::FocusEventBinding::FocusEventMethods; use dom::bindings::codegen::Bindings::UIEventBinding::UIEventMethods; use dom::bindings::error::Fallible; use dom::bindings::global::GlobalRef; use dom::bindings::inheritance::Castable; use dom::bindings::js::{JS, MutNullableHeap, Root, RootedReference}; use dom::bindings::reflector::reflect_dom_object; use dom::event::{EventBubbles, EventCancelable}; use dom::eventtarget::EventTarget; use dom::uievent::UIEvent; use dom::window::Window; use std::default::Default; use util::str::DOMString; #[dom_struct] pub struct FocusEvent { uievent: UIEvent, related_target: MutNullableHeap<JS<EventTarget>>, } impl FocusEvent { fn
() -> FocusEvent { FocusEvent { uievent: UIEvent::new_inherited(), related_target: Default::default(), } } pub fn new(window: &Window, type_: DOMString, can_bubble: EventBubbles, cancelable: EventCancelable, view: Option<&Window>, detail: i32, related_target: Option<&EventTarget>) -> Root<FocusEvent> { let event = box FocusEvent::new_inherited(); let ev = reflect_dom_object(event, GlobalRef::Window(window), FocusEventBinding::Wrap); ev.upcast::<UIEvent>().InitUIEvent(type_, can_bubble == EventBubbles::Bubbles, cancelable == EventCancelable::Cancelable, view, detail); ev.related_target.set(related_target); ev } pub fn Constructor(global: GlobalRef, type_: DOMString, init: &FocusEventBinding::FocusEventInit) -> Fallible<Root<FocusEvent>> { let bubbles = if init.parent.parent.bubbles { EventBubbles::Bubbles } else { EventBubbles::DoesNotBubble }; let cancelable = if init.parent.parent.cancelable { EventCancelable::Cancelable } else { EventCancelable::NotCancelable }; let event = FocusEvent::new(global.as_window(), type_, bubbles, cancelable, init.parent.view.r(), init.parent.detail, init.relatedTarget.r()); Ok(event) } } impl FocusEventMethods for FocusEvent { // https://w3c.github.io/uievents/#widl-FocusEvent-relatedTarget fn GetRelatedTarget(&self) -> Option<Root<EventTarget>> { self.related_target.get() } // https://dom.spec.whatwg.org/#dom-event-istrusted fn IsTrusted(&self) -> bool { self.uievent.IsTrusted() } }
new_inherited
identifier_name
focusevent.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use dom::bindings::codegen::Bindings::FocusEventBinding;
use dom::bindings::error::Fallible; use dom::bindings::global::GlobalRef; use dom::bindings::inheritance::Castable; use dom::bindings::js::{JS, MutNullableHeap, Root, RootedReference}; use dom::bindings::reflector::reflect_dom_object; use dom::event::{EventBubbles, EventCancelable}; use dom::eventtarget::EventTarget; use dom::uievent::UIEvent; use dom::window::Window; use std::default::Default; use util::str::DOMString; #[dom_struct] pub struct FocusEvent { uievent: UIEvent, related_target: MutNullableHeap<JS<EventTarget>>, } impl FocusEvent { fn new_inherited() -> FocusEvent { FocusEvent { uievent: UIEvent::new_inherited(), related_target: Default::default(), } } pub fn new(window: &Window, type_: DOMString, can_bubble: EventBubbles, cancelable: EventCancelable, view: Option<&Window>, detail: i32, related_target: Option<&EventTarget>) -> Root<FocusEvent> { let event = box FocusEvent::new_inherited(); let ev = reflect_dom_object(event, GlobalRef::Window(window), FocusEventBinding::Wrap); ev.upcast::<UIEvent>().InitUIEvent(type_, can_bubble == EventBubbles::Bubbles, cancelable == EventCancelable::Cancelable, view, detail); ev.related_target.set(related_target); ev } pub fn Constructor(global: GlobalRef, type_: DOMString, init: &FocusEventBinding::FocusEventInit) -> Fallible<Root<FocusEvent>> { let bubbles = if init.parent.parent.bubbles { EventBubbles::Bubbles } else { EventBubbles::DoesNotBubble }; let cancelable = if init.parent.parent.cancelable { EventCancelable::Cancelable } else { EventCancelable::NotCancelable }; let event = FocusEvent::new(global.as_window(), type_, bubbles, cancelable, init.parent.view.r(), init.parent.detail, init.relatedTarget.r()); Ok(event) } } impl FocusEventMethods for FocusEvent { // https://w3c.github.io/uievents/#widl-FocusEvent-relatedTarget fn GetRelatedTarget(&self) -> Option<Root<EventTarget>> { self.related_target.get() } // https://dom.spec.whatwg.org/#dom-event-istrusted fn IsTrusted(&self) -> bool { self.uievent.IsTrusted() } }
use dom::bindings::codegen::Bindings::FocusEventBinding::FocusEventMethods; use dom::bindings::codegen::Bindings::UIEventBinding::UIEventMethods;
random_line_split
focusevent.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use dom::bindings::codegen::Bindings::FocusEventBinding; use dom::bindings::codegen::Bindings::FocusEventBinding::FocusEventMethods; use dom::bindings::codegen::Bindings::UIEventBinding::UIEventMethods; use dom::bindings::error::Fallible; use dom::bindings::global::GlobalRef; use dom::bindings::inheritance::Castable; use dom::bindings::js::{JS, MutNullableHeap, Root, RootedReference}; use dom::bindings::reflector::reflect_dom_object; use dom::event::{EventBubbles, EventCancelable}; use dom::eventtarget::EventTarget; use dom::uievent::UIEvent; use dom::window::Window; use std::default::Default; use util::str::DOMString; #[dom_struct] pub struct FocusEvent { uievent: UIEvent, related_target: MutNullableHeap<JS<EventTarget>>, } impl FocusEvent { fn new_inherited() -> FocusEvent { FocusEvent { uievent: UIEvent::new_inherited(), related_target: Default::default(), } } pub fn new(window: &Window, type_: DOMString, can_bubble: EventBubbles, cancelable: EventCancelable, view: Option<&Window>, detail: i32, related_target: Option<&EventTarget>) -> Root<FocusEvent> { let event = box FocusEvent::new_inherited(); let ev = reflect_dom_object(event, GlobalRef::Window(window), FocusEventBinding::Wrap); ev.upcast::<UIEvent>().InitUIEvent(type_, can_bubble == EventBubbles::Bubbles, cancelable == EventCancelable::Cancelable, view, detail); ev.related_target.set(related_target); ev } pub fn Constructor(global: GlobalRef, type_: DOMString, init: &FocusEventBinding::FocusEventInit) -> Fallible<Root<FocusEvent>> { let bubbles = if init.parent.parent.bubbles { EventBubbles::Bubbles } else { EventBubbles::DoesNotBubble }; let cancelable = if init.parent.parent.cancelable { EventCancelable::Cancelable } else { EventCancelable::NotCancelable }; let event = FocusEvent::new(global.as_window(), type_, bubbles, cancelable, init.parent.view.r(), init.parent.detail, init.relatedTarget.r()); Ok(event) } } impl FocusEventMethods for FocusEvent { // https://w3c.github.io/uievents/#widl-FocusEvent-relatedTarget fn GetRelatedTarget(&self) -> Option<Root<EventTarget>>
// https://dom.spec.whatwg.org/#dom-event-istrusted fn IsTrusted(&self) -> bool { self.uievent.IsTrusted() } }
{ self.related_target.get() }
identifier_body
index.rs
// Copyright 2015-2017 Parity Technologies (UK) Ltd. // This file is part of Parity. // Parity is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // Parity is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // You should have received a copy of the GNU General Public License // along with Parity. If not, see <http://www.gnu.org/licenses/>. use serde::{Deserialize, Deserializer}; use serde::de::{Error, Visitor}; use std::fmt; /// Represents usize. #[derive(Debug, PartialEq)] pub struct Index(pub usize); impl Index { /// Convert to usize pub fn value(&self) -> usize { self.0 } } impl<'a> Deserialize<'a> for Index { fn deserialize<D>(deserializer: D) -> Result<Index, D::Error> where D: Deserializer<'a>, { deserializer.deserialize_any(IndexVisitor) } } struct IndexVisitor; impl<'a> Visitor<'a> for IndexVisitor { type Value = Index; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { write!(formatter, "a hex-encoded or decimal index") } fn visit_str<E>(self, value: &str) -> Result<Self::Value, E> where E: Error, { match value { _ if value.starts_with("0x") => usize::from_str_radix(&value[2..], 16).map(Index).map_err(|_| Error::custom("invalid index")), _ => value.parse::<usize>().map(Index).map_err(|_| Error::custom("invalid index")), } } fn
<E>(self, value: String) -> Result<Self::Value, E> where E: Error, { self.visit_str(value.as_ref()) } } #[cfg(test)] mod tests { use super::*; use serde_json; #[test] fn block_number_deserialization() { let s = r#"["0xa", "10"]"#; let deserialized: Vec<Index> = serde_json::from_str(s).unwrap(); assert_eq!(deserialized, vec![Index(10), Index(10)]); } }
visit_string
identifier_name
index.rs
// Copyright 2015-2017 Parity Technologies (UK) Ltd. // This file is part of Parity. // Parity is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // Parity is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // You should have received a copy of the GNU General Public License // along with Parity. If not, see <http://www.gnu.org/licenses/>. use serde::{Deserialize, Deserializer}; use serde::de::{Error, Visitor}; use std::fmt; /// Represents usize. #[derive(Debug, PartialEq)] pub struct Index(pub usize); impl Index { /// Convert to usize pub fn value(&self) -> usize { self.0 } } impl<'a> Deserialize<'a> for Index { fn deserialize<D>(deserializer: D) -> Result<Index, D::Error> where D: Deserializer<'a>, { deserializer.deserialize_any(IndexVisitor) } } struct IndexVisitor; impl<'a> Visitor<'a> for IndexVisitor { type Value = Index; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { write!(formatter, "a hex-encoded or decimal index") } fn visit_str<E>(self, value: &str) -> Result<Self::Value, E> where E: Error,
fn visit_string<E>(self, value: String) -> Result<Self::Value, E> where E: Error, { self.visit_str(value.as_ref()) } } #[cfg(test)] mod tests { use super::*; use serde_json; #[test] fn block_number_deserialization() { let s = r#"["0xa", "10"]"#; let deserialized: Vec<Index> = serde_json::from_str(s).unwrap(); assert_eq!(deserialized, vec![Index(10), Index(10)]); } }
{ match value { _ if value.starts_with("0x") => usize::from_str_radix(&value[2..], 16).map(Index).map_err(|_| Error::custom("invalid index")), _ => value.parse::<usize>().map(Index).map_err(|_| Error::custom("invalid index")), } }
identifier_body
index.rs
// Copyright 2015-2017 Parity Technologies (UK) Ltd. // This file is part of Parity. // Parity is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // Parity is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // You should have received a copy of the GNU General Public License // along with Parity. If not, see <http://www.gnu.org/licenses/>. use serde::{Deserialize, Deserializer}; use serde::de::{Error, Visitor}; use std::fmt; /// Represents usize. #[derive(Debug, PartialEq)] pub struct Index(pub usize); impl Index { /// Convert to usize pub fn value(&self) -> usize { self.0 } } impl<'a> Deserialize<'a> for Index { fn deserialize<D>(deserializer: D) -> Result<Index, D::Error> where D: Deserializer<'a>, { deserializer.deserialize_any(IndexVisitor) } } struct IndexVisitor; impl<'a> Visitor<'a> for IndexVisitor { type Value = Index; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { write!(formatter, "a hex-encoded or decimal index") } fn visit_str<E>(self, value: &str) -> Result<Self::Value, E> where E: Error, { match value { _ if value.starts_with("0x") => usize::from_str_radix(&value[2..], 16).map(Index).map_err(|_| Error::custom("invalid index")), _ => value.parse::<usize>().map(Index).map_err(|_| Error::custom("invalid index")), } } fn visit_string<E>(self, value: String) -> Result<Self::Value, E>
where E: Error, { self.visit_str(value.as_ref()) } } #[cfg(test)] mod tests { use super::*; use serde_json; #[test] fn block_number_deserialization() { let s = r#"["0xa", "10"]"#; let deserialized: Vec<Index> = serde_json::from_str(s).unwrap(); assert_eq!(deserialized, vec![Index(10), Index(10)]); } }
random_line_split
snapshot.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! A gecko snapshot, that stores the element attributes and state before they //! change in order to properly calculate restyle hints. use WeakAtom; use dom::TElement; use element_state::ElementState; use gecko::snapshot_helpers; use gecko::wrapper::{GeckoElement, NamespaceConstraintHelpers}; use gecko_bindings::bindings; use gecko_bindings::structs::ServoElementSnapshot; use gecko_bindings::structs::ServoElementSnapshotFlags as Flags; use gecko_bindings::structs::ServoElementSnapshotTable; use invalidation::element::element_wrapper::ElementSnapshot; use selectors::attr::{AttrSelectorOperation, AttrSelectorOperator}; use selectors::attr::{CaseSensitivity, NamespaceConstraint}; use string_cache::{Atom, Namespace}; /// A snapshot of a Gecko element. pub type GeckoElementSnapshot = ServoElementSnapshot; /// A map from elements to snapshots for Gecko's style back-end. pub type SnapshotMap = ServoElementSnapshotTable; impl SnapshotMap { /// Gets the snapshot for this element, if any. /// /// FIXME(emilio): The transmute() business we do here is kind of nasty, but /// it's a consequence of the map being a OpaqueNode -> Snapshot table in /// Servo and an Element -> Snapshot table in Gecko. /// /// We should be able to make this a more type-safe with type annotations by /// making SnapshotMap a trait and moving the implementations outside, but /// that's a pain because it implies parameterizing SharedStyleContext. pub fn get<E: TElement>(&self, element: &E) -> Option<&GeckoElementSnapshot> { debug_assert!(element.has_snapshot()); unsafe { let element = ::std::mem::transmute::<&E, &GeckoElement>(element); bindings::Gecko_GetElementSnapshot(self, element.0).as_ref() } } } impl GeckoElementSnapshot { #[inline] fn has_any(&self, flags: Flags) -> bool { (self.mContains as u8 & flags as u8)!= 0 } /// Returns true if the snapshot has stored state for pseudo-classes /// that depend on things other than `ElementState`. #[inline] pub fn has_other_pseudo_class_state(&self) -> bool { self.has_any(Flags::OtherPseudoClassState) } /// Returns true if the snapshot recorded an id change. #[inline] pub fn id_changed(&self) -> bool { self.mIdAttributeChanged() } /// Returns true if the snapshot recorded a class attribute change. #[inline] pub fn class_changed(&self) -> bool { self.mClassAttributeChanged() } /// Returns true if the snapshot recorded an attribute change which isn't a /// class or id change. #[inline] pub fn other_attr_changed(&self) -> bool { self.mOtherAttributeChanged() } /// selectors::Element::attr_matches pub fn attr_matches( &self, ns: &NamespaceConstraint<&Namespace>, local_name: &Atom, operation: &AttrSelectorOperation<&Atom>, ) -> bool { unsafe { match *operation { AttrSelectorOperation::Exists => { bindings::Gecko_SnapshotHasAttr(self, ns.atom_or_null(), local_name.as_ptr()) }, AttrSelectorOperation::WithValue { operator, case_sensitivity, expected_value, } => { let ignore_case = match case_sensitivity { CaseSensitivity::CaseSensitive => false, CaseSensitivity::AsciiCaseInsensitive => true, }; // FIXME: case sensitivity for operators other than Equal match operator { AttrSelectorOperator::Equal => bindings::Gecko_SnapshotAttrEquals( self, ns.atom_or_null(), local_name.as_ptr(), expected_value.as_ptr(), ignore_case, ), AttrSelectorOperator::Includes => bindings::Gecko_SnapshotAttrIncludes( self, ns.atom_or_null(), local_name.as_ptr(), expected_value.as_ptr(), ignore_case, ), AttrSelectorOperator::DashMatch => bindings::Gecko_SnapshotAttrDashEquals( self, ns.atom_or_null(), local_name.as_ptr(), expected_value.as_ptr(), ignore_case, ), AttrSelectorOperator::Prefix => bindings::Gecko_SnapshotAttrHasPrefix( self, ns.atom_or_null(), local_name.as_ptr(), expected_value.as_ptr(), ignore_case, ), AttrSelectorOperator::Suffix => bindings::Gecko_SnapshotAttrHasSuffix( self, ns.atom_or_null(), local_name.as_ptr(), expected_value.as_ptr(), ignore_case, ), AttrSelectorOperator::Substring => { bindings::Gecko_SnapshotAttrHasSubstring( self, ns.atom_or_null(), local_name.as_ptr(), expected_value.as_ptr(), ignore_case, ) }, } }, } } } } impl ElementSnapshot for GeckoElementSnapshot { fn
(&self) -> String { use nsstring::nsCString; let mut string = nsCString::new(); unsafe { bindings::Gecko_Snapshot_DebugListAttributes(self, &mut string); } String::from_utf8_lossy(&*string).into_owned() } fn state(&self) -> Option<ElementState> { if self.has_any(Flags::State) { Some(ElementState::from_bits_truncate(self.mState)) } else { None } } #[inline] fn has_attrs(&self) -> bool { self.has_any(Flags::Attributes) } #[inline] fn id_attr(&self) -> Option<&WeakAtom> { if!self.has_any(Flags::Id) { return None; } snapshot_helpers::get_id(&*self.mAttrs) } #[inline] fn has_class(&self, name: &Atom, case_sensitivity: CaseSensitivity) -> bool { if!self.has_any(Flags::MaybeClass) { return false; } snapshot_helpers::has_class(name, case_sensitivity, &self.mClass) } #[inline] fn each_class<F>(&self, callback: F) where F: FnMut(&Atom), { if!self.has_any(Flags::MaybeClass) { return; } snapshot_helpers::each_class(&self.mClass, callback) } #[inline] fn lang_attr(&self) -> Option<Atom> { let ptr = unsafe { bindings::Gecko_SnapshotLangValue(self) }; if ptr.is_null() { None } else { Some(unsafe { Atom::from_addrefed(ptr) }) } } }
debug_list_attributes
identifier_name
snapshot.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! A gecko snapshot, that stores the element attributes and state before they //! change in order to properly calculate restyle hints. use WeakAtom; use dom::TElement; use element_state::ElementState; use gecko::snapshot_helpers; use gecko::wrapper::{GeckoElement, NamespaceConstraintHelpers}; use gecko_bindings::bindings; use gecko_bindings::structs::ServoElementSnapshot; use gecko_bindings::structs::ServoElementSnapshotFlags as Flags; use gecko_bindings::structs::ServoElementSnapshotTable; use invalidation::element::element_wrapper::ElementSnapshot; use selectors::attr::{AttrSelectorOperation, AttrSelectorOperator}; use selectors::attr::{CaseSensitivity, NamespaceConstraint}; use string_cache::{Atom, Namespace}; /// A snapshot of a Gecko element. pub type GeckoElementSnapshot = ServoElementSnapshot; /// A map from elements to snapshots for Gecko's style back-end. pub type SnapshotMap = ServoElementSnapshotTable; impl SnapshotMap { /// Gets the snapshot for this element, if any. /// /// FIXME(emilio): The transmute() business we do here is kind of nasty, but /// it's a consequence of the map being a OpaqueNode -> Snapshot table in /// Servo and an Element -> Snapshot table in Gecko. /// /// We should be able to make this a more type-safe with type annotations by /// making SnapshotMap a trait and moving the implementations outside, but /// that's a pain because it implies parameterizing SharedStyleContext. pub fn get<E: TElement>(&self, element: &E) -> Option<&GeckoElementSnapshot> { debug_assert!(element.has_snapshot()); unsafe { let element = ::std::mem::transmute::<&E, &GeckoElement>(element); bindings::Gecko_GetElementSnapshot(self, element.0).as_ref() } } } impl GeckoElementSnapshot { #[inline] fn has_any(&self, flags: Flags) -> bool { (self.mContains as u8 & flags as u8)!= 0 } /// Returns true if the snapshot has stored state for pseudo-classes /// that depend on things other than `ElementState`. #[inline] pub fn has_other_pseudo_class_state(&self) -> bool { self.has_any(Flags::OtherPseudoClassState) } /// Returns true if the snapshot recorded an id change. #[inline] pub fn id_changed(&self) -> bool { self.mIdAttributeChanged() } /// Returns true if the snapshot recorded a class attribute change. #[inline] pub fn class_changed(&self) -> bool { self.mClassAttributeChanged() } /// Returns true if the snapshot recorded an attribute change which isn't a /// class or id change. #[inline] pub fn other_attr_changed(&self) -> bool { self.mOtherAttributeChanged() } /// selectors::Element::attr_matches pub fn attr_matches( &self, ns: &NamespaceConstraint<&Namespace>, local_name: &Atom, operation: &AttrSelectorOperation<&Atom>, ) -> bool { unsafe { match *operation { AttrSelectorOperation::Exists => { bindings::Gecko_SnapshotHasAttr(self, ns.atom_or_null(), local_name.as_ptr()) }, AttrSelectorOperation::WithValue { operator, case_sensitivity, expected_value, } => { let ignore_case = match case_sensitivity {
CaseSensitivity::CaseSensitive => false, CaseSensitivity::AsciiCaseInsensitive => true, }; // FIXME: case sensitivity for operators other than Equal match operator { AttrSelectorOperator::Equal => bindings::Gecko_SnapshotAttrEquals( self, ns.atom_or_null(), local_name.as_ptr(), expected_value.as_ptr(), ignore_case, ), AttrSelectorOperator::Includes => bindings::Gecko_SnapshotAttrIncludes( self, ns.atom_or_null(), local_name.as_ptr(), expected_value.as_ptr(), ignore_case, ), AttrSelectorOperator::DashMatch => bindings::Gecko_SnapshotAttrDashEquals( self, ns.atom_or_null(), local_name.as_ptr(), expected_value.as_ptr(), ignore_case, ), AttrSelectorOperator::Prefix => bindings::Gecko_SnapshotAttrHasPrefix( self, ns.atom_or_null(), local_name.as_ptr(), expected_value.as_ptr(), ignore_case, ), AttrSelectorOperator::Suffix => bindings::Gecko_SnapshotAttrHasSuffix( self, ns.atom_or_null(), local_name.as_ptr(), expected_value.as_ptr(), ignore_case, ), AttrSelectorOperator::Substring => { bindings::Gecko_SnapshotAttrHasSubstring( self, ns.atom_or_null(), local_name.as_ptr(), expected_value.as_ptr(), ignore_case, ) }, } }, } } } } impl ElementSnapshot for GeckoElementSnapshot { fn debug_list_attributes(&self) -> String { use nsstring::nsCString; let mut string = nsCString::new(); unsafe { bindings::Gecko_Snapshot_DebugListAttributes(self, &mut string); } String::from_utf8_lossy(&*string).into_owned() } fn state(&self) -> Option<ElementState> { if self.has_any(Flags::State) { Some(ElementState::from_bits_truncate(self.mState)) } else { None } } #[inline] fn has_attrs(&self) -> bool { self.has_any(Flags::Attributes) } #[inline] fn id_attr(&self) -> Option<&WeakAtom> { if!self.has_any(Flags::Id) { return None; } snapshot_helpers::get_id(&*self.mAttrs) } #[inline] fn has_class(&self, name: &Atom, case_sensitivity: CaseSensitivity) -> bool { if!self.has_any(Flags::MaybeClass) { return false; } snapshot_helpers::has_class(name, case_sensitivity, &self.mClass) } #[inline] fn each_class<F>(&self, callback: F) where F: FnMut(&Atom), { if!self.has_any(Flags::MaybeClass) { return; } snapshot_helpers::each_class(&self.mClass, callback) } #[inline] fn lang_attr(&self) -> Option<Atom> { let ptr = unsafe { bindings::Gecko_SnapshotLangValue(self) }; if ptr.is_null() { None } else { Some(unsafe { Atom::from_addrefed(ptr) }) } } }
random_line_split
snapshot.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! A gecko snapshot, that stores the element attributes and state before they //! change in order to properly calculate restyle hints. use WeakAtom; use dom::TElement; use element_state::ElementState; use gecko::snapshot_helpers; use gecko::wrapper::{GeckoElement, NamespaceConstraintHelpers}; use gecko_bindings::bindings; use gecko_bindings::structs::ServoElementSnapshot; use gecko_bindings::structs::ServoElementSnapshotFlags as Flags; use gecko_bindings::structs::ServoElementSnapshotTable; use invalidation::element::element_wrapper::ElementSnapshot; use selectors::attr::{AttrSelectorOperation, AttrSelectorOperator}; use selectors::attr::{CaseSensitivity, NamespaceConstraint}; use string_cache::{Atom, Namespace}; /// A snapshot of a Gecko element. pub type GeckoElementSnapshot = ServoElementSnapshot; /// A map from elements to snapshots for Gecko's style back-end. pub type SnapshotMap = ServoElementSnapshotTable; impl SnapshotMap { /// Gets the snapshot for this element, if any. /// /// FIXME(emilio): The transmute() business we do here is kind of nasty, but /// it's a consequence of the map being a OpaqueNode -> Snapshot table in /// Servo and an Element -> Snapshot table in Gecko. /// /// We should be able to make this a more type-safe with type annotations by /// making SnapshotMap a trait and moving the implementations outside, but /// that's a pain because it implies parameterizing SharedStyleContext. pub fn get<E: TElement>(&self, element: &E) -> Option<&GeckoElementSnapshot> { debug_assert!(element.has_snapshot()); unsafe { let element = ::std::mem::transmute::<&E, &GeckoElement>(element); bindings::Gecko_GetElementSnapshot(self, element.0).as_ref() } } } impl GeckoElementSnapshot { #[inline] fn has_any(&self, flags: Flags) -> bool { (self.mContains as u8 & flags as u8)!= 0 } /// Returns true if the snapshot has stored state for pseudo-classes /// that depend on things other than `ElementState`. #[inline] pub fn has_other_pseudo_class_state(&self) -> bool { self.has_any(Flags::OtherPseudoClassState) } /// Returns true if the snapshot recorded an id change. #[inline] pub fn id_changed(&self) -> bool { self.mIdAttributeChanged() } /// Returns true if the snapshot recorded a class attribute change. #[inline] pub fn class_changed(&self) -> bool { self.mClassAttributeChanged() } /// Returns true if the snapshot recorded an attribute change which isn't a /// class or id change. #[inline] pub fn other_attr_changed(&self) -> bool { self.mOtherAttributeChanged() } /// selectors::Element::attr_matches pub fn attr_matches( &self, ns: &NamespaceConstraint<&Namespace>, local_name: &Atom, operation: &AttrSelectorOperation<&Atom>, ) -> bool
local_name.as_ptr(), expected_value.as_ptr(), ignore_case, ), AttrSelectorOperator::Includes => bindings::Gecko_SnapshotAttrIncludes( self, ns.atom_or_null(), local_name.as_ptr(), expected_value.as_ptr(), ignore_case, ), AttrSelectorOperator::DashMatch => bindings::Gecko_SnapshotAttrDashEquals( self, ns.atom_or_null(), local_name.as_ptr(), expected_value.as_ptr(), ignore_case, ), AttrSelectorOperator::Prefix => bindings::Gecko_SnapshotAttrHasPrefix( self, ns.atom_or_null(), local_name.as_ptr(), expected_value.as_ptr(), ignore_case, ), AttrSelectorOperator::Suffix => bindings::Gecko_SnapshotAttrHasSuffix( self, ns.atom_or_null(), local_name.as_ptr(), expected_value.as_ptr(), ignore_case, ), AttrSelectorOperator::Substring => { bindings::Gecko_SnapshotAttrHasSubstring( self, ns.atom_or_null(), local_name.as_ptr(), expected_value.as_ptr(), ignore_case, ) }, } }, } } } } impl ElementSnapshot for GeckoElementSnapshot { fn debug_list_attributes(&self) -> String { use nsstring::nsCString; let mut string = nsCString::new(); unsafe { bindings::Gecko_Snapshot_DebugListAttributes(self, &mut string); } String::from_utf8_lossy(&*string).into_owned() } fn state(&self) -> Option<ElementState> { if self.has_any(Flags::State) { Some(ElementState::from_bits_truncate(self.mState)) } else { None } } #[inline] fn has_attrs(&self) -> bool { self.has_any(Flags::Attributes) } #[inline] fn id_attr(&self) -> Option<&WeakAtom> { if!self.has_any(Flags::Id) { return None; } snapshot_helpers::get_id(&*self.mAttrs) } #[inline] fn has_class(&self, name: &Atom, case_sensitivity: CaseSensitivity) -> bool { if!self.has_any(Flags::MaybeClass) { return false; } snapshot_helpers::has_class(name, case_sensitivity, &self.mClass) } #[inline] fn each_class<F>(&self, callback: F) where F: FnMut(&Atom), { if!self.has_any(Flags::MaybeClass) { return; } snapshot_helpers::each_class(&self.mClass, callback) } #[inline] fn lang_attr(&self) -> Option<Atom> { let ptr = unsafe { bindings::Gecko_SnapshotLangValue(self) }; if ptr.is_null() { None } else { Some(unsafe { Atom::from_addrefed(ptr) }) } } }
{ unsafe { match *operation { AttrSelectorOperation::Exists => { bindings::Gecko_SnapshotHasAttr(self, ns.atom_or_null(), local_name.as_ptr()) }, AttrSelectorOperation::WithValue { operator, case_sensitivity, expected_value, } => { let ignore_case = match case_sensitivity { CaseSensitivity::CaseSensitive => false, CaseSensitivity::AsciiCaseInsensitive => true, }; // FIXME: case sensitivity for operators other than Equal match operator { AttrSelectorOperator::Equal => bindings::Gecko_SnapshotAttrEquals( self, ns.atom_or_null(),
identifier_body
initrd.rs
//! Initial ramdisk driver use alloc::string::String; use alloc::vec::Vec; use hashbrown::HashMap; use x86_64::{PhysAddr, VirtAddr}; use d7initrd::{FileEntry, HEADER_MAGIC, HEADER_SIZE_BYTES}; use crate::memory::{self, phys_to_virt, prelude::*}; use crate::util::elf_parser::{self, ELFData, ELFHeader, ELFProgramHeader};
/// Files by name. files: HashMap<String, FileEntry>, /// A slice containing all files, concatenated. /// The lifetime is static, as these are never deallocated. slice: &'static [u8], } static INITRD: spin::Once<InitRD> = spin::Once::new(); pub fn init(elf_data: ELFData) { unsafe { // Get address let start_addr = PhysAddr::from_u64(page_align_up(elf_data.last_addr())); let header = phys_to_virt(start_addr); let hptr: *const u8 = header.as_ptr(); let magic = *(hptr.add(0) as *const u32); let size_flist = *(hptr.add(4) as *const u32); let size_total = *(hptr.add(8) as *const u64); assert_eq!(magic, HEADER_MAGIC, "InitRD magic mismatch"); assert!(size_flist as u64 > 0, "InitRD header empty"); assert!( size_flist as u64 + 8 < PAGE_SIZE_BYTES, "InitRD header too large" ); let header_bytes: &[u8] = core::slice::from_raw_parts(hptr.add(16), size_flist as usize); let file_list: Result<Vec<FileEntry>, _> = pinecone::from_bytes(&header_bytes[..]); let file_list = file_list.expect("Could not deserialize staticfs file list"); log::trace!("Files {:?}", file_list); // Initialize let files_offset = HEADER_SIZE_BYTES + (size_flist as usize); let files_len = size_total as usize - files_offset; let p: *const u8 = header.as_ptr(); INITRD.call_once(move || InitRD { files: file_list.into_iter().map(|f| (f.name.clone(), f)).collect(), slice: core::slice::from_raw_parts(p.add(files_offset), files_len), }); } } pub fn read(name: &str) -> Option<&'static [u8]> { let rd: &InitRD = INITRD.poll().unwrap(); log::trace!("Read {:?} (found={})", name, rd.files.contains_key(name)); let entry = rd.files.get(name)?; let start = entry.offset as usize; let len = entry.size as usize; Some(&rd.slice[start..start + len]) }
#[derive(Debug)] struct InitRD {
random_line_split
initrd.rs
//! Initial ramdisk driver use alloc::string::String; use alloc::vec::Vec; use hashbrown::HashMap; use x86_64::{PhysAddr, VirtAddr}; use d7initrd::{FileEntry, HEADER_MAGIC, HEADER_SIZE_BYTES}; use crate::memory::{self, phys_to_virt, prelude::*}; use crate::util::elf_parser::{self, ELFData, ELFHeader, ELFProgramHeader}; #[derive(Debug)] struct InitRD { /// Files by name. files: HashMap<String, FileEntry>, /// A slice containing all files, concatenated. /// The lifetime is static, as these are never deallocated. slice: &'static [u8], } static INITRD: spin::Once<InitRD> = spin::Once::new(); pub fn
(elf_data: ELFData) { unsafe { // Get address let start_addr = PhysAddr::from_u64(page_align_up(elf_data.last_addr())); let header = phys_to_virt(start_addr); let hptr: *const u8 = header.as_ptr(); let magic = *(hptr.add(0) as *const u32); let size_flist = *(hptr.add(4) as *const u32); let size_total = *(hptr.add(8) as *const u64); assert_eq!(magic, HEADER_MAGIC, "InitRD magic mismatch"); assert!(size_flist as u64 > 0, "InitRD header empty"); assert!( size_flist as u64 + 8 < PAGE_SIZE_BYTES, "InitRD header too large" ); let header_bytes: &[u8] = core::slice::from_raw_parts(hptr.add(16), size_flist as usize); let file_list: Result<Vec<FileEntry>, _> = pinecone::from_bytes(&header_bytes[..]); let file_list = file_list.expect("Could not deserialize staticfs file list"); log::trace!("Files {:?}", file_list); // Initialize let files_offset = HEADER_SIZE_BYTES + (size_flist as usize); let files_len = size_total as usize - files_offset; let p: *const u8 = header.as_ptr(); INITRD.call_once(move || InitRD { files: file_list.into_iter().map(|f| (f.name.clone(), f)).collect(), slice: core::slice::from_raw_parts(p.add(files_offset), files_len), }); } } pub fn read(name: &str) -> Option<&'static [u8]> { let rd: &InitRD = INITRD.poll().unwrap(); log::trace!("Read {:?} (found={})", name, rd.files.contains_key(name)); let entry = rd.files.get(name)?; let start = entry.offset as usize; let len = entry.size as usize; Some(&rd.slice[start..start + len]) }
init
identifier_name
initrd.rs
//! Initial ramdisk driver use alloc::string::String; use alloc::vec::Vec; use hashbrown::HashMap; use x86_64::{PhysAddr, VirtAddr}; use d7initrd::{FileEntry, HEADER_MAGIC, HEADER_SIZE_BYTES}; use crate::memory::{self, phys_to_virt, prelude::*}; use crate::util::elf_parser::{self, ELFData, ELFHeader, ELFProgramHeader}; #[derive(Debug)] struct InitRD { /// Files by name. files: HashMap<String, FileEntry>, /// A slice containing all files, concatenated. /// The lifetime is static, as these are never deallocated. slice: &'static [u8], } static INITRD: spin::Once<InitRD> = spin::Once::new(); pub fn init(elf_data: ELFData) { unsafe { // Get address let start_addr = PhysAddr::from_u64(page_align_up(elf_data.last_addr())); let header = phys_to_virt(start_addr); let hptr: *const u8 = header.as_ptr(); let magic = *(hptr.add(0) as *const u32); let size_flist = *(hptr.add(4) as *const u32); let size_total = *(hptr.add(8) as *const u64); assert_eq!(magic, HEADER_MAGIC, "InitRD magic mismatch"); assert!(size_flist as u64 > 0, "InitRD header empty"); assert!( size_flist as u64 + 8 < PAGE_SIZE_BYTES, "InitRD header too large" ); let header_bytes: &[u8] = core::slice::from_raw_parts(hptr.add(16), size_flist as usize); let file_list: Result<Vec<FileEntry>, _> = pinecone::from_bytes(&header_bytes[..]); let file_list = file_list.expect("Could not deserialize staticfs file list"); log::trace!("Files {:?}", file_list); // Initialize let files_offset = HEADER_SIZE_BYTES + (size_flist as usize); let files_len = size_total as usize - files_offset; let p: *const u8 = header.as_ptr(); INITRD.call_once(move || InitRD { files: file_list.into_iter().map(|f| (f.name.clone(), f)).collect(), slice: core::slice::from_raw_parts(p.add(files_offset), files_len), }); } } pub fn read(name: &str) -> Option<&'static [u8]>
{ let rd: &InitRD = INITRD.poll().unwrap(); log::trace!("Read {:?} (found={})", name, rd.files.contains_key(name)); let entry = rd.files.get(name)?; let start = entry.offset as usize; let len = entry.size as usize; Some(&rd.slice[start..start + len]) }
identifier_body
issue-49579.rs
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // compile-pass
fn fibs(n: u32) -> impl Iterator<Item=u128> { (0.. n) .scan((0, 1), |st, _| { *st = (st.1, st.0 + st.1); Some(*st) }) .map(&|(f, _)| f) } fn main() { println!("{:?}", fibs(10).collect::<Vec<_>>()); }
// ignore-emscripten no i128 support #![feature(nll)]
random_line_split
issue-49579.rs
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // compile-pass // ignore-emscripten no i128 support #![feature(nll)] fn fibs(n: u32) -> impl Iterator<Item=u128>
fn main() { println!("{:?}", fibs(10).collect::<Vec<_>>()); }
{ (0 .. n) .scan((0, 1), |st, _| { *st = (st.1, st.0 + st.1); Some(*st) }) .map(&|(f, _)| f) }
identifier_body
issue-49579.rs
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // compile-pass // ignore-emscripten no i128 support #![feature(nll)] fn fibs(n: u32) -> impl Iterator<Item=u128> { (0.. n) .scan((0, 1), |st, _| { *st = (st.1, st.0 + st.1); Some(*st) }) .map(&|(f, _)| f) } fn
() { println!("{:?}", fibs(10).collect::<Vec<_>>()); }
main
identifier_name
unboxed-closures-extern-fn.rs
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Checks that extern fn pointers implement the full range of Fn traits. #![feature(unboxed_closures)] #![feature(unboxed_closures)] use std::ops::{Fn,FnMut,FnOnce}; fn square(x: int) -> int { x * x } fn call_it<F:Fn(int)->int>(f: &F, x: int) -> int { f(x) } fn call_it_mut<F:FnMut(int)->int>(f: &mut F, x: int) -> int { f(x) } fn call_it_once<F:FnOnce(int)->int>(f: F, x: int) -> int { f(x) } fn main()
{ let x = call_it(&square, 22); let y = call_it_mut(&mut square, 22); let z = call_it_once(square, 22); assert_eq!(x, square(22)); assert_eq!(y, square(22)); assert_eq!(z, square(22)); }
identifier_body
unboxed-closures-extern-fn.rs
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Checks that extern fn pointers implement the full range of Fn traits. #![feature(unboxed_closures)]
fn square(x: int) -> int { x * x } fn call_it<F:Fn(int)->int>(f: &F, x: int) -> int { f(x) } fn call_it_mut<F:FnMut(int)->int>(f: &mut F, x: int) -> int { f(x) } fn call_it_once<F:FnOnce(int)->int>(f: F, x: int) -> int { f(x) } fn main() { let x = call_it(&square, 22); let y = call_it_mut(&mut square, 22); let z = call_it_once(square, 22); assert_eq!(x, square(22)); assert_eq!(y, square(22)); assert_eq!(z, square(22)); }
#![feature(unboxed_closures)] use std::ops::{Fn,FnMut,FnOnce};
random_line_split
unboxed-closures-extern-fn.rs
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Checks that extern fn pointers implement the full range of Fn traits. #![feature(unboxed_closures)] #![feature(unboxed_closures)] use std::ops::{Fn,FnMut,FnOnce}; fn
(x: int) -> int { x * x } fn call_it<F:Fn(int)->int>(f: &F, x: int) -> int { f(x) } fn call_it_mut<F:FnMut(int)->int>(f: &mut F, x: int) -> int { f(x) } fn call_it_once<F:FnOnce(int)->int>(f: F, x: int) -> int { f(x) } fn main() { let x = call_it(&square, 22); let y = call_it_mut(&mut square, 22); let z = call_it_once(square, 22); assert_eq!(x, square(22)); assert_eq!(y, square(22)); assert_eq!(z, square(22)); }
square
identifier_name
typeinfo.rs
use super::types; // this implements some kind of manual reflection pub trait TypeInfo { fn visit_fields<F>(cb: F) where F: Fn(&str /* name */, usize /* offset */, usize /* count */, types::GlTypeEnum);
macro_rules! impl_typeinfo { ($t:ty, $($field:ident),+) => ( impl $crate::typeinfo::TypeInfo for $t { fn visit_fields<F>(cb: F) where F: Fn(&str, usize, usize, ::types::GlTypeEnum) { use $crate::types::{GlType, ElemCount, GlTypeEnum}; // we need this to extract the type from a struct field fn gltype<T: GlType>(_v: &T) -> GlTypeEnum { T::get_gl_type() } fn elem_count<T: ElemCount>(_v: &T) -> usize { T::get_elem_count() } let tmp: $t = unsafe{ ::std::mem::uninitialized() }; let start = &tmp as *const _ as usize; $( let offset = &tmp.$field as *const _ as usize - start; let count = elem_count(&tmp.$field); let ty = gltype(&tmp.$field); cb(stringify!($field), offset, count, ty); )+ ::std::mem::forget(tmp); } } ) }
}
random_line_split
cb-cooccur.rs
//! Example 0: Word cooccurrence counter //! //! This simple script takes an input WET corpus piped to STDIN and counts cooccurrences of input //! tokens, where tokens are defined by unicode, and cooccurrence is a window 21 words in //! diameter, and only words from a newline-separated list are considered. //! //! The output is a numpy file with a cooccurrence matrix. The rows are the center word, starting //! with the unknown word as word 0, and proceeding in the order they were specified in the input //! word list. The columns are the context words, and cooccurrence is always counted as 1 or 0. //! // argument parsing #[macro_use] extern crate clap; // logging #[macro_use] extern crate log; extern crate env_logger; // numpy-like arrays extern crate ndarray; // better segmentation extern crate unicode_segmentation; // lastly, this library extern crate cabarrus; use std::fs::File; use std::io::{BufRead, BufReader}; use std::cmp::min; use std::collections::HashMap; use ndarray::prelude::*; use unicode_segmentation::UnicodeSegmentation; use cabarrus::warc::WarcStreamer; use cabarrus::errors::*; use cabarrus::numpy; const WINDOW_RADIUS: usize = 10; const WINDOW_WIDTH: usize = 2 * WINDOW_RADIUS + 1; pub fn main() { // Main can't return a Result, and the? operator needs the enclosing function to return Result inner_main().expect("Could not recover. Exiting."); } pub fn inner_main() -> Result<()>
// It could be usize instead of f64 but this is easier for interop // and its range is still enough not to be a problem. let mut cooccurrences: Array2<f64> = Array2::zeros((words.len() + 1, words.len() + 1)); if words.len() < 25 { info!("Collecting cooccurrences (with one another) of: {:?}", words); } else { info!("Collecting cooccurrences (with one another) of {} words.", words.len()); } for rec in WarcStreamer::new()? { let mention_ids = tokenize(&rec, &word_ids); for mention_i in 0..mention_ids.len() { for context_i in mention_i..min(mention_ids.len(), WINDOW_WIDTH) { cooccurrences[[ mention_ids[mention_i], // row: center word mention_ids[context_i] // column: context word ]] += 1.0; // uniform window weight } } } if words.len() <= 10 { println!("Cooccurrences look like {}", cooccurrences); } numpy::write_matrix(args.value_of("output").unwrap(), &cooccurrences)?; Ok(()) } /// Tokenize a string according to a dictionary. Unknowns will be 0. pub fn tokenize(content: &str, ids: &HashMap<&str, usize>) -> Vec<usize> { /// Notice that we return indices (avoiding allocation) content // You can split on nonalphanumerics for a big speedup but the tokens are dubious. //.split(|c: char|! c.is_alphanumeric()) .split_word_bounds() .map(|mention| *ids.get(mention).unwrap_or(&0)) // This will remove unknown words // but this is troublesome because it makes the context windows too wide if you have few words // because most words will be unknown //.filter(|i| *i > 0) .collect() }
{ env_logger::init().unwrap(); let args = app_from_crate!() .arg_from_usage("<wordlist> 'file containing words to look for, one per line'") .arg_from_usage("<output> 'file in which to store the resulting cooccurrence matrix'") .get_matches(); // Read the word list from a file. let mut words = vec![]; for line in BufReader::new(File::open(args.value_of("wordlist").unwrap())?).lines() { words.push(line?); } // Note that word 0 is the unknown word. let word_ids: HashMap<&str, usize> = words.iter() .enumerate() .map(|(id, word)| (word.as_ref(), id + 1)) .collect(); // This will be a table with rows of center words and columns of context words
identifier_body
cb-cooccur.rs
//! Example 0: Word cooccurrence counter //! //! This simple script takes an input WET corpus piped to STDIN and counts cooccurrences of input //! tokens, where tokens are defined by unicode, and cooccurrence is a window 21 words in //! diameter, and only words from a newline-separated list are considered. //! //! The output is a numpy file with a cooccurrence matrix. The rows are the center word, starting //! with the unknown word as word 0, and proceeding in the order they were specified in the input //! word list. The columns are the context words, and cooccurrence is always counted as 1 or 0. //! // argument parsing #[macro_use] extern crate clap; // logging #[macro_use] extern crate log; extern crate env_logger; // numpy-like arrays extern crate ndarray; // better segmentation extern crate unicode_segmentation; // lastly, this library extern crate cabarrus; use std::fs::File; use std::io::{BufRead, BufReader}; use std::cmp::min; use std::collections::HashMap; use ndarray::prelude::*; use unicode_segmentation::UnicodeSegmentation; use cabarrus::warc::WarcStreamer; use cabarrus::errors::*; use cabarrus::numpy; const WINDOW_RADIUS: usize = 10; const WINDOW_WIDTH: usize = 2 * WINDOW_RADIUS + 1; pub fn main() { // Main can't return a Result, and the? operator needs the enclosing function to return Result inner_main().expect("Could not recover. Exiting."); } pub fn inner_main() -> Result<()> { env_logger::init().unwrap(); let args = app_from_crate!() .arg_from_usage("<wordlist> 'file containing words to look for, one per line'") .arg_from_usage("<output> 'file in which to store the resulting cooccurrence matrix'") .get_matches(); // Read the word list from a file. let mut words = vec![]; for line in BufReader::new(File::open(args.value_of("wordlist").unwrap())?).lines() { words.push(line?); } // Note that word 0 is the unknown word. let word_ids: HashMap<&str, usize> = words.iter() .enumerate() .map(|(id, word)| (word.as_ref(), id + 1)) .collect(); // This will be a table with rows of center words and columns of context words // It could be usize instead of f64 but this is easier for interop // and its range is still enough not to be a problem. let mut cooccurrences: Array2<f64> = Array2::zeros((words.len() + 1, words.len() + 1)); if words.len() < 25 { info!("Collecting cooccurrences (with one another) of: {:?}", words); } else { info!("Collecting cooccurrences (with one another) of {} words.", words.len()); } for rec in WarcStreamer::new()? { let mention_ids = tokenize(&rec, &word_ids); for mention_i in 0..mention_ids.len() { for context_i in mention_i..min(mention_ids.len(), WINDOW_WIDTH) { cooccurrences[[ mention_ids[mention_i], // row: center word mention_ids[context_i] // column: context word ]] += 1.0; // uniform window weight } } } if words.len() <= 10 { println!("Cooccurrences look like {}", cooccurrences); } numpy::write_matrix(args.value_of("output").unwrap(), &cooccurrences)?; Ok(()) } /// Tokenize a string according to a dictionary. Unknowns will be 0. pub fn
(content: &str, ids: &HashMap<&str, usize>) -> Vec<usize> { /// Notice that we return indices (avoiding allocation) content // You can split on nonalphanumerics for a big speedup but the tokens are dubious. //.split(|c: char|! c.is_alphanumeric()) .split_word_bounds() .map(|mention| *ids.get(mention).unwrap_or(&0)) // This will remove unknown words // but this is troublesome because it makes the context windows too wide if you have few words // because most words will be unknown //.filter(|i| *i > 0) .collect() }
tokenize
identifier_name
cb-cooccur.rs
//! Example 0: Word cooccurrence counter //! //! This simple script takes an input WET corpus piped to STDIN and counts cooccurrences of input //! tokens, where tokens are defined by unicode, and cooccurrence is a window 21 words in //! diameter, and only words from a newline-separated list are considered. //! //! The output is a numpy file with a cooccurrence matrix. The rows are the center word, starting //! with the unknown word as word 0, and proceeding in the order they were specified in the input //! word list. The columns are the context words, and cooccurrence is always counted as 1 or 0. //! // argument parsing #[macro_use] extern crate clap; // logging #[macro_use] extern crate log; extern crate env_logger; // numpy-like arrays extern crate ndarray; // better segmentation extern crate unicode_segmentation; // lastly, this library extern crate cabarrus; use std::fs::File; use std::io::{BufRead, BufReader}; use std::cmp::min; use std::collections::HashMap; use ndarray::prelude::*; use unicode_segmentation::UnicodeSegmentation; use cabarrus::warc::WarcStreamer; use cabarrus::errors::*; use cabarrus::numpy; const WINDOW_RADIUS: usize = 10; const WINDOW_WIDTH: usize = 2 * WINDOW_RADIUS + 1; pub fn main() { // Main can't return a Result, and the? operator needs the enclosing function to return Result inner_main().expect("Could not recover. Exiting."); } pub fn inner_main() -> Result<()> { env_logger::init().unwrap(); let args = app_from_crate!() .arg_from_usage("<wordlist> 'file containing words to look for, one per line'") .arg_from_usage("<output> 'file in which to store the resulting cooccurrence matrix'") .get_matches(); // Read the word list from a file. let mut words = vec![]; for line in BufReader::new(File::open(args.value_of("wordlist").unwrap())?).lines() { words.push(line?); } // Note that word 0 is the unknown word. let word_ids: HashMap<&str, usize> = words.iter() .enumerate() .map(|(id, word)| (word.as_ref(), id + 1)) .collect(); // This will be a table with rows of center words and columns of context words // It could be usize instead of f64 but this is easier for interop // and its range is still enough not to be a problem. let mut cooccurrences: Array2<f64> = Array2::zeros((words.len() + 1, words.len() + 1)); if words.len() < 25
else { info!("Collecting cooccurrences (with one another) of {} words.", words.len()); } for rec in WarcStreamer::new()? { let mention_ids = tokenize(&rec, &word_ids); for mention_i in 0..mention_ids.len() { for context_i in mention_i..min(mention_ids.len(), WINDOW_WIDTH) { cooccurrences[[ mention_ids[mention_i], // row: center word mention_ids[context_i] // column: context word ]] += 1.0; // uniform window weight } } } if words.len() <= 10 { println!("Cooccurrences look like {}", cooccurrences); } numpy::write_matrix(args.value_of("output").unwrap(), &cooccurrences)?; Ok(()) } /// Tokenize a string according to a dictionary. Unknowns will be 0. pub fn tokenize(content: &str, ids: &HashMap<&str, usize>) -> Vec<usize> { /// Notice that we return indices (avoiding allocation) content // You can split on nonalphanumerics for a big speedup but the tokens are dubious. //.split(|c: char|! c.is_alphanumeric()) .split_word_bounds() .map(|mention| *ids.get(mention).unwrap_or(&0)) // This will remove unknown words // but this is troublesome because it makes the context windows too wide if you have few words // because most words will be unknown //.filter(|i| *i > 0) .collect() }
{ info!("Collecting cooccurrences (with one another) of: {:?}", words); }
conditional_block
cb-cooccur.rs
//! Example 0: Word cooccurrence counter //! //! This simple script takes an input WET corpus piped to STDIN and counts cooccurrences of input //! tokens, where tokens are defined by unicode, and cooccurrence is a window 21 words in //! diameter, and only words from a newline-separated list are considered.
//! The output is a numpy file with a cooccurrence matrix. The rows are the center word, starting //! with the unknown word as word 0, and proceeding in the order they were specified in the input //! word list. The columns are the context words, and cooccurrence is always counted as 1 or 0. //! // argument parsing #[macro_use] extern crate clap; // logging #[macro_use] extern crate log; extern crate env_logger; // numpy-like arrays extern crate ndarray; // better segmentation extern crate unicode_segmentation; // lastly, this library extern crate cabarrus; use std::fs::File; use std::io::{BufRead, BufReader}; use std::cmp::min; use std::collections::HashMap; use ndarray::prelude::*; use unicode_segmentation::UnicodeSegmentation; use cabarrus::warc::WarcStreamer; use cabarrus::errors::*; use cabarrus::numpy; const WINDOW_RADIUS: usize = 10; const WINDOW_WIDTH: usize = 2 * WINDOW_RADIUS + 1; pub fn main() { // Main can't return a Result, and the? operator needs the enclosing function to return Result inner_main().expect("Could not recover. Exiting."); } pub fn inner_main() -> Result<()> { env_logger::init().unwrap(); let args = app_from_crate!() .arg_from_usage("<wordlist> 'file containing words to look for, one per line'") .arg_from_usage("<output> 'file in which to store the resulting cooccurrence matrix'") .get_matches(); // Read the word list from a file. let mut words = vec![]; for line in BufReader::new(File::open(args.value_of("wordlist").unwrap())?).lines() { words.push(line?); } // Note that word 0 is the unknown word. let word_ids: HashMap<&str, usize> = words.iter() .enumerate() .map(|(id, word)| (word.as_ref(), id + 1)) .collect(); // This will be a table with rows of center words and columns of context words // It could be usize instead of f64 but this is easier for interop // and its range is still enough not to be a problem. let mut cooccurrences: Array2<f64> = Array2::zeros((words.len() + 1, words.len() + 1)); if words.len() < 25 { info!("Collecting cooccurrences (with one another) of: {:?}", words); } else { info!("Collecting cooccurrences (with one another) of {} words.", words.len()); } for rec in WarcStreamer::new()? { let mention_ids = tokenize(&rec, &word_ids); for mention_i in 0..mention_ids.len() { for context_i in mention_i..min(mention_ids.len(), WINDOW_WIDTH) { cooccurrences[[ mention_ids[mention_i], // row: center word mention_ids[context_i] // column: context word ]] += 1.0; // uniform window weight } } } if words.len() <= 10 { println!("Cooccurrences look like {}", cooccurrences); } numpy::write_matrix(args.value_of("output").unwrap(), &cooccurrences)?; Ok(()) } /// Tokenize a string according to a dictionary. Unknowns will be 0. pub fn tokenize(content: &str, ids: &HashMap<&str, usize>) -> Vec<usize> { /// Notice that we return indices (avoiding allocation) content // You can split on nonalphanumerics for a big speedup but the tokens are dubious. //.split(|c: char|! c.is_alphanumeric()) .split_word_bounds() .map(|mention| *ids.get(mention).unwrap_or(&0)) // This will remove unknown words // but this is troublesome because it makes the context windows too wide if you have few words // because most words will be unknown //.filter(|i| *i > 0) .collect() }
//!
random_line_split
spsc_queue.rs
/* Copyright (c) 2010-2011 Dmitry Vyukov. All rights reserved. * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY DMITRY VYUKOV "AS IS" AND ANY EXPRESS OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT * SHALL DMITRY VYUKOV OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * The views and conclusions contained in the software and documentation are * those of the authors and should not be interpreted as representing official * policies, either expressed or implied, of Dmitry Vyukov. */ // http://www.1024cores.net/home/lock-free-algorithms/queues/unbounded-spsc-queue //! A single-producer single-consumer concurrent queue //! //! This module contains the implementation of an SPSC queue which can be used //! concurrently between two tasks. This data structure is safe to use and //! enforces the semantics that there is one pusher and one popper. #![unstable(feature = "std_misc")] use core::prelude::*; use alloc::boxed::Box; use core::mem; use core::ptr; use core::cell::UnsafeCell; use sync::atomic::{AtomicPtr, AtomicUsize, Ordering}; // Node within the linked list queue of messages to send struct Node<T> { // FIXME: this could be an uninitialized T if we're careful enough, and // that would reduce memory usage (and be a bit faster). // is it worth it? value: Option<T>, // nullable for re-use of nodes next: AtomicPtr<Node<T>>, // next node in the queue } /// The single-producer single-consumer queue. This structure is not cloneable, /// but it can be safely shared in an Arc if it is guaranteed that there /// is only one popper and one pusher touching the queue at any one point in /// time. pub struct Queue<T> { // consumer fields tail: UnsafeCell<*mut Node<T>>, // where to pop from tail_prev: AtomicPtr<Node<T>>, // where to pop from // producer fields head: UnsafeCell<*mut Node<T>>, // where to push to first: UnsafeCell<*mut Node<T>>, // where to get new nodes from tail_copy: UnsafeCell<*mut Node<T>>, // between first/tail // Cache maintenance fields. Additions and subtractions are stored // separately in order to allow them to use nonatomic addition/subtraction. cache_bound: uint, cache_additions: AtomicUsize, cache_subtractions: AtomicUsize, } unsafe impl<T: Send> Send for Queue<T> { } unsafe impl<T: Send> Sync for Queue<T> { } impl<T: Send> Node<T> { fn new() -> *mut Node<T> { unsafe { mem::transmute(box Node { value: None, next: AtomicPtr::new(ptr::null_mut::<Node<T>>()), }) } } } impl<T: Send> Queue<T> { /// Creates a new queue. /// /// This is unsafe as the type system doesn't enforce a single /// consumer-producer relationship. It also allows the consumer to `pop` /// items while there is a `peek` active due to all methods having a /// non-mutable receiver. /// /// # Arguments /// /// * `bound` - This queue implementation is implemented with a linked /// list, and this means that a push is always a malloc. In /// order to amortize this cost, an internal cache of nodes is /// maintained to prevent a malloc from always being /// necessary. This bound is the limit on the size of the /// cache (if desired). If the value is 0, then the cache has /// no bound. Otherwise, the cache will never grow larger than /// `bound` (although the queue itself could be much larger. pub unsafe fn new(bound: uint) -> Queue<T> { let n1 = Node::new(); let n2 = Node::new(); (*n1).next.store(n2, Ordering::Relaxed); Queue { tail: UnsafeCell::new(n2), tail_prev: AtomicPtr::new(n1), head: UnsafeCell::new(n2), first: UnsafeCell::new(n1), tail_copy: UnsafeCell::new(n1), cache_bound: bound, cache_additions: AtomicUsize::new(0), cache_subtractions: AtomicUsize::new(0), } } /// Pushes a new value onto this queue. Note that to use this function /// safely, it must be externally guaranteed that there is only one pusher. pub fn push(&self, t: T) { unsafe { // Acquire a node (which either uses a cached one or allocates a new // one), and then append this to the 'head' node. let n = self.alloc(); assert!((*n).value.is_none()); (*n).value = Some(t); (*n).next.store(ptr::null_mut(), Ordering::Relaxed); (**self.head.get()).next.store(n, Ordering::Release); *self.head.get() = n; } } unsafe fn alloc(&self) -> *mut Node<T> { // First try to see if we can consume the 'first' node for our uses. // We try to avoid as many atomic instructions as possible here, so // the addition to cache_subtractions is not atomic (plus we're the // only one subtracting from the cache). if *self.first.get()!= *self.tail_copy.get() { if self.cache_bound > 0 { let b = self.cache_subtractions.load(Ordering::Relaxed); self.cache_subtractions.store(b + 1, Ordering::Relaxed); } let ret = *self.first.get(); *self.first.get() = (*ret).next.load(Ordering::Relaxed); return ret; } // If the above fails, then update our copy of the tail and try // again. *self.tail_copy.get() = self.tail_prev.load(Ordering::Acquire); if *self.first.get()!= *self.tail_copy.get() { if self.cache_bound > 0 { let b = self.cache_subtractions.load(Ordering::Relaxed); self.cache_subtractions.store(b + 1, Ordering::Relaxed); } let ret = *self.first.get(); *self.first.get() = (*ret).next.load(Ordering::Relaxed); return ret; } // If all of that fails, then we have to allocate a new node // (there's nothing in the node cache). Node::new() } /// Attempts to pop a value from this queue. Remember that to use this type /// safely you must ensure that there is only one popper at a time. pub fn pop(&self) -> Option<T> { unsafe { // The `tail` node is not actually a used node, but rather a // sentinel from where we should start popping from. Hence, look at // tail's next field and see if we can use it. If we do a pop, then // the current tail node is a candidate for going into the cache. let tail = *self.tail.get(); let next = (*tail).next.load(Ordering::Acquire); if next.is_null() { return None } assert!((*next).value.is_some()); let ret = (*next).value.take(); *self.tail.get() = next; if self.cache_bound == 0 { self.tail_prev.store(tail, Ordering::Release); } else { // FIXME: this is dubious with overflow. let additions = self.cache_additions.load(Ordering::Relaxed); let subtractions = self.cache_subtractions.load(Ordering::Relaxed); let size = additions - subtractions; if size < self.cache_bound { self.tail_prev.store(tail, Ordering::Release); self.cache_additions.store(additions + 1, Ordering::Relaxed); } else { (*self.tail_prev.load(Ordering::Relaxed)) .next.store(next, Ordering::Relaxed); // We have successfully erased all references to 'tail', so // now we can safely drop it. let _: Box<Node<T>> = mem::transmute(tail); } } return ret; } } /// Attempts to peek at the head of the queue, returning `None` if the queue /// has no data currently /// /// # Warning /// The reference returned is invalid if it is not used before the consumer /// pops the value off the queue. If the producer then pushes another value /// onto the queue, it will overwrite the value pointed to by the reference. pub fn peek<'a>(&'a self) -> Option<&'a mut T> { // This is essentially the same as above with all the popping bits // stripped out. unsafe { let tail = *self.tail.get(); let next = (*tail).next.load(Ordering::Acquire); if next.is_null() { return None } return (*next).value.as_mut(); } } } #[unsafe_destructor] impl<T: Send> Drop for Queue<T> { fn drop(&mut self) { unsafe { let mut cur = *self.first.get(); while!cur.is_null() { let next = (*cur).next.load(Ordering::Relaxed); let _n: Box<Node<T>> = mem::transmute(cur); cur = next; } } } } #[cfg(test)] mod test { use prelude::v1::*; use sync::Arc; use super::Queue; use thread::Thread; use sync::mpsc::channel; #[test] fn smoke() { unsafe { let queue = Queue::new(0); queue.push(1); queue.push(2); assert_eq!(queue.pop(), Some(1)); assert_eq!(queue.pop(), Some(2)); assert_eq!(queue.pop(), None); queue.push(3); queue.push(4); assert_eq!(queue.pop(), Some(3)); assert_eq!(queue.pop(), Some(4)); assert_eq!(queue.pop(), None); } } #[test] fn peek() { unsafe { let queue = Queue::new(0); queue.push(vec![1]); // Ensure the borrowchecker works match queue.peek() { Some(vec) => match &**vec { // Note that `pop` is not allowed here due to borrow [1] => {} _ => return }, None => unreachable!() } queue.pop(); } } #[test] fn drop_full() { unsafe { let q = Queue::new(0); q.push(box 1); q.push(box 2); } } #[test] fn smoke_bound() { unsafe { let q = Queue::new(0); q.push(1); q.push(2); assert_eq!(q.pop(), Some(1)); assert_eq!(q.pop(), Some(2)); assert_eq!(q.pop(), None); q.push(3); q.push(4); assert_eq!(q.pop(), Some(3)); assert_eq!(q.pop(), Some(4)); assert_eq!(q.pop(), None); } } #[test] fn stress()
} tx.send(()).unwrap(); }); for _ in 0..100000 { q.push(1); } rx.recv().unwrap(); } } }
{ unsafe { stress_bound(0); stress_bound(1); } unsafe fn stress_bound(bound: uint) { let q = Arc::new(Queue::new(bound)); let (tx, rx) = channel(); let q2 = q.clone(); let _t = Thread::spawn(move|| { for _ in 0u..100000 { loop { match q2.pop() { Some(1) => break, Some(_) => panic!(), None => {} } }
identifier_body
spsc_queue.rs
/* Copyright (c) 2010-2011 Dmitry Vyukov. All rights reserved. * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY DMITRY VYUKOV "AS IS" AND ANY EXPRESS OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT * SHALL DMITRY VYUKOV OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * The views and conclusions contained in the software and documentation are * those of the authors and should not be interpreted as representing official * policies, either expressed or implied, of Dmitry Vyukov. */ // http://www.1024cores.net/home/lock-free-algorithms/queues/unbounded-spsc-queue //! A single-producer single-consumer concurrent queue //! //! This module contains the implementation of an SPSC queue which can be used //! concurrently between two tasks. This data structure is safe to use and //! enforces the semantics that there is one pusher and one popper. #![unstable(feature = "std_misc")] use core::prelude::*; use alloc::boxed::Box; use core::mem; use core::ptr; use core::cell::UnsafeCell; use sync::atomic::{AtomicPtr, AtomicUsize, Ordering}; // Node within the linked list queue of messages to send struct Node<T> { // FIXME: this could be an uninitialized T if we're careful enough, and // that would reduce memory usage (and be a bit faster). // is it worth it? value: Option<T>, // nullable for re-use of nodes next: AtomicPtr<Node<T>>, // next node in the queue } /// The single-producer single-consumer queue. This structure is not cloneable, /// but it can be safely shared in an Arc if it is guaranteed that there /// is only one popper and one pusher touching the queue at any one point in /// time. pub struct Queue<T> { // consumer fields tail: UnsafeCell<*mut Node<T>>, // where to pop from tail_prev: AtomicPtr<Node<T>>, // where to pop from // producer fields head: UnsafeCell<*mut Node<T>>, // where to push to first: UnsafeCell<*mut Node<T>>, // where to get new nodes from tail_copy: UnsafeCell<*mut Node<T>>, // between first/tail // Cache maintenance fields. Additions and subtractions are stored // separately in order to allow them to use nonatomic addition/subtraction. cache_bound: uint, cache_additions: AtomicUsize, cache_subtractions: AtomicUsize, } unsafe impl<T: Send> Send for Queue<T> { } unsafe impl<T: Send> Sync for Queue<T> { } impl<T: Send> Node<T> { fn new() -> *mut Node<T> { unsafe { mem::transmute(box Node { value: None, next: AtomicPtr::new(ptr::null_mut::<Node<T>>()), }) } } } impl<T: Send> Queue<T> { /// Creates a new queue. /// /// This is unsafe as the type system doesn't enforce a single /// consumer-producer relationship. It also allows the consumer to `pop` /// items while there is a `peek` active due to all methods having a /// non-mutable receiver. /// /// # Arguments /// /// * `bound` - This queue implementation is implemented with a linked /// list, and this means that a push is always a malloc. In /// order to amortize this cost, an internal cache of nodes is /// maintained to prevent a malloc from always being /// necessary. This bound is the limit on the size of the /// cache (if desired). If the value is 0, then the cache has /// no bound. Otherwise, the cache will never grow larger than /// `bound` (although the queue itself could be much larger. pub unsafe fn new(bound: uint) -> Queue<T> { let n1 = Node::new(); let n2 = Node::new(); (*n1).next.store(n2, Ordering::Relaxed); Queue { tail: UnsafeCell::new(n2), tail_prev: AtomicPtr::new(n1), head: UnsafeCell::new(n2), first: UnsafeCell::new(n1), tail_copy: UnsafeCell::new(n1), cache_bound: bound, cache_additions: AtomicUsize::new(0), cache_subtractions: AtomicUsize::new(0), } } /// Pushes a new value onto this queue. Note that to use this function /// safely, it must be externally guaranteed that there is only one pusher. pub fn push(&self, t: T) { unsafe { // Acquire a node (which either uses a cached one or allocates a new // one), and then append this to the 'head' node. let n = self.alloc(); assert!((*n).value.is_none()); (*n).value = Some(t); (*n).next.store(ptr::null_mut(), Ordering::Relaxed); (**self.head.get()).next.store(n, Ordering::Release); *self.head.get() = n; } } unsafe fn alloc(&self) -> *mut Node<T> { // First try to see if we can consume the 'first' node for our uses. // We try to avoid as many atomic instructions as possible here, so // the addition to cache_subtractions is not atomic (plus we're the // only one subtracting from the cache). if *self.first.get()!= *self.tail_copy.get() { if self.cache_bound > 0 { let b = self.cache_subtractions.load(Ordering::Relaxed); self.cache_subtractions.store(b + 1, Ordering::Relaxed); } let ret = *self.first.get(); *self.first.get() = (*ret).next.load(Ordering::Relaxed); return ret; } // If the above fails, then update our copy of the tail and try // again. *self.tail_copy.get() = self.tail_prev.load(Ordering::Acquire); if *self.first.get()!= *self.tail_copy.get() { if self.cache_bound > 0 { let b = self.cache_subtractions.load(Ordering::Relaxed); self.cache_subtractions.store(b + 1, Ordering::Relaxed); } let ret = *self.first.get(); *self.first.get() = (*ret).next.load(Ordering::Relaxed); return ret; } // If all of that fails, then we have to allocate a new node // (there's nothing in the node cache). Node::new() } /// Attempts to pop a value from this queue. Remember that to use this type /// safely you must ensure that there is only one popper at a time. pub fn pop(&self) -> Option<T> { unsafe { // The `tail` node is not actually a used node, but rather a // sentinel from where we should start popping from. Hence, look at // tail's next field and see if we can use it. If we do a pop, then // the current tail node is a candidate for going into the cache. let tail = *self.tail.get(); let next = (*tail).next.load(Ordering::Acquire); if next.is_null() { return None } assert!((*next).value.is_some()); let ret = (*next).value.take(); *self.tail.get() = next; if self.cache_bound == 0 { self.tail_prev.store(tail, Ordering::Release); } else { // FIXME: this is dubious with overflow. let additions = self.cache_additions.load(Ordering::Relaxed); let subtractions = self.cache_subtractions.load(Ordering::Relaxed); let size = additions - subtractions; if size < self.cache_bound { self.tail_prev.store(tail, Ordering::Release); self.cache_additions.store(additions + 1, Ordering::Relaxed); } else { (*self.tail_prev.load(Ordering::Relaxed)) .next.store(next, Ordering::Relaxed); // We have successfully erased all references to 'tail', so // now we can safely drop it. let _: Box<Node<T>> = mem::transmute(tail); } } return ret; } } /// Attempts to peek at the head of the queue, returning `None` if the queue /// has no data currently /// /// # Warning /// The reference returned is invalid if it is not used before the consumer /// pops the value off the queue. If the producer then pushes another value /// onto the queue, it will overwrite the value pointed to by the reference. pub fn peek<'a>(&'a self) -> Option<&'a mut T> { // This is essentially the same as above with all the popping bits // stripped out. unsafe { let tail = *self.tail.get(); let next = (*tail).next.load(Ordering::Acquire); if next.is_null() { return None } return (*next).value.as_mut(); } } } #[unsafe_destructor] impl<T: Send> Drop for Queue<T> { fn drop(&mut self) { unsafe { let mut cur = *self.first.get(); while!cur.is_null() { let next = (*cur).next.load(Ordering::Relaxed); let _n: Box<Node<T>> = mem::transmute(cur); cur = next; } } } } #[cfg(test)] mod test { use prelude::v1::*; use sync::Arc; use super::Queue; use thread::Thread; use sync::mpsc::channel; #[test] fn smoke() { unsafe { let queue = Queue::new(0); queue.push(1); queue.push(2); assert_eq!(queue.pop(), Some(1)); assert_eq!(queue.pop(), Some(2)); assert_eq!(queue.pop(), None); queue.push(3); queue.push(4); assert_eq!(queue.pop(), Some(3)); assert_eq!(queue.pop(), Some(4)); assert_eq!(queue.pop(), None); } } #[test] fn peek() { unsafe { let queue = Queue::new(0); queue.push(vec![1]); // Ensure the borrowchecker works match queue.peek() { Some(vec) => match &**vec { // Note that `pop` is not allowed here due to borrow [1] => {} _ => return }, None => unreachable!() } queue.pop(); } } #[test] fn
() { unsafe { let q = Queue::new(0); q.push(box 1); q.push(box 2); } } #[test] fn smoke_bound() { unsafe { let q = Queue::new(0); q.push(1); q.push(2); assert_eq!(q.pop(), Some(1)); assert_eq!(q.pop(), Some(2)); assert_eq!(q.pop(), None); q.push(3); q.push(4); assert_eq!(q.pop(), Some(3)); assert_eq!(q.pop(), Some(4)); assert_eq!(q.pop(), None); } } #[test] fn stress() { unsafe { stress_bound(0); stress_bound(1); } unsafe fn stress_bound(bound: uint) { let q = Arc::new(Queue::new(bound)); let (tx, rx) = channel(); let q2 = q.clone(); let _t = Thread::spawn(move|| { for _ in 0u..100000 { loop { match q2.pop() { Some(1) => break, Some(_) => panic!(), None => {} } } } tx.send(()).unwrap(); }); for _ in 0..100000 { q.push(1); } rx.recv().unwrap(); } } }
drop_full
identifier_name
spsc_queue.rs
/* Copyright (c) 2010-2011 Dmitry Vyukov. All rights reserved. * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY DMITRY VYUKOV "AS IS" AND ANY EXPRESS OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT * SHALL DMITRY VYUKOV OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * The views and conclusions contained in the software and documentation are * those of the authors and should not be interpreted as representing official * policies, either expressed or implied, of Dmitry Vyukov. */ // http://www.1024cores.net/home/lock-free-algorithms/queues/unbounded-spsc-queue //! A single-producer single-consumer concurrent queue //! //! This module contains the implementation of an SPSC queue which can be used //! concurrently between two tasks. This data structure is safe to use and //! enforces the semantics that there is one pusher and one popper. #![unstable(feature = "std_misc")] use core::prelude::*; use alloc::boxed::Box; use core::mem; use core::ptr; use core::cell::UnsafeCell; use sync::atomic::{AtomicPtr, AtomicUsize, Ordering}; // Node within the linked list queue of messages to send struct Node<T> { // FIXME: this could be an uninitialized T if we're careful enough, and // that would reduce memory usage (and be a bit faster). // is it worth it? value: Option<T>, // nullable for re-use of nodes next: AtomicPtr<Node<T>>, // next node in the queue } /// The single-producer single-consumer queue. This structure is not cloneable, /// but it can be safely shared in an Arc if it is guaranteed that there /// is only one popper and one pusher touching the queue at any one point in /// time. pub struct Queue<T> { // consumer fields tail: UnsafeCell<*mut Node<T>>, // where to pop from tail_prev: AtomicPtr<Node<T>>, // where to pop from // producer fields head: UnsafeCell<*mut Node<T>>, // where to push to first: UnsafeCell<*mut Node<T>>, // where to get new nodes from tail_copy: UnsafeCell<*mut Node<T>>, // between first/tail // Cache maintenance fields. Additions and subtractions are stored // separately in order to allow them to use nonatomic addition/subtraction. cache_bound: uint, cache_additions: AtomicUsize, cache_subtractions: AtomicUsize, } unsafe impl<T: Send> Send for Queue<T> { } unsafe impl<T: Send> Sync for Queue<T> { } impl<T: Send> Node<T> { fn new() -> *mut Node<T> { unsafe { mem::transmute(box Node { value: None, next: AtomicPtr::new(ptr::null_mut::<Node<T>>()), }) } } } impl<T: Send> Queue<T> { /// Creates a new queue. /// /// This is unsafe as the type system doesn't enforce a single /// consumer-producer relationship. It also allows the consumer to `pop` /// items while there is a `peek` active due to all methods having a /// non-mutable receiver. /// /// # Arguments /// /// * `bound` - This queue implementation is implemented with a linked /// list, and this means that a push is always a malloc. In /// order to amortize this cost, an internal cache of nodes is /// maintained to prevent a malloc from always being /// necessary. This bound is the limit on the size of the /// cache (if desired). If the value is 0, then the cache has /// no bound. Otherwise, the cache will never grow larger than /// `bound` (although the queue itself could be much larger. pub unsafe fn new(bound: uint) -> Queue<T> { let n1 = Node::new(); let n2 = Node::new(); (*n1).next.store(n2, Ordering::Relaxed); Queue { tail: UnsafeCell::new(n2), tail_prev: AtomicPtr::new(n1), head: UnsafeCell::new(n2), first: UnsafeCell::new(n1), tail_copy: UnsafeCell::new(n1), cache_bound: bound, cache_additions: AtomicUsize::new(0), cache_subtractions: AtomicUsize::new(0), } } /// Pushes a new value onto this queue. Note that to use this function /// safely, it must be externally guaranteed that there is only one pusher. pub fn push(&self, t: T) { unsafe { // Acquire a node (which either uses a cached one or allocates a new // one), and then append this to the 'head' node. let n = self.alloc(); assert!((*n).value.is_none()); (*n).value = Some(t); (*n).next.store(ptr::null_mut(), Ordering::Relaxed); (**self.head.get()).next.store(n, Ordering::Release); *self.head.get() = n; } } unsafe fn alloc(&self) -> *mut Node<T> { // First try to see if we can consume the 'first' node for our uses. // We try to avoid as many atomic instructions as possible here, so // the addition to cache_subtractions is not atomic (plus we're the // only one subtracting from the cache). if *self.first.get()!= *self.tail_copy.get() { if self.cache_bound > 0
let ret = *self.first.get(); *self.first.get() = (*ret).next.load(Ordering::Relaxed); return ret; } // If the above fails, then update our copy of the tail and try // again. *self.tail_copy.get() = self.tail_prev.load(Ordering::Acquire); if *self.first.get()!= *self.tail_copy.get() { if self.cache_bound > 0 { let b = self.cache_subtractions.load(Ordering::Relaxed); self.cache_subtractions.store(b + 1, Ordering::Relaxed); } let ret = *self.first.get(); *self.first.get() = (*ret).next.load(Ordering::Relaxed); return ret; } // If all of that fails, then we have to allocate a new node // (there's nothing in the node cache). Node::new() } /// Attempts to pop a value from this queue. Remember that to use this type /// safely you must ensure that there is only one popper at a time. pub fn pop(&self) -> Option<T> { unsafe { // The `tail` node is not actually a used node, but rather a // sentinel from where we should start popping from. Hence, look at // tail's next field and see if we can use it. If we do a pop, then // the current tail node is a candidate for going into the cache. let tail = *self.tail.get(); let next = (*tail).next.load(Ordering::Acquire); if next.is_null() { return None } assert!((*next).value.is_some()); let ret = (*next).value.take(); *self.tail.get() = next; if self.cache_bound == 0 { self.tail_prev.store(tail, Ordering::Release); } else { // FIXME: this is dubious with overflow. let additions = self.cache_additions.load(Ordering::Relaxed); let subtractions = self.cache_subtractions.load(Ordering::Relaxed); let size = additions - subtractions; if size < self.cache_bound { self.tail_prev.store(tail, Ordering::Release); self.cache_additions.store(additions + 1, Ordering::Relaxed); } else { (*self.tail_prev.load(Ordering::Relaxed)) .next.store(next, Ordering::Relaxed); // We have successfully erased all references to 'tail', so // now we can safely drop it. let _: Box<Node<T>> = mem::transmute(tail); } } return ret; } } /// Attempts to peek at the head of the queue, returning `None` if the queue /// has no data currently /// /// # Warning /// The reference returned is invalid if it is not used before the consumer /// pops the value off the queue. If the producer then pushes another value /// onto the queue, it will overwrite the value pointed to by the reference. pub fn peek<'a>(&'a self) -> Option<&'a mut T> { // This is essentially the same as above with all the popping bits // stripped out. unsafe { let tail = *self.tail.get(); let next = (*tail).next.load(Ordering::Acquire); if next.is_null() { return None } return (*next).value.as_mut(); } } } #[unsafe_destructor] impl<T: Send> Drop for Queue<T> { fn drop(&mut self) { unsafe { let mut cur = *self.first.get(); while!cur.is_null() { let next = (*cur).next.load(Ordering::Relaxed); let _n: Box<Node<T>> = mem::transmute(cur); cur = next; } } } } #[cfg(test)] mod test { use prelude::v1::*; use sync::Arc; use super::Queue; use thread::Thread; use sync::mpsc::channel; #[test] fn smoke() { unsafe { let queue = Queue::new(0); queue.push(1); queue.push(2); assert_eq!(queue.pop(), Some(1)); assert_eq!(queue.pop(), Some(2)); assert_eq!(queue.pop(), None); queue.push(3); queue.push(4); assert_eq!(queue.pop(), Some(3)); assert_eq!(queue.pop(), Some(4)); assert_eq!(queue.pop(), None); } } #[test] fn peek() { unsafe { let queue = Queue::new(0); queue.push(vec![1]); // Ensure the borrowchecker works match queue.peek() { Some(vec) => match &**vec { // Note that `pop` is not allowed here due to borrow [1] => {} _ => return }, None => unreachable!() } queue.pop(); } } #[test] fn drop_full() { unsafe { let q = Queue::new(0); q.push(box 1); q.push(box 2); } } #[test] fn smoke_bound() { unsafe { let q = Queue::new(0); q.push(1); q.push(2); assert_eq!(q.pop(), Some(1)); assert_eq!(q.pop(), Some(2)); assert_eq!(q.pop(), None); q.push(3); q.push(4); assert_eq!(q.pop(), Some(3)); assert_eq!(q.pop(), Some(4)); assert_eq!(q.pop(), None); } } #[test] fn stress() { unsafe { stress_bound(0); stress_bound(1); } unsafe fn stress_bound(bound: uint) { let q = Arc::new(Queue::new(bound)); let (tx, rx) = channel(); let q2 = q.clone(); let _t = Thread::spawn(move|| { for _ in 0u..100000 { loop { match q2.pop() { Some(1) => break, Some(_) => panic!(), None => {} } } } tx.send(()).unwrap(); }); for _ in 0..100000 { q.push(1); } rx.recv().unwrap(); } } }
{ let b = self.cache_subtractions.load(Ordering::Relaxed); self.cache_subtractions.store(b + 1, Ordering::Relaxed); }
conditional_block
spsc_queue.rs
/* Copyright (c) 2010-2011 Dmitry Vyukov. All rights reserved. * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY DMITRY VYUKOV "AS IS" AND ANY EXPRESS OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT * SHALL DMITRY VYUKOV OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * The views and conclusions contained in the software and documentation are * those of the authors and should not be interpreted as representing official * policies, either expressed or implied, of Dmitry Vyukov. */ // http://www.1024cores.net/home/lock-free-algorithms/queues/unbounded-spsc-queue //! A single-producer single-consumer concurrent queue //! //! This module contains the implementation of an SPSC queue which can be used //! concurrently between two tasks. This data structure is safe to use and //! enforces the semantics that there is one pusher and one popper. #![unstable(feature = "std_misc")] use core::prelude::*; use alloc::boxed::Box; use core::mem; use core::ptr; use core::cell::UnsafeCell; use sync::atomic::{AtomicPtr, AtomicUsize, Ordering}; // Node within the linked list queue of messages to send struct Node<T> { // FIXME: this could be an uninitialized T if we're careful enough, and // that would reduce memory usage (and be a bit faster). // is it worth it? value: Option<T>, // nullable for re-use of nodes next: AtomicPtr<Node<T>>, // next node in the queue } /// The single-producer single-consumer queue. This structure is not cloneable, /// but it can be safely shared in an Arc if it is guaranteed that there /// is only one popper and one pusher touching the queue at any one point in /// time. pub struct Queue<T> { // consumer fields tail: UnsafeCell<*mut Node<T>>, // where to pop from tail_prev: AtomicPtr<Node<T>>, // where to pop from // producer fields head: UnsafeCell<*mut Node<T>>, // where to push to first: UnsafeCell<*mut Node<T>>, // where to get new nodes from tail_copy: UnsafeCell<*mut Node<T>>, // between first/tail // Cache maintenance fields. Additions and subtractions are stored // separately in order to allow them to use nonatomic addition/subtraction. cache_bound: uint, cache_additions: AtomicUsize, cache_subtractions: AtomicUsize, } unsafe impl<T: Send> Send for Queue<T> { } unsafe impl<T: Send> Sync for Queue<T> { } impl<T: Send> Node<T> { fn new() -> *mut Node<T> { unsafe { mem::transmute(box Node { value: None, next: AtomicPtr::new(ptr::null_mut::<Node<T>>()), }) } } } impl<T: Send> Queue<T> { /// Creates a new queue. /// /// This is unsafe as the type system doesn't enforce a single /// consumer-producer relationship. It also allows the consumer to `pop` /// items while there is a `peek` active due to all methods having a /// non-mutable receiver. /// /// # Arguments /// /// * `bound` - This queue implementation is implemented with a linked /// list, and this means that a push is always a malloc. In /// order to amortize this cost, an internal cache of nodes is /// maintained to prevent a malloc from always being /// necessary. This bound is the limit on the size of the /// cache (if desired). If the value is 0, then the cache has /// no bound. Otherwise, the cache will never grow larger than /// `bound` (although the queue itself could be much larger. pub unsafe fn new(bound: uint) -> Queue<T> { let n1 = Node::new(); let n2 = Node::new(); (*n1).next.store(n2, Ordering::Relaxed); Queue { tail: UnsafeCell::new(n2), tail_prev: AtomicPtr::new(n1), head: UnsafeCell::new(n2), first: UnsafeCell::new(n1), tail_copy: UnsafeCell::new(n1), cache_bound: bound, cache_additions: AtomicUsize::new(0), cache_subtractions: AtomicUsize::new(0), } } /// Pushes a new value onto this queue. Note that to use this function /// safely, it must be externally guaranteed that there is only one pusher. pub fn push(&self, t: T) { unsafe { // Acquire a node (which either uses a cached one or allocates a new // one), and then append this to the 'head' node. let n = self.alloc(); assert!((*n).value.is_none()); (*n).value = Some(t); (*n).next.store(ptr::null_mut(), Ordering::Relaxed); (**self.head.get()).next.store(n, Ordering::Release); *self.head.get() = n; } } unsafe fn alloc(&self) -> *mut Node<T> { // First try to see if we can consume the 'first' node for our uses. // We try to avoid as many atomic instructions as possible here, so // the addition to cache_subtractions is not atomic (plus we're the // only one subtracting from the cache). if *self.first.get()!= *self.tail_copy.get() { if self.cache_bound > 0 { let b = self.cache_subtractions.load(Ordering::Relaxed); self.cache_subtractions.store(b + 1, Ordering::Relaxed); } let ret = *self.first.get(); *self.first.get() = (*ret).next.load(Ordering::Relaxed); return ret; } // If the above fails, then update our copy of the tail and try // again. *self.tail_copy.get() = self.tail_prev.load(Ordering::Acquire); if *self.first.get()!= *self.tail_copy.get() { if self.cache_bound > 0 { let b = self.cache_subtractions.load(Ordering::Relaxed); self.cache_subtractions.store(b + 1, Ordering::Relaxed); } let ret = *self.first.get(); *self.first.get() = (*ret).next.load(Ordering::Relaxed); return ret; } // If all of that fails, then we have to allocate a new node // (there's nothing in the node cache). Node::new() } /// Attempts to pop a value from this queue. Remember that to use this type /// safely you must ensure that there is only one popper at a time. pub fn pop(&self) -> Option<T> { unsafe { // The `tail` node is not actually a used node, but rather a // sentinel from where we should start popping from. Hence, look at // tail's next field and see if we can use it. If we do a pop, then // the current tail node is a candidate for going into the cache. let tail = *self.tail.get(); let next = (*tail).next.load(Ordering::Acquire); if next.is_null() { return None } assert!((*next).value.is_some()); let ret = (*next).value.take(); *self.tail.get() = next; if self.cache_bound == 0 { self.tail_prev.store(tail, Ordering::Release); } else { // FIXME: this is dubious with overflow. let additions = self.cache_additions.load(Ordering::Relaxed); let subtractions = self.cache_subtractions.load(Ordering::Relaxed); let size = additions - subtractions; if size < self.cache_bound { self.tail_prev.store(tail, Ordering::Release); self.cache_additions.store(additions + 1, Ordering::Relaxed); } else { (*self.tail_prev.load(Ordering::Relaxed)) .next.store(next, Ordering::Relaxed); // We have successfully erased all references to 'tail', so // now we can safely drop it. let _: Box<Node<T>> = mem::transmute(tail); } } return ret; } } /// Attempts to peek at the head of the queue, returning `None` if the queue /// has no data currently /// /// # Warning /// The reference returned is invalid if it is not used before the consumer /// pops the value off the queue. If the producer then pushes another value /// onto the queue, it will overwrite the value pointed to by the reference. pub fn peek<'a>(&'a self) -> Option<&'a mut T> { // This is essentially the same as above with all the popping bits // stripped out. unsafe { let tail = *self.tail.get(); let next = (*tail).next.load(Ordering::Acquire); if next.is_null() { return None } return (*next).value.as_mut(); } } } #[unsafe_destructor] impl<T: Send> Drop for Queue<T> { fn drop(&mut self) { unsafe { let mut cur = *self.first.get(); while!cur.is_null() { let next = (*cur).next.load(Ordering::Relaxed); let _n: Box<Node<T>> = mem::transmute(cur); cur = next; } } } } #[cfg(test)] mod test { use prelude::v1::*; use sync::Arc; use super::Queue; use thread::Thread; use sync::mpsc::channel; #[test]
queue.push(1); queue.push(2); assert_eq!(queue.pop(), Some(1)); assert_eq!(queue.pop(), Some(2)); assert_eq!(queue.pop(), None); queue.push(3); queue.push(4); assert_eq!(queue.pop(), Some(3)); assert_eq!(queue.pop(), Some(4)); assert_eq!(queue.pop(), None); } } #[test] fn peek() { unsafe { let queue = Queue::new(0); queue.push(vec![1]); // Ensure the borrowchecker works match queue.peek() { Some(vec) => match &**vec { // Note that `pop` is not allowed here due to borrow [1] => {} _ => return }, None => unreachable!() } queue.pop(); } } #[test] fn drop_full() { unsafe { let q = Queue::new(0); q.push(box 1); q.push(box 2); } } #[test] fn smoke_bound() { unsafe { let q = Queue::new(0); q.push(1); q.push(2); assert_eq!(q.pop(), Some(1)); assert_eq!(q.pop(), Some(2)); assert_eq!(q.pop(), None); q.push(3); q.push(4); assert_eq!(q.pop(), Some(3)); assert_eq!(q.pop(), Some(4)); assert_eq!(q.pop(), None); } } #[test] fn stress() { unsafe { stress_bound(0); stress_bound(1); } unsafe fn stress_bound(bound: uint) { let q = Arc::new(Queue::new(bound)); let (tx, rx) = channel(); let q2 = q.clone(); let _t = Thread::spawn(move|| { for _ in 0u..100000 { loop { match q2.pop() { Some(1) => break, Some(_) => panic!(), None => {} } } } tx.send(()).unwrap(); }); for _ in 0..100000 { q.push(1); } rx.recv().unwrap(); } } }
fn smoke() { unsafe { let queue = Queue::new(0);
random_line_split
mozmap.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! The `MozMap` (open-ended dictionary) type. use crate::dom::bindings::conversions::jsid_to_string; use crate::dom::bindings::error::report_pending_exception; use crate::dom::bindings::str::DOMString; use js::conversions::{ConversionResult, FromJSValConvertible, ToJSValConvertible}; use js::jsapi::JSContext; use js::jsapi::JS_NewPlainObject; use js::jsapi::JSITER_HIDDEN; use js::jsapi::JSITER_OWNONLY; use js::jsapi::JSITER_SYMBOLS; use js::jsapi::JSPROP_ENUMERATE; use js::jsval::ObjectValue; use js::jsval::UndefinedValue; use js::rust::wrappers::GetPropertyKeys; use js::rust::wrappers::JS_DefineUCProperty2; use js::rust::wrappers::JS_GetPropertyById; use js::rust::HandleValue; use js::rust::IdVector; use js::rust::MutableHandleValue; use std::collections::HashMap; use std::ops::Deref; /// The `MozMap` (open-ended dictionary) type. #[derive(Clone, JSTraceable)] pub struct MozMap<T> { map: HashMap<DOMString, T>, } impl<T> MozMap<T> { /// Create an empty `MozMap`. pub fn new() -> Self { MozMap { map: HashMap::new(), } } } impl<T> Deref for MozMap<T> { type Target = HashMap<DOMString, T>; fn
(&self) -> &HashMap<DOMString, T> { &self.map } } impl<T, C> FromJSValConvertible for MozMap<T> where T: FromJSValConvertible<Config = C>, C: Clone, { type Config = C; unsafe fn from_jsval( cx: *mut JSContext, value: HandleValue, config: C, ) -> Result<ConversionResult<Self>, ()> { if!value.is_object() { return Ok(ConversionResult::Failure( "MozMap value was not an object".into(), )); } rooted!(in(cx) let object = value.to_object()); let ids = IdVector::new(cx); if!GetPropertyKeys( cx, object.handle(), JSITER_OWNONLY | JSITER_HIDDEN | JSITER_SYMBOLS, ids.get(), ) { // TODO: can GetPropertyKeys fail? // (it does so if the object has duplicate keys) // https://github.com/servo/servo/issues/21462 report_pending_exception(cx, false); return Ok(ConversionResult::Failure( "Getting MozMap value property keys failed".into(), )); } let mut map = HashMap::new(); for id in &*ids { rooted!(in(cx) let id = *id); rooted!(in(cx) let mut property = UndefinedValue()); if!JS_GetPropertyById(cx, object.handle(), id.handle(), property.handle_mut()) { return Err(()); } let property = match T::from_jsval(cx, property.handle(), config.clone())? { ConversionResult::Success(property) => property, ConversionResult::Failure(message) => return Ok(ConversionResult::Failure(message)), }; // TODO: Is this guaranteed to succeed? // https://github.com/servo/servo/issues/21463 if let Some(key) = jsid_to_string(cx, id.handle()) { map.insert(key, property); } } Ok(ConversionResult::Success(MozMap { map: map })) } } impl<T: ToJSValConvertible> ToJSValConvertible for MozMap<T> { #[inline] unsafe fn to_jsval(&self, cx: *mut JSContext, mut rval: MutableHandleValue) { rooted!(in(cx) let js_object = JS_NewPlainObject(cx)); assert!(!js_object.handle().is_null()); rooted!(in(cx) let mut js_value = UndefinedValue()); for (key, value) in &self.map { let key = key.encode_utf16().collect::<Vec<_>>(); value.to_jsval(cx, js_value.handle_mut()); assert!(JS_DefineUCProperty2( cx, js_object.handle(), key.as_ptr(), key.len(), js_value.handle(), JSPROP_ENUMERATE as u32 )); } rval.set(ObjectValue(js_object.handle().get())); } }
deref
identifier_name
mozmap.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! The `MozMap` (open-ended dictionary) type. use crate::dom::bindings::conversions::jsid_to_string; use crate::dom::bindings::error::report_pending_exception; use crate::dom::bindings::str::DOMString; use js::conversions::{ConversionResult, FromJSValConvertible, ToJSValConvertible}; use js::jsapi::JSContext; use js::jsapi::JS_NewPlainObject; use js::jsapi::JSITER_HIDDEN; use js::jsapi::JSITER_OWNONLY; use js::jsapi::JSITER_SYMBOLS; use js::jsapi::JSPROP_ENUMERATE; use js::jsval::ObjectValue; use js::jsval::UndefinedValue; use js::rust::wrappers::GetPropertyKeys; use js::rust::wrappers::JS_DefineUCProperty2; use js::rust::wrappers::JS_GetPropertyById; use js::rust::HandleValue; use js::rust::IdVector; use js::rust::MutableHandleValue; use std::collections::HashMap; use std::ops::Deref;
impl<T> MozMap<T> { /// Create an empty `MozMap`. pub fn new() -> Self { MozMap { map: HashMap::new(), } } } impl<T> Deref for MozMap<T> { type Target = HashMap<DOMString, T>; fn deref(&self) -> &HashMap<DOMString, T> { &self.map } } impl<T, C> FromJSValConvertible for MozMap<T> where T: FromJSValConvertible<Config = C>, C: Clone, { type Config = C; unsafe fn from_jsval( cx: *mut JSContext, value: HandleValue, config: C, ) -> Result<ConversionResult<Self>, ()> { if!value.is_object() { return Ok(ConversionResult::Failure( "MozMap value was not an object".into(), )); } rooted!(in(cx) let object = value.to_object()); let ids = IdVector::new(cx); if!GetPropertyKeys( cx, object.handle(), JSITER_OWNONLY | JSITER_HIDDEN | JSITER_SYMBOLS, ids.get(), ) { // TODO: can GetPropertyKeys fail? // (it does so if the object has duplicate keys) // https://github.com/servo/servo/issues/21462 report_pending_exception(cx, false); return Ok(ConversionResult::Failure( "Getting MozMap value property keys failed".into(), )); } let mut map = HashMap::new(); for id in &*ids { rooted!(in(cx) let id = *id); rooted!(in(cx) let mut property = UndefinedValue()); if!JS_GetPropertyById(cx, object.handle(), id.handle(), property.handle_mut()) { return Err(()); } let property = match T::from_jsval(cx, property.handle(), config.clone())? { ConversionResult::Success(property) => property, ConversionResult::Failure(message) => return Ok(ConversionResult::Failure(message)), }; // TODO: Is this guaranteed to succeed? // https://github.com/servo/servo/issues/21463 if let Some(key) = jsid_to_string(cx, id.handle()) { map.insert(key, property); } } Ok(ConversionResult::Success(MozMap { map: map })) } } impl<T: ToJSValConvertible> ToJSValConvertible for MozMap<T> { #[inline] unsafe fn to_jsval(&self, cx: *mut JSContext, mut rval: MutableHandleValue) { rooted!(in(cx) let js_object = JS_NewPlainObject(cx)); assert!(!js_object.handle().is_null()); rooted!(in(cx) let mut js_value = UndefinedValue()); for (key, value) in &self.map { let key = key.encode_utf16().collect::<Vec<_>>(); value.to_jsval(cx, js_value.handle_mut()); assert!(JS_DefineUCProperty2( cx, js_object.handle(), key.as_ptr(), key.len(), js_value.handle(), JSPROP_ENUMERATE as u32 )); } rval.set(ObjectValue(js_object.handle().get())); } }
/// The `MozMap` (open-ended dictionary) type. #[derive(Clone, JSTraceable)] pub struct MozMap<T> { map: HashMap<DOMString, T>, }
random_line_split
mozmap.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! The `MozMap` (open-ended dictionary) type. use crate::dom::bindings::conversions::jsid_to_string; use crate::dom::bindings::error::report_pending_exception; use crate::dom::bindings::str::DOMString; use js::conversions::{ConversionResult, FromJSValConvertible, ToJSValConvertible}; use js::jsapi::JSContext; use js::jsapi::JS_NewPlainObject; use js::jsapi::JSITER_HIDDEN; use js::jsapi::JSITER_OWNONLY; use js::jsapi::JSITER_SYMBOLS; use js::jsapi::JSPROP_ENUMERATE; use js::jsval::ObjectValue; use js::jsval::UndefinedValue; use js::rust::wrappers::GetPropertyKeys; use js::rust::wrappers::JS_DefineUCProperty2; use js::rust::wrappers::JS_GetPropertyById; use js::rust::HandleValue; use js::rust::IdVector; use js::rust::MutableHandleValue; use std::collections::HashMap; use std::ops::Deref; /// The `MozMap` (open-ended dictionary) type. #[derive(Clone, JSTraceable)] pub struct MozMap<T> { map: HashMap<DOMString, T>, } impl<T> MozMap<T> { /// Create an empty `MozMap`. pub fn new() -> Self { MozMap { map: HashMap::new(), } } } impl<T> Deref for MozMap<T> { type Target = HashMap<DOMString, T>; fn deref(&self) -> &HashMap<DOMString, T> { &self.map } } impl<T, C> FromJSValConvertible for MozMap<T> where T: FromJSValConvertible<Config = C>, C: Clone, { type Config = C; unsafe fn from_jsval( cx: *mut JSContext, value: HandleValue, config: C, ) -> Result<ConversionResult<Self>, ()> { if!value.is_object() { return Ok(ConversionResult::Failure( "MozMap value was not an object".into(), )); } rooted!(in(cx) let object = value.to_object()); let ids = IdVector::new(cx); if!GetPropertyKeys( cx, object.handle(), JSITER_OWNONLY | JSITER_HIDDEN | JSITER_SYMBOLS, ids.get(), ) { // TODO: can GetPropertyKeys fail? // (it does so if the object has duplicate keys) // https://github.com/servo/servo/issues/21462 report_pending_exception(cx, false); return Ok(ConversionResult::Failure( "Getting MozMap value property keys failed".into(), )); } let mut map = HashMap::new(); for id in &*ids { rooted!(in(cx) let id = *id); rooted!(in(cx) let mut property = UndefinedValue()); if!JS_GetPropertyById(cx, object.handle(), id.handle(), property.handle_mut()) { return Err(()); } let property = match T::from_jsval(cx, property.handle(), config.clone())? { ConversionResult::Success(property) => property, ConversionResult::Failure(message) => return Ok(ConversionResult::Failure(message)), }; // TODO: Is this guaranteed to succeed? // https://github.com/servo/servo/issues/21463 if let Some(key) = jsid_to_string(cx, id.handle()) { map.insert(key, property); } } Ok(ConversionResult::Success(MozMap { map: map })) } } impl<T: ToJSValConvertible> ToJSValConvertible for MozMap<T> { #[inline] unsafe fn to_jsval(&self, cx: *mut JSContext, mut rval: MutableHandleValue)
} }
{ rooted!(in(cx) let js_object = JS_NewPlainObject(cx)); assert!(!js_object.handle().is_null()); rooted!(in(cx) let mut js_value = UndefinedValue()); for (key, value) in &self.map { let key = key.encode_utf16().collect::<Vec<_>>(); value.to_jsval(cx, js_value.handle_mut()); assert!(JS_DefineUCProperty2( cx, js_object.handle(), key.as_ptr(), key.len(), js_value.handle(), JSPROP_ENUMERATE as u32 )); } rval.set(ObjectValue(js_object.handle().get()));
identifier_body
mozmap.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! The `MozMap` (open-ended dictionary) type. use crate::dom::bindings::conversions::jsid_to_string; use crate::dom::bindings::error::report_pending_exception; use crate::dom::bindings::str::DOMString; use js::conversions::{ConversionResult, FromJSValConvertible, ToJSValConvertible}; use js::jsapi::JSContext; use js::jsapi::JS_NewPlainObject; use js::jsapi::JSITER_HIDDEN; use js::jsapi::JSITER_OWNONLY; use js::jsapi::JSITER_SYMBOLS; use js::jsapi::JSPROP_ENUMERATE; use js::jsval::ObjectValue; use js::jsval::UndefinedValue; use js::rust::wrappers::GetPropertyKeys; use js::rust::wrappers::JS_DefineUCProperty2; use js::rust::wrappers::JS_GetPropertyById; use js::rust::HandleValue; use js::rust::IdVector; use js::rust::MutableHandleValue; use std::collections::HashMap; use std::ops::Deref; /// The `MozMap` (open-ended dictionary) type. #[derive(Clone, JSTraceable)] pub struct MozMap<T> { map: HashMap<DOMString, T>, } impl<T> MozMap<T> { /// Create an empty `MozMap`. pub fn new() -> Self { MozMap { map: HashMap::new(), } } } impl<T> Deref for MozMap<T> { type Target = HashMap<DOMString, T>; fn deref(&self) -> &HashMap<DOMString, T> { &self.map } } impl<T, C> FromJSValConvertible for MozMap<T> where T: FromJSValConvertible<Config = C>, C: Clone, { type Config = C; unsafe fn from_jsval( cx: *mut JSContext, value: HandleValue, config: C, ) -> Result<ConversionResult<Self>, ()> { if!value.is_object()
rooted!(in(cx) let object = value.to_object()); let ids = IdVector::new(cx); if!GetPropertyKeys( cx, object.handle(), JSITER_OWNONLY | JSITER_HIDDEN | JSITER_SYMBOLS, ids.get(), ) { // TODO: can GetPropertyKeys fail? // (it does so if the object has duplicate keys) // https://github.com/servo/servo/issues/21462 report_pending_exception(cx, false); return Ok(ConversionResult::Failure( "Getting MozMap value property keys failed".into(), )); } let mut map = HashMap::new(); for id in &*ids { rooted!(in(cx) let id = *id); rooted!(in(cx) let mut property = UndefinedValue()); if!JS_GetPropertyById(cx, object.handle(), id.handle(), property.handle_mut()) { return Err(()); } let property = match T::from_jsval(cx, property.handle(), config.clone())? { ConversionResult::Success(property) => property, ConversionResult::Failure(message) => return Ok(ConversionResult::Failure(message)), }; // TODO: Is this guaranteed to succeed? // https://github.com/servo/servo/issues/21463 if let Some(key) = jsid_to_string(cx, id.handle()) { map.insert(key, property); } } Ok(ConversionResult::Success(MozMap { map: map })) } } impl<T: ToJSValConvertible> ToJSValConvertible for MozMap<T> { #[inline] unsafe fn to_jsval(&self, cx: *mut JSContext, mut rval: MutableHandleValue) { rooted!(in(cx) let js_object = JS_NewPlainObject(cx)); assert!(!js_object.handle().is_null()); rooted!(in(cx) let mut js_value = UndefinedValue()); for (key, value) in &self.map { let key = key.encode_utf16().collect::<Vec<_>>(); value.to_jsval(cx, js_value.handle_mut()); assert!(JS_DefineUCProperty2( cx, js_object.handle(), key.as_ptr(), key.len(), js_value.handle(), JSPROP_ENUMERATE as u32 )); } rval.set(ObjectValue(js_object.handle().get())); } }
{ return Ok(ConversionResult::Failure( "MozMap value was not an object".into(), )); }
conditional_block
ssh_key.rs
// id number This is a unique identification number for the // key. This can be used to reference a specific SSH key when you wish to embed // a key into a Droplet. // fingerprint string This attribute contains the fingerprint value // that is generated from the public key. This is a unique identifier that will // differentiate it from other keys using a format that SSH recognizes. // public_key string This attribute contains the entire public key // string that was uploaded. This is what is embedded into the root user's // authorized_keys file if you choose to include this SSH key during Droplet // creation. // name string This is the human-readable display name for the // given SSH key. This is used to easily identify the SSH keys when they are // displayed. use std::fmt; use std::borrow::Cow; use response::NamedResponse; use response; #[derive(Deserialize, Debug)] pub struct SshKey { pub id: f64, pub fingerprint: String, pub public_key: String, pub name: String, } impl response::NotArray for SshKey {} impl NamedResponse for SshKey { fn name<'a>() -> Cow<'a, str>
} impl fmt::Display for SshKey { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "ID: {:.0}\n\ Fingerprint: {}\n\ Public Key: {}\n\ Name: {}", self.id, self.fingerprint, self.public_key, self.name) } } pub type SshKeys = Vec<SshKey>;
{ "ssh_key".into() }
identifier_body
ssh_key.rs
// id number This is a unique identification number for the // key. This can be used to reference a specific SSH key when you wish to embed // a key into a Droplet. // fingerprint string This attribute contains the fingerprint value // that is generated from the public key. This is a unique identifier that will // differentiate it from other keys using a format that SSH recognizes. // public_key string This attribute contains the entire public key // string that was uploaded. This is what is embedded into the root user's // authorized_keys file if you choose to include this SSH key during Droplet // creation. // name string This is the human-readable display name for the // given SSH key. This is used to easily identify the SSH keys when they are // displayed. use std::fmt; use std::borrow::Cow; use response::NamedResponse; use response; #[derive(Deserialize, Debug)] pub struct SshKey { pub id: f64, pub fingerprint: String, pub public_key: String, pub name: String, } impl response::NotArray for SshKey {} impl NamedResponse for SshKey { fn
<'a>() -> Cow<'a, str> { "ssh_key".into() } } impl fmt::Display for SshKey { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "ID: {:.0}\n\ Fingerprint: {}\n\ Public Key: {}\n\ Name: {}", self.id, self.fingerprint, self.public_key, self.name) } } pub type SshKeys = Vec<SshKey>;
name
identifier_name
ssh_key.rs
// id number This is a unique identification number for the // key. This can be used to reference a specific SSH key when you wish to embed // a key into a Droplet. // fingerprint string This attribute contains the fingerprint value // that is generated from the public key. This is a unique identifier that will // differentiate it from other keys using a format that SSH recognizes. // public_key string This attribute contains the entire public key // string that was uploaded. This is what is embedded into the root user's // authorized_keys file if you choose to include this SSH key during Droplet // creation. // name string This is the human-readable display name for the // given SSH key. This is used to easily identify the SSH keys when they are // displayed. use std::fmt; use std::borrow::Cow; use response::NamedResponse;
use response; #[derive(Deserialize, Debug)] pub struct SshKey { pub id: f64, pub fingerprint: String, pub public_key: String, pub name: String, } impl response::NotArray for SshKey {} impl NamedResponse for SshKey { fn name<'a>() -> Cow<'a, str> { "ssh_key".into() } } impl fmt::Display for SshKey { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "ID: {:.0}\n\ Fingerprint: {}\n\ Public Key: {}\n\ Name: {}", self.id, self.fingerprint, self.public_key, self.name) } } pub type SshKeys = Vec<SshKey>;
random_line_split
x86stdcall.rs
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // GetLastError doesn't seem to work with stack switching #[cfg(windows)] mod kernel32 { extern "system" { pub fn SetLastError(err: uint); pub fn GetLastError() -> uint; } } #[cfg(windows)] pub fn main()
#[cfg(any(target_os = "macos", target_os = "linux", target_os = "freebsd", target_os = "dragonfly", target_os = "bitrig", target_os = "openbsd", target_os = "android"))] pub fn main() { }
{ unsafe { let expected = 1234_usize; kernel32::SetLastError(expected); let actual = kernel32::GetLastError(); println!("actual = {}", actual); assert_eq!(expected, actual); } }
identifier_body
x86stdcall.rs
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // GetLastError doesn't seem to work with stack switching #[cfg(windows)] mod kernel32 { extern "system" { pub fn SetLastError(err: uint); pub fn GetLastError() -> uint; } } #[cfg(windows)] pub fn
() { unsafe { let expected = 1234_usize; kernel32::SetLastError(expected); let actual = kernel32::GetLastError(); println!("actual = {}", actual); assert_eq!(expected, actual); } } #[cfg(any(target_os = "macos", target_os = "linux", target_os = "freebsd", target_os = "dragonfly", target_os = "bitrig", target_os = "openbsd", target_os = "android"))] pub fn main() { }
main
identifier_name
x86stdcall.rs
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // GetLastError doesn't seem to work with stack switching #[cfg(windows)] mod kernel32 { extern "system" { pub fn SetLastError(err: uint); pub fn GetLastError() -> uint; } } #[cfg(windows)] pub fn main() { unsafe { let expected = 1234_usize; kernel32::SetLastError(expected); let actual = kernel32::GetLastError(); println!("actual = {}", actual);
target_os = "linux", target_os = "freebsd", target_os = "dragonfly", target_os = "bitrig", target_os = "openbsd", target_os = "android"))] pub fn main() { }
assert_eq!(expected, actual); } } #[cfg(any(target_os = "macos",
random_line_split
TestMad.rs
/* * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Don't edit this file! It is auto-generated by frameworks/rs/api/generate.sh. #pragma version(1) #pragma rs java_package_name(android.renderscript.cts) rs_allocation gAllocInMultiplicand2; rs_allocation gAllocInOffset; float __attribute__((kernel)) testMadFloatFloatFloatFloat(float inMultiplicand1, unsigned int x) { float inMultiplicand2 = rsGetElementAt_float(gAllocInMultiplicand2, x); float inOffset = rsGetElementAt_float(gAllocInOffset, x); return mad(inMultiplicand1, inMultiplicand2, inOffset); } float2 __attribute__((kernel)) testMadFloat2Float2Float2Float2(float2 inMultiplicand1, unsigned int x) { float2 inMultiplicand2 = rsGetElementAt_float2(gAllocInMultiplicand2, x); float2 inOffset = rsGetElementAt_float2(gAllocInOffset, x); return mad(inMultiplicand1, inMultiplicand2, inOffset);
float3 __attribute__((kernel)) testMadFloat3Float3Float3Float3(float3 inMultiplicand1, unsigned int x) { float3 inMultiplicand2 = rsGetElementAt_float3(gAllocInMultiplicand2, x); float3 inOffset = rsGetElementAt_float3(gAllocInOffset, x); return mad(inMultiplicand1, inMultiplicand2, inOffset); } float4 __attribute__((kernel)) testMadFloat4Float4Float4Float4(float4 inMultiplicand1, unsigned int x) { float4 inMultiplicand2 = rsGetElementAt_float4(gAllocInMultiplicand2, x); float4 inOffset = rsGetElementAt_float4(gAllocInOffset, x); return mad(inMultiplicand1, inMultiplicand2, inOffset); } half __attribute__((kernel)) testMadHalfHalfHalfHalf(half inMultiplicand1, unsigned int x) { half inMultiplicand2 = rsGetElementAt_half(gAllocInMultiplicand2, x); half inOffset = rsGetElementAt_half(gAllocInOffset, x); return mad(inMultiplicand1, inMultiplicand2, inOffset); } half2 __attribute__((kernel)) testMadHalf2Half2Half2Half2(half2 inMultiplicand1, unsigned int x) { half2 inMultiplicand2 = rsGetElementAt_half2(gAllocInMultiplicand2, x); half2 inOffset = rsGetElementAt_half2(gAllocInOffset, x); return mad(inMultiplicand1, inMultiplicand2, inOffset); } half3 __attribute__((kernel)) testMadHalf3Half3Half3Half3(half3 inMultiplicand1, unsigned int x) { half3 inMultiplicand2 = rsGetElementAt_half3(gAllocInMultiplicand2, x); half3 inOffset = rsGetElementAt_half3(gAllocInOffset, x); return mad(inMultiplicand1, inMultiplicand2, inOffset); } half4 __attribute__((kernel)) testMadHalf4Half4Half4Half4(half4 inMultiplicand1, unsigned int x) { half4 inMultiplicand2 = rsGetElementAt_half4(gAllocInMultiplicand2, x); half4 inOffset = rsGetElementAt_half4(gAllocInOffset, x); return mad(inMultiplicand1, inMultiplicand2, inOffset); }
}
random_line_split
main.rs
#![allow(unused_variables)] extern crate rustc_serialize; #[macro_use] extern crate serde_derive; extern crate docopt; extern crate sdl2; extern crate num; extern crate rand; #[macro_use] extern crate enum_primitive as ep; mod chip8; mod screen; mod instruction; use docopt::Docopt; use sdl2::keyboard::Keycode; use sdl2::event::Event; use chip8::Chip8; const USAGE: &'static str = " Chip8 Emulator Usage: chip8 <file> chip8 (-h | --help) chip8 (-v | --version) Options: -h --help Show this screen -v --version Show version "; #[derive(Debug, Deserialize)] struct Args { arg_file: String } fn main() { let args: Args = Docopt::new(USAGE) .and_then(|d| d.deserialize()) .unwrap_or_else(|e| e.exit()); let ctx = sdl2::init().unwrap(); let mut chip8 = Chip8::new(&ctx); let mut events = ctx.event_pump().unwrap(); chip8.load_rom(&args.arg_file); 'main: loop { for event in events.poll_iter() { match event { Event::Quit{..} => break'main, Event::KeyDown { keycode: Some(key),.. } => { chip8.reset_keys(); match key { Keycode::Num1 => chip8.press(0x1), Keycode::Num2 => chip8.press(0x2), Keycode::Num3 => chip8.press(0x3), Keycode::Num4 => chip8.press(0xc),
Keycode::Q => chip8.press(0x4), Keycode::W => chip8.press(0x5), Keycode::E => chip8.press(0x6), Keycode::R => chip8.press(0xd), Keycode::A => chip8.press(0x7), Keycode::S => chip8.press(0x8), Keycode::D => chip8.press(0x9), Keycode::Z => chip8.press(0x0), Keycode::X => chip8.press(0xb), Keycode::F => chip8.press(0xf), _ => {}, } }, _ => {}, } } chip8.run(); } println!("Exiting.."); }
random_line_split
main.rs
#![allow(unused_variables)] extern crate rustc_serialize; #[macro_use] extern crate serde_derive; extern crate docopt; extern crate sdl2; extern crate num; extern crate rand; #[macro_use] extern crate enum_primitive as ep; mod chip8; mod screen; mod instruction; use docopt::Docopt; use sdl2::keyboard::Keycode; use sdl2::event::Event; use chip8::Chip8; const USAGE: &'static str = " Chip8 Emulator Usage: chip8 <file> chip8 (-h | --help) chip8 (-v | --version) Options: -h --help Show this screen -v --version Show version "; #[derive(Debug, Deserialize)] struct Args { arg_file: String } fn
() { let args: Args = Docopt::new(USAGE) .and_then(|d| d.deserialize()) .unwrap_or_else(|e| e.exit()); let ctx = sdl2::init().unwrap(); let mut chip8 = Chip8::new(&ctx); let mut events = ctx.event_pump().unwrap(); chip8.load_rom(&args.arg_file); 'main: loop { for event in events.poll_iter() { match event { Event::Quit{..} => break'main, Event::KeyDown { keycode: Some(key),.. } => { chip8.reset_keys(); match key { Keycode::Num1 => chip8.press(0x1), Keycode::Num2 => chip8.press(0x2), Keycode::Num3 => chip8.press(0x3), Keycode::Num4 => chip8.press(0xc), Keycode::Q => chip8.press(0x4), Keycode::W => chip8.press(0x5), Keycode::E => chip8.press(0x6), Keycode::R => chip8.press(0xd), Keycode::A => chip8.press(0x7), Keycode::S => chip8.press(0x8), Keycode::D => chip8.press(0x9), Keycode::Z => chip8.press(0x0), Keycode::X => chip8.press(0xb), Keycode::F => chip8.press(0xf), _ => {}, } }, _ => {}, } } chip8.run(); } println!("Exiting.."); }
main
identifier_name
trace.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! Utilities for tracing JS-managed values. //! //! The lifetime of DOM objects is managed by the SpiderMonkey Garbage //! Collector. A rooted DOM object implementing the interface `Foo` is traced //! as follows: //! //! 1. The GC calls `_trace` defined in `FooBinding` during the marking //! phase. (This happens through `JSClass.trace` for non-proxy bindings, and //! through `ProxyTraps.trace` otherwise.) //! 2. `_trace` calls `Foo::trace()` (an implementation of `JSTraceable`). //! This is typically derived via a `#[dom_struct]` //! (implies `#[derive(JSTraceable)]`) annotation. //! Non-JS-managed types have an empty inline `trace()` method, //! achieved via `no_jsmanaged_fields!` or similar. //! 3. For all fields, `Foo::trace()` //! calls `trace()` on the field. //! For example, for fields of type `JS<T>`, `JS<T>::trace()` calls //! `trace_reflector()`. //! 4. `trace_reflector()` calls `JS_CallUnbarrieredObjectTracer()` with a //! pointer to the `JSObject` for the reflector. This notifies the GC, which //! will add the object to the graph, and will trace that object as well. //! 5. When the GC finishes tracing, it [`finalizes`](../index.html#destruction) //! any reflectors that were not reachable. //! //! The `no_jsmanaged_fields!()` macro adds an empty implementation of `JSTraceable` to //! a datatype. use canvas_traits::WebGLError; use canvas_traits::{CanvasGradientStop, LinearGradientStyle, RadialGradientStyle}; use canvas_traits::{CompositionOrBlending, LineCapStyle, LineJoinStyle, RepetitionStyle}; use cssparser::RGBA; use devtools_traits::WorkerId; use dom::bindings::js::{JS, Root}; use dom::bindings::refcounted::Trusted; use dom::bindings::reflector::{Reflectable, Reflector}; use dom::bindings::utils::WindowProxyHandler; use encoding::types::EncodingRef; use euclid::length::Length as EuclidLength; use euclid::matrix2d::Matrix2D; use euclid::rect::Rect; use euclid::size::Size2D; use html5ever::tree_builder::QuirksMode; use hyper::header::Headers; use hyper::method::Method; use hyper::mime::Mime; use ipc_channel::ipc::{IpcReceiver, IpcSender}; use js::jsapi::JS_CallUnbarrieredObjectTracer; use js::jsapi::{GCTraceKindToAscii, Heap, JSGCTraceKind, JSObject, JSTracer, JS_CallObjectTracer, JS_CallValueTracer}; use js::jsval::JSVal; use js::rust::Runtime; use layout_interface::{LayoutChan, LayoutRPC}; use libc; use msg::constellation_msg::ConstellationChan; use msg::constellation_msg::{PipelineId, SubpageId, WindowSizeData}; use net_traits::Metadata; use net_traits::image::base::Image; use net_traits::image_cache_task::{ImageCacheChan, ImageCacheTask}; use net_traits::storage_task::StorageType; use profile_traits::mem::ProfilerChan as MemProfilerChan; use profile_traits::time::ProfilerChan as TimeProfilerChan; use script_task::ScriptChan; use script_traits::{LayoutMsg, ScriptMsg, TimerEventId, TimerSource, UntrustedNodeAddress}; use selectors::parser::PseudoElement; use selectors::states::*; use serde::{Deserialize, Serialize}; use smallvec::SmallVec; use std::boxed::FnBox; use std::cell::{Cell, UnsafeCell}; use std::collections::hash_state::HashState; use std::collections::{HashMap, HashSet}; use std::ffi::CString; use std::hash::{Hash, Hasher}; use std::intrinsics::return_address; use std::iter::{FromIterator, IntoIterator}; use std::mem; use std::ops::{Deref, DerefMut}; use std::rc::Rc; use std::sync::Arc; use std::sync::atomic::AtomicBool; use std::sync::mpsc::{Receiver, Sender}; use string_cache::{Atom, Namespace, QualName}; use style::attr::{AttrIdentifier, AttrValue}; use style::properties::PropertyDeclarationBlock; use style::restyle_hints::ElementSnapshot; use style::values::specified::Length; use url::Url; use util::str::{DOMString, LengthOrPercentageOrAuto}; use uuid::Uuid; /// A trait to allow tracing (only) DOM objects. pub trait JSTraceable { /// Trace `self`. fn trace(&self, trc: *mut JSTracer); } no_jsmanaged_fields!(EncodingRef); no_jsmanaged_fields!(Reflector); /// Trace a `JSVal`. pub fn trace_jsval(tracer: *mut JSTracer, description: &str, val: &Heap<JSVal>) { unsafe { if!val.get().is_markable() { return; } let name = CString::new(description).unwrap(); (*tracer).debugPrinter_ = None; (*tracer).debugPrintIndex_ =!0; (*tracer).debugPrintArg_ = name.as_ptr() as *const libc::c_void; debug!("tracing value {}", description); JS_CallValueTracer(tracer, val.ptr.get() as *mut _, GCTraceKindToAscii(val.get().trace_kind())); } } /// Trace the `JSObject` held by `reflector`. #[allow(unrooted_must_root)] pub fn trace_reflector(tracer: *mut JSTracer, description: &str, reflector: &Reflector) { unsafe { let name = CString::new(description).unwrap(); (*tracer).debugPrinter_ = None; (*tracer).debugPrintIndex_ =!0; (*tracer).debugPrintArg_ = name.as_ptr() as *const libc::c_void; debug!("tracing reflector {}", description); JS_CallUnbarrieredObjectTracer(tracer, reflector.rootable(), GCTraceKindToAscii(JSGCTraceKind::JSTRACE_OBJECT)); } } /// Trace a `JSObject`. pub fn trace_object(tracer: *mut JSTracer, description: &str, obj: &Heap<*mut JSObject>) { unsafe { let name = CString::new(description).unwrap(); (*tracer).debugPrinter_ = None; (*tracer).debugPrintIndex_ =!0; (*tracer).debugPrintArg_ = name.as_ptr() as *const libc::c_void; debug!("tracing {}", description); JS_CallObjectTracer(tracer, obj.ptr.get() as *mut _, GCTraceKindToAscii(JSGCTraceKind::JSTRACE_OBJECT)); } } impl<T: JSTraceable> JSTraceable for Rc<T> { fn trace(&self, trc: *mut JSTracer) { (**self).trace(trc) } } impl<T: JSTraceable> JSTraceable for Box<T> { fn trace(&self, trc: *mut JSTracer)
} impl<T: JSTraceable + Copy> JSTraceable for Cell<T> { fn trace(&self, trc: *mut JSTracer) { self.get().trace(trc) } } impl<T: JSTraceable> JSTraceable for UnsafeCell<T> { fn trace(&self, trc: *mut JSTracer) { unsafe { (*self.get()).trace(trc) } } } impl JSTraceable for Heap<*mut JSObject> { fn trace(&self, trc: *mut JSTracer) { if self.get().is_null() { return; } trace_object(trc, "object", self); } } impl JSTraceable for Heap<JSVal> { fn trace(&self, trc: *mut JSTracer) { trace_jsval(trc, "val", self); } } // XXXManishearth Check if the following three are optimized to no-ops // if e.trace() is a no-op (e.g it is an no_jsmanaged_fields type) impl<T: JSTraceable> JSTraceable for Vec<T> { #[inline] fn trace(&self, trc: *mut JSTracer) { for e in &*self { e.trace(trc); } } } // XXXManishearth Check if the following three are optimized to no-ops // if e.trace() is a no-op (e.g it is an no_jsmanaged_fields type) impl<T: JSTraceable +'static> JSTraceable for SmallVec<[T; 1]> { #[inline] fn trace(&self, trc: *mut JSTracer) { for e in self.iter() { e.trace(trc); } } } impl<T: JSTraceable> JSTraceable for Option<T> { #[inline] fn trace(&self, trc: *mut JSTracer) { self.as_ref().map(|e| e.trace(trc)); } } impl<T: JSTraceable, U: JSTraceable> JSTraceable for Result<T, U> { #[inline] fn trace(&self, trc: *mut JSTracer) { match *self { Ok(ref inner) => inner.trace(trc), Err(ref inner) => inner.trace(trc), } } } impl<K, V, S> JSTraceable for HashMap<K, V, S> where K: Hash + Eq + JSTraceable, V: JSTraceable, S: HashState, <S as HashState>::Hasher: Hasher, { #[inline] fn trace(&self, trc: *mut JSTracer) { for (k, v) in &*self { k.trace(trc); v.trace(trc); } } } impl<A: JSTraceable, B: JSTraceable> JSTraceable for (A, B) { #[inline] fn trace(&self, trc: *mut JSTracer) { let (ref a, ref b) = *self; a.trace(trc); b.trace(trc); } } no_jsmanaged_fields!(bool, f32, f64, String, Url, AtomicBool, Uuid); no_jsmanaged_fields!(usize, u8, u16, u32, u64); no_jsmanaged_fields!(isize, i8, i16, i32, i64); no_jsmanaged_fields!(Sender<T>); no_jsmanaged_fields!(Receiver<T>); no_jsmanaged_fields!(Rect<T>); no_jsmanaged_fields!(Size2D<T>); no_jsmanaged_fields!(Arc<T>); no_jsmanaged_fields!(Image, ImageCacheChan, ImageCacheTask); no_jsmanaged_fields!(Metadata); no_jsmanaged_fields!(Atom, Namespace, QualName); no_jsmanaged_fields!(Trusted<T: Reflectable>); no_jsmanaged_fields!(PropertyDeclarationBlock); no_jsmanaged_fields!(HashSet<T>); // These three are interdependent, if you plan to put jsmanaged data // in one of these make sure it is propagated properly to containing structs no_jsmanaged_fields!(SubpageId, WindowSizeData, PipelineId); no_jsmanaged_fields!(TimerEventId, TimerSource); no_jsmanaged_fields!(WorkerId); no_jsmanaged_fields!(QuirksMode); no_jsmanaged_fields!(Runtime); no_jsmanaged_fields!(Headers, Method); no_jsmanaged_fields!(LayoutChan); no_jsmanaged_fields!(WindowProxyHandler); no_jsmanaged_fields!(UntrustedNodeAddress); no_jsmanaged_fields!(LengthOrPercentageOrAuto); no_jsmanaged_fields!(RGBA); no_jsmanaged_fields!(EuclidLength<Unit, T>); no_jsmanaged_fields!(Matrix2D<T>); no_jsmanaged_fields!(StorageType); no_jsmanaged_fields!(CanvasGradientStop, LinearGradientStyle, RadialGradientStyle); no_jsmanaged_fields!(LineCapStyle, LineJoinStyle, CompositionOrBlending); no_jsmanaged_fields!(RepetitionStyle); no_jsmanaged_fields!(WebGLError); no_jsmanaged_fields!(TimeProfilerChan); no_jsmanaged_fields!(MemProfilerChan); no_jsmanaged_fields!(PseudoElement); no_jsmanaged_fields!(Length); no_jsmanaged_fields!(ElementState); no_jsmanaged_fields!(DOMString); no_jsmanaged_fields!(Mime); no_jsmanaged_fields!(AttrIdentifier); no_jsmanaged_fields!(AttrValue); no_jsmanaged_fields!(ElementSnapshot); impl JSTraceable for ConstellationChan<ScriptMsg> { #[inline] fn trace(&self, _trc: *mut JSTracer) { // Do nothing } } impl JSTraceable for ConstellationChan<LayoutMsg> { #[inline] fn trace(&self, _trc: *mut JSTracer) { // Do nothing } } impl JSTraceable for Box<ScriptChan + Send> { #[inline] fn trace(&self, _trc: *mut JSTracer) { // Do nothing } } impl JSTraceable for Box<FnBox(f64, )> { #[inline] fn trace(&self, _trc: *mut JSTracer) { // Do nothing } } impl<'a> JSTraceable for &'a str { #[inline] fn trace(&self, _: *mut JSTracer) { // Do nothing } } impl<A, B> JSTraceable for fn(A) -> B { #[inline] fn trace(&self, _: *mut JSTracer) { // Do nothing } } impl<T> JSTraceable for IpcSender<T> where T: Deserialize + Serialize { #[inline] fn trace(&self, _: *mut JSTracer) { // Do nothing } } impl JSTraceable for Box<LayoutRPC +'static> { #[inline] fn trace(&self, _: *mut JSTracer) { // Do nothing } } impl JSTraceable for () { #[inline] fn trace(&self, _: *mut JSTracer) { // Do nothing } } impl<T> JSTraceable for IpcReceiver<T> where T: Deserialize + Serialize { #[inline] fn trace(&self, _: *mut JSTracer) { // Do nothing } } /// Homemade trait object for JSTraceable things struct TraceableInfo { pub ptr: *const libc::c_void, pub trace: fn(obj: *const libc::c_void, tracer: *mut JSTracer), } /// Holds a set of JSTraceables that need to be rooted pub struct RootedTraceableSet { set: Vec<TraceableInfo>, } #[allow(missing_docs)] // FIXME mod dummy { // Attributes don’t apply through the macro. use std::cell::RefCell; use std::rc::Rc; use super::RootedTraceableSet; /// TLV Holds a set of JSTraceables that need to be rooted thread_local!(pub static ROOTED_TRACEABLES: Rc<RefCell<RootedTraceableSet>> = Rc::new(RefCell::new(RootedTraceableSet::new()))); } pub use self::dummy::ROOTED_TRACEABLES; impl RootedTraceableSet { fn new() -> RootedTraceableSet { RootedTraceableSet { set: vec![], } } unsafe fn remove<T: JSTraceable>(traceable: &T) { ROOTED_TRACEABLES.with(|ref traceables| { let mut traceables = traceables.borrow_mut(); let idx = match traceables.set.iter() .rposition(|x| x.ptr == traceable as *const T as *const _) { Some(idx) => idx, None => unreachable!(), }; traceables.set.remove(idx); }); } unsafe fn add<T: JSTraceable>(traceable: &T) { ROOTED_TRACEABLES.with(|ref traceables| { fn trace<T: JSTraceable>(obj: *const libc::c_void, tracer: *mut JSTracer) { let obj: &T = unsafe { &*(obj as *const T) }; obj.trace(tracer); } let mut traceables = traceables.borrow_mut(); let info = TraceableInfo { ptr: traceable as *const T as *const libc::c_void, trace: trace::<T>, }; traceables.set.push(info); }) } unsafe fn trace(&self, tracer: *mut JSTracer) { for info in &self.set { (info.trace)(info.ptr, tracer); } } } /// Roots any JSTraceable thing /// /// If you have a valid Reflectable, use Root. /// If you have GC things like *mut JSObject or JSVal, use jsapi::Rooted. /// If you have an arbitrary number of Reflectables to root, use RootedVec<JS<T>> /// If you know what you're doing, use this. #[derive(JSTraceable)] pub struct RootedTraceable<'a, T: 'a + JSTraceable> { ptr: &'a T, } impl<'a, T: JSTraceable> RootedTraceable<'a, T> { /// Root a JSTraceable thing for the life of this RootedTraceable pub fn new(traceable: &'a T) -> RootedTraceable<'a, T> { unsafe { RootedTraceableSet::add(traceable); } RootedTraceable { ptr: traceable, } } } impl<'a, T: JSTraceable> Drop for RootedTraceable<'a, T> { fn drop(&mut self) { unsafe { RootedTraceableSet::remove(self.ptr); } } } /// A vector of items that are rooted for the lifetime of this struct. #[allow(unrooted_must_root)] #[no_move] #[derive(JSTraceable)] #[allow_unrooted_interior] pub struct RootedVec<T: JSTraceable> { v: Vec<T>, } impl<T: JSTraceable> RootedVec<T> { /// Create a vector of items of type T that is rooted for /// the lifetime of this struct pub fn new() -> RootedVec<T> { let addr = unsafe { return_address() as *const libc::c_void }; unsafe { RootedVec::new_with_destination_address(addr) } } /// Create a vector of items of type T. This constructor is specific /// for RootTraceableSet. pub unsafe fn new_with_destination_address(addr: *const libc::c_void) -> RootedVec<T> { RootedTraceableSet::add::<RootedVec<T>>(&*(addr as *const _)); RootedVec::<T> { v: vec![], } } } impl<T: JSTraceable + Reflectable> RootedVec<JS<T>> { /// Obtain a safe slice of references that can't outlive that RootedVec. pub fn r(&self) -> &[&T] { unsafe { mem::transmute(&self.v[..]) } } } impl<T: JSTraceable> Drop for RootedVec<T> { fn drop(&mut self) { unsafe { RootedTraceableSet::remove(self); } } } impl<T: JSTraceable> Deref for RootedVec<T> { type Target = Vec<T>; fn deref(&self) -> &Vec<T> { &self.v } } impl<T: JSTraceable> DerefMut for RootedVec<T> { fn deref_mut(&mut self) -> &mut Vec<T> { &mut self.v } } impl<A: JSTraceable + Reflectable> FromIterator<Root<A>> for RootedVec<JS<A>> { #[allow(moved_no_move)] fn from_iter<T>(iterable: T) -> RootedVec<JS<A>> where T: IntoIterator<Item = Root<A>> { let mut vec = unsafe { RootedVec::new_with_destination_address(return_address() as *const libc::c_void) }; vec.extend(iterable.into_iter().map(|item| JS::from_rooted(&item))); vec } } /// SM Callback that traces the rooted traceables pub unsafe fn trace_traceables(tracer: *mut JSTracer) { ROOTED_TRACEABLES.with(|ref traceables| { let traceables = traceables.borrow(); traceables.trace(tracer); }); }
{ (**self).trace(trc) }
identifier_body
trace.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! Utilities for tracing JS-managed values. //! //! The lifetime of DOM objects is managed by the SpiderMonkey Garbage //! Collector. A rooted DOM object implementing the interface `Foo` is traced //! as follows: //! //! 1. The GC calls `_trace` defined in `FooBinding` during the marking //! phase. (This happens through `JSClass.trace` for non-proxy bindings, and //! through `ProxyTraps.trace` otherwise.) //! 2. `_trace` calls `Foo::trace()` (an implementation of `JSTraceable`). //! This is typically derived via a `#[dom_struct]` //! (implies `#[derive(JSTraceable)]`) annotation. //! Non-JS-managed types have an empty inline `trace()` method, //! achieved via `no_jsmanaged_fields!` or similar. //! 3. For all fields, `Foo::trace()` //! calls `trace()` on the field. //! For example, for fields of type `JS<T>`, `JS<T>::trace()` calls //! `trace_reflector()`. //! 4. `trace_reflector()` calls `JS_CallUnbarrieredObjectTracer()` with a //! pointer to the `JSObject` for the reflector. This notifies the GC, which //! will add the object to the graph, and will trace that object as well. //! 5. When the GC finishes tracing, it [`finalizes`](../index.html#destruction) //! any reflectors that were not reachable. //! //! The `no_jsmanaged_fields!()` macro adds an empty implementation of `JSTraceable` to //! a datatype. use canvas_traits::WebGLError; use canvas_traits::{CanvasGradientStop, LinearGradientStyle, RadialGradientStyle}; use canvas_traits::{CompositionOrBlending, LineCapStyle, LineJoinStyle, RepetitionStyle}; use cssparser::RGBA; use devtools_traits::WorkerId; use dom::bindings::js::{JS, Root}; use dom::bindings::refcounted::Trusted; use dom::bindings::reflector::{Reflectable, Reflector}; use dom::bindings::utils::WindowProxyHandler; use encoding::types::EncodingRef; use euclid::length::Length as EuclidLength; use euclid::matrix2d::Matrix2D; use euclid::rect::Rect; use euclid::size::Size2D; use html5ever::tree_builder::QuirksMode; use hyper::header::Headers; use hyper::method::Method; use hyper::mime::Mime; use ipc_channel::ipc::{IpcReceiver, IpcSender}; use js::jsapi::JS_CallUnbarrieredObjectTracer; use js::jsapi::{GCTraceKindToAscii, Heap, JSGCTraceKind, JSObject, JSTracer, JS_CallObjectTracer, JS_CallValueTracer}; use js::jsval::JSVal; use js::rust::Runtime; use layout_interface::{LayoutChan, LayoutRPC}; use libc; use msg::constellation_msg::ConstellationChan; use msg::constellation_msg::{PipelineId, SubpageId, WindowSizeData}; use net_traits::Metadata; use net_traits::image::base::Image; use net_traits::image_cache_task::{ImageCacheChan, ImageCacheTask}; use net_traits::storage_task::StorageType; use profile_traits::mem::ProfilerChan as MemProfilerChan; use profile_traits::time::ProfilerChan as TimeProfilerChan; use script_task::ScriptChan; use script_traits::{LayoutMsg, ScriptMsg, TimerEventId, TimerSource, UntrustedNodeAddress}; use selectors::parser::PseudoElement; use selectors::states::*; use serde::{Deserialize, Serialize}; use smallvec::SmallVec; use std::boxed::FnBox; use std::cell::{Cell, UnsafeCell}; use std::collections::hash_state::HashState; use std::collections::{HashMap, HashSet}; use std::ffi::CString; use std::hash::{Hash, Hasher}; use std::intrinsics::return_address; use std::iter::{FromIterator, IntoIterator}; use std::mem; use std::ops::{Deref, DerefMut}; use std::rc::Rc; use std::sync::Arc; use std::sync::atomic::AtomicBool; use std::sync::mpsc::{Receiver, Sender}; use string_cache::{Atom, Namespace, QualName}; use style::attr::{AttrIdentifier, AttrValue}; use style::properties::PropertyDeclarationBlock; use style::restyle_hints::ElementSnapshot; use style::values::specified::Length; use url::Url; use util::str::{DOMString, LengthOrPercentageOrAuto}; use uuid::Uuid; /// A trait to allow tracing (only) DOM objects. pub trait JSTraceable { /// Trace `self`. fn trace(&self, trc: *mut JSTracer); } no_jsmanaged_fields!(EncodingRef); no_jsmanaged_fields!(Reflector); /// Trace a `JSVal`. pub fn trace_jsval(tracer: *mut JSTracer, description: &str, val: &Heap<JSVal>) { unsafe { if!val.get().is_markable() { return; } let name = CString::new(description).unwrap(); (*tracer).debugPrinter_ = None; (*tracer).debugPrintIndex_ =!0; (*tracer).debugPrintArg_ = name.as_ptr() as *const libc::c_void; debug!("tracing value {}", description); JS_CallValueTracer(tracer, val.ptr.get() as *mut _, GCTraceKindToAscii(val.get().trace_kind())); } } /// Trace the `JSObject` held by `reflector`. #[allow(unrooted_must_root)] pub fn trace_reflector(tracer: *mut JSTracer, description: &str, reflector: &Reflector) { unsafe { let name = CString::new(description).unwrap(); (*tracer).debugPrinter_ = None; (*tracer).debugPrintIndex_ =!0; (*tracer).debugPrintArg_ = name.as_ptr() as *const libc::c_void; debug!("tracing reflector {}", description); JS_CallUnbarrieredObjectTracer(tracer, reflector.rootable(), GCTraceKindToAscii(JSGCTraceKind::JSTRACE_OBJECT)); } } /// Trace a `JSObject`. pub fn trace_object(tracer: *mut JSTracer, description: &str, obj: &Heap<*mut JSObject>) { unsafe { let name = CString::new(description).unwrap(); (*tracer).debugPrinter_ = None; (*tracer).debugPrintIndex_ =!0; (*tracer).debugPrintArg_ = name.as_ptr() as *const libc::c_void; debug!("tracing {}", description); JS_CallObjectTracer(tracer, obj.ptr.get() as *mut _, GCTraceKindToAscii(JSGCTraceKind::JSTRACE_OBJECT)); } } impl<T: JSTraceable> JSTraceable for Rc<T> { fn trace(&self, trc: *mut JSTracer) { (**self).trace(trc) } } impl<T: JSTraceable> JSTraceable for Box<T> { fn trace(&self, trc: *mut JSTracer) { (**self).trace(trc) } } impl<T: JSTraceable + Copy> JSTraceable for Cell<T> { fn trace(&self, trc: *mut JSTracer) { self.get().trace(trc) } } impl<T: JSTraceable> JSTraceable for UnsafeCell<T> { fn trace(&self, trc: *mut JSTracer) { unsafe { (*self.get()).trace(trc) } } } impl JSTraceable for Heap<*mut JSObject> { fn trace(&self, trc: *mut JSTracer) { if self.get().is_null() { return; } trace_object(trc, "object", self); } } impl JSTraceable for Heap<JSVal> { fn trace(&self, trc: *mut JSTracer) { trace_jsval(trc, "val", self); } } // XXXManishearth Check if the following three are optimized to no-ops // if e.trace() is a no-op (e.g it is an no_jsmanaged_fields type) impl<T: JSTraceable> JSTraceable for Vec<T> { #[inline] fn trace(&self, trc: *mut JSTracer) { for e in &*self { e.trace(trc); } } } // XXXManishearth Check if the following three are optimized to no-ops // if e.trace() is a no-op (e.g it is an no_jsmanaged_fields type) impl<T: JSTraceable +'static> JSTraceable for SmallVec<[T; 1]> { #[inline] fn trace(&self, trc: *mut JSTracer) { for e in self.iter() { e.trace(trc); } } } impl<T: JSTraceable> JSTraceable for Option<T> { #[inline] fn trace(&self, trc: *mut JSTracer) { self.as_ref().map(|e| e.trace(trc)); } } impl<T: JSTraceable, U: JSTraceable> JSTraceable for Result<T, U> { #[inline] fn trace(&self, trc: *mut JSTracer) { match *self { Ok(ref inner) => inner.trace(trc), Err(ref inner) => inner.trace(trc), } } } impl<K, V, S> JSTraceable for HashMap<K, V, S> where K: Hash + Eq + JSTraceable, V: JSTraceable, S: HashState, <S as HashState>::Hasher: Hasher, { #[inline] fn trace(&self, trc: *mut JSTracer) { for (k, v) in &*self { k.trace(trc); v.trace(trc); } } } impl<A: JSTraceable, B: JSTraceable> JSTraceable for (A, B) { #[inline] fn trace(&self, trc: *mut JSTracer) { let (ref a, ref b) = *self; a.trace(trc); b.trace(trc); } } no_jsmanaged_fields!(bool, f32, f64, String, Url, AtomicBool, Uuid); no_jsmanaged_fields!(usize, u8, u16, u32, u64); no_jsmanaged_fields!(isize, i8, i16, i32, i64); no_jsmanaged_fields!(Sender<T>); no_jsmanaged_fields!(Receiver<T>); no_jsmanaged_fields!(Rect<T>); no_jsmanaged_fields!(Size2D<T>); no_jsmanaged_fields!(Arc<T>); no_jsmanaged_fields!(Image, ImageCacheChan, ImageCacheTask); no_jsmanaged_fields!(Metadata); no_jsmanaged_fields!(Atom, Namespace, QualName); no_jsmanaged_fields!(Trusted<T: Reflectable>); no_jsmanaged_fields!(PropertyDeclarationBlock); no_jsmanaged_fields!(HashSet<T>); // These three are interdependent, if you plan to put jsmanaged data // in one of these make sure it is propagated properly to containing structs no_jsmanaged_fields!(SubpageId, WindowSizeData, PipelineId); no_jsmanaged_fields!(TimerEventId, TimerSource); no_jsmanaged_fields!(WorkerId); no_jsmanaged_fields!(QuirksMode); no_jsmanaged_fields!(Runtime); no_jsmanaged_fields!(Headers, Method); no_jsmanaged_fields!(LayoutChan); no_jsmanaged_fields!(WindowProxyHandler); no_jsmanaged_fields!(UntrustedNodeAddress); no_jsmanaged_fields!(LengthOrPercentageOrAuto); no_jsmanaged_fields!(RGBA); no_jsmanaged_fields!(EuclidLength<Unit, T>); no_jsmanaged_fields!(Matrix2D<T>); no_jsmanaged_fields!(StorageType); no_jsmanaged_fields!(CanvasGradientStop, LinearGradientStyle, RadialGradientStyle); no_jsmanaged_fields!(LineCapStyle, LineJoinStyle, CompositionOrBlending); no_jsmanaged_fields!(RepetitionStyle); no_jsmanaged_fields!(WebGLError); no_jsmanaged_fields!(TimeProfilerChan); no_jsmanaged_fields!(MemProfilerChan); no_jsmanaged_fields!(PseudoElement); no_jsmanaged_fields!(Length); no_jsmanaged_fields!(ElementState); no_jsmanaged_fields!(DOMString); no_jsmanaged_fields!(Mime); no_jsmanaged_fields!(AttrIdentifier); no_jsmanaged_fields!(AttrValue); no_jsmanaged_fields!(ElementSnapshot); impl JSTraceable for ConstellationChan<ScriptMsg> { #[inline] fn trace(&self, _trc: *mut JSTracer) { // Do nothing } } impl JSTraceable for ConstellationChan<LayoutMsg> { #[inline] fn trace(&self, _trc: *mut JSTracer) { // Do nothing } } impl JSTraceable for Box<ScriptChan + Send> { #[inline] fn trace(&self, _trc: *mut JSTracer) { // Do nothing } } impl JSTraceable for Box<FnBox(f64, )> { #[inline] fn trace(&self, _trc: *mut JSTracer) { // Do nothing } } impl<'a> JSTraceable for &'a str { #[inline] fn trace(&self, _: *mut JSTracer) { // Do nothing } } impl<A, B> JSTraceable for fn(A) -> B { #[inline] fn trace(&self, _: *mut JSTracer) { // Do nothing } } impl<T> JSTraceable for IpcSender<T> where T: Deserialize + Serialize { #[inline] fn trace(&self, _: *mut JSTracer) { // Do nothing } } impl JSTraceable for Box<LayoutRPC +'static> { #[inline] fn trace(&self, _: *mut JSTracer) { // Do nothing } } impl JSTraceable for () { #[inline] fn trace(&self, _: *mut JSTracer) { // Do nothing } } impl<T> JSTraceable for IpcReceiver<T> where T: Deserialize + Serialize { #[inline] fn trace(&self, _: *mut JSTracer) { // Do nothing } } /// Homemade trait object for JSTraceable things struct TraceableInfo { pub ptr: *const libc::c_void, pub trace: fn(obj: *const libc::c_void, tracer: *mut JSTracer), } /// Holds a set of JSTraceables that need to be rooted pub struct RootedTraceableSet { set: Vec<TraceableInfo>, } #[allow(missing_docs)] // FIXME mod dummy { // Attributes don’t apply through the macro. use std::cell::RefCell; use std::rc::Rc; use super::RootedTraceableSet; /// TLV Holds a set of JSTraceables that need to be rooted thread_local!(pub static ROOTED_TRACEABLES: Rc<RefCell<RootedTraceableSet>> = Rc::new(RefCell::new(RootedTraceableSet::new()))); } pub use self::dummy::ROOTED_TRACEABLES; impl RootedTraceableSet { fn new() -> RootedTraceableSet { RootedTraceableSet { set: vec![], } } unsafe fn re
: JSTraceable>(traceable: &T) { ROOTED_TRACEABLES.with(|ref traceables| { let mut traceables = traceables.borrow_mut(); let idx = match traceables.set.iter() .rposition(|x| x.ptr == traceable as *const T as *const _) { Some(idx) => idx, None => unreachable!(), }; traceables.set.remove(idx); }); } unsafe fn add<T: JSTraceable>(traceable: &T) { ROOTED_TRACEABLES.with(|ref traceables| { fn trace<T: JSTraceable>(obj: *const libc::c_void, tracer: *mut JSTracer) { let obj: &T = unsafe { &*(obj as *const T) }; obj.trace(tracer); } let mut traceables = traceables.borrow_mut(); let info = TraceableInfo { ptr: traceable as *const T as *const libc::c_void, trace: trace::<T>, }; traceables.set.push(info); }) } unsafe fn trace(&self, tracer: *mut JSTracer) { for info in &self.set { (info.trace)(info.ptr, tracer); } } } /// Roots any JSTraceable thing /// /// If you have a valid Reflectable, use Root. /// If you have GC things like *mut JSObject or JSVal, use jsapi::Rooted. /// If you have an arbitrary number of Reflectables to root, use RootedVec<JS<T>> /// If you know what you're doing, use this. #[derive(JSTraceable)] pub struct RootedTraceable<'a, T: 'a + JSTraceable> { ptr: &'a T, } impl<'a, T: JSTraceable> RootedTraceable<'a, T> { /// Root a JSTraceable thing for the life of this RootedTraceable pub fn new(traceable: &'a T) -> RootedTraceable<'a, T> { unsafe { RootedTraceableSet::add(traceable); } RootedTraceable { ptr: traceable, } } } impl<'a, T: JSTraceable> Drop for RootedTraceable<'a, T> { fn drop(&mut self) { unsafe { RootedTraceableSet::remove(self.ptr); } } } /// A vector of items that are rooted for the lifetime of this struct. #[allow(unrooted_must_root)] #[no_move] #[derive(JSTraceable)] #[allow_unrooted_interior] pub struct RootedVec<T: JSTraceable> { v: Vec<T>, } impl<T: JSTraceable> RootedVec<T> { /// Create a vector of items of type T that is rooted for /// the lifetime of this struct pub fn new() -> RootedVec<T> { let addr = unsafe { return_address() as *const libc::c_void }; unsafe { RootedVec::new_with_destination_address(addr) } } /// Create a vector of items of type T. This constructor is specific /// for RootTraceableSet. pub unsafe fn new_with_destination_address(addr: *const libc::c_void) -> RootedVec<T> { RootedTraceableSet::add::<RootedVec<T>>(&*(addr as *const _)); RootedVec::<T> { v: vec![], } } } impl<T: JSTraceable + Reflectable> RootedVec<JS<T>> { /// Obtain a safe slice of references that can't outlive that RootedVec. pub fn r(&self) -> &[&T] { unsafe { mem::transmute(&self.v[..]) } } } impl<T: JSTraceable> Drop for RootedVec<T> { fn drop(&mut self) { unsafe { RootedTraceableSet::remove(self); } } } impl<T: JSTraceable> Deref for RootedVec<T> { type Target = Vec<T>; fn deref(&self) -> &Vec<T> { &self.v } } impl<T: JSTraceable> DerefMut for RootedVec<T> { fn deref_mut(&mut self) -> &mut Vec<T> { &mut self.v } } impl<A: JSTraceable + Reflectable> FromIterator<Root<A>> for RootedVec<JS<A>> { #[allow(moved_no_move)] fn from_iter<T>(iterable: T) -> RootedVec<JS<A>> where T: IntoIterator<Item = Root<A>> { let mut vec = unsafe { RootedVec::new_with_destination_address(return_address() as *const libc::c_void) }; vec.extend(iterable.into_iter().map(|item| JS::from_rooted(&item))); vec } } /// SM Callback that traces the rooted traceables pub unsafe fn trace_traceables(tracer: *mut JSTracer) { ROOTED_TRACEABLES.with(|ref traceables| { let traceables = traceables.borrow(); traceables.trace(tracer); }); }
move<T
identifier_name
trace.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! Utilities for tracing JS-managed values. //! //! The lifetime of DOM objects is managed by the SpiderMonkey Garbage //! Collector. A rooted DOM object implementing the interface `Foo` is traced //! as follows: //! //! 1. The GC calls `_trace` defined in `FooBinding` during the marking //! phase. (This happens through `JSClass.trace` for non-proxy bindings, and //! through `ProxyTraps.trace` otherwise.) //! 2. `_trace` calls `Foo::trace()` (an implementation of `JSTraceable`). //! This is typically derived via a `#[dom_struct]` //! (implies `#[derive(JSTraceable)]`) annotation. //! Non-JS-managed types have an empty inline `trace()` method, //! achieved via `no_jsmanaged_fields!` or similar. //! 3. For all fields, `Foo::trace()` //! calls `trace()` on the field. //! For example, for fields of type `JS<T>`, `JS<T>::trace()` calls //! `trace_reflector()`. //! 4. `trace_reflector()` calls `JS_CallUnbarrieredObjectTracer()` with a //! pointer to the `JSObject` for the reflector. This notifies the GC, which //! will add the object to the graph, and will trace that object as well. //! 5. When the GC finishes tracing, it [`finalizes`](../index.html#destruction) //! any reflectors that were not reachable. //! //! The `no_jsmanaged_fields!()` macro adds an empty implementation of `JSTraceable` to //! a datatype. use canvas_traits::WebGLError; use canvas_traits::{CanvasGradientStop, LinearGradientStyle, RadialGradientStyle}; use canvas_traits::{CompositionOrBlending, LineCapStyle, LineJoinStyle, RepetitionStyle}; use cssparser::RGBA; use devtools_traits::WorkerId; use dom::bindings::js::{JS, Root}; use dom::bindings::refcounted::Trusted; use dom::bindings::reflector::{Reflectable, Reflector}; use dom::bindings::utils::WindowProxyHandler; use encoding::types::EncodingRef; use euclid::length::Length as EuclidLength; use euclid::matrix2d::Matrix2D; use euclid::rect::Rect; use euclid::size::Size2D; use html5ever::tree_builder::QuirksMode; use hyper::header::Headers; use hyper::method::Method; use hyper::mime::Mime; use ipc_channel::ipc::{IpcReceiver, IpcSender}; use js::jsapi::JS_CallUnbarrieredObjectTracer; use js::jsapi::{GCTraceKindToAscii, Heap, JSGCTraceKind, JSObject, JSTracer, JS_CallObjectTracer, JS_CallValueTracer}; use js::jsval::JSVal; use js::rust::Runtime; use layout_interface::{LayoutChan, LayoutRPC}; use libc; use msg::constellation_msg::ConstellationChan; use msg::constellation_msg::{PipelineId, SubpageId, WindowSizeData}; use net_traits::Metadata; use net_traits::image::base::Image; use net_traits::image_cache_task::{ImageCacheChan, ImageCacheTask}; use net_traits::storage_task::StorageType; use profile_traits::mem::ProfilerChan as MemProfilerChan; use profile_traits::time::ProfilerChan as TimeProfilerChan; use script_task::ScriptChan; use script_traits::{LayoutMsg, ScriptMsg, TimerEventId, TimerSource, UntrustedNodeAddress}; use selectors::parser::PseudoElement; use selectors::states::*; use serde::{Deserialize, Serialize}; use smallvec::SmallVec; use std::boxed::FnBox; use std::cell::{Cell, UnsafeCell}; use std::collections::hash_state::HashState; use std::collections::{HashMap, HashSet}; use std::ffi::CString; use std::hash::{Hash, Hasher}; use std::intrinsics::return_address; use std::iter::{FromIterator, IntoIterator}; use std::mem; use std::ops::{Deref, DerefMut}; use std::rc::Rc; use std::sync::Arc; use std::sync::atomic::AtomicBool; use std::sync::mpsc::{Receiver, Sender}; use string_cache::{Atom, Namespace, QualName}; use style::attr::{AttrIdentifier, AttrValue}; use style::properties::PropertyDeclarationBlock; use style::restyle_hints::ElementSnapshot; use style::values::specified::Length; use url::Url; use util::str::{DOMString, LengthOrPercentageOrAuto}; use uuid::Uuid; /// A trait to allow tracing (only) DOM objects. pub trait JSTraceable { /// Trace `self`. fn trace(&self, trc: *mut JSTracer); } no_jsmanaged_fields!(EncodingRef); no_jsmanaged_fields!(Reflector); /// Trace a `JSVal`. pub fn trace_jsval(tracer: *mut JSTracer, description: &str, val: &Heap<JSVal>) { unsafe { if!val.get().is_markable() { return; } let name = CString::new(description).unwrap(); (*tracer).debugPrinter_ = None; (*tracer).debugPrintIndex_ =!0; (*tracer).debugPrintArg_ = name.as_ptr() as *const libc::c_void; debug!("tracing value {}", description); JS_CallValueTracer(tracer, val.ptr.get() as *mut _, GCTraceKindToAscii(val.get().trace_kind())); } } /// Trace the `JSObject` held by `reflector`. #[allow(unrooted_must_root)] pub fn trace_reflector(tracer: *mut JSTracer, description: &str, reflector: &Reflector) { unsafe { let name = CString::new(description).unwrap(); (*tracer).debugPrinter_ = None; (*tracer).debugPrintIndex_ =!0; (*tracer).debugPrintArg_ = name.as_ptr() as *const libc::c_void; debug!("tracing reflector {}", description); JS_CallUnbarrieredObjectTracer(tracer, reflector.rootable(), GCTraceKindToAscii(JSGCTraceKind::JSTRACE_OBJECT)); } } /// Trace a `JSObject`. pub fn trace_object(tracer: *mut JSTracer, description: &str, obj: &Heap<*mut JSObject>) { unsafe { let name = CString::new(description).unwrap(); (*tracer).debugPrinter_ = None; (*tracer).debugPrintIndex_ =!0; (*tracer).debugPrintArg_ = name.as_ptr() as *const libc::c_void; debug!("tracing {}", description); JS_CallObjectTracer(tracer, obj.ptr.get() as *mut _, GCTraceKindToAscii(JSGCTraceKind::JSTRACE_OBJECT)); } } impl<T: JSTraceable> JSTraceable for Rc<T> { fn trace(&self, trc: *mut JSTracer) { (**self).trace(trc) } } impl<T: JSTraceable> JSTraceable for Box<T> { fn trace(&self, trc: *mut JSTracer) { (**self).trace(trc) } } impl<T: JSTraceable + Copy> JSTraceable for Cell<T> { fn trace(&self, trc: *mut JSTracer) { self.get().trace(trc) } } impl<T: JSTraceable> JSTraceable for UnsafeCell<T> { fn trace(&self, trc: *mut JSTracer) { unsafe { (*self.get()).trace(trc) } } } impl JSTraceable for Heap<*mut JSObject> { fn trace(&self, trc: *mut JSTracer) { if self.get().is_null() { return; } trace_object(trc, "object", self); } } impl JSTraceable for Heap<JSVal> { fn trace(&self, trc: *mut JSTracer) { trace_jsval(trc, "val", self); } } // XXXManishearth Check if the following three are optimized to no-ops // if e.trace() is a no-op (e.g it is an no_jsmanaged_fields type) impl<T: JSTraceable> JSTraceable for Vec<T> { #[inline] fn trace(&self, trc: *mut JSTracer) { for e in &*self { e.trace(trc); } } } // XXXManishearth Check if the following three are optimized to no-ops // if e.trace() is a no-op (e.g it is an no_jsmanaged_fields type) impl<T: JSTraceable +'static> JSTraceable for SmallVec<[T; 1]> { #[inline] fn trace(&self, trc: *mut JSTracer) { for e in self.iter() { e.trace(trc); } } } impl<T: JSTraceable> JSTraceable for Option<T> { #[inline] fn trace(&self, trc: *mut JSTracer) { self.as_ref().map(|e| e.trace(trc)); } } impl<T: JSTraceable, U: JSTraceable> JSTraceable for Result<T, U> { #[inline] fn trace(&self, trc: *mut JSTracer) { match *self { Ok(ref inner) => inner.trace(trc), Err(ref inner) => inner.trace(trc), } } } impl<K, V, S> JSTraceable for HashMap<K, V, S> where K: Hash + Eq + JSTraceable, V: JSTraceable, S: HashState, <S as HashState>::Hasher: Hasher, { #[inline] fn trace(&self, trc: *mut JSTracer) { for (k, v) in &*self { k.trace(trc); v.trace(trc); } } } impl<A: JSTraceable, B: JSTraceable> JSTraceable for (A, B) { #[inline] fn trace(&self, trc: *mut JSTracer) { let (ref a, ref b) = *self; a.trace(trc); b.trace(trc); } } no_jsmanaged_fields!(bool, f32, f64, String, Url, AtomicBool, Uuid); no_jsmanaged_fields!(usize, u8, u16, u32, u64); no_jsmanaged_fields!(isize, i8, i16, i32, i64); no_jsmanaged_fields!(Sender<T>); no_jsmanaged_fields!(Receiver<T>); no_jsmanaged_fields!(Rect<T>); no_jsmanaged_fields!(Size2D<T>); no_jsmanaged_fields!(Arc<T>); no_jsmanaged_fields!(Image, ImageCacheChan, ImageCacheTask); no_jsmanaged_fields!(Metadata); no_jsmanaged_fields!(Atom, Namespace, QualName); no_jsmanaged_fields!(Trusted<T: Reflectable>); no_jsmanaged_fields!(PropertyDeclarationBlock); no_jsmanaged_fields!(HashSet<T>); // These three are interdependent, if you plan to put jsmanaged data // in one of these make sure it is propagated properly to containing structs no_jsmanaged_fields!(SubpageId, WindowSizeData, PipelineId); no_jsmanaged_fields!(TimerEventId, TimerSource); no_jsmanaged_fields!(WorkerId); no_jsmanaged_fields!(QuirksMode); no_jsmanaged_fields!(Runtime); no_jsmanaged_fields!(Headers, Method); no_jsmanaged_fields!(LayoutChan); no_jsmanaged_fields!(WindowProxyHandler); no_jsmanaged_fields!(UntrustedNodeAddress); no_jsmanaged_fields!(LengthOrPercentageOrAuto); no_jsmanaged_fields!(RGBA); no_jsmanaged_fields!(EuclidLength<Unit, T>); no_jsmanaged_fields!(Matrix2D<T>); no_jsmanaged_fields!(StorageType); no_jsmanaged_fields!(CanvasGradientStop, LinearGradientStyle, RadialGradientStyle); no_jsmanaged_fields!(LineCapStyle, LineJoinStyle, CompositionOrBlending); no_jsmanaged_fields!(RepetitionStyle); no_jsmanaged_fields!(WebGLError); no_jsmanaged_fields!(TimeProfilerChan); no_jsmanaged_fields!(MemProfilerChan); no_jsmanaged_fields!(PseudoElement); no_jsmanaged_fields!(Length); no_jsmanaged_fields!(ElementState); no_jsmanaged_fields!(DOMString); no_jsmanaged_fields!(Mime); no_jsmanaged_fields!(AttrIdentifier); no_jsmanaged_fields!(AttrValue); no_jsmanaged_fields!(ElementSnapshot); impl JSTraceable for ConstellationChan<ScriptMsg> { #[inline] fn trace(&self, _trc: *mut JSTracer) { // Do nothing } } impl JSTraceable for ConstellationChan<LayoutMsg> { #[inline] fn trace(&self, _trc: *mut JSTracer) { // Do nothing } } impl JSTraceable for Box<ScriptChan + Send> { #[inline] fn trace(&self, _trc: *mut JSTracer) { // Do nothing } } impl JSTraceable for Box<FnBox(f64, )> { #[inline] fn trace(&self, _trc: *mut JSTracer) { // Do nothing } } impl<'a> JSTraceable for &'a str { #[inline] fn trace(&self, _: *mut JSTracer) { // Do nothing } } impl<A, B> JSTraceable for fn(A) -> B { #[inline] fn trace(&self, _: *mut JSTracer) { // Do nothing } } impl<T> JSTraceable for IpcSender<T> where T: Deserialize + Serialize { #[inline] fn trace(&self, _: *mut JSTracer) { // Do nothing } } impl JSTraceable for Box<LayoutRPC +'static> { #[inline] fn trace(&self, _: *mut JSTracer) { // Do nothing } } impl JSTraceable for () { #[inline] fn trace(&self, _: *mut JSTracer) { // Do nothing } } impl<T> JSTraceable for IpcReceiver<T> where T: Deserialize + Serialize { #[inline] fn trace(&self, _: *mut JSTracer) { // Do nothing } } /// Homemade trait object for JSTraceable things struct TraceableInfo { pub ptr: *const libc::c_void, pub trace: fn(obj: *const libc::c_void, tracer: *mut JSTracer), } /// Holds a set of JSTraceables that need to be rooted pub struct RootedTraceableSet { set: Vec<TraceableInfo>, } #[allow(missing_docs)] // FIXME mod dummy { // Attributes don’t apply through the macro. use std::cell::RefCell; use std::rc::Rc; use super::RootedTraceableSet; /// TLV Holds a set of JSTraceables that need to be rooted thread_local!(pub static ROOTED_TRACEABLES: Rc<RefCell<RootedTraceableSet>> = Rc::new(RefCell::new(RootedTraceableSet::new()))); } pub use self::dummy::ROOTED_TRACEABLES; impl RootedTraceableSet { fn new() -> RootedTraceableSet { RootedTraceableSet { set: vec![], } } unsafe fn remove<T: JSTraceable>(traceable: &T) { ROOTED_TRACEABLES.with(|ref traceables| { let mut traceables = traceables.borrow_mut(); let idx = match traceables.set.iter() .rposition(|x| x.ptr == traceable as *const T as *const _) { Some(idx) => idx, None => unreachable!(), }; traceables.set.remove(idx); }); } unsafe fn add<T: JSTraceable>(traceable: &T) { ROOTED_TRACEABLES.with(|ref traceables| { fn trace<T: JSTraceable>(obj: *const libc::c_void, tracer: *mut JSTracer) { let obj: &T = unsafe { &*(obj as *const T) }; obj.trace(tracer); } let mut traceables = traceables.borrow_mut(); let info = TraceableInfo { ptr: traceable as *const T as *const libc::c_void, trace: trace::<T>, }; traceables.set.push(info); }) } unsafe fn trace(&self, tracer: *mut JSTracer) { for info in &self.set { (info.trace)(info.ptr, tracer); } } } /// Roots any JSTraceable thing /// /// If you have a valid Reflectable, use Root. /// If you have GC things like *mut JSObject or JSVal, use jsapi::Rooted. /// If you have an arbitrary number of Reflectables to root, use RootedVec<JS<T>> /// If you know what you're doing, use this. #[derive(JSTraceable)] pub struct RootedTraceable<'a, T: 'a + JSTraceable> { ptr: &'a T, } impl<'a, T: JSTraceable> RootedTraceable<'a, T> { /// Root a JSTraceable thing for the life of this RootedTraceable pub fn new(traceable: &'a T) -> RootedTraceable<'a, T> { unsafe { RootedTraceableSet::add(traceable); } RootedTraceable { ptr: traceable, } } }
impl<'a, T: JSTraceable> Drop for RootedTraceable<'a, T> { fn drop(&mut self) { unsafe { RootedTraceableSet::remove(self.ptr); } } } /// A vector of items that are rooted for the lifetime of this struct. #[allow(unrooted_must_root)] #[no_move] #[derive(JSTraceable)] #[allow_unrooted_interior] pub struct RootedVec<T: JSTraceable> { v: Vec<T>, } impl<T: JSTraceable> RootedVec<T> { /// Create a vector of items of type T that is rooted for /// the lifetime of this struct pub fn new() -> RootedVec<T> { let addr = unsafe { return_address() as *const libc::c_void }; unsafe { RootedVec::new_with_destination_address(addr) } } /// Create a vector of items of type T. This constructor is specific /// for RootTraceableSet. pub unsafe fn new_with_destination_address(addr: *const libc::c_void) -> RootedVec<T> { RootedTraceableSet::add::<RootedVec<T>>(&*(addr as *const _)); RootedVec::<T> { v: vec![], } } } impl<T: JSTraceable + Reflectable> RootedVec<JS<T>> { /// Obtain a safe slice of references that can't outlive that RootedVec. pub fn r(&self) -> &[&T] { unsafe { mem::transmute(&self.v[..]) } } } impl<T: JSTraceable> Drop for RootedVec<T> { fn drop(&mut self) { unsafe { RootedTraceableSet::remove(self); } } } impl<T: JSTraceable> Deref for RootedVec<T> { type Target = Vec<T>; fn deref(&self) -> &Vec<T> { &self.v } } impl<T: JSTraceable> DerefMut for RootedVec<T> { fn deref_mut(&mut self) -> &mut Vec<T> { &mut self.v } } impl<A: JSTraceable + Reflectable> FromIterator<Root<A>> for RootedVec<JS<A>> { #[allow(moved_no_move)] fn from_iter<T>(iterable: T) -> RootedVec<JS<A>> where T: IntoIterator<Item = Root<A>> { let mut vec = unsafe { RootedVec::new_with_destination_address(return_address() as *const libc::c_void) }; vec.extend(iterable.into_iter().map(|item| JS::from_rooted(&item))); vec } } /// SM Callback that traces the rooted traceables pub unsafe fn trace_traceables(tracer: *mut JSTracer) { ROOTED_TRACEABLES.with(|ref traceables| { let traceables = traceables.borrow(); traceables.trace(tracer); }); }
random_line_split
0009_simple_multiplication_with_stdin.rs
// Compute product of two numbers while accepting value from standard input. use std::io; // imports io library from standard library fn main() { let mut a = String::new(); //creates new, empty String let mut b = String::new(); let c: u32; println!("Enter value a:"); io::stdin().read_line(&mut a) .ok() .expect("Failed to read value"); println!("Enter value b:"); io::stdin().read_line(&mut b) .ok()
//Shadowing lets us to re-use the old name. // parse() method on String converts the String into number let a: u32 = a.trim().parse() .ok() .expect("Please type a number"); let b: u32 = b.trim().parse() .ok() .expect("Please type a number"); c = a * b; println!("Product of {} * {} is {} ", a, b, c); }
.expect("Failed to read value");
random_line_split
0009_simple_multiplication_with_stdin.rs
// Compute product of two numbers while accepting value from standard input. use std::io; // imports io library from standard library fn
() { let mut a = String::new(); //creates new, empty String let mut b = String::new(); let c: u32; println!("Enter value a:"); io::stdin().read_line(&mut a) .ok() .expect("Failed to read value"); println!("Enter value b:"); io::stdin().read_line(&mut b) .ok() .expect("Failed to read value"); //Shadowing lets us to re-use the old name. // parse() method on String converts the String into number let a: u32 = a.trim().parse() .ok() .expect("Please type a number"); let b: u32 = b.trim().parse() .ok() .expect("Please type a number"); c = a * b; println!("Product of {} * {} is {} ", a, b, c); }
main
identifier_name
0009_simple_multiplication_with_stdin.rs
// Compute product of two numbers while accepting value from standard input. use std::io; // imports io library from standard library fn main()
let b: u32 = b.trim().parse() .ok() .expect("Please type a number"); c = a * b; println!("Product of {} * {} is {} ", a, b, c); }
{ let mut a = String::new(); //creates new, empty String let mut b = String::new(); let c: u32; println!("Enter value a:"); io::stdin().read_line(&mut a) .ok() .expect("Failed to read value"); println!("Enter value b:"); io::stdin().read_line(&mut b) .ok() .expect("Failed to read value"); //Shadowing lets us to re-use the old name. // parse() method on String converts the String into number let a: u32 = a.trim().parse() .ok() .expect("Please type a number");
identifier_body
init-res-into-things.rs
// run-pass #![allow(non_camel_case_types)] #![allow(dead_code)] #![feature(box_syntax)] use std::cell::Cell; // Resources can't be copied, but storing into data structures counts // as a move unless the stored thing is used afterwards. struct r<'a> { i: &'a Cell<isize>, } struct BoxR<'a> { x: r<'a> } impl<'a> Drop for r<'a> { fn drop(&mut self) { self.i.set(self.i.get() + 1) } } fn r(i: &Cell<isize>) -> r { r { i: i } } fn test_rec() { let i = &Cell::new(0); { let _a = BoxR {x: r(i)}; } assert_eq!(i.get(), 1); } fn test_tag() { enum
<'a> { t0(r<'a>), } let i = &Cell::new(0); { let _a = t::t0(r(i)); } assert_eq!(i.get(), 1); } fn test_tup() { let i = &Cell::new(0); { let _a = (r(i), 0); } assert_eq!(i.get(), 1); } fn test_unique() { let i = &Cell::new(0); { let _a: Box<_> = box r(i); } assert_eq!(i.get(), 1); } fn test_unique_rec() { let i = &Cell::new(0); { let _a: Box<_> = box BoxR { x: r(i) }; } assert_eq!(i.get(), 1); } pub fn main() { test_rec(); test_tag(); test_tup(); test_unique(); test_unique_rec(); }
t
identifier_name
init-res-into-things.rs
// run-pass #![allow(non_camel_case_types)] #![allow(dead_code)] #![feature(box_syntax)] use std::cell::Cell; // Resources can't be copied, but storing into data structures counts // as a move unless the stored thing is used afterwards. struct r<'a> { i: &'a Cell<isize>, } struct BoxR<'a> { x: r<'a> } impl<'a> Drop for r<'a> { fn drop(&mut self) { self.i.set(self.i.get() + 1) } } fn r(i: &Cell<isize>) -> r { r { i: i } } fn test_rec() { let i = &Cell::new(0); { let _a = BoxR {x: r(i)}; } assert_eq!(i.get(), 1); }
} let i = &Cell::new(0); { let _a = t::t0(r(i)); } assert_eq!(i.get(), 1); } fn test_tup() { let i = &Cell::new(0); { let _a = (r(i), 0); } assert_eq!(i.get(), 1); } fn test_unique() { let i = &Cell::new(0); { let _a: Box<_> = box r(i); } assert_eq!(i.get(), 1); } fn test_unique_rec() { let i = &Cell::new(0); { let _a: Box<_> = box BoxR { x: r(i) }; } assert_eq!(i.get(), 1); } pub fn main() { test_rec(); test_tag(); test_tup(); test_unique(); test_unique_rec(); }
fn test_tag() { enum t<'a> { t0(r<'a>),
random_line_split
dump.rs
extern crate wasmparser; use std::env; use std::fs::File; use std::io; use std::io::prelude::*; use std::str; use wasmparser::Parser; use wasmparser::ParserState; use wasmparser::WasmDecoder; fn main() { let args = env::args().collect::<Vec<_>>(); if args.len()!= 2 { println!("Usage: {} in.wasm", args[0]);
loop { print!("0x{:08x}\t", parser.current_position()); let state = parser.read(); match *state { ParserState::ExportSectionEntry { field, ref kind, index, } => { println!( "ExportSectionEntry {{ field: \"{}\", kind: {:?}, index: {} }}", field, kind, index ); } ParserState::ImportSectionEntry { module, field, ref ty, } => { println!( "ImportSectionEntry {{ module: \"{}\", field: \"{}\", ty: {:?} }}", module, field, ty ); } ParserState::EndWasm => break, ParserState::Error(err) => panic!("Error: {:?}", err), _ => println!("{:?}", state), } } } fn read_wasm(file: &str) -> io::Result<Vec<u8>> { let mut data = Vec::new(); let mut f = File::open(file)?; f.read_to_end(&mut data)?; Ok(data) }
return; } let buf: Vec<u8> = read_wasm(&args[1]).unwrap(); let mut parser = Parser::new(&buf);
random_line_split
dump.rs
extern crate wasmparser; use std::env; use std::fs::File; use std::io; use std::io::prelude::*; use std::str; use wasmparser::Parser; use wasmparser::ParserState; use wasmparser::WasmDecoder; fn
() { let args = env::args().collect::<Vec<_>>(); if args.len()!= 2 { println!("Usage: {} in.wasm", args[0]); return; } let buf: Vec<u8> = read_wasm(&args[1]).unwrap(); let mut parser = Parser::new(&buf); loop { print!("0x{:08x}\t", parser.current_position()); let state = parser.read(); match *state { ParserState::ExportSectionEntry { field, ref kind, index, } => { println!( "ExportSectionEntry {{ field: \"{}\", kind: {:?}, index: {} }}", field, kind, index ); } ParserState::ImportSectionEntry { module, field, ref ty, } => { println!( "ImportSectionEntry {{ module: \"{}\", field: \"{}\", ty: {:?} }}", module, field, ty ); } ParserState::EndWasm => break, ParserState::Error(err) => panic!("Error: {:?}", err), _ => println!("{:?}", state), } } } fn read_wasm(file: &str) -> io::Result<Vec<u8>> { let mut data = Vec::new(); let mut f = File::open(file)?; f.read_to_end(&mut data)?; Ok(data) }
main
identifier_name
dump.rs
extern crate wasmparser; use std::env; use std::fs::File; use std::io; use std::io::prelude::*; use std::str; use wasmparser::Parser; use wasmparser::ParserState; use wasmparser::WasmDecoder; fn main() { let args = env::args().collect::<Vec<_>>(); if args.len()!= 2 { println!("Usage: {} in.wasm", args[0]); return; } let buf: Vec<u8> = read_wasm(&args[1]).unwrap(); let mut parser = Parser::new(&buf); loop { print!("0x{:08x}\t", parser.current_position()); let state = parser.read(); match *state { ParserState::ExportSectionEntry { field, ref kind, index, } => { println!( "ExportSectionEntry {{ field: \"{}\", kind: {:?}, index: {} }}", field, kind, index ); } ParserState::ImportSectionEntry { module, field, ref ty, } => { println!( "ImportSectionEntry {{ module: \"{}\", field: \"{}\", ty: {:?} }}", module, field, ty ); } ParserState::EndWasm => break, ParserState::Error(err) => panic!("Error: {:?}", err), _ => println!("{:?}", state), } } } fn read_wasm(file: &str) -> io::Result<Vec<u8>>
{ let mut data = Vec::new(); let mut f = File::open(file)?; f.read_to_end(&mut data)?; Ok(data) }
identifier_body
condvar.rs
use crate::ffi::c_void; use crate::ptr; use crate::sync::atomic::{AtomicUsize, Ordering::SeqCst}; use crate::sys::hermit::abi; use crate::sys::mutex::Mutex; use crate::time::Duration; // The implementation is inspired by Andrew D. Birrell's paper // "Implementing Condition Variables with Semaphores" pub struct Condvar { counter: AtomicUsize, sem1: *const c_void, sem2: *const c_void, } pub type MovableCondvar = Condvar; unsafe impl Send for Condvar {} unsafe impl Sync for Condvar {} impl Condvar { pub const fn new() -> Condvar { Condvar { counter: AtomicUsize::new(0), sem1: ptr::null(), sem2: ptr::null() } } pub unsafe fn init(&mut self) { let _ = abi::sem_init(&mut self.sem1 as *mut *const c_void, 0); let _ = abi::sem_init(&mut self.sem2 as *mut *const c_void, 0); } pub unsafe fn notify_one(&self) { if self.counter.load(SeqCst) > 0 { self.counter.fetch_sub(1, SeqCst); abi::sem_post(self.sem1); abi::sem_timedwait(self.sem2, 0); } } pub unsafe fn notify_all(&self) { let counter = self.counter.swap(0, SeqCst); for _ in 0..counter { abi::sem_post(self.sem1); } for _ in 0..counter { abi::sem_timedwait(self.sem2, 0); } } pub unsafe fn wait(&self, mutex: &Mutex) { self.counter.fetch_add(1, SeqCst); mutex.unlock(); abi::sem_timedwait(self.sem1, 0); abi::sem_post(self.sem2); mutex.lock(); } pub unsafe fn wait_timeout(&self, mutex: &Mutex, dur: Duration) -> bool { self.counter.fetch_add(1, SeqCst); mutex.unlock(); let millis = dur.as_millis().min(u32::MAX as u128) as u32; let res = if millis > 0 { abi::sem_timedwait(self.sem1, millis) } else { abi::sem_trywait(self.sem1) }; abi::sem_post(self.sem2); mutex.lock(); res == 0 } pub unsafe fn
(&self) { let _ = abi::sem_destroy(self.sem1); let _ = abi::sem_destroy(self.sem2); } }
destroy
identifier_name
condvar.rs
use crate::ffi::c_void; use crate::ptr; use crate::sync::atomic::{AtomicUsize, Ordering::SeqCst}; use crate::sys::hermit::abi; use crate::sys::mutex::Mutex; use crate::time::Duration; // The implementation is inspired by Andrew D. Birrell's paper // "Implementing Condition Variables with Semaphores" pub struct Condvar { counter: AtomicUsize, sem1: *const c_void, sem2: *const c_void, } pub type MovableCondvar = Condvar; unsafe impl Send for Condvar {} unsafe impl Sync for Condvar {} impl Condvar { pub const fn new() -> Condvar { Condvar { counter: AtomicUsize::new(0), sem1: ptr::null(), sem2: ptr::null() } } pub unsafe fn init(&mut self) { let _ = abi::sem_init(&mut self.sem1 as *mut *const c_void, 0); let _ = abi::sem_init(&mut self.sem2 as *mut *const c_void, 0); } pub unsafe fn notify_one(&self) { if self.counter.load(SeqCst) > 0
} pub unsafe fn notify_all(&self) { let counter = self.counter.swap(0, SeqCst); for _ in 0..counter { abi::sem_post(self.sem1); } for _ in 0..counter { abi::sem_timedwait(self.sem2, 0); } } pub unsafe fn wait(&self, mutex: &Mutex) { self.counter.fetch_add(1, SeqCst); mutex.unlock(); abi::sem_timedwait(self.sem1, 0); abi::sem_post(self.sem2); mutex.lock(); } pub unsafe fn wait_timeout(&self, mutex: &Mutex, dur: Duration) -> bool { self.counter.fetch_add(1, SeqCst); mutex.unlock(); let millis = dur.as_millis().min(u32::MAX as u128) as u32; let res = if millis > 0 { abi::sem_timedwait(self.sem1, millis) } else { abi::sem_trywait(self.sem1) }; abi::sem_post(self.sem2); mutex.lock(); res == 0 } pub unsafe fn destroy(&self) { let _ = abi::sem_destroy(self.sem1); let _ = abi::sem_destroy(self.sem2); } }
{ self.counter.fetch_sub(1, SeqCst); abi::sem_post(self.sem1); abi::sem_timedwait(self.sem2, 0); }
conditional_block
condvar.rs
use crate::ffi::c_void; use crate::ptr; use crate::sync::atomic::{AtomicUsize, Ordering::SeqCst}; use crate::sys::hermit::abi; use crate::sys::mutex::Mutex; use crate::time::Duration; // The implementation is inspired by Andrew D. Birrell's paper // "Implementing Condition Variables with Semaphores" pub struct Condvar { counter: AtomicUsize, sem1: *const c_void, sem2: *const c_void, } pub type MovableCondvar = Condvar; unsafe impl Send for Condvar {} unsafe impl Sync for Condvar {} impl Condvar { pub const fn new() -> Condvar { Condvar { counter: AtomicUsize::new(0), sem1: ptr::null(), sem2: ptr::null() } } pub unsafe fn init(&mut self) { let _ = abi::sem_init(&mut self.sem1 as *mut *const c_void, 0); let _ = abi::sem_init(&mut self.sem2 as *mut *const c_void, 0); } pub unsafe fn notify_one(&self) { if self.counter.load(SeqCst) > 0 { self.counter.fetch_sub(1, SeqCst); abi::sem_post(self.sem1); abi::sem_timedwait(self.sem2, 0); } } pub unsafe fn notify_all(&self) { let counter = self.counter.swap(0, SeqCst); for _ in 0..counter { abi::sem_post(self.sem1); } for _ in 0..counter { abi::sem_timedwait(self.sem2, 0); } } pub unsafe fn wait(&self, mutex: &Mutex) { self.counter.fetch_add(1, SeqCst); mutex.unlock(); abi::sem_timedwait(self.sem1, 0); abi::sem_post(self.sem2); mutex.lock(); } pub unsafe fn wait_timeout(&self, mutex: &Mutex, dur: Duration) -> bool {
mutex.unlock(); let millis = dur.as_millis().min(u32::MAX as u128) as u32; let res = if millis > 0 { abi::sem_timedwait(self.sem1, millis) } else { abi::sem_trywait(self.sem1) }; abi::sem_post(self.sem2); mutex.lock(); res == 0 } pub unsafe fn destroy(&self) { let _ = abi::sem_destroy(self.sem1); let _ = abi::sem_destroy(self.sem2); } }
self.counter.fetch_add(1, SeqCst);
random_line_split
condvar.rs
use crate::ffi::c_void; use crate::ptr; use crate::sync::atomic::{AtomicUsize, Ordering::SeqCst}; use crate::sys::hermit::abi; use crate::sys::mutex::Mutex; use crate::time::Duration; // The implementation is inspired by Andrew D. Birrell's paper // "Implementing Condition Variables with Semaphores" pub struct Condvar { counter: AtomicUsize, sem1: *const c_void, sem2: *const c_void, } pub type MovableCondvar = Condvar; unsafe impl Send for Condvar {} unsafe impl Sync for Condvar {} impl Condvar { pub const fn new() -> Condvar
pub unsafe fn init(&mut self) { let _ = abi::sem_init(&mut self.sem1 as *mut *const c_void, 0); let _ = abi::sem_init(&mut self.sem2 as *mut *const c_void, 0); } pub unsafe fn notify_one(&self) { if self.counter.load(SeqCst) > 0 { self.counter.fetch_sub(1, SeqCst); abi::sem_post(self.sem1); abi::sem_timedwait(self.sem2, 0); } } pub unsafe fn notify_all(&self) { let counter = self.counter.swap(0, SeqCst); for _ in 0..counter { abi::sem_post(self.sem1); } for _ in 0..counter { abi::sem_timedwait(self.sem2, 0); } } pub unsafe fn wait(&self, mutex: &Mutex) { self.counter.fetch_add(1, SeqCst); mutex.unlock(); abi::sem_timedwait(self.sem1, 0); abi::sem_post(self.sem2); mutex.lock(); } pub unsafe fn wait_timeout(&self, mutex: &Mutex, dur: Duration) -> bool { self.counter.fetch_add(1, SeqCst); mutex.unlock(); let millis = dur.as_millis().min(u32::MAX as u128) as u32; let res = if millis > 0 { abi::sem_timedwait(self.sem1, millis) } else { abi::sem_trywait(self.sem1) }; abi::sem_post(self.sem2); mutex.lock(); res == 0 } pub unsafe fn destroy(&self) { let _ = abi::sem_destroy(self.sem1); let _ = abi::sem_destroy(self.sem2); } }
{ Condvar { counter: AtomicUsize::new(0), sem1: ptr::null(), sem2: ptr::null() } }
identifier_body
ast_clone.rs
use std::str::FromStr; use { proc_macro2::{Span, TokenStream}, syn::{ self, Data, DataEnum, DataStruct, DeriveInput, Field, Fields, FieldsNamed, FieldsUnnamed, Generics, Ident, Variant, }, }; use crate::{ attr, shared::{map_lifetimes, map_type_params, split_for_impl}, }; pub fn derive(input: TokenStream) -> TokenStream { let derive_input = syn::parse2(input).expect("Input is checked by rustc"); let container = attr::Container::from_ast(&derive_input); let DeriveInput { ident, data, generics, .. } = derive_input; let tokens = match data { Data::Struct(ast) => derive_struct(&container, ast, ident, generics), Data::Enum(ast) => derive_enum(&container, ast, ident, generics), Data::Union(_) => panic!("Unions are not supported"), }; tokens.into() } fn derive_struct( container: &attr::Container, ast: DataStruct, ident: Ident, generics: Generics, ) -> TokenStream { let cons = match ast.fields { Fields::Named(FieldsNamed { named,.. }) => gen_struct_cons(&ident, named), Fields::Unnamed(FieldsUnnamed { unnamed,.. }) => gen_tuple_struct_cons(&ident, unnamed), Fields::Unit => quote! { #ident }, }; gen_impl(container, ident, generics, cons) } fn gen_struct_cons<I>(ident: &Ident, fields: I) -> TokenStream where I: IntoIterator<Item = Field>, { // lookup each field by its name and then convert to its type using the Getable // impl of the fields type let field_initializers = fields.into_iter().map(|field| { let field_ty = &field.ty; let ident = field .ident .as_ref() .expect("Struct fields always have names"); quote! { #ident: <#field_ty as gluon_base::ast::AstClone<'ast, Id>>::ast_clone(&self.#ident, arena) } }); quote! { #ident { #(#field_initializers,)* } } } fn gen_tuple_struct_cons<I>(ident: &Ident, fields: I) -> TokenStream where I: IntoIterator<Item = Field>, { let mut fields = fields.into_iter().fuse(); // Treat newtype structs as just their inner type let (first, second) = (fields.next(), fields.next()); match (&first, &second) { (Some(field), None) => { let field_ty = &field.ty; return quote! { #ident ( <#field_ty as gluon_base::ast::AstClone<'__vm, _>>::ast_clone(&self.0, arena) ) }; } _ => (), } // do the lookup using the tag, because tuple structs don't have field names let field_initializers = first .into_iter() .chain(second) .chain(fields) .enumerate() .map(|(idx, field)| { let field_ty = &field.ty; let idx = syn::Index::from(idx); quote! { <#field_ty as gluon_base::ast::AstClone<'__vm, _>>::ast_clone(&self. #idx, arena) } }); quote! { #ident ( #(#field_initializers,)* ) } } fn derive_enum( container: &attr::Container, ast: DataEnum, ident: Ident, generics: Generics, ) -> TokenStream { let cons = { let variants = ast .variants .iter() .enumerate() .map(|(tag, variant)| gen_variant_match(&ident, tag, variant)); // data contains the the data for each field of a variant; the variant of the passed value // is defined by the tag(), which is defined by order of the variants (the first variant is 0) quote! { match self { #(#variants,)* } } }; gen_impl(container, ident, generics, cons) } fn gen_impl( container: &attr::Container, ident: Ident, generics: Generics, clone_impl: TokenStream, ) -> TokenStream { // lifetime bounds like '__vm: 'a, 'a: '__vm (which implies => 'a == '__vm) // writing bounds like this is a lot easier than actually replacing all lifetimes // with '__vm let lifetime_bounds = create_lifetime_bounds(&generics); // generate bounds like T: Getable for every type parameter let ast_clone_bounds = create_ast_clone_bounds(&generics); let (impl_generics, ty_generics, where_clause) = split_for_impl(&generics, &["Id"], &["'ast"]); let dummy_const = Ident::new(&format!("_IMPL_AST_CLONE_FOR_{}", ident), Span::call_site()); let extra_bounds = container.ast_clone_bounds.as_ref().map(|b| { let b = TokenStream::from_str(b).unwrap(); quote! { #b, } }); quote! { #[allow(non_upper_case_globals)] const #dummy_const: () = { use crate as gluon_base; #[automatically_derived] #[allow(unused_attributes, unused_variables)] impl #impl_generics gluon_base::ast::AstClone<'ast, Id> for #ident #ty_generics #where_clause #(#ast_clone_bounds,)* #(#lifetime_bounds),* #extra_bounds Id: Clone { fn ast_clone(&self, arena: gluon_base::ast::ArenaRef<'_, 'ast, Id>) -> Self { #clone_impl } } }; } } fn gen_variant_match(ident: &Ident, _tag: usize, variant: &Variant) -> TokenStream { let variant_ident = &variant.ident; // depending on the type of the variant we need to generate different constructors // for the enum match &variant.fields { Fields::Unit => quote! { #ident::#variant_ident => #ident::#variant_ident }, // both constructors that need to marshall values extract them by using the index // of the field to get the content from Data::get_variant; // the data variable was assigned in the function body above Fields::Unnamed(FieldsUnnamed { unnamed,.. }) => { let fields: Vec<_> = unnamed .iter() .enumerate() .map(|(idx, _field)| syn::Ident::new(&format!("_{}", idx), Span::call_site())) .collect(); let cons = gen_tuple_variant_cons(unnamed.iter().zip(fields.iter())); quote! { #ident::#variant_ident ( #(#fields),* ) => #ident::#variant_ident#cons } } Fields::Named(FieldsNamed { named,.. }) => { let cons = gen_struct_variant_cons(ident, variant_ident, named); let named = named.iter().map(|field| field.ident.as_ref().unwrap()); quote! { #ident::#variant_ident { #(#named),* } => #cons } } } } fn gen_tuple_variant_cons<'a, I>(fields: I) -> TokenStream where I: IntoIterator<Item = (&'a syn::Field, &'a syn::Ident)>, { let fields = fields.into_iter().map(|(field, ident)| { let field_ty = &field.ty; quote! { <#field_ty as gluon_base::ast::AstClone<_>>::ast_clone(#ident, arena) } }); quote! { (#(#fields),*) } } fn gen_struct_variant_cons<'a, I>(ident: &Ident, variant_ident: &Ident, fields: I) -> TokenStream where I: IntoIterator<Item = &'a Field>,
fn create_ast_clone_bounds(generics: &Generics) -> Vec<TokenStream> { map_type_params(generics, |ty| { quote! { #ty: gluon_base::ast::AstClone<'ast, Id> } }) } fn create_lifetime_bounds(generics: &Generics) -> Vec<TokenStream> { map_lifetimes(generics, |_lifetime| { quote! {} }) }
{ let fields = fields.into_iter().map(|field| { let field_ty = &field.ty; let field_ident = field .ident .as_ref() .expect("Struct fields always have names"); quote! { #field_ident: <#field_ty as gluon_base::ast::AstClone<_>>::ast_clone(#field_ident, arena) } }); quote! { #ident::#variant_ident { #(#fields),* } } }
identifier_body
ast_clone.rs
use std::str::FromStr; use { proc_macro2::{Span, TokenStream}, syn::{ self, Data, DataEnum, DataStruct, DeriveInput, Field, Fields, FieldsNamed, FieldsUnnamed, Generics, Ident, Variant, }, }; use crate::{ attr, shared::{map_lifetimes, map_type_params, split_for_impl}, }; pub fn derive(input: TokenStream) -> TokenStream { let derive_input = syn::parse2(input).expect("Input is checked by rustc"); let container = attr::Container::from_ast(&derive_input); let DeriveInput { ident, data, generics, .. } = derive_input; let tokens = match data { Data::Struct(ast) => derive_struct(&container, ast, ident, generics), Data::Enum(ast) => derive_enum(&container, ast, ident, generics), Data::Union(_) => panic!("Unions are not supported"), }; tokens.into() } fn derive_struct( container: &attr::Container, ast: DataStruct, ident: Ident, generics: Generics, ) -> TokenStream { let cons = match ast.fields { Fields::Named(FieldsNamed { named,.. }) => gen_struct_cons(&ident, named), Fields::Unnamed(FieldsUnnamed { unnamed,.. }) => gen_tuple_struct_cons(&ident, unnamed), Fields::Unit => quote! { #ident }, }; gen_impl(container, ident, generics, cons) } fn gen_struct_cons<I>(ident: &Ident, fields: I) -> TokenStream where I: IntoIterator<Item = Field>, { // lookup each field by its name and then convert to its type using the Getable // impl of the fields type let field_initializers = fields.into_iter().map(|field| { let field_ty = &field.ty; let ident = field .ident .as_ref() .expect("Struct fields always have names"); quote! { #ident: <#field_ty as gluon_base::ast::AstClone<'ast, Id>>::ast_clone(&self.#ident, arena) } }); quote! { #ident { #(#field_initializers,)* } } } fn gen_tuple_struct_cons<I>(ident: &Ident, fields: I) -> TokenStream where I: IntoIterator<Item = Field>, { let mut fields = fields.into_iter().fuse(); // Treat newtype structs as just their inner type let (first, second) = (fields.next(), fields.next()); match (&first, &second) { (Some(field), None) => { let field_ty = &field.ty; return quote! { #ident ( <#field_ty as gluon_base::ast::AstClone<'__vm, _>>::ast_clone(&self.0, arena) ) }; } _ => (), } // do the lookup using the tag, because tuple structs don't have field names let field_initializers = first .into_iter() .chain(second) .chain(fields) .enumerate() .map(|(idx, field)| { let field_ty = &field.ty; let idx = syn::Index::from(idx); quote! { <#field_ty as gluon_base::ast::AstClone<'__vm, _>>::ast_clone(&self. #idx, arena) } }); quote! { #ident ( #(#field_initializers,)* ) } } fn derive_enum( container: &attr::Container, ast: DataEnum, ident: Ident, generics: Generics, ) -> TokenStream { let cons = { let variants = ast .variants .iter() .enumerate() .map(|(tag, variant)| gen_variant_match(&ident, tag, variant)); // data contains the the data for each field of a variant; the variant of the passed value // is defined by the tag(), which is defined by order of the variants (the first variant is 0) quote! { match self { #(#variants,)* } } }; gen_impl(container, ident, generics, cons) } fn gen_impl( container: &attr::Container, ident: Ident, generics: Generics, clone_impl: TokenStream, ) -> TokenStream { // lifetime bounds like '__vm: 'a, 'a: '__vm (which implies => 'a == '__vm) // writing bounds like this is a lot easier than actually replacing all lifetimes // with '__vm let lifetime_bounds = create_lifetime_bounds(&generics); // generate bounds like T: Getable for every type parameter let ast_clone_bounds = create_ast_clone_bounds(&generics); let (impl_generics, ty_generics, where_clause) = split_for_impl(&generics, &["Id"], &["'ast"]); let dummy_const = Ident::new(&format!("_IMPL_AST_CLONE_FOR_{}", ident), Span::call_site()); let extra_bounds = container.ast_clone_bounds.as_ref().map(|b| { let b = TokenStream::from_str(b).unwrap();
quote! { #[allow(non_upper_case_globals)] const #dummy_const: () = { use crate as gluon_base; #[automatically_derived] #[allow(unused_attributes, unused_variables)] impl #impl_generics gluon_base::ast::AstClone<'ast, Id> for #ident #ty_generics #where_clause #(#ast_clone_bounds,)* #(#lifetime_bounds),* #extra_bounds Id: Clone { fn ast_clone(&self, arena: gluon_base::ast::ArenaRef<'_, 'ast, Id>) -> Self { #clone_impl } } }; } } fn gen_variant_match(ident: &Ident, _tag: usize, variant: &Variant) -> TokenStream { let variant_ident = &variant.ident; // depending on the type of the variant we need to generate different constructors // for the enum match &variant.fields { Fields::Unit => quote! { #ident::#variant_ident => #ident::#variant_ident }, // both constructors that need to marshall values extract them by using the index // of the field to get the content from Data::get_variant; // the data variable was assigned in the function body above Fields::Unnamed(FieldsUnnamed { unnamed,.. }) => { let fields: Vec<_> = unnamed .iter() .enumerate() .map(|(idx, _field)| syn::Ident::new(&format!("_{}", idx), Span::call_site())) .collect(); let cons = gen_tuple_variant_cons(unnamed.iter().zip(fields.iter())); quote! { #ident::#variant_ident ( #(#fields),* ) => #ident::#variant_ident#cons } } Fields::Named(FieldsNamed { named,.. }) => { let cons = gen_struct_variant_cons(ident, variant_ident, named); let named = named.iter().map(|field| field.ident.as_ref().unwrap()); quote! { #ident::#variant_ident { #(#named),* } => #cons } } } } fn gen_tuple_variant_cons<'a, I>(fields: I) -> TokenStream where I: IntoIterator<Item = (&'a syn::Field, &'a syn::Ident)>, { let fields = fields.into_iter().map(|(field, ident)| { let field_ty = &field.ty; quote! { <#field_ty as gluon_base::ast::AstClone<_>>::ast_clone(#ident, arena) } }); quote! { (#(#fields),*) } } fn gen_struct_variant_cons<'a, I>(ident: &Ident, variant_ident: &Ident, fields: I) -> TokenStream where I: IntoIterator<Item = &'a Field>, { let fields = fields.into_iter().map(|field| { let field_ty = &field.ty; let field_ident = field .ident .as_ref() .expect("Struct fields always have names"); quote! { #field_ident: <#field_ty as gluon_base::ast::AstClone<_>>::ast_clone(#field_ident, arena) } }); quote! { #ident::#variant_ident { #(#fields),* } } } fn create_ast_clone_bounds(generics: &Generics) -> Vec<TokenStream> { map_type_params(generics, |ty| { quote! { #ty: gluon_base::ast::AstClone<'ast, Id> } }) } fn create_lifetime_bounds(generics: &Generics) -> Vec<TokenStream> { map_lifetimes(generics, |_lifetime| { quote! {} }) }
quote! { #b, } });
random_line_split