file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
media_queries.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use app_units::Au;
use cssparser::{Delimiter, Parser, Token};
use euclid::size::{Size2D, TypedSize2D};
use properties::longhands;
use std::ascii::AsciiExt;
use util::geometry::ViewportPx;
use util::mem::HeapSizeOf;
use values::specified;
#[derive(Debug, HeapSizeOf, PartialEq)]
pub struct MediaQueryList {
pub media_queries: Vec<MediaQuery>
}
#[derive(PartialEq, Eq, Copy, Clone, Debug, HeapSizeOf)]
pub enum Range<T> {
Min(T),
Max(T),
//Eq(T), // FIXME: Implement parsing support for equality then re-enable this.
}
impl Range<specified::Length> {
fn to_computed_range(&self, viewport_size: Size2D<Au>) -> Range<Au> {
let compute_width = |&width| {
match width {
specified::Length::Absolute(value) => value,
specified::Length::FontRelative(value) => {
// http://dev.w3.org/csswg/mediaqueries3/#units
// em units are relative to the initial font-size.
let initial_font_size = longhands::font_size::get_initial_value();
value.to_computed_value(initial_font_size, initial_font_size)
}
specified::Length::ViewportPercentage(value) =>
value.to_computed_value(viewport_size),
_ => unreachable!()
}
};
match *self {
Range::Min(ref width) => Range::Min(compute_width(width)),
Range::Max(ref width) => Range::Max(compute_width(width)),
//Range::Eq(ref width) => Range::Eq(compute_width(width))
}
}
}
impl<T: Ord> Range<T> {
fn evaluate(&self, value: T) -> bool {
match *self {
Range::Min(ref width) => { value >= *width },
Range::Max(ref width) => { value <= *width },
//Range::Eq(ref width) => { value == *width },
}
}
}
/// http://dev.w3.org/csswg/mediaqueries-3/#media1
#[derive(PartialEq, Copy, Clone, Debug, HeapSizeOf)]
pub enum Expression {
/// http://dev.w3.org/csswg/mediaqueries-3/#width
Width(Range<specified::Length>),
}
/// http://dev.w3.org/csswg/mediaqueries-3/#media0
#[derive(PartialEq, Eq, Copy, Clone, Debug, HeapSizeOf)]
pub enum Qualifier {
Only,
Not,
}
#[derive(Debug, HeapSizeOf, PartialEq)]
pub struct MediaQuery {
pub qualifier: Option<Qualifier>,
pub media_type: MediaQueryType,
pub expressions: Vec<Expression>,
}
impl MediaQuery {
pub fn new(qualifier: Option<Qualifier>, media_type: MediaQueryType,
expressions: Vec<Expression>) -> MediaQuery {
MediaQuery {
qualifier: qualifier,
media_type: media_type,
expressions: expressions,
}
}
}
/// http://dev.w3.org/csswg/mediaqueries-3/#media0
#[derive(PartialEq, Eq, Copy, Clone, Debug, HeapSizeOf)]
pub enum MediaQueryType {
All, // Always true
MediaType(MediaType),
}
#[derive(PartialEq, Eq, Copy, Clone, Debug, HeapSizeOf)]
pub enum MediaType {
Screen,
Print,
Unknown,
}
#[derive(Debug, HeapSizeOf)]
pub struct Device {
pub media_type: MediaType,
pub viewport_size: TypedSize2D<ViewportPx, f32>,
}
impl Device {
pub fn new(media_type: MediaType, viewport_size: TypedSize2D<ViewportPx, f32>) -> Device {
Device {
media_type: media_type,
viewport_size: viewport_size,
}
}
}
impl Expression {
fn parse(input: &mut Parser) -> Result<Expression, ()> {
try!(input.expect_parenthesis_block());
input.parse_nested_block(|input| {
let name = try!(input.expect_ident());
try!(input.expect_colon());
// TODO: Handle other media features
match_ignore_ascii_case! { name,
"min-width" => {
Ok(Expression::Width(Range::Min(try!(specified::Length::parse_non_negative(input)))))
},
"max-width" => {
Ok(Expression::Width(Range::Max(try!(specified::Length::parse_non_negative(input)))))
},
_ => Err(())
}
})
}
}
impl MediaQuery {
fn parse(input: &mut Parser) -> Result<MediaQuery, ()>
|
// Media type is only optional if qualifier is not specified.
if qualifier.is_some() {
return Err(())
}
media_type = MediaQueryType::All;
// Without a media type, require at least one expression
expressions.push(try!(Expression::parse(input)));
}
// Parse any subsequent expressions
loop {
if input.try(|input| input.expect_ident_matching("and")).is_err() {
return Ok(MediaQuery::new(qualifier, media_type, expressions))
}
expressions.push(try!(Expression::parse(input)))
}
}
}
pub fn parse_media_query_list(input: &mut Parser) -> MediaQueryList {
let queries = if input.is_exhausted() {
vec![MediaQuery::new(None, MediaQueryType::All, vec!())]
} else {
let mut media_queries = vec![];
loop {
media_queries.push(
input.parse_until_before(Delimiter::Comma, MediaQuery::parse)
.unwrap_or(MediaQuery::new(Some(Qualifier::Not),
MediaQueryType::All,
vec!())));
match input.next() {
Ok(Token::Comma) => continue,
Ok(_) => unreachable!(),
Err(()) => break,
}
}
media_queries
};
MediaQueryList { media_queries: queries }
}
impl MediaQueryList {
pub fn evaluate(&self, device: &Device) -> bool {
let viewport_size = Size2D::new(Au::from_f32_px(device.viewport_size.width.get()),
Au::from_f32_px(device.viewport_size.height.get()));
// Check if any queries match (OR condition)
self.media_queries.iter().any(|mq| {
// Check if media matches. Unknown media never matches.
let media_match = match mq.media_type {
MediaQueryType::MediaType(MediaType::Unknown) => false,
MediaQueryType::MediaType(media_type) => media_type == device.media_type,
MediaQueryType::All => true,
};
// Check if all conditions match (AND condition)
let query_match = media_match && mq.expressions.iter().all(|expression| {
match *expression {
Expression::Width(ref value) =>
value.to_computed_range(viewport_size).evaluate(viewport_size.width),
}
});
// Apply the logical NOT qualifier to the result
match mq.qualifier {
Some(Qualifier::Not) =>!query_match,
_ => query_match,
}
})
}
}
|
{
let mut expressions = vec![];
let qualifier = if input.try(|input| input.expect_ident_matching("only")).is_ok() {
Some(Qualifier::Only)
} else if input.try(|input| input.expect_ident_matching("not")).is_ok() {
Some(Qualifier::Not)
} else {
None
};
let media_type;
if let Ok(ident) = input.try(|input| input.expect_ident()) {
media_type = match_ignore_ascii_case! { ident,
"screen" => MediaQueryType::MediaType(MediaType::Screen),
"print" => MediaQueryType::MediaType(MediaType::Print),
"all" => MediaQueryType::All,
_ => MediaQueryType::MediaType(MediaType::Unknown)
}
} else {
|
identifier_body
|
media_queries.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use app_units::Au;
use cssparser::{Delimiter, Parser, Token};
use euclid::size::{Size2D, TypedSize2D};
use properties::longhands;
use std::ascii::AsciiExt;
use util::geometry::ViewportPx;
use util::mem::HeapSizeOf;
use values::specified;
#[derive(Debug, HeapSizeOf, PartialEq)]
pub struct
|
{
pub media_queries: Vec<MediaQuery>
}
#[derive(PartialEq, Eq, Copy, Clone, Debug, HeapSizeOf)]
pub enum Range<T> {
Min(T),
Max(T),
//Eq(T), // FIXME: Implement parsing support for equality then re-enable this.
}
impl Range<specified::Length> {
fn to_computed_range(&self, viewport_size: Size2D<Au>) -> Range<Au> {
let compute_width = |&width| {
match width {
specified::Length::Absolute(value) => value,
specified::Length::FontRelative(value) => {
// http://dev.w3.org/csswg/mediaqueries3/#units
// em units are relative to the initial font-size.
let initial_font_size = longhands::font_size::get_initial_value();
value.to_computed_value(initial_font_size, initial_font_size)
}
specified::Length::ViewportPercentage(value) =>
value.to_computed_value(viewport_size),
_ => unreachable!()
}
};
match *self {
Range::Min(ref width) => Range::Min(compute_width(width)),
Range::Max(ref width) => Range::Max(compute_width(width)),
//Range::Eq(ref width) => Range::Eq(compute_width(width))
}
}
}
impl<T: Ord> Range<T> {
fn evaluate(&self, value: T) -> bool {
match *self {
Range::Min(ref width) => { value >= *width },
Range::Max(ref width) => { value <= *width },
//Range::Eq(ref width) => { value == *width },
}
}
}
/// http://dev.w3.org/csswg/mediaqueries-3/#media1
#[derive(PartialEq, Copy, Clone, Debug, HeapSizeOf)]
pub enum Expression {
/// http://dev.w3.org/csswg/mediaqueries-3/#width
Width(Range<specified::Length>),
}
/// http://dev.w3.org/csswg/mediaqueries-3/#media0
#[derive(PartialEq, Eq, Copy, Clone, Debug, HeapSizeOf)]
pub enum Qualifier {
Only,
Not,
}
#[derive(Debug, HeapSizeOf, PartialEq)]
pub struct MediaQuery {
pub qualifier: Option<Qualifier>,
pub media_type: MediaQueryType,
pub expressions: Vec<Expression>,
}
impl MediaQuery {
pub fn new(qualifier: Option<Qualifier>, media_type: MediaQueryType,
expressions: Vec<Expression>) -> MediaQuery {
MediaQuery {
qualifier: qualifier,
media_type: media_type,
expressions: expressions,
}
}
}
/// http://dev.w3.org/csswg/mediaqueries-3/#media0
#[derive(PartialEq, Eq, Copy, Clone, Debug, HeapSizeOf)]
pub enum MediaQueryType {
All, // Always true
MediaType(MediaType),
}
#[derive(PartialEq, Eq, Copy, Clone, Debug, HeapSizeOf)]
pub enum MediaType {
Screen,
Print,
Unknown,
}
#[derive(Debug, HeapSizeOf)]
pub struct Device {
pub media_type: MediaType,
pub viewport_size: TypedSize2D<ViewportPx, f32>,
}
impl Device {
pub fn new(media_type: MediaType, viewport_size: TypedSize2D<ViewportPx, f32>) -> Device {
Device {
media_type: media_type,
viewport_size: viewport_size,
}
}
}
impl Expression {
fn parse(input: &mut Parser) -> Result<Expression, ()> {
try!(input.expect_parenthesis_block());
input.parse_nested_block(|input| {
let name = try!(input.expect_ident());
try!(input.expect_colon());
// TODO: Handle other media features
match_ignore_ascii_case! { name,
"min-width" => {
Ok(Expression::Width(Range::Min(try!(specified::Length::parse_non_negative(input)))))
},
"max-width" => {
Ok(Expression::Width(Range::Max(try!(specified::Length::parse_non_negative(input)))))
},
_ => Err(())
}
})
}
}
impl MediaQuery {
fn parse(input: &mut Parser) -> Result<MediaQuery, ()> {
let mut expressions = vec![];
let qualifier = if input.try(|input| input.expect_ident_matching("only")).is_ok() {
Some(Qualifier::Only)
} else if input.try(|input| input.expect_ident_matching("not")).is_ok() {
Some(Qualifier::Not)
} else {
None
};
let media_type;
if let Ok(ident) = input.try(|input| input.expect_ident()) {
media_type = match_ignore_ascii_case! { ident,
"screen" => MediaQueryType::MediaType(MediaType::Screen),
"print" => MediaQueryType::MediaType(MediaType::Print),
"all" => MediaQueryType::All,
_ => MediaQueryType::MediaType(MediaType::Unknown)
}
} else {
// Media type is only optional if qualifier is not specified.
if qualifier.is_some() {
return Err(())
}
media_type = MediaQueryType::All;
// Without a media type, require at least one expression
expressions.push(try!(Expression::parse(input)));
}
// Parse any subsequent expressions
loop {
if input.try(|input| input.expect_ident_matching("and")).is_err() {
return Ok(MediaQuery::new(qualifier, media_type, expressions))
}
expressions.push(try!(Expression::parse(input)))
}
}
}
pub fn parse_media_query_list(input: &mut Parser) -> MediaQueryList {
let queries = if input.is_exhausted() {
vec![MediaQuery::new(None, MediaQueryType::All, vec!())]
} else {
let mut media_queries = vec![];
loop {
media_queries.push(
input.parse_until_before(Delimiter::Comma, MediaQuery::parse)
.unwrap_or(MediaQuery::new(Some(Qualifier::Not),
MediaQueryType::All,
vec!())));
match input.next() {
Ok(Token::Comma) => continue,
Ok(_) => unreachable!(),
Err(()) => break,
}
}
media_queries
};
MediaQueryList { media_queries: queries }
}
impl MediaQueryList {
pub fn evaluate(&self, device: &Device) -> bool {
let viewport_size = Size2D::new(Au::from_f32_px(device.viewport_size.width.get()),
Au::from_f32_px(device.viewport_size.height.get()));
// Check if any queries match (OR condition)
self.media_queries.iter().any(|mq| {
// Check if media matches. Unknown media never matches.
let media_match = match mq.media_type {
MediaQueryType::MediaType(MediaType::Unknown) => false,
MediaQueryType::MediaType(media_type) => media_type == device.media_type,
MediaQueryType::All => true,
};
// Check if all conditions match (AND condition)
let query_match = media_match && mq.expressions.iter().all(|expression| {
match *expression {
Expression::Width(ref value) =>
value.to_computed_range(viewport_size).evaluate(viewport_size.width),
}
});
// Apply the logical NOT qualifier to the result
match mq.qualifier {
Some(Qualifier::Not) =>!query_match,
_ => query_match,
}
})
}
}
|
MediaQueryList
|
identifier_name
|
media_queries.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use app_units::Au;
use cssparser::{Delimiter, Parser, Token};
use euclid::size::{Size2D, TypedSize2D};
use properties::longhands;
use std::ascii::AsciiExt;
use util::geometry::ViewportPx;
use util::mem::HeapSizeOf;
use values::specified;
#[derive(Debug, HeapSizeOf, PartialEq)]
pub struct MediaQueryList {
pub media_queries: Vec<MediaQuery>
}
#[derive(PartialEq, Eq, Copy, Clone, Debug, HeapSizeOf)]
|
impl Range<specified::Length> {
fn to_computed_range(&self, viewport_size: Size2D<Au>) -> Range<Au> {
let compute_width = |&width| {
match width {
specified::Length::Absolute(value) => value,
specified::Length::FontRelative(value) => {
// http://dev.w3.org/csswg/mediaqueries3/#units
// em units are relative to the initial font-size.
let initial_font_size = longhands::font_size::get_initial_value();
value.to_computed_value(initial_font_size, initial_font_size)
}
specified::Length::ViewportPercentage(value) =>
value.to_computed_value(viewport_size),
_ => unreachable!()
}
};
match *self {
Range::Min(ref width) => Range::Min(compute_width(width)),
Range::Max(ref width) => Range::Max(compute_width(width)),
//Range::Eq(ref width) => Range::Eq(compute_width(width))
}
}
}
impl<T: Ord> Range<T> {
fn evaluate(&self, value: T) -> bool {
match *self {
Range::Min(ref width) => { value >= *width },
Range::Max(ref width) => { value <= *width },
//Range::Eq(ref width) => { value == *width },
}
}
}
/// http://dev.w3.org/csswg/mediaqueries-3/#media1
#[derive(PartialEq, Copy, Clone, Debug, HeapSizeOf)]
pub enum Expression {
/// http://dev.w3.org/csswg/mediaqueries-3/#width
Width(Range<specified::Length>),
}
/// http://dev.w3.org/csswg/mediaqueries-3/#media0
#[derive(PartialEq, Eq, Copy, Clone, Debug, HeapSizeOf)]
pub enum Qualifier {
Only,
Not,
}
#[derive(Debug, HeapSizeOf, PartialEq)]
pub struct MediaQuery {
pub qualifier: Option<Qualifier>,
pub media_type: MediaQueryType,
pub expressions: Vec<Expression>,
}
impl MediaQuery {
pub fn new(qualifier: Option<Qualifier>, media_type: MediaQueryType,
expressions: Vec<Expression>) -> MediaQuery {
MediaQuery {
qualifier: qualifier,
media_type: media_type,
expressions: expressions,
}
}
}
/// http://dev.w3.org/csswg/mediaqueries-3/#media0
#[derive(PartialEq, Eq, Copy, Clone, Debug, HeapSizeOf)]
pub enum MediaQueryType {
All, // Always true
MediaType(MediaType),
}
#[derive(PartialEq, Eq, Copy, Clone, Debug, HeapSizeOf)]
pub enum MediaType {
Screen,
Print,
Unknown,
}
#[derive(Debug, HeapSizeOf)]
pub struct Device {
pub media_type: MediaType,
pub viewport_size: TypedSize2D<ViewportPx, f32>,
}
impl Device {
pub fn new(media_type: MediaType, viewport_size: TypedSize2D<ViewportPx, f32>) -> Device {
Device {
media_type: media_type,
viewport_size: viewport_size,
}
}
}
impl Expression {
fn parse(input: &mut Parser) -> Result<Expression, ()> {
try!(input.expect_parenthesis_block());
input.parse_nested_block(|input| {
let name = try!(input.expect_ident());
try!(input.expect_colon());
// TODO: Handle other media features
match_ignore_ascii_case! { name,
"min-width" => {
Ok(Expression::Width(Range::Min(try!(specified::Length::parse_non_negative(input)))))
},
"max-width" => {
Ok(Expression::Width(Range::Max(try!(specified::Length::parse_non_negative(input)))))
},
_ => Err(())
}
})
}
}
impl MediaQuery {
fn parse(input: &mut Parser) -> Result<MediaQuery, ()> {
let mut expressions = vec![];
let qualifier = if input.try(|input| input.expect_ident_matching("only")).is_ok() {
Some(Qualifier::Only)
} else if input.try(|input| input.expect_ident_matching("not")).is_ok() {
Some(Qualifier::Not)
} else {
None
};
let media_type;
if let Ok(ident) = input.try(|input| input.expect_ident()) {
media_type = match_ignore_ascii_case! { ident,
"screen" => MediaQueryType::MediaType(MediaType::Screen),
"print" => MediaQueryType::MediaType(MediaType::Print),
"all" => MediaQueryType::All,
_ => MediaQueryType::MediaType(MediaType::Unknown)
}
} else {
// Media type is only optional if qualifier is not specified.
if qualifier.is_some() {
return Err(())
}
media_type = MediaQueryType::All;
// Without a media type, require at least one expression
expressions.push(try!(Expression::parse(input)));
}
// Parse any subsequent expressions
loop {
if input.try(|input| input.expect_ident_matching("and")).is_err() {
return Ok(MediaQuery::new(qualifier, media_type, expressions))
}
expressions.push(try!(Expression::parse(input)))
}
}
}
pub fn parse_media_query_list(input: &mut Parser) -> MediaQueryList {
let queries = if input.is_exhausted() {
vec![MediaQuery::new(None, MediaQueryType::All, vec!())]
} else {
let mut media_queries = vec![];
loop {
media_queries.push(
input.parse_until_before(Delimiter::Comma, MediaQuery::parse)
.unwrap_or(MediaQuery::new(Some(Qualifier::Not),
MediaQueryType::All,
vec!())));
match input.next() {
Ok(Token::Comma) => continue,
Ok(_) => unreachable!(),
Err(()) => break,
}
}
media_queries
};
MediaQueryList { media_queries: queries }
}
impl MediaQueryList {
pub fn evaluate(&self, device: &Device) -> bool {
let viewport_size = Size2D::new(Au::from_f32_px(device.viewport_size.width.get()),
Au::from_f32_px(device.viewport_size.height.get()));
// Check if any queries match (OR condition)
self.media_queries.iter().any(|mq| {
// Check if media matches. Unknown media never matches.
let media_match = match mq.media_type {
MediaQueryType::MediaType(MediaType::Unknown) => false,
MediaQueryType::MediaType(media_type) => media_type == device.media_type,
MediaQueryType::All => true,
};
// Check if all conditions match (AND condition)
let query_match = media_match && mq.expressions.iter().all(|expression| {
match *expression {
Expression::Width(ref value) =>
value.to_computed_range(viewport_size).evaluate(viewport_size.width),
}
});
// Apply the logical NOT qualifier to the result
match mq.qualifier {
Some(Qualifier::Not) =>!query_match,
_ => query_match,
}
})
}
}
|
pub enum Range<T> {
Min(T),
Max(T),
//Eq(T), // FIXME: Implement parsing support for equality then re-enable this.
}
|
random_line_split
|
heaphistogram.rs
|
/*
* Copyright 2015-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use env::JvmTI;
use std::io::Write;
use std::io::stdout;
use heap::tagger::Tagger;
use heap::tagger::Tag;
use heap::stats::Stats;
use heap::stats::Record;
use heap::stats::Print;
pub struct HeapHistogram<T: JvmTI + Clone> {
jvmti: T,
max_entries: usize
}
impl<T: JvmTI + Clone> ::std::fmt::Display for HeapHistogram<T> {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "HeapHistogram")
}
}
impl<T: JvmTI + Clone> HeapHistogram<T> {
pub fn new(mut jvmti: T, max_entries: usize) -> Result<Self, ::err::Error> {
jvmti.enable_object_tagging()?;
Ok(Self {
jvmti: jvmti.clone(),
max_entries: max_entries
})
}
fn print(&self, writer: &mut dyn Write) -> Result<(), ::err::Error> {
let mut tagger = Tagger::new();
// Tag all loaded classes so we can determine each object's class signature during heap traversal.
self.jvmti.tag_loaded_classes(&mut tagger)?;
let mut heap_stats = Stats::new(self.max_entries);
// Traverse the live heap and add objects to the heap stats.
self.jvmti.traverse_live_heap(|class_tag: ::jvmti::jlong, size: ::jvmti::jlong| {
if let Some(sig) = tagger.class_signature(class_tag) {
heap_stats.recordObject(sig, size);
}
})?;
heap_stats.print(writer);
Ok(())
}
}
impl<T: JvmTI + Clone> super::Action for HeapHistogram<T> {
fn on_oom(&self, _: ::env::JniEnv, _: ::jvmti::jint) -> Result<(), ::err::Error> {
self.print(&mut stdout())?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::HeapHistogram;
use ::env::JvmTI;
use ::env::FnResourceExhausted;
use std::cell::RefCell;
use ::heap::tagger::Tag;
const test_error_code: ::jvmti::jint = 54;
#[test]
fn new_calls_enable_object_tagging() {
let mockJvmti = MockJvmti::new();
let hh = HeapHistogram::new(mockJvmti, 100);
assert!(hh.is_ok());
assert!((hh.expect("unexpected error").jvmti as MockJvmti).object_tagging_enabled);
}
#[test]
fn new_percolates_enable_object_tagging_failure() {
let mut mockJvmti = MockJvmti::new();
mockJvmti.object_tagging_enabled_result = test_error_code;
let hh = HeapHistogram::new(mockJvmti, 100);
assert!(hh.is_err());
match hh.err().expect("unexpected error") {
::err::Error::JvmTi(msg, rc) => {
assert_eq!(msg, "test error".to_string());
assert_eq!(rc, test_error_code);
}
_ => assert!(false, "wrong error value"),
}
}
#[test]
fn print_works() {
let mockJvmti = MockJvmti::new();
let hh = HeapHistogram::new(mockJvmti, 100);
let mut buff: Vec<u8> = Vec::new();
hh.expect("invalid HeapHistogram").print(&mut buff).expect("print failed");
let string_buff = String::from_utf8(buff).expect("invalid UTF-8");
assert_eq!(string_buff, "| Instance Count | Total Bytes | Class Name |\n| 2 | 200 | sig2 |\n| 1 | 10 | sig1 |\n".to_string());
}
#[derive(Clone, Copy, Default)]
struct Classes {
t1: ::jvmti::jlong,
t2: ::jvmti::jlong
}
|
classes: RefCell<Classes>
}
impl MockJvmti {
fn new() -> MockJvmti {
MockJvmti {
object_tagging_enabled_result: 0,
object_tagging_enabled: false,
classes: RefCell::new(Default::default())
}
}
}
impl JvmTI for MockJvmti {
fn on_resource_exhausted(&mut self, _: FnResourceExhausted) -> Result<(), ::err::Error> {
unimplemented!()
}
fn enable_object_tagging(&mut self) -> Result<(), ::err::Error> {
self.object_tagging_enabled = true;
if self.object_tagging_enabled_result == 0 {
Ok(())
} else {
Err(::err::Error::JvmTi("test error".to_string(), self.object_tagging_enabled_result))
}
}
fn tag_loaded_classes(&self, tagger: &mut Tag) -> Result<(), ::err::Error> {
let mut c = self.classes.borrow_mut();
c.t1 = tagger.class_tag(&"sig1".to_string());
c.t2 = tagger.class_tag(&"sig2".to_string());
Ok(())
}
fn traverse_live_heap<F>(&self, mut closure: F) -> Result<(), ::err::Error>
where F: FnMut(::jvmti::jlong, ::jvmti::jlong) {
let c = self.classes.borrow();
closure(c.t1, 10);
closure(c.t2, 100);
closure(c.t2, 100);
Ok(())
}
}
}
|
#[derive(Clone)]
struct MockJvmti {
pub object_tagging_enabled_result: ::jvmti::jint,
pub object_tagging_enabled: bool,
|
random_line_split
|
heaphistogram.rs
|
/*
* Copyright 2015-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use env::JvmTI;
use std::io::Write;
use std::io::stdout;
use heap::tagger::Tagger;
use heap::tagger::Tag;
use heap::stats::Stats;
use heap::stats::Record;
use heap::stats::Print;
pub struct HeapHistogram<T: JvmTI + Clone> {
jvmti: T,
max_entries: usize
}
impl<T: JvmTI + Clone> ::std::fmt::Display for HeapHistogram<T> {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "HeapHistogram")
}
}
impl<T: JvmTI + Clone> HeapHistogram<T> {
pub fn new(mut jvmti: T, max_entries: usize) -> Result<Self, ::err::Error> {
jvmti.enable_object_tagging()?;
Ok(Self {
jvmti: jvmti.clone(),
max_entries: max_entries
})
}
fn print(&self, writer: &mut dyn Write) -> Result<(), ::err::Error> {
let mut tagger = Tagger::new();
// Tag all loaded classes so we can determine each object's class signature during heap traversal.
self.jvmti.tag_loaded_classes(&mut tagger)?;
let mut heap_stats = Stats::new(self.max_entries);
// Traverse the live heap and add objects to the heap stats.
self.jvmti.traverse_live_heap(|class_tag: ::jvmti::jlong, size: ::jvmti::jlong| {
if let Some(sig) = tagger.class_signature(class_tag) {
heap_stats.recordObject(sig, size);
}
})?;
heap_stats.print(writer);
Ok(())
}
}
impl<T: JvmTI + Clone> super::Action for HeapHistogram<T> {
fn on_oom(&self, _: ::env::JniEnv, _: ::jvmti::jint) -> Result<(), ::err::Error> {
self.print(&mut stdout())?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::HeapHistogram;
use ::env::JvmTI;
use ::env::FnResourceExhausted;
use std::cell::RefCell;
use ::heap::tagger::Tag;
const test_error_code: ::jvmti::jint = 54;
#[test]
fn new_calls_enable_object_tagging() {
let mockJvmti = MockJvmti::new();
let hh = HeapHistogram::new(mockJvmti, 100);
assert!(hh.is_ok());
assert!((hh.expect("unexpected error").jvmti as MockJvmti).object_tagging_enabled);
}
#[test]
fn new_percolates_enable_object_tagging_failure() {
let mut mockJvmti = MockJvmti::new();
mockJvmti.object_tagging_enabled_result = test_error_code;
let hh = HeapHistogram::new(mockJvmti, 100);
assert!(hh.is_err());
match hh.err().expect("unexpected error") {
::err::Error::JvmTi(msg, rc) => {
assert_eq!(msg, "test error".to_string());
assert_eq!(rc, test_error_code);
}
_ => assert!(false, "wrong error value"),
}
}
#[test]
fn print_works() {
let mockJvmti = MockJvmti::new();
let hh = HeapHistogram::new(mockJvmti, 100);
let mut buff: Vec<u8> = Vec::new();
hh.expect("invalid HeapHistogram").print(&mut buff).expect("print failed");
let string_buff = String::from_utf8(buff).expect("invalid UTF-8");
assert_eq!(string_buff, "| Instance Count | Total Bytes | Class Name |\n| 2 | 200 | sig2 |\n| 1 | 10 | sig1 |\n".to_string());
}
#[derive(Clone, Copy, Default)]
struct Classes {
t1: ::jvmti::jlong,
t2: ::jvmti::jlong
}
#[derive(Clone)]
struct MockJvmti {
pub object_tagging_enabled_result: ::jvmti::jint,
pub object_tagging_enabled: bool,
classes: RefCell<Classes>
}
impl MockJvmti {
fn
|
() -> MockJvmti {
MockJvmti {
object_tagging_enabled_result: 0,
object_tagging_enabled: false,
classes: RefCell::new(Default::default())
}
}
}
impl JvmTI for MockJvmti {
fn on_resource_exhausted(&mut self, _: FnResourceExhausted) -> Result<(), ::err::Error> {
unimplemented!()
}
fn enable_object_tagging(&mut self) -> Result<(), ::err::Error> {
self.object_tagging_enabled = true;
if self.object_tagging_enabled_result == 0 {
Ok(())
} else {
Err(::err::Error::JvmTi("test error".to_string(), self.object_tagging_enabled_result))
}
}
fn tag_loaded_classes(&self, tagger: &mut Tag) -> Result<(), ::err::Error> {
let mut c = self.classes.borrow_mut();
c.t1 = tagger.class_tag(&"sig1".to_string());
c.t2 = tagger.class_tag(&"sig2".to_string());
Ok(())
}
fn traverse_live_heap<F>(&self, mut closure: F) -> Result<(), ::err::Error>
where F: FnMut(::jvmti::jlong, ::jvmti::jlong) {
let c = self.classes.borrow();
closure(c.t1, 10);
closure(c.t2, 100);
closure(c.t2, 100);
Ok(())
}
}
}
|
new
|
identifier_name
|
heaphistogram.rs
|
/*
* Copyright 2015-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use env::JvmTI;
use std::io::Write;
use std::io::stdout;
use heap::tagger::Tagger;
use heap::tagger::Tag;
use heap::stats::Stats;
use heap::stats::Record;
use heap::stats::Print;
pub struct HeapHistogram<T: JvmTI + Clone> {
jvmti: T,
max_entries: usize
}
impl<T: JvmTI + Clone> ::std::fmt::Display for HeapHistogram<T> {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "HeapHistogram")
}
}
impl<T: JvmTI + Clone> HeapHistogram<T> {
pub fn new(mut jvmti: T, max_entries: usize) -> Result<Self, ::err::Error> {
jvmti.enable_object_tagging()?;
Ok(Self {
jvmti: jvmti.clone(),
max_entries: max_entries
})
}
fn print(&self, writer: &mut dyn Write) -> Result<(), ::err::Error> {
let mut tagger = Tagger::new();
// Tag all loaded classes so we can determine each object's class signature during heap traversal.
self.jvmti.tag_loaded_classes(&mut tagger)?;
let mut heap_stats = Stats::new(self.max_entries);
// Traverse the live heap and add objects to the heap stats.
self.jvmti.traverse_live_heap(|class_tag: ::jvmti::jlong, size: ::jvmti::jlong| {
if let Some(sig) = tagger.class_signature(class_tag) {
heap_stats.recordObject(sig, size);
}
})?;
heap_stats.print(writer);
Ok(())
}
}
impl<T: JvmTI + Clone> super::Action for HeapHistogram<T> {
fn on_oom(&self, _: ::env::JniEnv, _: ::jvmti::jint) -> Result<(), ::err::Error>
|
}
#[cfg(test)]
mod tests {
use super::HeapHistogram;
use ::env::JvmTI;
use ::env::FnResourceExhausted;
use std::cell::RefCell;
use ::heap::tagger::Tag;
const test_error_code: ::jvmti::jint = 54;
#[test]
fn new_calls_enable_object_tagging() {
let mockJvmti = MockJvmti::new();
let hh = HeapHistogram::new(mockJvmti, 100);
assert!(hh.is_ok());
assert!((hh.expect("unexpected error").jvmti as MockJvmti).object_tagging_enabled);
}
#[test]
fn new_percolates_enable_object_tagging_failure() {
let mut mockJvmti = MockJvmti::new();
mockJvmti.object_tagging_enabled_result = test_error_code;
let hh = HeapHistogram::new(mockJvmti, 100);
assert!(hh.is_err());
match hh.err().expect("unexpected error") {
::err::Error::JvmTi(msg, rc) => {
assert_eq!(msg, "test error".to_string());
assert_eq!(rc, test_error_code);
}
_ => assert!(false, "wrong error value"),
}
}
#[test]
fn print_works() {
let mockJvmti = MockJvmti::new();
let hh = HeapHistogram::new(mockJvmti, 100);
let mut buff: Vec<u8> = Vec::new();
hh.expect("invalid HeapHistogram").print(&mut buff).expect("print failed");
let string_buff = String::from_utf8(buff).expect("invalid UTF-8");
assert_eq!(string_buff, "| Instance Count | Total Bytes | Class Name |\n| 2 | 200 | sig2 |\n| 1 | 10 | sig1 |\n".to_string());
}
#[derive(Clone, Copy, Default)]
struct Classes {
t1: ::jvmti::jlong,
t2: ::jvmti::jlong
}
#[derive(Clone)]
struct MockJvmti {
pub object_tagging_enabled_result: ::jvmti::jint,
pub object_tagging_enabled: bool,
classes: RefCell<Classes>
}
impl MockJvmti {
fn new() -> MockJvmti {
MockJvmti {
object_tagging_enabled_result: 0,
object_tagging_enabled: false,
classes: RefCell::new(Default::default())
}
}
}
impl JvmTI for MockJvmti {
fn on_resource_exhausted(&mut self, _: FnResourceExhausted) -> Result<(), ::err::Error> {
unimplemented!()
}
fn enable_object_tagging(&mut self) -> Result<(), ::err::Error> {
self.object_tagging_enabled = true;
if self.object_tagging_enabled_result == 0 {
Ok(())
} else {
Err(::err::Error::JvmTi("test error".to_string(), self.object_tagging_enabled_result))
}
}
fn tag_loaded_classes(&self, tagger: &mut Tag) -> Result<(), ::err::Error> {
let mut c = self.classes.borrow_mut();
c.t1 = tagger.class_tag(&"sig1".to_string());
c.t2 = tagger.class_tag(&"sig2".to_string());
Ok(())
}
fn traverse_live_heap<F>(&self, mut closure: F) -> Result<(), ::err::Error>
where F: FnMut(::jvmti::jlong, ::jvmti::jlong) {
let c = self.classes.borrow();
closure(c.t1, 10);
closure(c.t2, 100);
closure(c.t2, 100);
Ok(())
}
}
}
|
{
self.print(&mut stdout())?;
Ok(())
}
|
identifier_body
|
display.rs
|
use alloc::boxed::Box;
use collections::String;
use common::event::Event;
use core::{cmp, ptr};
use core::mem::size_of;
use fs::{KScheme, Resource, ResourceSeek, Url};
use system::error::{Error, Result, EACCES, EBADF, ENOENT, EINVAL};
use system::graphics::fast_copy;
/// A display resource
pub struct DisplayResource {
/// Path
path: String,
/// Seek
seek: usize,
}
impl Resource for DisplayResource {
fn dup(&self) -> Result<Box<Resource>> {
Ok(Box::new(DisplayResource {
path: self.path.clone(),
seek: self.seek
}))
}
/// Return the URL for display resource
fn path(&self, buf: &mut [u8]) -> Result<usize> {
let path = self.path.as_bytes();
for (b, p) in buf.iter_mut().zip(path.iter()) {
*b = *p;
}
Ok(cmp::min(buf.len(), path.len()))
}
fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
if buf.len() >= size_of::<Event>() {
let event = ::env().events.receive("DisplayResource::read");
unsafe { ptr::write(buf.as_mut_ptr().offset(0isize) as *mut Event, event) };
let mut i = size_of::<Event>();
while i + size_of::<Event>() <= buf.len() {
if let Some(event) = unsafe { ::env().events.inner() }.pop_front() {
unsafe { ptr::write(buf.as_mut_ptr().offset(i as isize) as *mut Event, event) };
i += size_of::<Event>();
} else {
break;
}
}
Ok(i)
} else {
Err(Error::new(EINVAL))
}
}
fn write(&mut self, buf: &[u8]) -> Result<usize> {
let console = unsafe { & *::env().console.get() };
if let Some(ref display) = console.display {
let size = cmp::max(0, cmp::min(display.size as isize - self.seek as isize, (buf.len()/4) as isize)) as usize;
if size > 0 {
unsafe {
fast_copy(display.onscreen.offset(self.seek as isize), buf.as_ptr() as *const u32, size);
}
}
Ok(size)
} else {
Err(Error::new(EBADF))
}
}
fn seek(&mut self, pos: ResourceSeek) -> Result<usize> {
let console = unsafe { & *::env().console.get() };
if let Some(ref display) = console.display {
self.seek = match pos {
ResourceSeek::Start(offset) => cmp::min(display.size, cmp::max(0, offset)),
ResourceSeek::Current(offset) => cmp::min(display.size, cmp::max(0, self.seek as isize + offset) as usize),
ResourceSeek::End(offset) => cmp::min(display.size, cmp::max(0, display.size as isize + offset) as usize),
};
Ok(self.seek)
} else {
Err(Error::new(EBADF))
}
}
fn sync(&mut self) -> Result<()>
|
}
pub struct DisplayScheme;
impl KScheme for DisplayScheme {
fn scheme(&self) -> &str {
"display"
}
fn open(&mut self, url: Url, _: usize) -> Result<Box<Resource>> {
if url.reference() == "manager" {
let console = unsafe { &mut *::env().console.get() };
if console.draw {
console.draw = false;
if let Some(ref display) = console.display {
Ok(box DisplayResource {
path: format!("display:{}/{}", display.width, display.height),
seek: 0,
})
} else {
Err(Error::new(ENOENT))
}
} else {
Err(Error::new(EACCES))
}
} else {
let console = unsafe { & *::env().console.get() };
if let Some(ref display) = console.display {
Ok(box DisplayResource {
path: format!("display:{}/{}", display.width, display.height),
seek: 0,
})
} else {
Err(Error::new(ENOENT))
}
}
}
}
|
{
Ok(())
}
|
identifier_body
|
display.rs
|
use alloc::boxed::Box;
use collections::String;
use common::event::Event;
use core::{cmp, ptr};
use core::mem::size_of;
use fs::{KScheme, Resource, ResourceSeek, Url};
use system::error::{Error, Result, EACCES, EBADF, ENOENT, EINVAL};
use system::graphics::fast_copy;
/// A display resource
pub struct DisplayResource {
/// Path
path: String,
/// Seek
seek: usize,
}
impl Resource for DisplayResource {
fn dup(&self) -> Result<Box<Resource>> {
Ok(Box::new(DisplayResource {
path: self.path.clone(),
seek: self.seek
}))
}
/// Return the URL for display resource
fn path(&self, buf: &mut [u8]) -> Result<usize> {
let path = self.path.as_bytes();
for (b, p) in buf.iter_mut().zip(path.iter()) {
*b = *p;
}
Ok(cmp::min(buf.len(), path.len()))
}
fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
if buf.len() >= size_of::<Event>() {
let event = ::env().events.receive("DisplayResource::read");
unsafe { ptr::write(buf.as_mut_ptr().offset(0isize) as *mut Event, event) };
let mut i = size_of::<Event>();
while i + size_of::<Event>() <= buf.len() {
if let Some(event) = unsafe { ::env().events.inner() }.pop_front() {
unsafe { ptr::write(buf.as_mut_ptr().offset(i as isize) as *mut Event, event) };
i += size_of::<Event>();
} else {
break;
|
Ok(i)
} else {
Err(Error::new(EINVAL))
}
}
fn write(&mut self, buf: &[u8]) -> Result<usize> {
let console = unsafe { & *::env().console.get() };
if let Some(ref display) = console.display {
let size = cmp::max(0, cmp::min(display.size as isize - self.seek as isize, (buf.len()/4) as isize)) as usize;
if size > 0 {
unsafe {
fast_copy(display.onscreen.offset(self.seek as isize), buf.as_ptr() as *const u32, size);
}
}
Ok(size)
} else {
Err(Error::new(EBADF))
}
}
fn seek(&mut self, pos: ResourceSeek) -> Result<usize> {
let console = unsafe { & *::env().console.get() };
if let Some(ref display) = console.display {
self.seek = match pos {
ResourceSeek::Start(offset) => cmp::min(display.size, cmp::max(0, offset)),
ResourceSeek::Current(offset) => cmp::min(display.size, cmp::max(0, self.seek as isize + offset) as usize),
ResourceSeek::End(offset) => cmp::min(display.size, cmp::max(0, display.size as isize + offset) as usize),
};
Ok(self.seek)
} else {
Err(Error::new(EBADF))
}
}
fn sync(&mut self) -> Result<()> {
Ok(())
}
}
pub struct DisplayScheme;
impl KScheme for DisplayScheme {
fn scheme(&self) -> &str {
"display"
}
fn open(&mut self, url: Url, _: usize) -> Result<Box<Resource>> {
if url.reference() == "manager" {
let console = unsafe { &mut *::env().console.get() };
if console.draw {
console.draw = false;
if let Some(ref display) = console.display {
Ok(box DisplayResource {
path: format!("display:{}/{}", display.width, display.height),
seek: 0,
})
} else {
Err(Error::new(ENOENT))
}
} else {
Err(Error::new(EACCES))
}
} else {
let console = unsafe { & *::env().console.get() };
if let Some(ref display) = console.display {
Ok(box DisplayResource {
path: format!("display:{}/{}", display.width, display.height),
seek: 0,
})
} else {
Err(Error::new(ENOENT))
}
}
}
}
|
}
}
|
random_line_split
|
display.rs
|
use alloc::boxed::Box;
use collections::String;
use common::event::Event;
use core::{cmp, ptr};
use core::mem::size_of;
use fs::{KScheme, Resource, ResourceSeek, Url};
use system::error::{Error, Result, EACCES, EBADF, ENOENT, EINVAL};
use system::graphics::fast_copy;
/// A display resource
pub struct DisplayResource {
/// Path
path: String,
/// Seek
seek: usize,
}
impl Resource for DisplayResource {
fn dup(&self) -> Result<Box<Resource>> {
Ok(Box::new(DisplayResource {
path: self.path.clone(),
seek: self.seek
}))
}
/// Return the URL for display resource
fn path(&self, buf: &mut [u8]) -> Result<usize> {
let path = self.path.as_bytes();
for (b, p) in buf.iter_mut().zip(path.iter()) {
*b = *p;
}
Ok(cmp::min(buf.len(), path.len()))
}
fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
if buf.len() >= size_of::<Event>() {
let event = ::env().events.receive("DisplayResource::read");
unsafe { ptr::write(buf.as_mut_ptr().offset(0isize) as *mut Event, event) };
let mut i = size_of::<Event>();
while i + size_of::<Event>() <= buf.len() {
if let Some(event) = unsafe { ::env().events.inner() }.pop_front() {
unsafe { ptr::write(buf.as_mut_ptr().offset(i as isize) as *mut Event, event) };
i += size_of::<Event>();
} else {
break;
}
}
Ok(i)
} else {
Err(Error::new(EINVAL))
}
}
fn
|
(&mut self, buf: &[u8]) -> Result<usize> {
let console = unsafe { & *::env().console.get() };
if let Some(ref display) = console.display {
let size = cmp::max(0, cmp::min(display.size as isize - self.seek as isize, (buf.len()/4) as isize)) as usize;
if size > 0 {
unsafe {
fast_copy(display.onscreen.offset(self.seek as isize), buf.as_ptr() as *const u32, size);
}
}
Ok(size)
} else {
Err(Error::new(EBADF))
}
}
fn seek(&mut self, pos: ResourceSeek) -> Result<usize> {
let console = unsafe { & *::env().console.get() };
if let Some(ref display) = console.display {
self.seek = match pos {
ResourceSeek::Start(offset) => cmp::min(display.size, cmp::max(0, offset)),
ResourceSeek::Current(offset) => cmp::min(display.size, cmp::max(0, self.seek as isize + offset) as usize),
ResourceSeek::End(offset) => cmp::min(display.size, cmp::max(0, display.size as isize + offset) as usize),
};
Ok(self.seek)
} else {
Err(Error::new(EBADF))
}
}
fn sync(&mut self) -> Result<()> {
Ok(())
}
}
pub struct DisplayScheme;
impl KScheme for DisplayScheme {
fn scheme(&self) -> &str {
"display"
}
fn open(&mut self, url: Url, _: usize) -> Result<Box<Resource>> {
if url.reference() == "manager" {
let console = unsafe { &mut *::env().console.get() };
if console.draw {
console.draw = false;
if let Some(ref display) = console.display {
Ok(box DisplayResource {
path: format!("display:{}/{}", display.width, display.height),
seek: 0,
})
} else {
Err(Error::new(ENOENT))
}
} else {
Err(Error::new(EACCES))
}
} else {
let console = unsafe { & *::env().console.get() };
if let Some(ref display) = console.display {
Ok(box DisplayResource {
path: format!("display:{}/{}", display.width, display.height),
seek: 0,
})
} else {
Err(Error::new(ENOENT))
}
}
}
}
|
write
|
identifier_name
|
base.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use std::iter::range_step;
use stb_image = stb_image::image;
use png;
// FIXME: Images must not be copied every frame. Instead we should atomically
// reference count them.
pub type Image = png::Image;
pub fn
|
(width: u32, height: u32, color_type: png::ColorType, data: ~[u8]) -> Image {
png::Image {
width: width,
height: height,
color_type: color_type,
pixels: data,
}
}
static TEST_IMAGE: &'static [u8] = include_bin!("test.jpeg");
pub fn test_image_bin() -> ~[u8] {
TEST_IMAGE.into_owned()
}
// TODO(pcwalton): Speed up with SIMD, or better yet, find some way to not do this.
fn byte_swap(color_type: png::ColorType, data: &mut [u8]) {
match color_type {
png::RGBA8 => {
let length = data.len();
for i in range_step(0, length, 4) {
let r = data[i + 2];
data[i + 2] = data[i + 0];
data[i + 0] = r;
}
}
_ => {}
}
}
pub fn load_from_memory(buffer: &[u8]) -> Option<Image> {
if png::is_png(buffer) {
match png::load_png_from_memory(buffer) {
Ok(mut png_image) => {
byte_swap(png_image.color_type, png_image.pixels);
Some(png_image)
}
Err(_err) => None,
}
} else {
// For non-png images, we use stb_image
// Can't remember why we do this. Maybe it's what cairo wants
static FORCE_DEPTH: uint = 4;
match stb_image::load_from_memory_with_depth(buffer, FORCE_DEPTH, true) {
stb_image::ImageU8(mut image) => {
assert!(image.depth == 4);
byte_swap(png::RGBA8, image.data);
Some(Image(image.width as u32, image.height as u32, png::RGBA8, image.data))
}
stb_image::ImageF32(_image) => fail!(~"HDR images not implemented"),
stb_image::Error => None
}
}
}
|
Image
|
identifier_name
|
base.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use std::iter::range_step;
use stb_image = stb_image::image;
use png;
// FIXME: Images must not be copied every frame. Instead we should atomically
// reference count them.
pub type Image = png::Image;
pub fn Image(width: u32, height: u32, color_type: png::ColorType, data: ~[u8]) -> Image {
png::Image {
width: width,
height: height,
color_type: color_type,
pixels: data,
}
}
static TEST_IMAGE: &'static [u8] = include_bin!("test.jpeg");
pub fn test_image_bin() -> ~[u8]
|
// TODO(pcwalton): Speed up with SIMD, or better yet, find some way to not do this.
fn byte_swap(color_type: png::ColorType, data: &mut [u8]) {
match color_type {
png::RGBA8 => {
let length = data.len();
for i in range_step(0, length, 4) {
let r = data[i + 2];
data[i + 2] = data[i + 0];
data[i + 0] = r;
}
}
_ => {}
}
}
pub fn load_from_memory(buffer: &[u8]) -> Option<Image> {
if png::is_png(buffer) {
match png::load_png_from_memory(buffer) {
Ok(mut png_image) => {
byte_swap(png_image.color_type, png_image.pixels);
Some(png_image)
}
Err(_err) => None,
}
} else {
// For non-png images, we use stb_image
// Can't remember why we do this. Maybe it's what cairo wants
static FORCE_DEPTH: uint = 4;
match stb_image::load_from_memory_with_depth(buffer, FORCE_DEPTH, true) {
stb_image::ImageU8(mut image) => {
assert!(image.depth == 4);
byte_swap(png::RGBA8, image.data);
Some(Image(image.width as u32, image.height as u32, png::RGBA8, image.data))
}
stb_image::ImageF32(_image) => fail!(~"HDR images not implemented"),
stb_image::Error => None
}
}
}
|
{
TEST_IMAGE.into_owned()
}
|
identifier_body
|
base.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use std::iter::range_step;
use stb_image = stb_image::image;
use png;
// FIXME: Images must not be copied every frame. Instead we should atomically
// reference count them.
pub type Image = png::Image;
|
color_type: color_type,
pixels: data,
}
}
static TEST_IMAGE: &'static [u8] = include_bin!("test.jpeg");
pub fn test_image_bin() -> ~[u8] {
TEST_IMAGE.into_owned()
}
// TODO(pcwalton): Speed up with SIMD, or better yet, find some way to not do this.
fn byte_swap(color_type: png::ColorType, data: &mut [u8]) {
match color_type {
png::RGBA8 => {
let length = data.len();
for i in range_step(0, length, 4) {
let r = data[i + 2];
data[i + 2] = data[i + 0];
data[i + 0] = r;
}
}
_ => {}
}
}
pub fn load_from_memory(buffer: &[u8]) -> Option<Image> {
if png::is_png(buffer) {
match png::load_png_from_memory(buffer) {
Ok(mut png_image) => {
byte_swap(png_image.color_type, png_image.pixels);
Some(png_image)
}
Err(_err) => None,
}
} else {
// For non-png images, we use stb_image
// Can't remember why we do this. Maybe it's what cairo wants
static FORCE_DEPTH: uint = 4;
match stb_image::load_from_memory_with_depth(buffer, FORCE_DEPTH, true) {
stb_image::ImageU8(mut image) => {
assert!(image.depth == 4);
byte_swap(png::RGBA8, image.data);
Some(Image(image.width as u32, image.height as u32, png::RGBA8, image.data))
}
stb_image::ImageF32(_image) => fail!(~"HDR images not implemented"),
stb_image::Error => None
}
}
}
|
pub fn Image(width: u32, height: u32, color_type: png::ColorType, data: ~[u8]) -> Image {
png::Image {
width: width,
height: height,
|
random_line_split
|
ws2spi.rs
|
// Copyright © 2015-2017 winapi-rs developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option.
// All files in the project carrying such notice may not be copied, modified, or distributed
// except according to those terms.
//! Definitions to be used with the WinSock service provider
use shared::basetsd::DWORD_PTR;
use shared::guiddef::LPGUID;
use shared::minwindef::{DWORD, INT, LPDWORD, LPHANDLE, LPVOID, WORD};
use um::winnt::{HANDLE, WCHAR};
use um::winsock2::{LPWSAQUERYSET2W, WSAESETSERVICEOP};
pub const WSPDESCRIPTION_LEN: usize = 255;
STRUCT!{struct WSPDATA {
wVersion: WORD,
wHighVersion: WORD,
szDescription: [WCHAR; WSPDESCRIPTION_LEN + 1],
}}
|
ThreadHandle: HANDLE,
Reserved: DWORD_PTR,
}}
pub type LPWSATHREADID = *mut WSATHREADID;
ENUM!{enum WSC_PROVIDER_INFO_TYPE {
ProviderInfoLspCategories,
ProviderInfoAudit,
}}
FN!{stdcall LPNSPV2STARTUP(
lpProviderId: LPGUID,
ppvClientSessionArg: *mut LPVOID,
) -> INT}
FN!{stdcall LPNSPV2CLEANUP(
lpProviderId: LPGUID,
pvClientSessionArg: LPVOID,
) -> INT}
FN!{stdcall LPNSPV2LOOKUPSERVICEBEGIN(
lpProviderId: LPGUID,
lpqsRestrictions: LPWSAQUERYSET2W,
dwControlFlags: DWORD,
lpvClientSessionArg: LPVOID,
lphLookup: LPHANDLE,
) -> INT}
FN!{stdcall LPNSPV2LOOKUPSERVICENEXTEX(
hAsyncCall: HANDLE,
hLookup: HANDLE,
dwControlFlags: DWORD,
lpdwBufferLength: LPDWORD,
lpqsResults: LPWSAQUERYSET2W,
) -> ()}
FN!{stdcall LPNSPV2LOOKUPSERVICEEND(
hLookup: HANDLE,
) -> INT}
FN!{stdcall LPNSPV2SETSERVICEEX(
hAsyncCall: HANDLE,
lpProviderId: LPGUID,
lpqsRegInfo: LPWSAQUERYSET2W,
essOperation: WSAESETSERVICEOP,
dwControlFlags: DWORD,
lpvClientSessionArg: LPVOID,
) -> ()}
FN!{stdcall LPNSPV2CLIENTSESSIONRUNDOWN(
lpProviderId: LPGUID,
pvClientSessionArg: LPVOID,
) -> ()}
STRUCT!{struct NSPV2_ROUTINE {
cbSize: DWORD,
dwMajorVersion: DWORD,
dwMinorVersion: DWORD,
NSPv2Startup: LPNSPV2STARTUP,
NSPv2Cleanup: LPNSPV2CLEANUP,
NSPv2LookupServiceBegin: LPNSPV2LOOKUPSERVICEBEGIN,
NSPv2LookupServiceNextEx: LPNSPV2LOOKUPSERVICENEXTEX,
NSPv2LookupServiceEnd: LPNSPV2LOOKUPSERVICEEND,
NSPv2SetServiceEx: LPNSPV2SETSERVICEEX,
NSPv2ClientSessionRundown: LPNSPV2CLIENTSESSIONRUNDOWN,
}}
pub type PNSPV2_ROUTINE = *mut NSPV2_ROUTINE;
pub type LPNSPV2_ROUTINE = *mut NSPV2_ROUTINE;
pub type PCNSPV2_ROUTINE = *const NSPV2_ROUTINE;
pub type LPCNSPV2_ROUTINE = *const NSPV2_ROUTINE;
|
pub type LPWSPDATA = *mut WSPDATA;
STRUCT!{struct WSATHREADID {
|
random_line_split
|
user32.rs
|
extern crate libc;
use ::types::*;
use std::string::String;
#[link(name = "user32")]
extern "stdcall" {
pub fn RegisterClassExA(
lpWndClass : *mut WNDCLASSEXA
) -> ATOM;
pub fn CreateWindowExA(
dwExStyle: DWORD,
lpClassName: &str,
lpWindowName: &str,
dwStyle: DWORD,
X: i32,
Y: i32,
nWidth: i32,
nHeight: i32,
hWndParent: HWND,
hMenu: HMENU,
hInstance: HINSTANCE,
lpParam: LPVOID
) -> HWND;
pub fn ShowWindow(
hWnd: HWND,
nCmdShow: i32
) -> BOOL;
pub fn DefWindowProcA(
hWnd: HWND,
uMsg: UINT,
wParam: WPARAM,
lParam: LPARAM
) -> LRESULT;
pub fn GetMessageA(
lpMsg: LPMSG,
hWnd: HWND,
wMsgFilterMin: UINT,
wMsgFilterMax: UINT
) -> BOOL;
pub fn TranslateMessage(
lpMsg: LPMSG
) -> BOOL;
|
}
|
pub fn DispatchMessageA(
lpMsg: LPMSG
) -> LRESULT;
|
random_line_split
|
glue.rs
|
// Copyright 2020 The Exonum Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Glue between WebSocket server and `actix-web` HTTP server.
use actix_web::{
http,
web::{Payload, Query},
FromRequest,
};
use actix_web_actors::ws;
use exonum::blockchain::Blockchain;
use exonum_api::{
self as api,
backends::actix::{self as actix_backend, HttpRequest, RawHandler, RequestHandler},
ApiBackend,
};
use exonum_rust_runtime::api::ServiceApiScope;
use futures::{future, FutureExt};
use std::sync::Arc;
use super::{Session, SharedStateRef, SubscriptionType, TransactionFilter};
use crate::api::ExplorerApi;
impl ExplorerApi {
/// Subscribes to events.
fn handle_ws<Q>(
name: &str,
backend: &mut actix_backend::ApiBuilder,
blockchain: Blockchain,
shared_state: SharedStateRef,
extract_query: Q,
) where
Q: Fn(&HttpRequest) -> SubscriptionType +'static + Clone + Send + Sync,
{
let handler = move |request: HttpRequest, stream: Payload| {
let maybe_address = shared_state.ensure_server(&blockchain);
let address =
maybe_address.ok_or_else(|| api::Error::not_found().title("Server shut down"))?;
let query = extract_query(&request);
ws::start(Session::new(address, vec![query]), &request, stream)
};
let raw_handler =
move |request, stream| future::ready(handler(request, stream)).boxed_local();
backend.raw_handler(RequestHandler {
name: name.to_owned(),
method: http::Method::GET,
inner: Arc::from(raw_handler) as Arc<RawHandler>,
});
}
pub fn wire_ws(&self, shared_state: SharedStateRef, api_scope: &mut ServiceApiScope) -> &Self
|
// `future::Ready<_>` type annotation is redundant; it's here to check that
// `now_or_never` will not fail due to changes in `actix`.
let extract: future::Ready<_> = Query::<TransactionFilter>::extract(request);
extract
.now_or_never()
.expect("`Ready` futures always have their output immediately available")
.map(|query| SubscriptionType::Transactions {
filter: Some(query.into_inner()),
})
.unwrap_or(SubscriptionType::None)
},
);
// Default websocket connection.
Self::handle_ws(
"v1/ws",
api_scope.web_backend(),
self.blockchain.clone(),
shared_state,
|_| SubscriptionType::None,
);
self
}
}
|
{
// Default subscription for blocks.
Self::handle_ws(
"v1/blocks/subscribe",
api_scope.web_backend(),
self.blockchain.clone(),
shared_state.clone(),
|_| SubscriptionType::Blocks,
);
// Default subscription for transactions.
Self::handle_ws(
"v1/transactions/subscribe",
api_scope.web_backend(),
self.blockchain.clone(),
shared_state.clone(),
|request| {
if request.query_string().is_empty() {
return SubscriptionType::Transactions { filter: None };
}
|
identifier_body
|
glue.rs
|
// Copyright 2020 The Exonum Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Glue between WebSocket server and `actix-web` HTTP server.
use actix_web::{
http,
web::{Payload, Query},
FromRequest,
};
use actix_web_actors::ws;
use exonum::blockchain::Blockchain;
use exonum_api::{
self as api,
backends::actix::{self as actix_backend, HttpRequest, RawHandler, RequestHandler},
ApiBackend,
};
use exonum_rust_runtime::api::ServiceApiScope;
use futures::{future, FutureExt};
use std::sync::Arc;
use super::{Session, SharedStateRef, SubscriptionType, TransactionFilter};
use crate::api::ExplorerApi;
impl ExplorerApi {
/// Subscribes to events.
fn handle_ws<Q>(
name: &str,
backend: &mut actix_backend::ApiBuilder,
blockchain: Blockchain,
shared_state: SharedStateRef,
extract_query: Q,
) where
Q: Fn(&HttpRequest) -> SubscriptionType +'static + Clone + Send + Sync,
{
let handler = move |request: HttpRequest, stream: Payload| {
let maybe_address = shared_state.ensure_server(&blockchain);
let address =
maybe_address.ok_or_else(|| api::Error::not_found().title("Server shut down"))?;
let query = extract_query(&request);
ws::start(Session::new(address, vec![query]), &request, stream)
};
let raw_handler =
move |request, stream| future::ready(handler(request, stream)).boxed_local();
backend.raw_handler(RequestHandler {
name: name.to_owned(),
method: http::Method::GET,
inner: Arc::from(raw_handler) as Arc<RawHandler>,
});
}
pub fn wire_ws(&self, shared_state: SharedStateRef, api_scope: &mut ServiceApiScope) -> &Self {
// Default subscription for blocks.
Self::handle_ws(
"v1/blocks/subscribe",
api_scope.web_backend(),
self.blockchain.clone(),
shared_state.clone(),
|_| SubscriptionType::Blocks,
);
// Default subscription for transactions.
Self::handle_ws(
"v1/transactions/subscribe",
api_scope.web_backend(),
self.blockchain.clone(),
shared_state.clone(),
|request| {
if request.query_string().is_empty()
|
// `future::Ready<_>` type annotation is redundant; it's here to check that
// `now_or_never` will not fail due to changes in `actix`.
let extract: future::Ready<_> = Query::<TransactionFilter>::extract(request);
extract
.now_or_never()
.expect("`Ready` futures always have their output immediately available")
.map(|query| SubscriptionType::Transactions {
filter: Some(query.into_inner()),
})
.unwrap_or(SubscriptionType::None)
},
);
// Default websocket connection.
Self::handle_ws(
"v1/ws",
api_scope.web_backend(),
self.blockchain.clone(),
shared_state,
|_| SubscriptionType::None,
);
self
}
}
|
{
return SubscriptionType::Transactions { filter: None };
}
|
conditional_block
|
glue.rs
|
// Copyright 2020 The Exonum Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Glue between WebSocket server and `actix-web` HTTP server.
use actix_web::{
http,
web::{Payload, Query},
FromRequest,
};
use actix_web_actors::ws;
use exonum::blockchain::Blockchain;
use exonum_api::{
self as api,
backends::actix::{self as actix_backend, HttpRequest, RawHandler, RequestHandler},
ApiBackend,
};
use exonum_rust_runtime::api::ServiceApiScope;
use futures::{future, FutureExt};
use std::sync::Arc;
use super::{Session, SharedStateRef, SubscriptionType, TransactionFilter};
use crate::api::ExplorerApi;
impl ExplorerApi {
/// Subscribes to events.
fn handle_ws<Q>(
name: &str,
backend: &mut actix_backend::ApiBuilder,
blockchain: Blockchain,
shared_state: SharedStateRef,
extract_query: Q,
) where
Q: Fn(&HttpRequest) -> SubscriptionType +'static + Clone + Send + Sync,
{
let handler = move |request: HttpRequest, stream: Payload| {
let maybe_address = shared_state.ensure_server(&blockchain);
let address =
maybe_address.ok_or_else(|| api::Error::not_found().title("Server shut down"))?;
let query = extract_query(&request);
ws::start(Session::new(address, vec![query]), &request, stream)
};
let raw_handler =
move |request, stream| future::ready(handler(request, stream)).boxed_local();
backend.raw_handler(RequestHandler {
name: name.to_owned(),
method: http::Method::GET,
inner: Arc::from(raw_handler) as Arc<RawHandler>,
});
}
pub fn
|
(&self, shared_state: SharedStateRef, api_scope: &mut ServiceApiScope) -> &Self {
// Default subscription for blocks.
Self::handle_ws(
"v1/blocks/subscribe",
api_scope.web_backend(),
self.blockchain.clone(),
shared_state.clone(),
|_| SubscriptionType::Blocks,
);
// Default subscription for transactions.
Self::handle_ws(
"v1/transactions/subscribe",
api_scope.web_backend(),
self.blockchain.clone(),
shared_state.clone(),
|request| {
if request.query_string().is_empty() {
return SubscriptionType::Transactions { filter: None };
}
// `future::Ready<_>` type annotation is redundant; it's here to check that
// `now_or_never` will not fail due to changes in `actix`.
let extract: future::Ready<_> = Query::<TransactionFilter>::extract(request);
extract
.now_or_never()
.expect("`Ready` futures always have their output immediately available")
.map(|query| SubscriptionType::Transactions {
filter: Some(query.into_inner()),
})
.unwrap_or(SubscriptionType::None)
},
);
// Default websocket connection.
Self::handle_ws(
"v1/ws",
api_scope.web_backend(),
self.blockchain.clone(),
shared_state,
|_| SubscriptionType::None,
);
self
}
}
|
wire_ws
|
identifier_name
|
glue.rs
|
// Copyright 2020 The Exonum Team
//
|
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Glue between WebSocket server and `actix-web` HTTP server.
use actix_web::{
http,
web::{Payload, Query},
FromRequest,
};
use actix_web_actors::ws;
use exonum::blockchain::Blockchain;
use exonum_api::{
self as api,
backends::actix::{self as actix_backend, HttpRequest, RawHandler, RequestHandler},
ApiBackend,
};
use exonum_rust_runtime::api::ServiceApiScope;
use futures::{future, FutureExt};
use std::sync::Arc;
use super::{Session, SharedStateRef, SubscriptionType, TransactionFilter};
use crate::api::ExplorerApi;
impl ExplorerApi {
/// Subscribes to events.
fn handle_ws<Q>(
name: &str,
backend: &mut actix_backend::ApiBuilder,
blockchain: Blockchain,
shared_state: SharedStateRef,
extract_query: Q,
) where
Q: Fn(&HttpRequest) -> SubscriptionType +'static + Clone + Send + Sync,
{
let handler = move |request: HttpRequest, stream: Payload| {
let maybe_address = shared_state.ensure_server(&blockchain);
let address =
maybe_address.ok_or_else(|| api::Error::not_found().title("Server shut down"))?;
let query = extract_query(&request);
ws::start(Session::new(address, vec![query]), &request, stream)
};
let raw_handler =
move |request, stream| future::ready(handler(request, stream)).boxed_local();
backend.raw_handler(RequestHandler {
name: name.to_owned(),
method: http::Method::GET,
inner: Arc::from(raw_handler) as Arc<RawHandler>,
});
}
pub fn wire_ws(&self, shared_state: SharedStateRef, api_scope: &mut ServiceApiScope) -> &Self {
// Default subscription for blocks.
Self::handle_ws(
"v1/blocks/subscribe",
api_scope.web_backend(),
self.blockchain.clone(),
shared_state.clone(),
|_| SubscriptionType::Blocks,
);
// Default subscription for transactions.
Self::handle_ws(
"v1/transactions/subscribe",
api_scope.web_backend(),
self.blockchain.clone(),
shared_state.clone(),
|request| {
if request.query_string().is_empty() {
return SubscriptionType::Transactions { filter: None };
}
// `future::Ready<_>` type annotation is redundant; it's here to check that
// `now_or_never` will not fail due to changes in `actix`.
let extract: future::Ready<_> = Query::<TransactionFilter>::extract(request);
extract
.now_or_never()
.expect("`Ready` futures always have their output immediately available")
.map(|query| SubscriptionType::Transactions {
filter: Some(query.into_inner()),
})
.unwrap_or(SubscriptionType::None)
},
);
// Default websocket connection.
Self::handle_ws(
"v1/ws",
api_scope.web_backend(),
self.blockchain.clone(),
shared_state,
|_| SubscriptionType::None,
);
self
}
}
|
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
|
random_line_split
|
test_helper.rs
|
// Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::{mock_tree_store::MockTreeStore, node_type::LeafNode, JellyfishMerkleTree};
use diem_crypto::{
hash::{CryptoHash, SPARSE_MERKLE_PLACEHOLDER_HASH},
HashValue,
};
use diem_crypto_derive::{BCSCryptoHash, CryptoHasher};
use diem_types::{
proof::{SparseMerkleInternalNode, SparseMerkleRangeProof},
transaction::Version,
};
use proptest::{
collection::{btree_map, hash_map, vec},
prelude::*,
};
use proptest_derive::Arbitrary;
use serde::{Deserialize, Serialize};
use std::{
collections::{BTreeMap, HashMap},
ops::Bound,
};
#[derive(
Arbitrary,
BCSCryptoHash,
Clone,
Debug,
Default,
Eq,
PartialEq,
Serialize,
Deserialize,
CryptoHasher,
)]
pub(crate) struct ValueBlob(Vec<u8>);
impl From<Vec<u8>> for ValueBlob {
fn from(blob: Vec<u8>) -> Self {
Self(blob)
}
}
impl crate::Value for ValueBlob {}
impl crate::TestValue for ValueBlob {}
/// Computes the key immediately after `key`.
pub fn plus_one(key: HashValue) -> HashValue {
assert_ne!(key, HashValue::new([0xff; HashValue::LENGTH]));
let mut buf = key.to_vec();
for i in (0..HashValue::LENGTH).rev() {
if buf[i] == 255 {
|
}
}
HashValue::from_slice(&buf).unwrap()
}
/// Initializes a DB with a set of key-value pairs by inserting one key at each version.
pub fn init_mock_db<V>(kvs: &HashMap<HashValue, V>) -> (MockTreeStore<V>, Version)
where
V: crate::TestValue,
{
assert!(!kvs.is_empty());
let db = MockTreeStore::default();
let tree = JellyfishMerkleTree::new(&db);
for (i, (key, value)) in kvs.iter().enumerate() {
let (_root_hash, write_batch) = tree
.put_value_set(vec![(*key, value.clone())], i as Version)
.unwrap();
db.write_tree_update_batch(write_batch).unwrap();
}
(db, (kvs.len() - 1) as Version)
}
pub fn arb_existent_kvs_and_nonexistent_keys<V: crate::TestValue>(
num_kvs: usize,
num_non_existing_keys: usize,
) -> impl Strategy<Value = (HashMap<HashValue, V>, Vec<HashValue>)> {
hash_map(any::<HashValue>(), any::<V>(), 1..num_kvs).prop_flat_map(move |kvs| {
let kvs_clone = kvs.clone();
(
Just(kvs),
vec(
any::<HashValue>().prop_filter(
"Make sure these keys do not exist in the tree.",
move |key|!kvs_clone.contains_key(key),
),
num_non_existing_keys,
),
)
})
}
pub fn test_get_with_proof<V: crate::TestValue>(
(existent_kvs, nonexistent_keys): (HashMap<HashValue, V>, Vec<HashValue>),
) {
let (db, version) = init_mock_db(&existent_kvs);
let tree = JellyfishMerkleTree::new(&db);
test_existent_keys_impl(&tree, version, &existent_kvs);
test_nonexistent_keys_impl(&tree, version, &nonexistent_keys);
}
pub fn arb_kv_pair_with_distinct_last_nibble<V: crate::TestValue>(
) -> impl Strategy<Value = ((HashValue, V), (HashValue, V))> {
(
any::<HashValue>().prop_filter("Can't be 0xffffff...", |key| {
*key!= HashValue::new([0xff; HashValue::LENGTH])
}),
vec(any::<V>(), 2),
)
.prop_map(|(key1, accounts)| {
let key2 = plus_one(key1);
((key1, accounts[0].clone()), (key2, accounts[1].clone()))
})
}
pub fn test_get_with_proof_with_distinct_last_nibble<V: crate::TestValue>(
(kv1, kv2): ((HashValue, V), (HashValue, V)),
) {
let mut kvs = HashMap::new();
kvs.insert(kv1.0, kv1.1);
kvs.insert(kv2.0, kv2.1);
let (db, version) = init_mock_db(&kvs);
let tree = JellyfishMerkleTree::new(&db);
test_existent_keys_impl(&tree, version, &kvs);
}
pub fn arb_tree_with_index<V: crate::TestValue>(
tree_size: usize,
) -> impl Strategy<Value = (BTreeMap<HashValue, V>, usize)> {
btree_map(any::<HashValue>(), any::<V>(), 1..tree_size).prop_flat_map(|btree| {
let len = btree.len();
(Just(btree), 0..len)
})
}
pub fn test_get_range_proof<V: crate::TestValue>((btree, n): (BTreeMap<HashValue, V>, usize)) {
let (db, version) = init_mock_db(&btree.clone().into_iter().collect());
let tree = JellyfishMerkleTree::new(&db);
let nth_key = *btree.keys().nth(n).unwrap();
let proof = tree.get_range_proof(nth_key, version).unwrap();
verify_range_proof(
tree.get_root_hash(version).unwrap(),
btree.into_iter().take(n + 1).collect(),
proof,
);
}
fn test_existent_keys_impl<'a, V: crate::TestValue>(
tree: &JellyfishMerkleTree<'a, MockTreeStore<V>, V>,
version: Version,
existent_kvs: &HashMap<HashValue, V>,
) {
let root_hash = tree.get_root_hash(version).unwrap();
for (key, value) in existent_kvs {
let (account, proof) = tree.get_with_proof(*key, version).unwrap();
assert!(proof.verify(root_hash, *key, account.as_ref()).is_ok());
assert_eq!(account.unwrap(), *value);
}
}
fn test_nonexistent_keys_impl<'a, V: crate::TestValue>(
tree: &JellyfishMerkleTree<'a, MockTreeStore<V>, V>,
version: Version,
nonexistent_keys: &[HashValue],
) {
let root_hash = tree.get_root_hash(version).unwrap();
for key in nonexistent_keys {
let (account, proof) = tree.get_with_proof(*key, version).unwrap();
assert!(proof.verify(root_hash, *key, account.as_ref()).is_ok());
assert!(account.is_none());
}
}
/// Checks if we can construct the expected root hash using the entries in the btree and the proof.
fn verify_range_proof<V: crate::TestValue>(
expected_root_hash: HashValue,
btree: BTreeMap<HashValue, V>,
proof: SparseMerkleRangeProof,
) {
// For example, given the following sparse Merkle tree:
//
// root
// / \
// / \
// / \
// o o
// / \ / \
// a o o h
// / \ / \
// o d e X
// / \ / \
// b c f g
//
// we transform the keys as follows:
// a => 00,
// b => 0100,
// c => 0101,
// d => 011,
// e => 100,
// X => 101
// h => 11
//
// Basically, the suffixes that doesn't affect the common prefix of adjacent leaves are
// discarded. In this example, we assume `btree` has the keys `a` to `e` and the proof has `X`
// and `h` in the siblings.
// Now we want to construct a set of key-value pairs that covers the entire set of leaves. For
// `a` to `e` this is simple -- we just insert them directly into this set. For the rest of the
// leaves, they are represented by the siblings, so we just make up some keys that make sense.
// For example, for `X` we just use 101000... (more zeros omitted), because that is one key
// that would cause `X` to end up in the above position.
let mut btree1 = BTreeMap::new();
for (key, value) in &btree {
let leaf = LeafNode::new(*key, value.clone());
btree1.insert(*key, leaf.hash());
}
// Using the above example, `last_proven_key` is `e`. We look at the path from root to `e`.
// For each 0-bit, there should be a sibling in the proof. And we use the path from root to
// this position, plus a `1` as the key.
let last_proven_key = *btree
.keys()
.last()
.expect("We are proving at least one key.");
for (i, sibling) in last_proven_key
.iter_bits()
.enumerate()
.filter_map(|(i, bit)| if!bit { Some(i) } else { None })
.zip(proof.right_siblings().iter().rev())
{
// This means the `i`-th bit is zero. We take `i` bits from `last_proven_key` and append a
// one to make up the key for this sibling.
let mut buf: Vec<_> = last_proven_key.iter_bits().take(i).collect();
buf.push(true);
// The rest doesn't matter, because they don't affect the position of the node. We just
// add zeros.
buf.resize(HashValue::LENGTH_IN_BITS, false);
let key = HashValue::from_bit_iter(buf.into_iter()).unwrap();
btree1.insert(key, *sibling);
}
// Now we do the transformation (removing the suffixes) described above.
let mut kvs = vec![];
for (key, value) in &btree1 {
// The length of the common prefix of the previous key and the current key.
let prev_common_prefix_len =
prev_key(&btree1, key).map(|pkey| pkey.common_prefix_bits_len(*key));
// The length of the common prefix of the next key and the current key.
let next_common_prefix_len =
next_key(&btree1, key).map(|nkey| nkey.common_prefix_bits_len(*key));
// We take the longest common prefix of the current key and its neighbors. That's how much
// we need to keep.
let len = match (prev_common_prefix_len, next_common_prefix_len) {
(Some(plen), Some(nlen)) => std::cmp::max(plen, nlen),
(Some(plen), None) => plen,
(None, Some(nlen)) => nlen,
(None, None) => 0,
};
let transformed_key: Vec<_> = key.iter_bits().take(len + 1).collect();
kvs.push((transformed_key, *value));
}
assert_eq!(compute_root_hash(kvs), expected_root_hash);
}
/// Reduces the problem by removing the first bit of every key.
fn reduce<'a>(kvs: &'a [(&[bool], HashValue)]) -> Vec<(&'a [bool], HashValue)> {
kvs.iter().map(|(key, value)| (&key[1..], *value)).collect()
}
/// Returns the key immediately before `key` in `btree`.
fn prev_key<K, V>(btree: &BTreeMap<K, V>, key: &K) -> Option<K>
where
K: Clone + Ord,
{
btree
.range((Bound::Unbounded, Bound::Excluded(key)))
.next_back()
.map(|(k, _v)| k.clone())
}
fn next_key<K, V>(btree: &BTreeMap<K, V>, key: &K) -> Option<K>
where
K: Clone + Ord,
{
btree
.range((Bound::Excluded(key), Bound::Unbounded))
.next()
.map(|(k, _v)| k.clone())
}
/// Computes the root hash of a sparse Merkle tree. `kvs` consists of the entire set of key-value
/// pairs stored in the tree.
fn compute_root_hash(kvs: Vec<(Vec<bool>, HashValue)>) -> HashValue {
let mut kv_ref = vec![];
for (key, value) in &kvs {
kv_ref.push((&key[..], *value));
}
compute_root_hash_impl(kv_ref)
}
fn compute_root_hash_impl(kvs: Vec<(&[bool], HashValue)>) -> HashValue {
assert!(!kvs.is_empty());
// If there is only one entry, it is the root.
if kvs.len() == 1 {
return kvs[0].1;
}
// Otherwise the tree has more than one leaves, which means we can find which ones are in the
// left subtree and which ones are in the right subtree. So we find the first key that starts
// with a 1-bit.
let left_hash;
let right_hash;
match kvs.iter().position(|(key, _value)| key[0]) {
Some(0) => {
// Every key starts with a 1-bit, i.e., they are all in the right subtree.
left_hash = *SPARSE_MERKLE_PLACEHOLDER_HASH;
right_hash = compute_root_hash_impl(reduce(&kvs));
}
Some(index) => {
// Both left subtree and right subtree have some keys.
left_hash = compute_root_hash_impl(reduce(&kvs[..index]));
right_hash = compute_root_hash_impl(reduce(&kvs[index..]));
}
None => {
// Every key starts with a 0-bit, i.e., they are all in the left subtree.
left_hash = compute_root_hash_impl(reduce(&kvs));
right_hash = *SPARSE_MERKLE_PLACEHOLDER_HASH;
}
}
SparseMerkleInternalNode::new(left_hash, right_hash).hash()
}
|
buf[i] = 0;
} else {
buf[i] += 1;
break;
|
random_line_split
|
test_helper.rs
|
// Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::{mock_tree_store::MockTreeStore, node_type::LeafNode, JellyfishMerkleTree};
use diem_crypto::{
hash::{CryptoHash, SPARSE_MERKLE_PLACEHOLDER_HASH},
HashValue,
};
use diem_crypto_derive::{BCSCryptoHash, CryptoHasher};
use diem_types::{
proof::{SparseMerkleInternalNode, SparseMerkleRangeProof},
transaction::Version,
};
use proptest::{
collection::{btree_map, hash_map, vec},
prelude::*,
};
use proptest_derive::Arbitrary;
use serde::{Deserialize, Serialize};
use std::{
collections::{BTreeMap, HashMap},
ops::Bound,
};
#[derive(
Arbitrary,
BCSCryptoHash,
Clone,
Debug,
Default,
Eq,
PartialEq,
Serialize,
Deserialize,
CryptoHasher,
)]
pub(crate) struct ValueBlob(Vec<u8>);
impl From<Vec<u8>> for ValueBlob {
fn from(blob: Vec<u8>) -> Self {
Self(blob)
}
}
impl crate::Value for ValueBlob {}
impl crate::TestValue for ValueBlob {}
/// Computes the key immediately after `key`.
pub fn plus_one(key: HashValue) -> HashValue {
assert_ne!(key, HashValue::new([0xff; HashValue::LENGTH]));
let mut buf = key.to_vec();
for i in (0..HashValue::LENGTH).rev() {
if buf[i] == 255 {
buf[i] = 0;
} else {
buf[i] += 1;
break;
}
}
HashValue::from_slice(&buf).unwrap()
}
/// Initializes a DB with a set of key-value pairs by inserting one key at each version.
pub fn init_mock_db<V>(kvs: &HashMap<HashValue, V>) -> (MockTreeStore<V>, Version)
where
V: crate::TestValue,
{
assert!(!kvs.is_empty());
let db = MockTreeStore::default();
let tree = JellyfishMerkleTree::new(&db);
for (i, (key, value)) in kvs.iter().enumerate() {
let (_root_hash, write_batch) = tree
.put_value_set(vec![(*key, value.clone())], i as Version)
.unwrap();
db.write_tree_update_batch(write_batch).unwrap();
}
(db, (kvs.len() - 1) as Version)
}
pub fn arb_existent_kvs_and_nonexistent_keys<V: crate::TestValue>(
num_kvs: usize,
num_non_existing_keys: usize,
) -> impl Strategy<Value = (HashMap<HashValue, V>, Vec<HashValue>)> {
hash_map(any::<HashValue>(), any::<V>(), 1..num_kvs).prop_flat_map(move |kvs| {
let kvs_clone = kvs.clone();
(
Just(kvs),
vec(
any::<HashValue>().prop_filter(
"Make sure these keys do not exist in the tree.",
move |key|!kvs_clone.contains_key(key),
),
num_non_existing_keys,
),
)
})
}
pub fn test_get_with_proof<V: crate::TestValue>(
(existent_kvs, nonexistent_keys): (HashMap<HashValue, V>, Vec<HashValue>),
) {
let (db, version) = init_mock_db(&existent_kvs);
let tree = JellyfishMerkleTree::new(&db);
test_existent_keys_impl(&tree, version, &existent_kvs);
test_nonexistent_keys_impl(&tree, version, &nonexistent_keys);
}
pub fn arb_kv_pair_with_distinct_last_nibble<V: crate::TestValue>(
) -> impl Strategy<Value = ((HashValue, V), (HashValue, V))> {
(
any::<HashValue>().prop_filter("Can't be 0xffffff...", |key| {
*key!= HashValue::new([0xff; HashValue::LENGTH])
}),
vec(any::<V>(), 2),
)
.prop_map(|(key1, accounts)| {
let key2 = plus_one(key1);
((key1, accounts[0].clone()), (key2, accounts[1].clone()))
})
}
pub fn test_get_with_proof_with_distinct_last_nibble<V: crate::TestValue>(
(kv1, kv2): ((HashValue, V), (HashValue, V)),
) {
let mut kvs = HashMap::new();
kvs.insert(kv1.0, kv1.1);
kvs.insert(kv2.0, kv2.1);
let (db, version) = init_mock_db(&kvs);
let tree = JellyfishMerkleTree::new(&db);
test_existent_keys_impl(&tree, version, &kvs);
}
pub fn arb_tree_with_index<V: crate::TestValue>(
tree_size: usize,
) -> impl Strategy<Value = (BTreeMap<HashValue, V>, usize)> {
btree_map(any::<HashValue>(), any::<V>(), 1..tree_size).prop_flat_map(|btree| {
let len = btree.len();
(Just(btree), 0..len)
})
}
pub fn test_get_range_proof<V: crate::TestValue>((btree, n): (BTreeMap<HashValue, V>, usize)) {
let (db, version) = init_mock_db(&btree.clone().into_iter().collect());
let tree = JellyfishMerkleTree::new(&db);
let nth_key = *btree.keys().nth(n).unwrap();
let proof = tree.get_range_proof(nth_key, version).unwrap();
verify_range_proof(
tree.get_root_hash(version).unwrap(),
btree.into_iter().take(n + 1).collect(),
proof,
);
}
fn test_existent_keys_impl<'a, V: crate::TestValue>(
tree: &JellyfishMerkleTree<'a, MockTreeStore<V>, V>,
version: Version,
existent_kvs: &HashMap<HashValue, V>,
) {
let root_hash = tree.get_root_hash(version).unwrap();
for (key, value) in existent_kvs {
let (account, proof) = tree.get_with_proof(*key, version).unwrap();
assert!(proof.verify(root_hash, *key, account.as_ref()).is_ok());
assert_eq!(account.unwrap(), *value);
}
}
fn test_nonexistent_keys_impl<'a, V: crate::TestValue>(
tree: &JellyfishMerkleTree<'a, MockTreeStore<V>, V>,
version: Version,
nonexistent_keys: &[HashValue],
) {
let root_hash = tree.get_root_hash(version).unwrap();
for key in nonexistent_keys {
let (account, proof) = tree.get_with_proof(*key, version).unwrap();
assert!(proof.verify(root_hash, *key, account.as_ref()).is_ok());
assert!(account.is_none());
}
}
/// Checks if we can construct the expected root hash using the entries in the btree and the proof.
fn verify_range_proof<V: crate::TestValue>(
expected_root_hash: HashValue,
btree: BTreeMap<HashValue, V>,
proof: SparseMerkleRangeProof,
) {
// For example, given the following sparse Merkle tree:
//
// root
// / \
// / \
// / \
// o o
// / \ / \
// a o o h
// / \ / \
// o d e X
// / \ / \
// b c f g
//
// we transform the keys as follows:
// a => 00,
// b => 0100,
// c => 0101,
// d => 011,
// e => 100,
// X => 101
// h => 11
//
// Basically, the suffixes that doesn't affect the common prefix of adjacent leaves are
// discarded. In this example, we assume `btree` has the keys `a` to `e` and the proof has `X`
// and `h` in the siblings.
// Now we want to construct a set of key-value pairs that covers the entire set of leaves. For
// `a` to `e` this is simple -- we just insert them directly into this set. For the rest of the
// leaves, they are represented by the siblings, so we just make up some keys that make sense.
// For example, for `X` we just use 101000... (more zeros omitted), because that is one key
// that would cause `X` to end up in the above position.
let mut btree1 = BTreeMap::new();
for (key, value) in &btree {
let leaf = LeafNode::new(*key, value.clone());
btree1.insert(*key, leaf.hash());
}
// Using the above example, `last_proven_key` is `e`. We look at the path from root to `e`.
// For each 0-bit, there should be a sibling in the proof. And we use the path from root to
// this position, plus a `1` as the key.
let last_proven_key = *btree
.keys()
.last()
.expect("We are proving at least one key.");
for (i, sibling) in last_proven_key
.iter_bits()
.enumerate()
.filter_map(|(i, bit)| if!bit { Some(i) } else { None })
.zip(proof.right_siblings().iter().rev())
{
// This means the `i`-th bit is zero. We take `i` bits from `last_proven_key` and append a
// one to make up the key for this sibling.
let mut buf: Vec<_> = last_proven_key.iter_bits().take(i).collect();
buf.push(true);
// The rest doesn't matter, because they don't affect the position of the node. We just
// add zeros.
buf.resize(HashValue::LENGTH_IN_BITS, false);
let key = HashValue::from_bit_iter(buf.into_iter()).unwrap();
btree1.insert(key, *sibling);
}
// Now we do the transformation (removing the suffixes) described above.
let mut kvs = vec![];
for (key, value) in &btree1 {
// The length of the common prefix of the previous key and the current key.
let prev_common_prefix_len =
prev_key(&btree1, key).map(|pkey| pkey.common_prefix_bits_len(*key));
// The length of the common prefix of the next key and the current key.
let next_common_prefix_len =
next_key(&btree1, key).map(|nkey| nkey.common_prefix_bits_len(*key));
// We take the longest common prefix of the current key and its neighbors. That's how much
// we need to keep.
let len = match (prev_common_prefix_len, next_common_prefix_len) {
(Some(plen), Some(nlen)) => std::cmp::max(plen, nlen),
(Some(plen), None) => plen,
(None, Some(nlen)) => nlen,
(None, None) => 0,
};
let transformed_key: Vec<_> = key.iter_bits().take(len + 1).collect();
kvs.push((transformed_key, *value));
}
assert_eq!(compute_root_hash(kvs), expected_root_hash);
}
/// Reduces the problem by removing the first bit of every key.
fn reduce<'a>(kvs: &'a [(&[bool], HashValue)]) -> Vec<(&'a [bool], HashValue)> {
kvs.iter().map(|(key, value)| (&key[1..], *value)).collect()
}
/// Returns the key immediately before `key` in `btree`.
fn prev_key<K, V>(btree: &BTreeMap<K, V>, key: &K) -> Option<K>
where
K: Clone + Ord,
{
btree
.range((Bound::Unbounded, Bound::Excluded(key)))
.next_back()
.map(|(k, _v)| k.clone())
}
fn
|
<K, V>(btree: &BTreeMap<K, V>, key: &K) -> Option<K>
where
K: Clone + Ord,
{
btree
.range((Bound::Excluded(key), Bound::Unbounded))
.next()
.map(|(k, _v)| k.clone())
}
/// Computes the root hash of a sparse Merkle tree. `kvs` consists of the entire set of key-value
/// pairs stored in the tree.
fn compute_root_hash(kvs: Vec<(Vec<bool>, HashValue)>) -> HashValue {
let mut kv_ref = vec![];
for (key, value) in &kvs {
kv_ref.push((&key[..], *value));
}
compute_root_hash_impl(kv_ref)
}
fn compute_root_hash_impl(kvs: Vec<(&[bool], HashValue)>) -> HashValue {
assert!(!kvs.is_empty());
// If there is only one entry, it is the root.
if kvs.len() == 1 {
return kvs[0].1;
}
// Otherwise the tree has more than one leaves, which means we can find which ones are in the
// left subtree and which ones are in the right subtree. So we find the first key that starts
// with a 1-bit.
let left_hash;
let right_hash;
match kvs.iter().position(|(key, _value)| key[0]) {
Some(0) => {
// Every key starts with a 1-bit, i.e., they are all in the right subtree.
left_hash = *SPARSE_MERKLE_PLACEHOLDER_HASH;
right_hash = compute_root_hash_impl(reduce(&kvs));
}
Some(index) => {
// Both left subtree and right subtree have some keys.
left_hash = compute_root_hash_impl(reduce(&kvs[..index]));
right_hash = compute_root_hash_impl(reduce(&kvs[index..]));
}
None => {
// Every key starts with a 0-bit, i.e., they are all in the left subtree.
left_hash = compute_root_hash_impl(reduce(&kvs));
right_hash = *SPARSE_MERKLE_PLACEHOLDER_HASH;
}
}
SparseMerkleInternalNode::new(left_hash, right_hash).hash()
}
|
next_key
|
identifier_name
|
test_helper.rs
|
// Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::{mock_tree_store::MockTreeStore, node_type::LeafNode, JellyfishMerkleTree};
use diem_crypto::{
hash::{CryptoHash, SPARSE_MERKLE_PLACEHOLDER_HASH},
HashValue,
};
use diem_crypto_derive::{BCSCryptoHash, CryptoHasher};
use diem_types::{
proof::{SparseMerkleInternalNode, SparseMerkleRangeProof},
transaction::Version,
};
use proptest::{
collection::{btree_map, hash_map, vec},
prelude::*,
};
use proptest_derive::Arbitrary;
use serde::{Deserialize, Serialize};
use std::{
collections::{BTreeMap, HashMap},
ops::Bound,
};
#[derive(
Arbitrary,
BCSCryptoHash,
Clone,
Debug,
Default,
Eq,
PartialEq,
Serialize,
Deserialize,
CryptoHasher,
)]
pub(crate) struct ValueBlob(Vec<u8>);
impl From<Vec<u8>> for ValueBlob {
fn from(blob: Vec<u8>) -> Self {
Self(blob)
}
}
impl crate::Value for ValueBlob {}
impl crate::TestValue for ValueBlob {}
/// Computes the key immediately after `key`.
pub fn plus_one(key: HashValue) -> HashValue {
assert_ne!(key, HashValue::new([0xff; HashValue::LENGTH]));
let mut buf = key.to_vec();
for i in (0..HashValue::LENGTH).rev() {
if buf[i] == 255
|
else {
buf[i] += 1;
break;
}
}
HashValue::from_slice(&buf).unwrap()
}
/// Initializes a DB with a set of key-value pairs by inserting one key at each version.
pub fn init_mock_db<V>(kvs: &HashMap<HashValue, V>) -> (MockTreeStore<V>, Version)
where
V: crate::TestValue,
{
assert!(!kvs.is_empty());
let db = MockTreeStore::default();
let tree = JellyfishMerkleTree::new(&db);
for (i, (key, value)) in kvs.iter().enumerate() {
let (_root_hash, write_batch) = tree
.put_value_set(vec![(*key, value.clone())], i as Version)
.unwrap();
db.write_tree_update_batch(write_batch).unwrap();
}
(db, (kvs.len() - 1) as Version)
}
pub fn arb_existent_kvs_and_nonexistent_keys<V: crate::TestValue>(
num_kvs: usize,
num_non_existing_keys: usize,
) -> impl Strategy<Value = (HashMap<HashValue, V>, Vec<HashValue>)> {
hash_map(any::<HashValue>(), any::<V>(), 1..num_kvs).prop_flat_map(move |kvs| {
let kvs_clone = kvs.clone();
(
Just(kvs),
vec(
any::<HashValue>().prop_filter(
"Make sure these keys do not exist in the tree.",
move |key|!kvs_clone.contains_key(key),
),
num_non_existing_keys,
),
)
})
}
pub fn test_get_with_proof<V: crate::TestValue>(
(existent_kvs, nonexistent_keys): (HashMap<HashValue, V>, Vec<HashValue>),
) {
let (db, version) = init_mock_db(&existent_kvs);
let tree = JellyfishMerkleTree::new(&db);
test_existent_keys_impl(&tree, version, &existent_kvs);
test_nonexistent_keys_impl(&tree, version, &nonexistent_keys);
}
pub fn arb_kv_pair_with_distinct_last_nibble<V: crate::TestValue>(
) -> impl Strategy<Value = ((HashValue, V), (HashValue, V))> {
(
any::<HashValue>().prop_filter("Can't be 0xffffff...", |key| {
*key!= HashValue::new([0xff; HashValue::LENGTH])
}),
vec(any::<V>(), 2),
)
.prop_map(|(key1, accounts)| {
let key2 = plus_one(key1);
((key1, accounts[0].clone()), (key2, accounts[1].clone()))
})
}
pub fn test_get_with_proof_with_distinct_last_nibble<V: crate::TestValue>(
(kv1, kv2): ((HashValue, V), (HashValue, V)),
) {
let mut kvs = HashMap::new();
kvs.insert(kv1.0, kv1.1);
kvs.insert(kv2.0, kv2.1);
let (db, version) = init_mock_db(&kvs);
let tree = JellyfishMerkleTree::new(&db);
test_existent_keys_impl(&tree, version, &kvs);
}
pub fn arb_tree_with_index<V: crate::TestValue>(
tree_size: usize,
) -> impl Strategy<Value = (BTreeMap<HashValue, V>, usize)> {
btree_map(any::<HashValue>(), any::<V>(), 1..tree_size).prop_flat_map(|btree| {
let len = btree.len();
(Just(btree), 0..len)
})
}
pub fn test_get_range_proof<V: crate::TestValue>((btree, n): (BTreeMap<HashValue, V>, usize)) {
let (db, version) = init_mock_db(&btree.clone().into_iter().collect());
let tree = JellyfishMerkleTree::new(&db);
let nth_key = *btree.keys().nth(n).unwrap();
let proof = tree.get_range_proof(nth_key, version).unwrap();
verify_range_proof(
tree.get_root_hash(version).unwrap(),
btree.into_iter().take(n + 1).collect(),
proof,
);
}
fn test_existent_keys_impl<'a, V: crate::TestValue>(
tree: &JellyfishMerkleTree<'a, MockTreeStore<V>, V>,
version: Version,
existent_kvs: &HashMap<HashValue, V>,
) {
let root_hash = tree.get_root_hash(version).unwrap();
for (key, value) in existent_kvs {
let (account, proof) = tree.get_with_proof(*key, version).unwrap();
assert!(proof.verify(root_hash, *key, account.as_ref()).is_ok());
assert_eq!(account.unwrap(), *value);
}
}
fn test_nonexistent_keys_impl<'a, V: crate::TestValue>(
tree: &JellyfishMerkleTree<'a, MockTreeStore<V>, V>,
version: Version,
nonexistent_keys: &[HashValue],
) {
let root_hash = tree.get_root_hash(version).unwrap();
for key in nonexistent_keys {
let (account, proof) = tree.get_with_proof(*key, version).unwrap();
assert!(proof.verify(root_hash, *key, account.as_ref()).is_ok());
assert!(account.is_none());
}
}
/// Checks if we can construct the expected root hash using the entries in the btree and the proof.
fn verify_range_proof<V: crate::TestValue>(
expected_root_hash: HashValue,
btree: BTreeMap<HashValue, V>,
proof: SparseMerkleRangeProof,
) {
// For example, given the following sparse Merkle tree:
//
// root
// / \
// / \
// / \
// o o
// / \ / \
// a o o h
// / \ / \
// o d e X
// / \ / \
// b c f g
//
// we transform the keys as follows:
// a => 00,
// b => 0100,
// c => 0101,
// d => 011,
// e => 100,
// X => 101
// h => 11
//
// Basically, the suffixes that doesn't affect the common prefix of adjacent leaves are
// discarded. In this example, we assume `btree` has the keys `a` to `e` and the proof has `X`
// and `h` in the siblings.
// Now we want to construct a set of key-value pairs that covers the entire set of leaves. For
// `a` to `e` this is simple -- we just insert them directly into this set. For the rest of the
// leaves, they are represented by the siblings, so we just make up some keys that make sense.
// For example, for `X` we just use 101000... (more zeros omitted), because that is one key
// that would cause `X` to end up in the above position.
let mut btree1 = BTreeMap::new();
for (key, value) in &btree {
let leaf = LeafNode::new(*key, value.clone());
btree1.insert(*key, leaf.hash());
}
// Using the above example, `last_proven_key` is `e`. We look at the path from root to `e`.
// For each 0-bit, there should be a sibling in the proof. And we use the path from root to
// this position, plus a `1` as the key.
let last_proven_key = *btree
.keys()
.last()
.expect("We are proving at least one key.");
for (i, sibling) in last_proven_key
.iter_bits()
.enumerate()
.filter_map(|(i, bit)| if!bit { Some(i) } else { None })
.zip(proof.right_siblings().iter().rev())
{
// This means the `i`-th bit is zero. We take `i` bits from `last_proven_key` and append a
// one to make up the key for this sibling.
let mut buf: Vec<_> = last_proven_key.iter_bits().take(i).collect();
buf.push(true);
// The rest doesn't matter, because they don't affect the position of the node. We just
// add zeros.
buf.resize(HashValue::LENGTH_IN_BITS, false);
let key = HashValue::from_bit_iter(buf.into_iter()).unwrap();
btree1.insert(key, *sibling);
}
// Now we do the transformation (removing the suffixes) described above.
let mut kvs = vec![];
for (key, value) in &btree1 {
// The length of the common prefix of the previous key and the current key.
let prev_common_prefix_len =
prev_key(&btree1, key).map(|pkey| pkey.common_prefix_bits_len(*key));
// The length of the common prefix of the next key and the current key.
let next_common_prefix_len =
next_key(&btree1, key).map(|nkey| nkey.common_prefix_bits_len(*key));
// We take the longest common prefix of the current key and its neighbors. That's how much
// we need to keep.
let len = match (prev_common_prefix_len, next_common_prefix_len) {
(Some(plen), Some(nlen)) => std::cmp::max(plen, nlen),
(Some(plen), None) => plen,
(None, Some(nlen)) => nlen,
(None, None) => 0,
};
let transformed_key: Vec<_> = key.iter_bits().take(len + 1).collect();
kvs.push((transformed_key, *value));
}
assert_eq!(compute_root_hash(kvs), expected_root_hash);
}
/// Reduces the problem by removing the first bit of every key.
fn reduce<'a>(kvs: &'a [(&[bool], HashValue)]) -> Vec<(&'a [bool], HashValue)> {
kvs.iter().map(|(key, value)| (&key[1..], *value)).collect()
}
/// Returns the key immediately before `key` in `btree`.
fn prev_key<K, V>(btree: &BTreeMap<K, V>, key: &K) -> Option<K>
where
K: Clone + Ord,
{
btree
.range((Bound::Unbounded, Bound::Excluded(key)))
.next_back()
.map(|(k, _v)| k.clone())
}
fn next_key<K, V>(btree: &BTreeMap<K, V>, key: &K) -> Option<K>
where
K: Clone + Ord,
{
btree
.range((Bound::Excluded(key), Bound::Unbounded))
.next()
.map(|(k, _v)| k.clone())
}
/// Computes the root hash of a sparse Merkle tree. `kvs` consists of the entire set of key-value
/// pairs stored in the tree.
fn compute_root_hash(kvs: Vec<(Vec<bool>, HashValue)>) -> HashValue {
let mut kv_ref = vec![];
for (key, value) in &kvs {
kv_ref.push((&key[..], *value));
}
compute_root_hash_impl(kv_ref)
}
fn compute_root_hash_impl(kvs: Vec<(&[bool], HashValue)>) -> HashValue {
assert!(!kvs.is_empty());
// If there is only one entry, it is the root.
if kvs.len() == 1 {
return kvs[0].1;
}
// Otherwise the tree has more than one leaves, which means we can find which ones are in the
// left subtree and which ones are in the right subtree. So we find the first key that starts
// with a 1-bit.
let left_hash;
let right_hash;
match kvs.iter().position(|(key, _value)| key[0]) {
Some(0) => {
// Every key starts with a 1-bit, i.e., they are all in the right subtree.
left_hash = *SPARSE_MERKLE_PLACEHOLDER_HASH;
right_hash = compute_root_hash_impl(reduce(&kvs));
}
Some(index) => {
// Both left subtree and right subtree have some keys.
left_hash = compute_root_hash_impl(reduce(&kvs[..index]));
right_hash = compute_root_hash_impl(reduce(&kvs[index..]));
}
None => {
// Every key starts with a 0-bit, i.e., they are all in the left subtree.
left_hash = compute_root_hash_impl(reduce(&kvs));
right_hash = *SPARSE_MERKLE_PLACEHOLDER_HASH;
}
}
SparseMerkleInternalNode::new(left_hash, right_hash).hash()
}
|
{
buf[i] = 0;
}
|
conditional_block
|
take.rs
|
use futures_core::task::{Context, Poll};
#[cfg(feature = "read-initializer")]
use futures_io::Initializer;
use futures_io::{AsyncRead, AsyncBufRead};
use pin_utils::{unsafe_pinned, unsafe_unpinned};
use std::{cmp, io};
use std::pin::Pin;
/// Reader for the [`take`](super::AsyncReadExt::take) method.
#[derive(Debug)]
#[must_use = "readers do nothing unless you `.await` or poll them"]
pub struct Take<R> {
inner: R,
// Add '_' to avoid conflicts with `limit` method.
limit_: u64,
}
impl<R: Unpin> Unpin for Take<R> { }
impl<R: AsyncRead> Take<R> {
unsafe_pinned!(inner: R);
unsafe_unpinned!(limit_: u64);
pub(super) fn new(inner: R, limit: u64) -> Self {
Self { inner, limit_: limit }
}
/// Returns the remaining number of bytes that can be
/// read before this instance will return EOF.
///
/// # Note
///
/// This instance may reach `EOF` after reading fewer bytes than indicated by
/// this method if the underlying [`AsyncRead`] instance reaches EOF.
///
/// # Examples
///
/// ```
/// # futures::executor::block_on(async {
/// use futures::io::{AsyncReadExt, Cursor};
///
/// let reader = Cursor::new(&b"12345678"[..]);
/// let mut buffer = [0; 2];
///
/// let mut take = reader.take(4);
/// let n = take.read(&mut buffer).await?;
///
/// assert_eq!(take.limit(), 2);
/// # Ok::<(), Box<dyn std::error::Error>>(()) }).unwrap();
/// ```
pub fn limit(&self) -> u64 {
self.limit_
}
/// Sets the number of bytes that can be read before this instance will
/// return EOF. This is the same as constructing a new `Take` instance, so
/// the amount of bytes read and the previous limit value don't matter when
/// calling this method.
///
/// # Examples
///
/// ```
/// # futures::executor::block_on(async {
/// use futures::io::{AsyncReadExt, Cursor};
///
/// let reader = Cursor::new(&b"12345678"[..]);
/// let mut buffer = [0; 4];
///
/// let mut take = reader.take(4);
/// let n = take.read(&mut buffer).await?;
///
/// assert_eq!(n, 4);
/// assert_eq!(take.limit(), 0);
///
/// take.set_limit(10);
/// let n = take.read(&mut buffer).await?;
/// assert_eq!(n, 4);
///
/// # Ok::<(), Box<dyn std::error::Error>>(()) }).unwrap();
/// ```
pub fn set_limit(&mut self, limit: u64) {
self.limit_ = limit
}
/// Gets a reference to the underlying reader.
///
/// # Examples
///
/// ```
/// # futures::executor::block_on(async {
/// use futures::io::{AsyncReadExt, Cursor};
///
/// let reader = Cursor::new(&b"12345678"[..]);
/// let mut buffer = [0; 4];
///
/// let mut take = reader.take(4);
/// let n = take.read(&mut buffer).await?;
///
/// let cursor_ref = take.get_ref();
/// assert_eq!(cursor_ref.position(), 4);
///
/// # Ok::<(), Box<dyn std::error::Error>>(()) }).unwrap();
/// ```
pub fn get_ref(&self) -> &R {
&self.inner
}
/// Gets a mutable reference to the underlying reader.
///
/// Care should be taken to avoid modifying the internal I/O state of the
/// underlying reader as doing so may corrupt the internal limit of this
/// `Take`.
///
/// # Examples
///
/// ```
/// # futures::executor::block_on(async {
/// use futures::io::{AsyncReadExt, Cursor};
///
/// let reader = Cursor::new(&b"12345678"[..]);
/// let mut buffer = [0; 4];
///
/// let mut take = reader.take(4);
/// let n = take.read(&mut buffer).await?;
///
/// let cursor_mut = take.get_mut();
///
/// # Ok::<(), Box<dyn std::error::Error>>(()) }).unwrap();
/// ```
pub fn get_mut(&mut self) -> &mut R {
&mut self.inner
}
/// Gets a pinned mutable reference to the underlying reader.
///
/// Care should be taken to avoid modifying the internal I/O state of the
/// underlying reader as doing so may corrupt the internal limit of this
/// `Take`.
pub fn get_pin_mut(self: Pin<&mut Self>) -> Pin<&mut R> {
self.inner()
}
/// Consumes the `Take`, returning the wrapped reader.
///
/// # Examples
///
/// ```
/// # futures::executor::block_on(async {
/// use futures::io::{AsyncReadExt, Cursor};
///
/// let reader = Cursor::new(&b"12345678"[..]);
/// let mut buffer = [0; 4];
///
/// let mut take = reader.take(4);
/// let n = take.read(&mut buffer).await?;
///
/// let cursor = take.into_inner();
/// assert_eq!(cursor.position(), 4);
///
/// # Ok::<(), Box<dyn std::error::Error>>(()) }).unwrap();
/// ```
pub fn into_inner(self) -> R {
self.inner
}
}
impl<R: AsyncRead> AsyncRead for Take<R> {
fn poll_read(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &mut [u8],
) -> Poll<Result<usize, io::Error>> {
if self.limit_ == 0 {
return Poll::Ready(Ok(0));
}
let max = std::cmp::min(buf.len() as u64, self.limit_) as usize;
let n = ready!(self.as_mut().inner().poll_read(cx, &mut buf[..max]))?;
*self.as_mut().limit_() -= n as u64;
Poll::Ready(Ok(n))
}
#[cfg(feature = "read-initializer")]
unsafe fn
|
(&self) -> Initializer {
self.inner.initializer()
}
}
impl<R: AsyncBufRead> AsyncBufRead for Take<R> {
fn poll_fill_buf(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<&[u8]>> {
let Self { inner, limit_ } = unsafe { self.get_unchecked_mut() };
let inner = unsafe { Pin::new_unchecked(inner) };
// Don't call into inner reader at all at EOF because it may still block
if *limit_ == 0 {
return Poll::Ready(Ok(&[]));
}
let buf = ready!(inner.poll_fill_buf(cx)?);
let cap = cmp::min(buf.len() as u64, *limit_) as usize;
Poll::Ready(Ok(&buf[..cap]))
}
fn consume(mut self: Pin<&mut Self>, amt: usize) {
// Don't let callers reset the limit by passing an overlarge value
let amt = cmp::min(amt as u64, self.limit_) as usize;
*self.as_mut().limit_() -= amt as u64;
self.inner().consume(amt);
}
}
|
initializer
|
identifier_name
|
take.rs
|
use futures_core::task::{Context, Poll};
#[cfg(feature = "read-initializer")]
use futures_io::Initializer;
use futures_io::{AsyncRead, AsyncBufRead};
use pin_utils::{unsafe_pinned, unsafe_unpinned};
use std::{cmp, io};
use std::pin::Pin;
/// Reader for the [`take`](super::AsyncReadExt::take) method.
#[derive(Debug)]
#[must_use = "readers do nothing unless you `.await` or poll them"]
pub struct Take<R> {
inner: R,
// Add '_' to avoid conflicts with `limit` method.
limit_: u64,
}
impl<R: Unpin> Unpin for Take<R> { }
impl<R: AsyncRead> Take<R> {
unsafe_pinned!(inner: R);
unsafe_unpinned!(limit_: u64);
pub(super) fn new(inner: R, limit: u64) -> Self {
Self { inner, limit_: limit }
}
/// Returns the remaining number of bytes that can be
/// read before this instance will return EOF.
///
/// # Note
///
/// This instance may reach `EOF` after reading fewer bytes than indicated by
/// this method if the underlying [`AsyncRead`] instance reaches EOF.
///
/// # Examples
///
/// ```
/// # futures::executor::block_on(async {
/// use futures::io::{AsyncReadExt, Cursor};
///
/// let reader = Cursor::new(&b"12345678"[..]);
/// let mut buffer = [0; 2];
///
/// let mut take = reader.take(4);
/// let n = take.read(&mut buffer).await?;
///
/// assert_eq!(take.limit(), 2);
/// # Ok::<(), Box<dyn std::error::Error>>(()) }).unwrap();
/// ```
pub fn limit(&self) -> u64 {
self.limit_
}
/// Sets the number of bytes that can be read before this instance will
/// return EOF. This is the same as constructing a new `Take` instance, so
/// the amount of bytes read and the previous limit value don't matter when
/// calling this method.
///
/// # Examples
///
/// ```
/// # futures::executor::block_on(async {
/// use futures::io::{AsyncReadExt, Cursor};
///
/// let reader = Cursor::new(&b"12345678"[..]);
/// let mut buffer = [0; 4];
///
/// let mut take = reader.take(4);
/// let n = take.read(&mut buffer).await?;
///
/// assert_eq!(n, 4);
/// assert_eq!(take.limit(), 0);
///
/// take.set_limit(10);
/// let n = take.read(&mut buffer).await?;
/// assert_eq!(n, 4);
///
/// # Ok::<(), Box<dyn std::error::Error>>(()) }).unwrap();
/// ```
pub fn set_limit(&mut self, limit: u64) {
self.limit_ = limit
}
/// Gets a reference to the underlying reader.
///
/// # Examples
///
/// ```
/// # futures::executor::block_on(async {
/// use futures::io::{AsyncReadExt, Cursor};
///
/// let reader = Cursor::new(&b"12345678"[..]);
/// let mut buffer = [0; 4];
///
/// let mut take = reader.take(4);
/// let n = take.read(&mut buffer).await?;
///
/// let cursor_ref = take.get_ref();
/// assert_eq!(cursor_ref.position(), 4);
///
/// # Ok::<(), Box<dyn std::error::Error>>(()) }).unwrap();
/// ```
pub fn get_ref(&self) -> &R {
&self.inner
}
/// Gets a mutable reference to the underlying reader.
///
/// Care should be taken to avoid modifying the internal I/O state of the
/// underlying reader as doing so may corrupt the internal limit of this
/// `Take`.
///
/// # Examples
///
/// ```
/// # futures::executor::block_on(async {
/// use futures::io::{AsyncReadExt, Cursor};
///
/// let reader = Cursor::new(&b"12345678"[..]);
/// let mut buffer = [0; 4];
///
/// let mut take = reader.take(4);
/// let n = take.read(&mut buffer).await?;
///
/// let cursor_mut = take.get_mut();
|
/// # Ok::<(), Box<dyn std::error::Error>>(()) }).unwrap();
/// ```
pub fn get_mut(&mut self) -> &mut R {
&mut self.inner
}
/// Gets a pinned mutable reference to the underlying reader.
///
/// Care should be taken to avoid modifying the internal I/O state of the
/// underlying reader as doing so may corrupt the internal limit of this
/// `Take`.
pub fn get_pin_mut(self: Pin<&mut Self>) -> Pin<&mut R> {
self.inner()
}
/// Consumes the `Take`, returning the wrapped reader.
///
/// # Examples
///
/// ```
/// # futures::executor::block_on(async {
/// use futures::io::{AsyncReadExt, Cursor};
///
/// let reader = Cursor::new(&b"12345678"[..]);
/// let mut buffer = [0; 4];
///
/// let mut take = reader.take(4);
/// let n = take.read(&mut buffer).await?;
///
/// let cursor = take.into_inner();
/// assert_eq!(cursor.position(), 4);
///
/// # Ok::<(), Box<dyn std::error::Error>>(()) }).unwrap();
/// ```
pub fn into_inner(self) -> R {
self.inner
}
}
impl<R: AsyncRead> AsyncRead for Take<R> {
fn poll_read(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &mut [u8],
) -> Poll<Result<usize, io::Error>> {
if self.limit_ == 0 {
return Poll::Ready(Ok(0));
}
let max = std::cmp::min(buf.len() as u64, self.limit_) as usize;
let n = ready!(self.as_mut().inner().poll_read(cx, &mut buf[..max]))?;
*self.as_mut().limit_() -= n as u64;
Poll::Ready(Ok(n))
}
#[cfg(feature = "read-initializer")]
unsafe fn initializer(&self) -> Initializer {
self.inner.initializer()
}
}
impl<R: AsyncBufRead> AsyncBufRead for Take<R> {
fn poll_fill_buf(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<&[u8]>> {
let Self { inner, limit_ } = unsafe { self.get_unchecked_mut() };
let inner = unsafe { Pin::new_unchecked(inner) };
// Don't call into inner reader at all at EOF because it may still block
if *limit_ == 0 {
return Poll::Ready(Ok(&[]));
}
let buf = ready!(inner.poll_fill_buf(cx)?);
let cap = cmp::min(buf.len() as u64, *limit_) as usize;
Poll::Ready(Ok(&buf[..cap]))
}
fn consume(mut self: Pin<&mut Self>, amt: usize) {
// Don't let callers reset the limit by passing an overlarge value
let amt = cmp::min(amt as u64, self.limit_) as usize;
*self.as_mut().limit_() -= amt as u64;
self.inner().consume(amt);
}
}
|
///
|
random_line_split
|
take.rs
|
use futures_core::task::{Context, Poll};
#[cfg(feature = "read-initializer")]
use futures_io::Initializer;
use futures_io::{AsyncRead, AsyncBufRead};
use pin_utils::{unsafe_pinned, unsafe_unpinned};
use std::{cmp, io};
use std::pin::Pin;
/// Reader for the [`take`](super::AsyncReadExt::take) method.
#[derive(Debug)]
#[must_use = "readers do nothing unless you `.await` or poll them"]
pub struct Take<R> {
inner: R,
// Add '_' to avoid conflicts with `limit` method.
limit_: u64,
}
impl<R: Unpin> Unpin for Take<R> { }
impl<R: AsyncRead> Take<R> {
unsafe_pinned!(inner: R);
unsafe_unpinned!(limit_: u64);
pub(super) fn new(inner: R, limit: u64) -> Self {
Self { inner, limit_: limit }
}
/// Returns the remaining number of bytes that can be
/// read before this instance will return EOF.
///
/// # Note
///
/// This instance may reach `EOF` after reading fewer bytes than indicated by
/// this method if the underlying [`AsyncRead`] instance reaches EOF.
///
/// # Examples
///
/// ```
/// # futures::executor::block_on(async {
/// use futures::io::{AsyncReadExt, Cursor};
///
/// let reader = Cursor::new(&b"12345678"[..]);
/// let mut buffer = [0; 2];
///
/// let mut take = reader.take(4);
/// let n = take.read(&mut buffer).await?;
///
/// assert_eq!(take.limit(), 2);
/// # Ok::<(), Box<dyn std::error::Error>>(()) }).unwrap();
/// ```
pub fn limit(&self) -> u64 {
self.limit_
}
/// Sets the number of bytes that can be read before this instance will
/// return EOF. This is the same as constructing a new `Take` instance, so
/// the amount of bytes read and the previous limit value don't matter when
/// calling this method.
///
/// # Examples
///
/// ```
/// # futures::executor::block_on(async {
/// use futures::io::{AsyncReadExt, Cursor};
///
/// let reader = Cursor::new(&b"12345678"[..]);
/// let mut buffer = [0; 4];
///
/// let mut take = reader.take(4);
/// let n = take.read(&mut buffer).await?;
///
/// assert_eq!(n, 4);
/// assert_eq!(take.limit(), 0);
///
/// take.set_limit(10);
/// let n = take.read(&mut buffer).await?;
/// assert_eq!(n, 4);
///
/// # Ok::<(), Box<dyn std::error::Error>>(()) }).unwrap();
/// ```
pub fn set_limit(&mut self, limit: u64) {
self.limit_ = limit
}
/// Gets a reference to the underlying reader.
///
/// # Examples
///
/// ```
/// # futures::executor::block_on(async {
/// use futures::io::{AsyncReadExt, Cursor};
///
/// let reader = Cursor::new(&b"12345678"[..]);
/// let mut buffer = [0; 4];
///
/// let mut take = reader.take(4);
/// let n = take.read(&mut buffer).await?;
///
/// let cursor_ref = take.get_ref();
/// assert_eq!(cursor_ref.position(), 4);
///
/// # Ok::<(), Box<dyn std::error::Error>>(()) }).unwrap();
/// ```
pub fn get_ref(&self) -> &R {
&self.inner
}
/// Gets a mutable reference to the underlying reader.
///
/// Care should be taken to avoid modifying the internal I/O state of the
/// underlying reader as doing so may corrupt the internal limit of this
/// `Take`.
///
/// # Examples
///
/// ```
/// # futures::executor::block_on(async {
/// use futures::io::{AsyncReadExt, Cursor};
///
/// let reader = Cursor::new(&b"12345678"[..]);
/// let mut buffer = [0; 4];
///
/// let mut take = reader.take(4);
/// let n = take.read(&mut buffer).await?;
///
/// let cursor_mut = take.get_mut();
///
/// # Ok::<(), Box<dyn std::error::Error>>(()) }).unwrap();
/// ```
pub fn get_mut(&mut self) -> &mut R {
&mut self.inner
}
/// Gets a pinned mutable reference to the underlying reader.
///
/// Care should be taken to avoid modifying the internal I/O state of the
/// underlying reader as doing so may corrupt the internal limit of this
/// `Take`.
pub fn get_pin_mut(self: Pin<&mut Self>) -> Pin<&mut R> {
self.inner()
}
/// Consumes the `Take`, returning the wrapped reader.
///
/// # Examples
///
/// ```
/// # futures::executor::block_on(async {
/// use futures::io::{AsyncReadExt, Cursor};
///
/// let reader = Cursor::new(&b"12345678"[..]);
/// let mut buffer = [0; 4];
///
/// let mut take = reader.take(4);
/// let n = take.read(&mut buffer).await?;
///
/// let cursor = take.into_inner();
/// assert_eq!(cursor.position(), 4);
///
/// # Ok::<(), Box<dyn std::error::Error>>(()) }).unwrap();
/// ```
pub fn into_inner(self) -> R {
self.inner
}
}
impl<R: AsyncRead> AsyncRead for Take<R> {
fn poll_read(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &mut [u8],
) -> Poll<Result<usize, io::Error>> {
if self.limit_ == 0 {
return Poll::Ready(Ok(0));
}
let max = std::cmp::min(buf.len() as u64, self.limit_) as usize;
let n = ready!(self.as_mut().inner().poll_read(cx, &mut buf[..max]))?;
*self.as_mut().limit_() -= n as u64;
Poll::Ready(Ok(n))
}
#[cfg(feature = "read-initializer")]
unsafe fn initializer(&self) -> Initializer {
self.inner.initializer()
}
}
impl<R: AsyncBufRead> AsyncBufRead for Take<R> {
fn poll_fill_buf(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<&[u8]>> {
let Self { inner, limit_ } = unsafe { self.get_unchecked_mut() };
let inner = unsafe { Pin::new_unchecked(inner) };
// Don't call into inner reader at all at EOF because it may still block
if *limit_ == 0 {
return Poll::Ready(Ok(&[]));
}
let buf = ready!(inner.poll_fill_buf(cx)?);
let cap = cmp::min(buf.len() as u64, *limit_) as usize;
Poll::Ready(Ok(&buf[..cap]))
}
fn consume(mut self: Pin<&mut Self>, amt: usize)
|
}
|
{
// Don't let callers reset the limit by passing an overlarge value
let amt = cmp::min(amt as u64, self.limit_) as usize;
*self.as_mut().limit_() -= amt as u64;
self.inner().consume(amt);
}
|
identifier_body
|
take.rs
|
use futures_core::task::{Context, Poll};
#[cfg(feature = "read-initializer")]
use futures_io::Initializer;
use futures_io::{AsyncRead, AsyncBufRead};
use pin_utils::{unsafe_pinned, unsafe_unpinned};
use std::{cmp, io};
use std::pin::Pin;
/// Reader for the [`take`](super::AsyncReadExt::take) method.
#[derive(Debug)]
#[must_use = "readers do nothing unless you `.await` or poll them"]
pub struct Take<R> {
inner: R,
// Add '_' to avoid conflicts with `limit` method.
limit_: u64,
}
impl<R: Unpin> Unpin for Take<R> { }
impl<R: AsyncRead> Take<R> {
unsafe_pinned!(inner: R);
unsafe_unpinned!(limit_: u64);
pub(super) fn new(inner: R, limit: u64) -> Self {
Self { inner, limit_: limit }
}
/// Returns the remaining number of bytes that can be
/// read before this instance will return EOF.
///
/// # Note
///
/// This instance may reach `EOF` after reading fewer bytes than indicated by
/// this method if the underlying [`AsyncRead`] instance reaches EOF.
///
/// # Examples
///
/// ```
/// # futures::executor::block_on(async {
/// use futures::io::{AsyncReadExt, Cursor};
///
/// let reader = Cursor::new(&b"12345678"[..]);
/// let mut buffer = [0; 2];
///
/// let mut take = reader.take(4);
/// let n = take.read(&mut buffer).await?;
///
/// assert_eq!(take.limit(), 2);
/// # Ok::<(), Box<dyn std::error::Error>>(()) }).unwrap();
/// ```
pub fn limit(&self) -> u64 {
self.limit_
}
/// Sets the number of bytes that can be read before this instance will
/// return EOF. This is the same as constructing a new `Take` instance, so
/// the amount of bytes read and the previous limit value don't matter when
/// calling this method.
///
/// # Examples
///
/// ```
/// # futures::executor::block_on(async {
/// use futures::io::{AsyncReadExt, Cursor};
///
/// let reader = Cursor::new(&b"12345678"[..]);
/// let mut buffer = [0; 4];
///
/// let mut take = reader.take(4);
/// let n = take.read(&mut buffer).await?;
///
/// assert_eq!(n, 4);
/// assert_eq!(take.limit(), 0);
///
/// take.set_limit(10);
/// let n = take.read(&mut buffer).await?;
/// assert_eq!(n, 4);
///
/// # Ok::<(), Box<dyn std::error::Error>>(()) }).unwrap();
/// ```
pub fn set_limit(&mut self, limit: u64) {
self.limit_ = limit
}
/// Gets a reference to the underlying reader.
///
/// # Examples
///
/// ```
/// # futures::executor::block_on(async {
/// use futures::io::{AsyncReadExt, Cursor};
///
/// let reader = Cursor::new(&b"12345678"[..]);
/// let mut buffer = [0; 4];
///
/// let mut take = reader.take(4);
/// let n = take.read(&mut buffer).await?;
///
/// let cursor_ref = take.get_ref();
/// assert_eq!(cursor_ref.position(), 4);
///
/// # Ok::<(), Box<dyn std::error::Error>>(()) }).unwrap();
/// ```
pub fn get_ref(&self) -> &R {
&self.inner
}
/// Gets a mutable reference to the underlying reader.
///
/// Care should be taken to avoid modifying the internal I/O state of the
/// underlying reader as doing so may corrupt the internal limit of this
/// `Take`.
///
/// # Examples
///
/// ```
/// # futures::executor::block_on(async {
/// use futures::io::{AsyncReadExt, Cursor};
///
/// let reader = Cursor::new(&b"12345678"[..]);
/// let mut buffer = [0; 4];
///
/// let mut take = reader.take(4);
/// let n = take.read(&mut buffer).await?;
///
/// let cursor_mut = take.get_mut();
///
/// # Ok::<(), Box<dyn std::error::Error>>(()) }).unwrap();
/// ```
pub fn get_mut(&mut self) -> &mut R {
&mut self.inner
}
/// Gets a pinned mutable reference to the underlying reader.
///
/// Care should be taken to avoid modifying the internal I/O state of the
/// underlying reader as doing so may corrupt the internal limit of this
/// `Take`.
pub fn get_pin_mut(self: Pin<&mut Self>) -> Pin<&mut R> {
self.inner()
}
/// Consumes the `Take`, returning the wrapped reader.
///
/// # Examples
///
/// ```
/// # futures::executor::block_on(async {
/// use futures::io::{AsyncReadExt, Cursor};
///
/// let reader = Cursor::new(&b"12345678"[..]);
/// let mut buffer = [0; 4];
///
/// let mut take = reader.take(4);
/// let n = take.read(&mut buffer).await?;
///
/// let cursor = take.into_inner();
/// assert_eq!(cursor.position(), 4);
///
/// # Ok::<(), Box<dyn std::error::Error>>(()) }).unwrap();
/// ```
pub fn into_inner(self) -> R {
self.inner
}
}
impl<R: AsyncRead> AsyncRead for Take<R> {
fn poll_read(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &mut [u8],
) -> Poll<Result<usize, io::Error>> {
if self.limit_ == 0
|
let max = std::cmp::min(buf.len() as u64, self.limit_) as usize;
let n = ready!(self.as_mut().inner().poll_read(cx, &mut buf[..max]))?;
*self.as_mut().limit_() -= n as u64;
Poll::Ready(Ok(n))
}
#[cfg(feature = "read-initializer")]
unsafe fn initializer(&self) -> Initializer {
self.inner.initializer()
}
}
impl<R: AsyncBufRead> AsyncBufRead for Take<R> {
fn poll_fill_buf(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<&[u8]>> {
let Self { inner, limit_ } = unsafe { self.get_unchecked_mut() };
let inner = unsafe { Pin::new_unchecked(inner) };
// Don't call into inner reader at all at EOF because it may still block
if *limit_ == 0 {
return Poll::Ready(Ok(&[]));
}
let buf = ready!(inner.poll_fill_buf(cx)?);
let cap = cmp::min(buf.len() as u64, *limit_) as usize;
Poll::Ready(Ok(&buf[..cap]))
}
fn consume(mut self: Pin<&mut Self>, amt: usize) {
// Don't let callers reset the limit by passing an overlarge value
let amt = cmp::min(amt as u64, self.limit_) as usize;
*self.as_mut().limit_() -= amt as u64;
self.inner().consume(amt);
}
}
|
{
return Poll::Ready(Ok(0));
}
|
conditional_block
|
refcounted.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! A generic, safe mechanism by which DOM objects can be pinned and transferred
//! between threads (or intra-thread for asynchronous events). Akin to Gecko's
//! nsMainThreadPtrHandle, this uses thread-safe reference counting and ensures
//! that the actual SpiderMonkey GC integration occurs on the script thread via
//! weak refcounts. Ownership of a `Trusted<T>` object means the DOM object of
//! type T to which it points remains alive. Any other behaviour is undefined.
//! To guarantee the lifetime of a DOM object when performing asynchronous operations,
//! obtain a `Trusted<T>` from that object and pass it along with each operation.
//! A usable pointer to the original DOM object can be obtained on the script thread
//! from a `Trusted<T>` via the `root` method.
//!
//! The implementation of `Trusted<T>` is as follows:
//! The `Trusted<T>` object contains an atomic reference counted pointer to the Rust DOM object.
//! A hashtable resides in the script thread, keyed on the pointer.
//! The values in this hashtable are weak reference counts. When a `Trusted<T>` object is
//! created or cloned, the reference count is increased. When a `Trusted<T>` is dropped, the count
//! decreases. If the count hits zero, the weak reference is emptied, and is removed from
//! its hash table during the next GC. During GC, the entries of the hash table are counted
//! as JS roots.
use dom::bindings::conversions::ToJSValConvertible;
use dom::bindings::error::Error;
use dom::bindings::reflector::{DomObject, Reflector};
use dom::bindings::root::DomRoot;
use dom::bindings::trace::trace_reflector;
use dom::promise::Promise;
use js::jsapi::JSTracer;
use libc;
use std::cell::RefCell;
use std::collections::hash_map::Entry::{Occupied, Vacant};
use std::collections::hash_map::HashMap;
use std::hash::Hash;
use std::marker::PhantomData;
use std::os;
use std::rc::Rc;
use std::sync::{Arc, Weak};
use task::TaskOnce;
#[allow(missing_docs)] // FIXME
mod dummy { // Attributes don’t apply through the macro.
use std::cell::RefCell;
use std::rc::Rc;
use super::LiveDOMReferences;
thread_local!(pub static LIVE_REFERENCES: Rc<RefCell<Option<LiveDOMReferences>>> =
Rc::new(RefCell::new(None)));
}
pub use self::dummy::LIVE_REFERENCES;
/// A pointer to a Rust DOM object that needs to be destroyed.
pub struct TrustedReference(*const libc::c_void);
unsafe impl Send for TrustedReference {}
impl TrustedReference {
fn new<T: DomObject>(ptr: *const T) -> TrustedReference {
TrustedReference(ptr as *const libc::c_void)
}
}
/// A safe wrapper around a DOM Promise object that can be shared among threads for use
/// in asynchronous operations. The underlying DOM object is guaranteed to live at least
/// as long as the last outstanding `TrustedPromise` instance. These values cannot be cloned,
/// only created from existing Rc<Promise> values.
pub struct TrustedPromise {
dom_object: *const Promise,
owner_thread: *const libc::c_void,
}
unsafe impl Send for TrustedPromise {}
impl TrustedPromise {
/// Create a new `TrustedPromise` instance from an existing DOM object. The object will
/// be prevented from being GCed for the duration of the resulting `TrustedPromise` object's
/// lifetime.
#[allow(unrooted_must_root)]
pub fn new(promise: Rc<Promise>) -> TrustedPromise {
LIVE_REFERENCES.with(|ref r| {
let r = r.borrow();
let live_references = r.as_ref().unwrap();
let ptr = &*promise as *const Promise;
live_references.addref_promise(promise);
TrustedPromise {
dom_object: ptr,
owner_thread: (&*live_references) as *const _ as *const libc::c_void,
}
})
}
/// Obtain a usable DOM Promise from a pinned `TrustedPromise` value. Fails if used on
/// a different thread than the original value from which this `TrustedPromise` was
/// obtained.
#[allow(unrooted_must_root)]
pub fn root(self) -> Rc<Promise> {
LIVE_REFERENCES.with(|ref r| {
let r = r.borrow();
let live_references = r.as_ref().unwrap();
assert!(self.owner_thread == (&*live_references) as *const _ as *const libc::c_void);
// Borrow-check error requires the redundant `let promise =...; promise` here.
let promise = match live_references.promise_table.borrow_mut().entry(self.dom_object) {
Occupied(mut entry) => {
let promise = {
let promises = entry.get_mut();
promises.pop().expect("rooted promise list unexpectedly empty")
};
if entry.get().is_empty() {
entry.remove();
}
promise
}
Vacant(_) => unreachable!(),
};
promise
})
}
/// A task which will reject the promise.
#[allow(unrooted_must_root)]
pub fn reject_task(self, error: Error) -> impl TaskOnce {
let this = self;
task!(reject_promise: move || {
debug!("Rejecting promise.");
this.root().reject_error(error);
})
}
/// A task which will resolve the promise.
#[allow(unrooted_must_root)]
pub fn resolve_task<T>(self, value: T) -> impl TaskOnce
where
T: ToJSValConvertible + Send,
{
let this = self;
task!(resolve_promise: move || {
debug!("Resolving promise.");
this.root().resolve_native(&value);
})
}
}
/// A safe wrapper around a raw pointer to a DOM object that can be
/// shared among threads for use in asynchronous operations. The underlying
/// DOM object is guaranteed to live at least as long as the last outstanding
/// `Trusted<T>` instance.
#[allow_unrooted_interior]
pub struct Trusted<T: DomObject> {
/// A pointer to the Rust DOM object of type T, but void to allow
/// sending `Trusted<T>` between threads, regardless of T's sendability.
refcount: Arc<TrustedReference>,
owner_thread: *const libc::c_void,
phantom: PhantomData<T>,
}
unsafe impl<T: DomObject> Send for Trusted<T> {}
impl<T: DomObject> Trusted<T> {
/// Create a new `Trusted<T>` instance from an existing DOM pointer. The DOM object will
/// be prevented from being GCed for the duration of the resulting `Trusted<T>` object's
/// lifetime.
pub fn new(ptr: &T) -> Trusted<T> {
LIVE_REFERENCES.with(|ref r| {
let r = r.borrow();
let live_references = r.as_ref().unwrap();
let refcount = live_references.addref(&*ptr as *const T);
Trusted {
refcount: refcount,
owner_thread: (&*live_references) as *const _ as *const libc::c_void,
phantom: PhantomData,
}
})
}
/// Obtain a usable DOM pointer from a pinned `Trusted<T>` value. Fails if used on
/// a different thread than the original value from which this `Trusted<T>` was
/// obtained.
pub fn root(&self) -> DomRoot<T> {
|
impl<T: DomObject> Clone for Trusted<T> {
fn clone(&self) -> Trusted<T> {
Trusted {
refcount: self.refcount.clone(),
owner_thread: self.owner_thread,
phantom: PhantomData,
}
}
}
/// The set of live, pinned DOM objects that are currently prevented
/// from being garbage collected due to outstanding references.
#[allow(unrooted_must_root)]
pub struct LiveDOMReferences {
// keyed on pointer to Rust DOM object
reflectable_table: RefCell<HashMap<*const libc::c_void, Weak<TrustedReference>>>,
promise_table: RefCell<HashMap<*const Promise, Vec<Rc<Promise>>>>,
}
impl LiveDOMReferences {
/// Set up the thread-local data required for storing the outstanding DOM references.
pub fn initialize() {
LIVE_REFERENCES.with(|ref r| {
*r.borrow_mut() = Some(LiveDOMReferences {
reflectable_table: RefCell::new(HashMap::new()),
promise_table: RefCell::new(HashMap::new()),
})
});
}
#[allow(unrooted_must_root)]
fn addref_promise(&self, promise: Rc<Promise>) {
let mut table = self.promise_table.borrow_mut();
table.entry(&*promise).or_insert(vec![]).push(promise)
}
fn addref<T: DomObject>(&self, ptr: *const T) -> Arc<TrustedReference> {
let mut table = self.reflectable_table.borrow_mut();
let capacity = table.capacity();
let len = table.len();
if (0 < capacity) && (capacity <= len) {
info!("growing refcounted references by {}", len);
remove_nulls(&mut table);
table.reserve(len);
}
match table.entry(ptr as *const libc::c_void) {
Occupied(mut entry) => match entry.get().upgrade() {
Some(refcount) => refcount,
None => {
let refcount = Arc::new(TrustedReference::new(ptr));
entry.insert(Arc::downgrade(&refcount));
refcount
},
},
Vacant(entry) => {
let refcount = Arc::new(TrustedReference::new(ptr));
entry.insert(Arc::downgrade(&refcount));
refcount
}
}
}
}
/// Remove null entries from the live references table
fn remove_nulls<K: Eq + Hash + Clone, V> (table: &mut HashMap<K, Weak<V>>) {
let to_remove: Vec<K> =
table.iter()
.filter(|&(_, value)| Weak::upgrade(value).is_none())
.map(|(key, _)| key.clone())
.collect();
info!("removing {} refcounted references", to_remove.len());
for key in to_remove {
table.remove(&key);
}
}
/// A JSTraceDataOp for tracing reflectors held in LIVE_REFERENCES
#[allow(unrooted_must_root)]
pub unsafe extern "C" fn trace_refcounted_objects(tracer: *mut JSTracer,
_data: *mut os::raw::c_void) {
info!("tracing live refcounted references");
LIVE_REFERENCES.with(|ref r| {
let r = r.borrow();
let live_references = r.as_ref().unwrap();
{
let mut table = live_references.reflectable_table.borrow_mut();
remove_nulls(&mut table);
for obj in table.keys() {
let reflectable = &*(*obj as *const Reflector);
trace_reflector(tracer, "refcounted", reflectable);
}
}
{
let table = live_references.promise_table.borrow_mut();
for promise in table.keys() {
trace_reflector(tracer, "refcounted", (**promise).reflector());
}
}
});
}
|
assert!(LIVE_REFERENCES.with(|ref r| {
let r = r.borrow();
let live_references = r.as_ref().unwrap();
self.owner_thread == (&*live_references) as *const _ as *const libc::c_void
}));
unsafe {
DomRoot::from_ref(&*(self.refcount.0 as *const T))
}
}
}
|
identifier_body
|
refcounted.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! A generic, safe mechanism by which DOM objects can be pinned and transferred
//! between threads (or intra-thread for asynchronous events). Akin to Gecko's
//! nsMainThreadPtrHandle, this uses thread-safe reference counting and ensures
//! that the actual SpiderMonkey GC integration occurs on the script thread via
//! weak refcounts. Ownership of a `Trusted<T>` object means the DOM object of
//! type T to which it points remains alive. Any other behaviour is undefined.
//! To guarantee the lifetime of a DOM object when performing asynchronous operations,
//! obtain a `Trusted<T>` from that object and pass it along with each operation.
//! A usable pointer to the original DOM object can be obtained on the script thread
//! from a `Trusted<T>` via the `root` method.
//!
//! The implementation of `Trusted<T>` is as follows:
//! The `Trusted<T>` object contains an atomic reference counted pointer to the Rust DOM object.
//! A hashtable resides in the script thread, keyed on the pointer.
//! The values in this hashtable are weak reference counts. When a `Trusted<T>` object is
//! created or cloned, the reference count is increased. When a `Trusted<T>` is dropped, the count
//! decreases. If the count hits zero, the weak reference is emptied, and is removed from
//! its hash table during the next GC. During GC, the entries of the hash table are counted
//! as JS roots.
use dom::bindings::conversions::ToJSValConvertible;
use dom::bindings::error::Error;
use dom::bindings::reflector::{DomObject, Reflector};
use dom::bindings::root::DomRoot;
use dom::bindings::trace::trace_reflector;
use dom::promise::Promise;
use js::jsapi::JSTracer;
use libc;
use std::cell::RefCell;
use std::collections::hash_map::Entry::{Occupied, Vacant};
use std::collections::hash_map::HashMap;
use std::hash::Hash;
use std::marker::PhantomData;
use std::os;
use std::rc::Rc;
use std::sync::{Arc, Weak};
use task::TaskOnce;
#[allow(missing_docs)] // FIXME
mod dummy { // Attributes don’t apply through the macro.
use std::cell::RefCell;
use std::rc::Rc;
use super::LiveDOMReferences;
thread_local!(pub static LIVE_REFERENCES: Rc<RefCell<Option<LiveDOMReferences>>> =
Rc::new(RefCell::new(None)));
}
pub use self::dummy::LIVE_REFERENCES;
/// A pointer to a Rust DOM object that needs to be destroyed.
pub struct TrustedReference(*const libc::c_void);
unsafe impl Send for TrustedReference {}
impl TrustedReference {
fn new<T: DomObject>(ptr: *const T) -> TrustedReference {
TrustedReference(ptr as *const libc::c_void)
}
}
/// A safe wrapper around a DOM Promise object that can be shared among threads for use
/// in asynchronous operations. The underlying DOM object is guaranteed to live at least
/// as long as the last outstanding `TrustedPromise` instance. These values cannot be cloned,
/// only created from existing Rc<Promise> values.
pub struct TrustedPromise {
dom_object: *const Promise,
owner_thread: *const libc::c_void,
}
unsafe impl Send for TrustedPromise {}
impl TrustedPromise {
/// Create a new `TrustedPromise` instance from an existing DOM object. The object will
/// be prevented from being GCed for the duration of the resulting `TrustedPromise` object's
/// lifetime.
#[allow(unrooted_must_root)]
pub fn new(promise: Rc<Promise>) -> TrustedPromise {
LIVE_REFERENCES.with(|ref r| {
let r = r.borrow();
let live_references = r.as_ref().unwrap();
let ptr = &*promise as *const Promise;
live_references.addref_promise(promise);
TrustedPromise {
dom_object: ptr,
owner_thread: (&*live_references) as *const _ as *const libc::c_void,
}
})
}
/// Obtain a usable DOM Promise from a pinned `TrustedPromise` value. Fails if used on
/// a different thread than the original value from which this `TrustedPromise` was
/// obtained.
#[allow(unrooted_must_root)]
pub fn root(self) -> Rc<Promise> {
LIVE_REFERENCES.with(|ref r| {
let r = r.borrow();
let live_references = r.as_ref().unwrap();
assert!(self.owner_thread == (&*live_references) as *const _ as *const libc::c_void);
// Borrow-check error requires the redundant `let promise =...; promise` here.
let promise = match live_references.promise_table.borrow_mut().entry(self.dom_object) {
Occupied(mut entry) => {
let promise = {
let promises = entry.get_mut();
promises.pop().expect("rooted promise list unexpectedly empty")
};
if entry.get().is_empty() {
entry.remove();
}
promise
}
Vacant(_) => unreachable!(),
};
promise
})
}
/// A task which will reject the promise.
#[allow(unrooted_must_root)]
pub fn reject_task(self, error: Error) -> impl TaskOnce {
let this = self;
task!(reject_promise: move || {
debug!("Rejecting promise.");
this.root().reject_error(error);
})
}
/// A task which will resolve the promise.
#[allow(unrooted_must_root)]
pub fn resolve_task<T>(self, value: T) -> impl TaskOnce
where
T: ToJSValConvertible + Send,
{
let this = self;
task!(resolve_promise: move || {
debug!("Resolving promise.");
this.root().resolve_native(&value);
})
}
}
/// A safe wrapper around a raw pointer to a DOM object that can be
/// shared among threads for use in asynchronous operations. The underlying
/// DOM object is guaranteed to live at least as long as the last outstanding
/// `Trusted<T>` instance.
#[allow_unrooted_interior]
pub struct Trusted<T: DomObject> {
/// A pointer to the Rust DOM object of type T, but void to allow
/// sending `Trusted<T>` between threads, regardless of T's sendability.
refcount: Arc<TrustedReference>,
owner_thread: *const libc::c_void,
phantom: PhantomData<T>,
}
unsafe impl<T: DomObject> Send for Trusted<T> {}
impl<T: DomObject> Trusted<T> {
/// Create a new `Trusted<T>` instance from an existing DOM pointer. The DOM object will
/// be prevented from being GCed for the duration of the resulting `Trusted<T>` object's
/// lifetime.
pub fn new(ptr: &T) -> Trusted<T> {
LIVE_REFERENCES.with(|ref r| {
let r = r.borrow();
let live_references = r.as_ref().unwrap();
let refcount = live_references.addref(&*ptr as *const T);
Trusted {
refcount: refcount,
owner_thread: (&*live_references) as *const _ as *const libc::c_void,
phantom: PhantomData,
}
})
}
/// Obtain a usable DOM pointer from a pinned `Trusted<T>` value. Fails if used on
/// a different thread than the original value from which this `Trusted<T>` was
/// obtained.
pub fn root(&self) -> DomRoot<T> {
assert!(LIVE_REFERENCES.with(|ref r| {
let r = r.borrow();
let live_references = r.as_ref().unwrap();
self.owner_thread == (&*live_references) as *const _ as *const libc::c_void
}));
unsafe {
DomRoot::from_ref(&*(self.refcount.0 as *const T))
}
}
}
impl<T: DomObject> Clone for Trusted<T> {
fn clone(&self) -> Trusted<T> {
Trusted {
refcount: self.refcount.clone(),
owner_thread: self.owner_thread,
phantom: PhantomData,
}
}
}
/// The set of live, pinned DOM objects that are currently prevented
/// from being garbage collected due to outstanding references.
#[allow(unrooted_must_root)]
pub struct LiveDOMReferences {
// keyed on pointer to Rust DOM object
reflectable_table: RefCell<HashMap<*const libc::c_void, Weak<TrustedReference>>>,
promise_table: RefCell<HashMap<*const Promise, Vec<Rc<Promise>>>>,
|
LIVE_REFERENCES.with(|ref r| {
*r.borrow_mut() = Some(LiveDOMReferences {
reflectable_table: RefCell::new(HashMap::new()),
promise_table: RefCell::new(HashMap::new()),
})
});
}
#[allow(unrooted_must_root)]
fn addref_promise(&self, promise: Rc<Promise>) {
let mut table = self.promise_table.borrow_mut();
table.entry(&*promise).or_insert(vec![]).push(promise)
}
fn addref<T: DomObject>(&self, ptr: *const T) -> Arc<TrustedReference> {
let mut table = self.reflectable_table.borrow_mut();
let capacity = table.capacity();
let len = table.len();
if (0 < capacity) && (capacity <= len) {
info!("growing refcounted references by {}", len);
remove_nulls(&mut table);
table.reserve(len);
}
match table.entry(ptr as *const libc::c_void) {
Occupied(mut entry) => match entry.get().upgrade() {
Some(refcount) => refcount,
None => {
let refcount = Arc::new(TrustedReference::new(ptr));
entry.insert(Arc::downgrade(&refcount));
refcount
},
},
Vacant(entry) => {
let refcount = Arc::new(TrustedReference::new(ptr));
entry.insert(Arc::downgrade(&refcount));
refcount
}
}
}
}
/// Remove null entries from the live references table
fn remove_nulls<K: Eq + Hash + Clone, V> (table: &mut HashMap<K, Weak<V>>) {
let to_remove: Vec<K> =
table.iter()
.filter(|&(_, value)| Weak::upgrade(value).is_none())
.map(|(key, _)| key.clone())
.collect();
info!("removing {} refcounted references", to_remove.len());
for key in to_remove {
table.remove(&key);
}
}
/// A JSTraceDataOp for tracing reflectors held in LIVE_REFERENCES
#[allow(unrooted_must_root)]
pub unsafe extern "C" fn trace_refcounted_objects(tracer: *mut JSTracer,
_data: *mut os::raw::c_void) {
info!("tracing live refcounted references");
LIVE_REFERENCES.with(|ref r| {
let r = r.borrow();
let live_references = r.as_ref().unwrap();
{
let mut table = live_references.reflectable_table.borrow_mut();
remove_nulls(&mut table);
for obj in table.keys() {
let reflectable = &*(*obj as *const Reflector);
trace_reflector(tracer, "refcounted", reflectable);
}
}
{
let table = live_references.promise_table.borrow_mut();
for promise in table.keys() {
trace_reflector(tracer, "refcounted", (**promise).reflector());
}
}
});
}
|
}
impl LiveDOMReferences {
/// Set up the thread-local data required for storing the outstanding DOM references.
pub fn initialize() {
|
random_line_split
|
refcounted.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! A generic, safe mechanism by which DOM objects can be pinned and transferred
//! between threads (or intra-thread for asynchronous events). Akin to Gecko's
//! nsMainThreadPtrHandle, this uses thread-safe reference counting and ensures
//! that the actual SpiderMonkey GC integration occurs on the script thread via
//! weak refcounts. Ownership of a `Trusted<T>` object means the DOM object of
//! type T to which it points remains alive. Any other behaviour is undefined.
//! To guarantee the lifetime of a DOM object when performing asynchronous operations,
//! obtain a `Trusted<T>` from that object and pass it along with each operation.
//! A usable pointer to the original DOM object can be obtained on the script thread
//! from a `Trusted<T>` via the `root` method.
//!
//! The implementation of `Trusted<T>` is as follows:
//! The `Trusted<T>` object contains an atomic reference counted pointer to the Rust DOM object.
//! A hashtable resides in the script thread, keyed on the pointer.
//! The values in this hashtable are weak reference counts. When a `Trusted<T>` object is
//! created or cloned, the reference count is increased. When a `Trusted<T>` is dropped, the count
//! decreases. If the count hits zero, the weak reference is emptied, and is removed from
//! its hash table during the next GC. During GC, the entries of the hash table are counted
//! as JS roots.
use dom::bindings::conversions::ToJSValConvertible;
use dom::bindings::error::Error;
use dom::bindings::reflector::{DomObject, Reflector};
use dom::bindings::root::DomRoot;
use dom::bindings::trace::trace_reflector;
use dom::promise::Promise;
use js::jsapi::JSTracer;
use libc;
use std::cell::RefCell;
use std::collections::hash_map::Entry::{Occupied, Vacant};
use std::collections::hash_map::HashMap;
use std::hash::Hash;
use std::marker::PhantomData;
use std::os;
use std::rc::Rc;
use std::sync::{Arc, Weak};
use task::TaskOnce;
#[allow(missing_docs)] // FIXME
mod dummy { // Attributes don’t apply through the macro.
use std::cell::RefCell;
use std::rc::Rc;
use super::LiveDOMReferences;
thread_local!(pub static LIVE_REFERENCES: Rc<RefCell<Option<LiveDOMReferences>>> =
Rc::new(RefCell::new(None)));
}
pub use self::dummy::LIVE_REFERENCES;
/// A pointer to a Rust DOM object that needs to be destroyed.
pub struct TrustedReference(*const libc::c_void);
unsafe impl Send for TrustedReference {}
impl TrustedReference {
fn new<T: DomObject>(ptr: *const T) -> TrustedReference {
TrustedReference(ptr as *const libc::c_void)
}
}
/// A safe wrapper around a DOM Promise object that can be shared among threads for use
/// in asynchronous operations. The underlying DOM object is guaranteed to live at least
/// as long as the last outstanding `TrustedPromise` instance. These values cannot be cloned,
/// only created from existing Rc<Promise> values.
pub struct TrustedPromise {
dom_object: *const Promise,
owner_thread: *const libc::c_void,
}
unsafe impl Send for TrustedPromise {}
impl TrustedPromise {
/// Create a new `TrustedPromise` instance from an existing DOM object. The object will
/// be prevented from being GCed for the duration of the resulting `TrustedPromise` object's
/// lifetime.
#[allow(unrooted_must_root)]
pub fn new(promise: Rc<Promise>) -> TrustedPromise {
LIVE_REFERENCES.with(|ref r| {
let r = r.borrow();
let live_references = r.as_ref().unwrap();
let ptr = &*promise as *const Promise;
live_references.addref_promise(promise);
TrustedPromise {
dom_object: ptr,
owner_thread: (&*live_references) as *const _ as *const libc::c_void,
}
})
}
/// Obtain a usable DOM Promise from a pinned `TrustedPromise` value. Fails if used on
/// a different thread than the original value from which this `TrustedPromise` was
/// obtained.
#[allow(unrooted_must_root)]
pub fn root(self) -> Rc<Promise> {
LIVE_REFERENCES.with(|ref r| {
let r = r.borrow();
let live_references = r.as_ref().unwrap();
assert!(self.owner_thread == (&*live_references) as *const _ as *const libc::c_void);
// Borrow-check error requires the redundant `let promise =...; promise` here.
let promise = match live_references.promise_table.borrow_mut().entry(self.dom_object) {
Occupied(mut entry) => {
let promise = {
let promises = entry.get_mut();
promises.pop().expect("rooted promise list unexpectedly empty")
};
if entry.get().is_empty() {
entry.remove();
}
promise
}
Vacant(_) => unreachable!(),
};
promise
})
}
/// A task which will reject the promise.
#[allow(unrooted_must_root)]
pub fn reject_task(self, error: Error) -> impl TaskOnce {
let this = self;
task!(reject_promise: move || {
debug!("Rejecting promise.");
this.root().reject_error(error);
})
}
/// A task which will resolve the promise.
#[allow(unrooted_must_root)]
pub fn resolve_task<T>(self, value: T) -> impl TaskOnce
where
T: ToJSValConvertible + Send,
{
let this = self;
task!(resolve_promise: move || {
debug!("Resolving promise.");
this.root().resolve_native(&value);
})
}
}
/// A safe wrapper around a raw pointer to a DOM object that can be
/// shared among threads for use in asynchronous operations. The underlying
/// DOM object is guaranteed to live at least as long as the last outstanding
/// `Trusted<T>` instance.
#[allow_unrooted_interior]
pub struct Trusted<T: DomObject> {
/// A pointer to the Rust DOM object of type T, but void to allow
/// sending `Trusted<T>` between threads, regardless of T's sendability.
refcount: Arc<TrustedReference>,
owner_thread: *const libc::c_void,
phantom: PhantomData<T>,
}
unsafe impl<T: DomObject> Send for Trusted<T> {}
impl<T: DomObject> Trusted<T> {
/// Create a new `Trusted<T>` instance from an existing DOM pointer. The DOM object will
/// be prevented from being GCed for the duration of the resulting `Trusted<T>` object's
/// lifetime.
pub fn new(ptr: &T) -> Trusted<T> {
LIVE_REFERENCES.with(|ref r| {
let r = r.borrow();
let live_references = r.as_ref().unwrap();
let refcount = live_references.addref(&*ptr as *const T);
Trusted {
refcount: refcount,
owner_thread: (&*live_references) as *const _ as *const libc::c_void,
phantom: PhantomData,
}
})
}
/// Obtain a usable DOM pointer from a pinned `Trusted<T>` value. Fails if used on
/// a different thread than the original value from which this `Trusted<T>` was
/// obtained.
pub fn root(&self) -> DomRoot<T> {
assert!(LIVE_REFERENCES.with(|ref r| {
let r = r.borrow();
let live_references = r.as_ref().unwrap();
self.owner_thread == (&*live_references) as *const _ as *const libc::c_void
}));
unsafe {
DomRoot::from_ref(&*(self.refcount.0 as *const T))
}
}
}
impl<T: DomObject> Clone for Trusted<T> {
fn cl
|
self) -> Trusted<T> {
Trusted {
refcount: self.refcount.clone(),
owner_thread: self.owner_thread,
phantom: PhantomData,
}
}
}
/// The set of live, pinned DOM objects that are currently prevented
/// from being garbage collected due to outstanding references.
#[allow(unrooted_must_root)]
pub struct LiveDOMReferences {
// keyed on pointer to Rust DOM object
reflectable_table: RefCell<HashMap<*const libc::c_void, Weak<TrustedReference>>>,
promise_table: RefCell<HashMap<*const Promise, Vec<Rc<Promise>>>>,
}
impl LiveDOMReferences {
/// Set up the thread-local data required for storing the outstanding DOM references.
pub fn initialize() {
LIVE_REFERENCES.with(|ref r| {
*r.borrow_mut() = Some(LiveDOMReferences {
reflectable_table: RefCell::new(HashMap::new()),
promise_table: RefCell::new(HashMap::new()),
})
});
}
#[allow(unrooted_must_root)]
fn addref_promise(&self, promise: Rc<Promise>) {
let mut table = self.promise_table.borrow_mut();
table.entry(&*promise).or_insert(vec![]).push(promise)
}
fn addref<T: DomObject>(&self, ptr: *const T) -> Arc<TrustedReference> {
let mut table = self.reflectable_table.borrow_mut();
let capacity = table.capacity();
let len = table.len();
if (0 < capacity) && (capacity <= len) {
info!("growing refcounted references by {}", len);
remove_nulls(&mut table);
table.reserve(len);
}
match table.entry(ptr as *const libc::c_void) {
Occupied(mut entry) => match entry.get().upgrade() {
Some(refcount) => refcount,
None => {
let refcount = Arc::new(TrustedReference::new(ptr));
entry.insert(Arc::downgrade(&refcount));
refcount
},
},
Vacant(entry) => {
let refcount = Arc::new(TrustedReference::new(ptr));
entry.insert(Arc::downgrade(&refcount));
refcount
}
}
}
}
/// Remove null entries from the live references table
fn remove_nulls<K: Eq + Hash + Clone, V> (table: &mut HashMap<K, Weak<V>>) {
let to_remove: Vec<K> =
table.iter()
.filter(|&(_, value)| Weak::upgrade(value).is_none())
.map(|(key, _)| key.clone())
.collect();
info!("removing {} refcounted references", to_remove.len());
for key in to_remove {
table.remove(&key);
}
}
/// A JSTraceDataOp for tracing reflectors held in LIVE_REFERENCES
#[allow(unrooted_must_root)]
pub unsafe extern "C" fn trace_refcounted_objects(tracer: *mut JSTracer,
_data: *mut os::raw::c_void) {
info!("tracing live refcounted references");
LIVE_REFERENCES.with(|ref r| {
let r = r.borrow();
let live_references = r.as_ref().unwrap();
{
let mut table = live_references.reflectable_table.borrow_mut();
remove_nulls(&mut table);
for obj in table.keys() {
let reflectable = &*(*obj as *const Reflector);
trace_reflector(tracer, "refcounted", reflectable);
}
}
{
let table = live_references.promise_table.borrow_mut();
for promise in table.keys() {
trace_reflector(tracer, "refcounted", (**promise).reflector());
}
}
});
}
|
one(&
|
identifier_name
|
refcounted.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! A generic, safe mechanism by which DOM objects can be pinned and transferred
//! between threads (or intra-thread for asynchronous events). Akin to Gecko's
//! nsMainThreadPtrHandle, this uses thread-safe reference counting and ensures
//! that the actual SpiderMonkey GC integration occurs on the script thread via
//! weak refcounts. Ownership of a `Trusted<T>` object means the DOM object of
//! type T to which it points remains alive. Any other behaviour is undefined.
//! To guarantee the lifetime of a DOM object when performing asynchronous operations,
//! obtain a `Trusted<T>` from that object and pass it along with each operation.
//! A usable pointer to the original DOM object can be obtained on the script thread
//! from a `Trusted<T>` via the `root` method.
//!
//! The implementation of `Trusted<T>` is as follows:
//! The `Trusted<T>` object contains an atomic reference counted pointer to the Rust DOM object.
//! A hashtable resides in the script thread, keyed on the pointer.
//! The values in this hashtable are weak reference counts. When a `Trusted<T>` object is
//! created or cloned, the reference count is increased. When a `Trusted<T>` is dropped, the count
//! decreases. If the count hits zero, the weak reference is emptied, and is removed from
//! its hash table during the next GC. During GC, the entries of the hash table are counted
//! as JS roots.
use dom::bindings::conversions::ToJSValConvertible;
use dom::bindings::error::Error;
use dom::bindings::reflector::{DomObject, Reflector};
use dom::bindings::root::DomRoot;
use dom::bindings::trace::trace_reflector;
use dom::promise::Promise;
use js::jsapi::JSTracer;
use libc;
use std::cell::RefCell;
use std::collections::hash_map::Entry::{Occupied, Vacant};
use std::collections::hash_map::HashMap;
use std::hash::Hash;
use std::marker::PhantomData;
use std::os;
use std::rc::Rc;
use std::sync::{Arc, Weak};
use task::TaskOnce;
#[allow(missing_docs)] // FIXME
mod dummy { // Attributes don’t apply through the macro.
use std::cell::RefCell;
use std::rc::Rc;
use super::LiveDOMReferences;
thread_local!(pub static LIVE_REFERENCES: Rc<RefCell<Option<LiveDOMReferences>>> =
Rc::new(RefCell::new(None)));
}
pub use self::dummy::LIVE_REFERENCES;
/// A pointer to a Rust DOM object that needs to be destroyed.
pub struct TrustedReference(*const libc::c_void);
unsafe impl Send for TrustedReference {}
impl TrustedReference {
fn new<T: DomObject>(ptr: *const T) -> TrustedReference {
TrustedReference(ptr as *const libc::c_void)
}
}
/// A safe wrapper around a DOM Promise object that can be shared among threads for use
/// in asynchronous operations. The underlying DOM object is guaranteed to live at least
/// as long as the last outstanding `TrustedPromise` instance. These values cannot be cloned,
/// only created from existing Rc<Promise> values.
pub struct TrustedPromise {
dom_object: *const Promise,
owner_thread: *const libc::c_void,
}
unsafe impl Send for TrustedPromise {}
impl TrustedPromise {
/// Create a new `TrustedPromise` instance from an existing DOM object. The object will
/// be prevented from being GCed for the duration of the resulting `TrustedPromise` object's
/// lifetime.
#[allow(unrooted_must_root)]
pub fn new(promise: Rc<Promise>) -> TrustedPromise {
LIVE_REFERENCES.with(|ref r| {
let r = r.borrow();
let live_references = r.as_ref().unwrap();
let ptr = &*promise as *const Promise;
live_references.addref_promise(promise);
TrustedPromise {
dom_object: ptr,
owner_thread: (&*live_references) as *const _ as *const libc::c_void,
}
})
}
/// Obtain a usable DOM Promise from a pinned `TrustedPromise` value. Fails if used on
/// a different thread than the original value from which this `TrustedPromise` was
/// obtained.
#[allow(unrooted_must_root)]
pub fn root(self) -> Rc<Promise> {
LIVE_REFERENCES.with(|ref r| {
let r = r.borrow();
let live_references = r.as_ref().unwrap();
assert!(self.owner_thread == (&*live_references) as *const _ as *const libc::c_void);
// Borrow-check error requires the redundant `let promise =...; promise` here.
let promise = match live_references.promise_table.borrow_mut().entry(self.dom_object) {
Occupied(mut entry) => {
let promise = {
let promises = entry.get_mut();
promises.pop().expect("rooted promise list unexpectedly empty")
};
if entry.get().is_empty() {
|
promise
}
Vacant(_) => unreachable!(),
};
promise
})
}
/// A task which will reject the promise.
#[allow(unrooted_must_root)]
pub fn reject_task(self, error: Error) -> impl TaskOnce {
let this = self;
task!(reject_promise: move || {
debug!("Rejecting promise.");
this.root().reject_error(error);
})
}
/// A task which will resolve the promise.
#[allow(unrooted_must_root)]
pub fn resolve_task<T>(self, value: T) -> impl TaskOnce
where
T: ToJSValConvertible + Send,
{
let this = self;
task!(resolve_promise: move || {
debug!("Resolving promise.");
this.root().resolve_native(&value);
})
}
}
/// A safe wrapper around a raw pointer to a DOM object that can be
/// shared among threads for use in asynchronous operations. The underlying
/// DOM object is guaranteed to live at least as long as the last outstanding
/// `Trusted<T>` instance.
#[allow_unrooted_interior]
pub struct Trusted<T: DomObject> {
/// A pointer to the Rust DOM object of type T, but void to allow
/// sending `Trusted<T>` between threads, regardless of T's sendability.
refcount: Arc<TrustedReference>,
owner_thread: *const libc::c_void,
phantom: PhantomData<T>,
}
unsafe impl<T: DomObject> Send for Trusted<T> {}
impl<T: DomObject> Trusted<T> {
/// Create a new `Trusted<T>` instance from an existing DOM pointer. The DOM object will
/// be prevented from being GCed for the duration of the resulting `Trusted<T>` object's
/// lifetime.
pub fn new(ptr: &T) -> Trusted<T> {
LIVE_REFERENCES.with(|ref r| {
let r = r.borrow();
let live_references = r.as_ref().unwrap();
let refcount = live_references.addref(&*ptr as *const T);
Trusted {
refcount: refcount,
owner_thread: (&*live_references) as *const _ as *const libc::c_void,
phantom: PhantomData,
}
})
}
/// Obtain a usable DOM pointer from a pinned `Trusted<T>` value. Fails if used on
/// a different thread than the original value from which this `Trusted<T>` was
/// obtained.
pub fn root(&self) -> DomRoot<T> {
assert!(LIVE_REFERENCES.with(|ref r| {
let r = r.borrow();
let live_references = r.as_ref().unwrap();
self.owner_thread == (&*live_references) as *const _ as *const libc::c_void
}));
unsafe {
DomRoot::from_ref(&*(self.refcount.0 as *const T))
}
}
}
impl<T: DomObject> Clone for Trusted<T> {
fn clone(&self) -> Trusted<T> {
Trusted {
refcount: self.refcount.clone(),
owner_thread: self.owner_thread,
phantom: PhantomData,
}
}
}
/// The set of live, pinned DOM objects that are currently prevented
/// from being garbage collected due to outstanding references.
#[allow(unrooted_must_root)]
pub struct LiveDOMReferences {
// keyed on pointer to Rust DOM object
reflectable_table: RefCell<HashMap<*const libc::c_void, Weak<TrustedReference>>>,
promise_table: RefCell<HashMap<*const Promise, Vec<Rc<Promise>>>>,
}
impl LiveDOMReferences {
/// Set up the thread-local data required for storing the outstanding DOM references.
pub fn initialize() {
LIVE_REFERENCES.with(|ref r| {
*r.borrow_mut() = Some(LiveDOMReferences {
reflectable_table: RefCell::new(HashMap::new()),
promise_table: RefCell::new(HashMap::new()),
})
});
}
#[allow(unrooted_must_root)]
fn addref_promise(&self, promise: Rc<Promise>) {
let mut table = self.promise_table.borrow_mut();
table.entry(&*promise).or_insert(vec![]).push(promise)
}
fn addref<T: DomObject>(&self, ptr: *const T) -> Arc<TrustedReference> {
let mut table = self.reflectable_table.borrow_mut();
let capacity = table.capacity();
let len = table.len();
if (0 < capacity) && (capacity <= len) {
info!("growing refcounted references by {}", len);
remove_nulls(&mut table);
table.reserve(len);
}
match table.entry(ptr as *const libc::c_void) {
Occupied(mut entry) => match entry.get().upgrade() {
Some(refcount) => refcount,
None => {
let refcount = Arc::new(TrustedReference::new(ptr));
entry.insert(Arc::downgrade(&refcount));
refcount
},
},
Vacant(entry) => {
let refcount = Arc::new(TrustedReference::new(ptr));
entry.insert(Arc::downgrade(&refcount));
refcount
}
}
}
}
/// Remove null entries from the live references table
fn remove_nulls<K: Eq + Hash + Clone, V> (table: &mut HashMap<K, Weak<V>>) {
let to_remove: Vec<K> =
table.iter()
.filter(|&(_, value)| Weak::upgrade(value).is_none())
.map(|(key, _)| key.clone())
.collect();
info!("removing {} refcounted references", to_remove.len());
for key in to_remove {
table.remove(&key);
}
}
/// A JSTraceDataOp for tracing reflectors held in LIVE_REFERENCES
#[allow(unrooted_must_root)]
pub unsafe extern "C" fn trace_refcounted_objects(tracer: *mut JSTracer,
_data: *mut os::raw::c_void) {
info!("tracing live refcounted references");
LIVE_REFERENCES.with(|ref r| {
let r = r.borrow();
let live_references = r.as_ref().unwrap();
{
let mut table = live_references.reflectable_table.borrow_mut();
remove_nulls(&mut table);
for obj in table.keys() {
let reflectable = &*(*obj as *const Reflector);
trace_reflector(tracer, "refcounted", reflectable);
}
}
{
let table = live_references.promise_table.borrow_mut();
for promise in table.keys() {
trace_reflector(tracer, "refcounted", (**promise).reflector());
}
}
});
}
|
entry.remove();
}
|
conditional_block
|
no-std.rs
|
/* automatically generated by rust-bindgen */
#![allow(
dead_code,
non_snake_case,
non_camel_case_types,
non_upper_case_globals
)]
#![no_std]
mod libc {
pub type c_int = i32;
pub enum c_void {}
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct foo {
pub a: libc::c_int,
pub b: libc::c_int,
|
assert_eq!(
::core::mem::size_of::<foo>(),
16usize,
concat!("Size of: ", stringify!(foo))
);
assert_eq!(
::core::mem::align_of::<foo>(),
8usize,
concat!("Alignment of ", stringify!(foo))
);
assert_eq!(
unsafe { &(*(::core::ptr::null::<foo>())).a as *const _ as usize },
0usize,
concat!("Offset of field: ", stringify!(foo), "::", stringify!(a))
);
assert_eq!(
unsafe { &(*(::core::ptr::null::<foo>())).b as *const _ as usize },
4usize,
concat!("Offset of field: ", stringify!(foo), "::", stringify!(b))
);
assert_eq!(
unsafe { &(*(::core::ptr::null::<foo>())).bar as *const _ as usize },
8usize,
concat!("Offset of field: ", stringify!(foo), "::", stringify!(bar))
);
}
impl Default for foo {
fn default() -> Self {
unsafe { ::core::mem::zeroed() }
}
}
|
pub bar: *mut libc::c_void,
}
#[test]
fn bindgen_test_layout_foo() {
|
random_line_split
|
no-std.rs
|
/* automatically generated by rust-bindgen */
#![allow(
dead_code,
non_snake_case,
non_camel_case_types,
non_upper_case_globals
)]
#![no_std]
mod libc {
pub type c_int = i32;
pub enum c_void {}
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct foo {
pub a: libc::c_int,
pub b: libc::c_int,
pub bar: *mut libc::c_void,
}
#[test]
fn
|
() {
assert_eq!(
::core::mem::size_of::<foo>(),
16usize,
concat!("Size of: ", stringify!(foo))
);
assert_eq!(
::core::mem::align_of::<foo>(),
8usize,
concat!("Alignment of ", stringify!(foo))
);
assert_eq!(
unsafe { &(*(::core::ptr::null::<foo>())).a as *const _ as usize },
0usize,
concat!("Offset of field: ", stringify!(foo), "::", stringify!(a))
);
assert_eq!(
unsafe { &(*(::core::ptr::null::<foo>())).b as *const _ as usize },
4usize,
concat!("Offset of field: ", stringify!(foo), "::", stringify!(b))
);
assert_eq!(
unsafe { &(*(::core::ptr::null::<foo>())).bar as *const _ as usize },
8usize,
concat!("Offset of field: ", stringify!(foo), "::", stringify!(bar))
);
}
impl Default for foo {
fn default() -> Self {
unsafe { ::core::mem::zeroed() }
}
}
|
bindgen_test_layout_foo
|
identifier_name
|
poll_token_derive.rs
|
// Copyright 2018 The Chromium OS Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#![recursion_limit = "128"]
extern crate proc_macro;
use proc_macro2::{Ident, TokenStream};
use quote::quote;
use syn::{parse_macro_input, Data, DeriveInput, Field, Fields, Index, Member, Variant};
#[cfg(test)]
mod tests;
// The method for packing an enum into a u64 is as follows:
// 1) Reserve the lowest "ceil(log_2(x))" bits where x is the number of enum variants.
// 2) Store the enum variant's index (0-based index based on order in the enum definition) in
// reserved bits.
// 3) If there is data in the enum variant, store the data in remaining bits.
// The method for unpacking is as follows
// 1) Mask the raw token to just the reserved bits
// 2) Match the reserved bits to the enum variant token.
// 3) If the indicated enum variant had data, extract it from the unreserved bits.
// Calculates the number of bits needed to store the variant index. Essentially the log base 2
// of the number of variants, rounded up.
fn variant_bits(variants: &[Variant]) -> u32 {
if variants.is_empty() {
// The degenerate case of no variants.
0
} else {
variants.len().next_power_of_two().trailing_zeros()
}
}
// Name of the field if it has one, otherwise 0 assuming this is the zeroth
// field of a tuple variant.
fn field_member(field: &Field) -> Member {
match &field.ident {
Some(name) => Member::Named(name.clone()),
None => Member::Unnamed(Index::from(0)),
}
}
// Generates the function body for `as_raw_token`.
fn generate_as_raw_token(enum_name: &Ident, variants: &[Variant]) -> TokenStream {
let variant_bits = variant_bits(variants);
// Each iteration corresponds to one variant's match arm.
let cases = variants.iter().enumerate().map(|(index, variant)| {
let variant_name = &variant.ident;
let index = index as u64;
// The capture string is for everything between the variant identifier and the `=>` in
// the match arm: the variant's data capture.
let capture = variant.fields.iter().next().map(|field| {
let member = field_member(field);
quote!({ #member: data })
});
// The modifier string ORs the variant index with extra bits from the variant data
// field.
let modifier = match variant.fields {
Fields::Named(_) | Fields::Unnamed(_) => Some(quote! {
| ((data as u64) << #variant_bits)
}),
Fields::Unit => None,
};
// Assembly of the match arm.
quote! {
#enum_name::#variant_name #capture => #index #modifier
}
});
quote! {
match *self {
#(
#cases,
)*
}
}
}
// Generates the function body for `from_raw_token`.
fn generate_from_raw_token(enum_name: &Ident, variants: &[Variant]) -> TokenStream {
let variant_bits = variant_bits(variants);
let variant_mask = ((1 << variant_bits) - 1) as u64;
// Each iteration corresponds to one variant's match arm.
let cases = variants.iter().enumerate().map(|(index, variant)| {
let variant_name = &variant.ident;
let index = index as u64;
// The data string is for extracting the enum variant's data bits out of the raw token
// data, which includes both variant index and data bits.
let data = variant.fields.iter().next().map(|field| {
let member = field_member(field);
let ty = &field.ty;
quote!({ #member: (data >> #variant_bits) as #ty })
});
// Assembly of the match arm.
quote! {
#index => #enum_name::#variant_name #data
}
});
quote! {
// The match expression only matches the bits for the variant index.
match data & #variant_mask {
#(
#cases,
)*
_ => unreachable!(),
}
}
}
// The proc_macro::TokenStream type can only be constructed from within a
// procedural macro, meaning that unit tests are not able to invoke `fn
// poll_token` below as an ordinary Rust function. We factor out the logic into
// a signature that deals with Syn and proc-macro2 types only which are not
// restricted to a procedural macro invocation.
fn poll_token_inner(input: DeriveInput) -> TokenStream
|
quote! {
impl PollToken for #enum_name {
fn as_raw_token(&self) -> u64 {
#as_raw_token
}
fn from_raw_token(data: u64) -> Self {
#from_raw_token
}
}
}
}
/// Implements the PollToken trait for a given `enum`.
///
/// There are limitations on what `enum`s this custom derive will work on:
///
/// * Each variant must be a unit variant (no data), or have a single (un)named data field.
/// * If a variant has data, it must be a primitive type castable to and from a `u64`.
/// * If a variant data has size greater than or equal to a `u64`, its most significant bits must be
/// zero. The number of bits truncated is equal to the number of bits used to store the variant
/// index plus the number of bits above 64.
#[proc_macro_derive(PollToken)]
pub fn poll_token(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
let input = parse_macro_input!(input as DeriveInput);
poll_token_inner(input).into()
}
|
{
let variants: Vec<Variant> = match input.data {
Data::Enum(data) => data.variants.into_iter().collect(),
Data::Struct(_) | Data::Union(_) => panic!("input must be an enum"),
};
for variant in &variants {
assert!(variant.fields.iter().count() <= 1);
}
// Given our basic model of a user given enum that is suitable as a token, we generate the
// implementation. The implementation is NOT always well formed, such as when a variant's data
// type is not bit shiftable or castable to u64, but we let Rust generate such errors as it
// would be difficult to detect every kind of error. Importantly, every implementation that we
// generate here and goes on to compile succesfully is sound.
let enum_name = input.ident;
let as_raw_token = generate_as_raw_token(&enum_name, &variants);
let from_raw_token = generate_from_raw_token(&enum_name, &variants);
|
identifier_body
|
poll_token_derive.rs
|
// Copyright 2018 The Chromium OS Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#![recursion_limit = "128"]
extern crate proc_macro;
use proc_macro2::{Ident, TokenStream};
use quote::quote;
use syn::{parse_macro_input, Data, DeriveInput, Field, Fields, Index, Member, Variant};
#[cfg(test)]
mod tests;
// The method for packing an enum into a u64 is as follows:
// 1) Reserve the lowest "ceil(log_2(x))" bits where x is the number of enum variants.
// 2) Store the enum variant's index (0-based index based on order in the enum definition) in
// reserved bits.
// 3) If there is data in the enum variant, store the data in remaining bits.
// The method for unpacking is as follows
// 1) Mask the raw token to just the reserved bits
// 2) Match the reserved bits to the enum variant token.
// 3) If the indicated enum variant had data, extract it from the unreserved bits.
// Calculates the number of bits needed to store the variant index. Essentially the log base 2
// of the number of variants, rounded up.
fn variant_bits(variants: &[Variant]) -> u32 {
if variants.is_empty() {
// The degenerate case of no variants.
0
} else {
variants.len().next_power_of_two().trailing_zeros()
}
}
// Name of the field if it has one, otherwise 0 assuming this is the zeroth
// field of a tuple variant.
fn field_member(field: &Field) -> Member {
match &field.ident {
Some(name) => Member::Named(name.clone()),
None => Member::Unnamed(Index::from(0)),
}
}
// Generates the function body for `as_raw_token`.
fn generate_as_raw_token(enum_name: &Ident, variants: &[Variant]) -> TokenStream {
let variant_bits = variant_bits(variants);
// Each iteration corresponds to one variant's match arm.
let cases = variants.iter().enumerate().map(|(index, variant)| {
let variant_name = &variant.ident;
let index = index as u64;
// The capture string is for everything between the variant identifier and the `=>` in
// the match arm: the variant's data capture.
let capture = variant.fields.iter().next().map(|field| {
let member = field_member(field);
quote!({ #member: data })
});
// The modifier string ORs the variant index with extra bits from the variant data
// field.
let modifier = match variant.fields {
Fields::Named(_) | Fields::Unnamed(_) => Some(quote! {
| ((data as u64) << #variant_bits)
}),
Fields::Unit => None,
};
// Assembly of the match arm.
quote! {
#enum_name::#variant_name #capture => #index #modifier
}
});
quote! {
match *self {
#(
#cases,
)*
}
}
}
// Generates the function body for `from_raw_token`.
fn generate_from_raw_token(enum_name: &Ident, variants: &[Variant]) -> TokenStream {
let variant_bits = variant_bits(variants);
let variant_mask = ((1 << variant_bits) - 1) as u64;
// Each iteration corresponds to one variant's match arm.
let cases = variants.iter().enumerate().map(|(index, variant)| {
let variant_name = &variant.ident;
let index = index as u64;
// The data string is for extracting the enum variant's data bits out of the raw token
// data, which includes both variant index and data bits.
let data = variant.fields.iter().next().map(|field| {
let member = field_member(field);
let ty = &field.ty;
quote!({ #member: (data >> #variant_bits) as #ty })
});
// Assembly of the match arm.
quote! {
#index => #enum_name::#variant_name #data
}
});
quote! {
// The match expression only matches the bits for the variant index.
match data & #variant_mask {
#(
#cases,
)*
_ => unreachable!(),
}
}
}
// The proc_macro::TokenStream type can only be constructed from within a
// procedural macro, meaning that unit tests are not able to invoke `fn
// poll_token` below as an ordinary Rust function. We factor out the logic into
// a signature that deals with Syn and proc-macro2 types only which are not
// restricted to a procedural macro invocation.
fn poll_token_inner(input: DeriveInput) -> TokenStream {
let variants: Vec<Variant> = match input.data {
Data::Enum(data) => data.variants.into_iter().collect(),
Data::Struct(_) | Data::Union(_) => panic!("input must be an enum"),
};
for variant in &variants {
assert!(variant.fields.iter().count() <= 1);
}
// Given our basic model of a user given enum that is suitable as a token, we generate the
// implementation. The implementation is NOT always well formed, such as when a variant's data
// type is not bit shiftable or castable to u64, but we let Rust generate such errors as it
// would be difficult to detect every kind of error. Importantly, every implementation that we
// generate here and goes on to compile succesfully is sound.
let enum_name = input.ident;
let as_raw_token = generate_as_raw_token(&enum_name, &variants);
let from_raw_token = generate_from_raw_token(&enum_name, &variants);
quote! {
impl PollToken for #enum_name {
fn as_raw_token(&self) -> u64 {
#as_raw_token
}
fn from_raw_token(data: u64) -> Self {
#from_raw_token
}
}
}
}
/// Implements the PollToken trait for a given `enum`.
///
/// There are limitations on what `enum`s this custom derive will work on:
///
/// * Each variant must be a unit variant (no data), or have a single (un)named data field.
/// * If a variant has data, it must be a primitive type castable to and from a `u64`.
/// * If a variant data has size greater than or equal to a `u64`, its most significant bits must be
/// zero. The number of bits truncated is equal to the number of bits used to store the variant
/// index plus the number of bits above 64.
#[proc_macro_derive(PollToken)]
pub fn
|
(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
let input = parse_macro_input!(input as DeriveInput);
poll_token_inner(input).into()
}
|
poll_token
|
identifier_name
|
poll_token_derive.rs
|
// Copyright 2018 The Chromium OS Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#![recursion_limit = "128"]
extern crate proc_macro;
use proc_macro2::{Ident, TokenStream};
use quote::quote;
use syn::{parse_macro_input, Data, DeriveInput, Field, Fields, Index, Member, Variant};
|
// The method for packing an enum into a u64 is as follows:
// 1) Reserve the lowest "ceil(log_2(x))" bits where x is the number of enum variants.
// 2) Store the enum variant's index (0-based index based on order in the enum definition) in
// reserved bits.
// 3) If there is data in the enum variant, store the data in remaining bits.
// The method for unpacking is as follows
// 1) Mask the raw token to just the reserved bits
// 2) Match the reserved bits to the enum variant token.
// 3) If the indicated enum variant had data, extract it from the unreserved bits.
// Calculates the number of bits needed to store the variant index. Essentially the log base 2
// of the number of variants, rounded up.
fn variant_bits(variants: &[Variant]) -> u32 {
if variants.is_empty() {
// The degenerate case of no variants.
0
} else {
variants.len().next_power_of_two().trailing_zeros()
}
}
// Name of the field if it has one, otherwise 0 assuming this is the zeroth
// field of a tuple variant.
fn field_member(field: &Field) -> Member {
match &field.ident {
Some(name) => Member::Named(name.clone()),
None => Member::Unnamed(Index::from(0)),
}
}
// Generates the function body for `as_raw_token`.
fn generate_as_raw_token(enum_name: &Ident, variants: &[Variant]) -> TokenStream {
let variant_bits = variant_bits(variants);
// Each iteration corresponds to one variant's match arm.
let cases = variants.iter().enumerate().map(|(index, variant)| {
let variant_name = &variant.ident;
let index = index as u64;
// The capture string is for everything between the variant identifier and the `=>` in
// the match arm: the variant's data capture.
let capture = variant.fields.iter().next().map(|field| {
let member = field_member(field);
quote!({ #member: data })
});
// The modifier string ORs the variant index with extra bits from the variant data
// field.
let modifier = match variant.fields {
Fields::Named(_) | Fields::Unnamed(_) => Some(quote! {
| ((data as u64) << #variant_bits)
}),
Fields::Unit => None,
};
// Assembly of the match arm.
quote! {
#enum_name::#variant_name #capture => #index #modifier
}
});
quote! {
match *self {
#(
#cases,
)*
}
}
}
// Generates the function body for `from_raw_token`.
fn generate_from_raw_token(enum_name: &Ident, variants: &[Variant]) -> TokenStream {
let variant_bits = variant_bits(variants);
let variant_mask = ((1 << variant_bits) - 1) as u64;
// Each iteration corresponds to one variant's match arm.
let cases = variants.iter().enumerate().map(|(index, variant)| {
let variant_name = &variant.ident;
let index = index as u64;
// The data string is for extracting the enum variant's data bits out of the raw token
// data, which includes both variant index and data bits.
let data = variant.fields.iter().next().map(|field| {
let member = field_member(field);
let ty = &field.ty;
quote!({ #member: (data >> #variant_bits) as #ty })
});
// Assembly of the match arm.
quote! {
#index => #enum_name::#variant_name #data
}
});
quote! {
// The match expression only matches the bits for the variant index.
match data & #variant_mask {
#(
#cases,
)*
_ => unreachable!(),
}
}
}
// The proc_macro::TokenStream type can only be constructed from within a
// procedural macro, meaning that unit tests are not able to invoke `fn
// poll_token` below as an ordinary Rust function. We factor out the logic into
// a signature that deals with Syn and proc-macro2 types only which are not
// restricted to a procedural macro invocation.
fn poll_token_inner(input: DeriveInput) -> TokenStream {
let variants: Vec<Variant> = match input.data {
Data::Enum(data) => data.variants.into_iter().collect(),
Data::Struct(_) | Data::Union(_) => panic!("input must be an enum"),
};
for variant in &variants {
assert!(variant.fields.iter().count() <= 1);
}
// Given our basic model of a user given enum that is suitable as a token, we generate the
// implementation. The implementation is NOT always well formed, such as when a variant's data
// type is not bit shiftable or castable to u64, but we let Rust generate such errors as it
// would be difficult to detect every kind of error. Importantly, every implementation that we
// generate here and goes on to compile succesfully is sound.
let enum_name = input.ident;
let as_raw_token = generate_as_raw_token(&enum_name, &variants);
let from_raw_token = generate_from_raw_token(&enum_name, &variants);
quote! {
impl PollToken for #enum_name {
fn as_raw_token(&self) -> u64 {
#as_raw_token
}
fn from_raw_token(data: u64) -> Self {
#from_raw_token
}
}
}
}
/// Implements the PollToken trait for a given `enum`.
///
/// There are limitations on what `enum`s this custom derive will work on:
///
/// * Each variant must be a unit variant (no data), or have a single (un)named data field.
/// * If a variant has data, it must be a primitive type castable to and from a `u64`.
/// * If a variant data has size greater than or equal to a `u64`, its most significant bits must be
/// zero. The number of bits truncated is equal to the number of bits used to store the variant
/// index plus the number of bits above 64.
#[proc_macro_derive(PollToken)]
pub fn poll_token(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
let input = parse_macro_input!(input as DeriveInput);
poll_token_inner(input).into()
}
|
#[cfg(test)]
mod tests;
|
random_line_split
|
sync_provider.rs
|
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Test implementation of SyncProvider.
use util::U256;
use ethsync::{SyncProvider, SyncState, SyncStatus};
use std::sync::RwLock;
/// TestSyncProvider config.
pub struct Config {
/// Protocol version.
pub network_id: U256,
/// Number of peers.
pub num_peers: usize,
}
/// Test sync provider.
pub struct TestSyncProvider {
/// Sync status.
pub status: RwLock<SyncStatus>,
}
impl TestSyncProvider {
/// Creates new sync provider.
pub fn new(config: Config) -> Self {
TestSyncProvider {
status: RwLock::new(SyncStatus {
state: SyncState::NotSynced,
network_id: config.network_id,
protocol_version: 63,
start_block_number: 0,
last_imported_block_number: None,
highest_block_number: None,
blocks_total: 0,
blocks_received: 0,
num_peers: config.num_peers,
num_active_peers: 0,
mem_used: 0,
}),
}
}
|
impl SyncProvider for TestSyncProvider {
fn status(&self) -> SyncStatus {
self.status.read().unwrap().clone()
}
}
|
}
|
random_line_split
|
sync_provider.rs
|
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Test implementation of SyncProvider.
use util::U256;
use ethsync::{SyncProvider, SyncState, SyncStatus};
use std::sync::RwLock;
/// TestSyncProvider config.
pub struct Config {
/// Protocol version.
pub network_id: U256,
/// Number of peers.
pub num_peers: usize,
}
/// Test sync provider.
pub struct TestSyncProvider {
/// Sync status.
pub status: RwLock<SyncStatus>,
}
impl TestSyncProvider {
/// Creates new sync provider.
pub fn new(config: Config) -> Self {
TestSyncProvider {
status: RwLock::new(SyncStatus {
state: SyncState::NotSynced,
network_id: config.network_id,
protocol_version: 63,
start_block_number: 0,
last_imported_block_number: None,
highest_block_number: None,
blocks_total: 0,
blocks_received: 0,
num_peers: config.num_peers,
num_active_peers: 0,
mem_used: 0,
}),
}
}
}
impl SyncProvider for TestSyncProvider {
fn status(&self) -> SyncStatus
|
}
|
{
self.status.read().unwrap().clone()
}
|
identifier_body
|
sync_provider.rs
|
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Test implementation of SyncProvider.
use util::U256;
use ethsync::{SyncProvider, SyncState, SyncStatus};
use std::sync::RwLock;
/// TestSyncProvider config.
pub struct
|
{
/// Protocol version.
pub network_id: U256,
/// Number of peers.
pub num_peers: usize,
}
/// Test sync provider.
pub struct TestSyncProvider {
/// Sync status.
pub status: RwLock<SyncStatus>,
}
impl TestSyncProvider {
/// Creates new sync provider.
pub fn new(config: Config) -> Self {
TestSyncProvider {
status: RwLock::new(SyncStatus {
state: SyncState::NotSynced,
network_id: config.network_id,
protocol_version: 63,
start_block_number: 0,
last_imported_block_number: None,
highest_block_number: None,
blocks_total: 0,
blocks_received: 0,
num_peers: config.num_peers,
num_active_peers: 0,
mem_used: 0,
}),
}
}
}
impl SyncProvider for TestSyncProvider {
fn status(&self) -> SyncStatus {
self.status.read().unwrap().clone()
}
}
|
Config
|
identifier_name
|
estr-slice.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub fn
|
() {
let x = &"hello";
let v = &"hello";
let mut y : &str = &"there";
info!(x);
info!(y);
assert_eq!(x[0], 'h' as u8);
assert_eq!(x[4], 'o' as u8);
let z : &str = &"thing";
assert_eq!(v, x);
assert!(x!= z);
let a = &"aaaa";
let b = &"bbbb";
let c = &"cccc";
let cc = &"ccccc";
info!(a);
assert!(a < b);
assert!(a <= b);
assert!(a!= b);
assert!(b >= a);
assert!(b > a);
info!(b);
assert!(a < c);
assert!(a <= c);
assert!(a!= c);
assert!(c >= a);
assert!(c > a);
info!(c);
assert!(c < cc);
assert!(c <= cc);
assert!(c!= cc);
assert!(cc >= c);
assert!(cc > c);
info!(cc);
}
|
main
|
identifier_name
|
estr-slice.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub fn main()
|
info!(a);
assert!(a < b);
assert!(a <= b);
assert!(a!= b);
assert!(b >= a);
assert!(b > a);
info!(b);
assert!(a < c);
assert!(a <= c);
assert!(a!= c);
assert!(c >= a);
assert!(c > a);
info!(c);
assert!(c < cc);
assert!(c <= cc);
assert!(c!= cc);
assert!(cc >= c);
assert!(cc > c);
info!(cc);
}
|
{
let x = &"hello";
let v = &"hello";
let mut y : &str = &"there";
info!(x);
info!(y);
assert_eq!(x[0], 'h' as u8);
assert_eq!(x[4], 'o' as u8);
let z : &str = &"thing";
assert_eq!(v, x);
assert!(x != z);
let a = &"aaaa";
let b = &"bbbb";
let c = &"cccc";
let cc = &"ccccc";
|
identifier_body
|
estr-slice.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub fn main() {
let x = &"hello";
let v = &"hello";
let mut y : &str = &"there";
info!(x);
info!(y);
assert_eq!(x[0], 'h' as u8);
assert_eq!(x[4], 'o' as u8);
let z : &str = &"thing";
assert_eq!(v, x);
assert!(x!= z);
let a = &"aaaa";
let b = &"bbbb";
let c = &"cccc";
let cc = &"ccccc";
info!(a);
assert!(a < b);
assert!(a <= b);
assert!(a!= b);
assert!(b >= a);
assert!(b > a);
info!(b);
assert!(a < c);
assert!(a <= c);
assert!(a!= c);
assert!(c >= a);
assert!(c > a);
|
assert!(c <= cc);
assert!(c!= cc);
assert!(cc >= c);
assert!(cc > c);
info!(cc);
}
|
info!(c);
assert!(c < cc);
|
random_line_split
|
reachable.rs
|
rustc_middle::ty::query::Providers;
use rustc_middle::ty::{self, DefIdTree, TyCtxt};
use rustc_session::config::CrateType;
use rustc_target::spec::abi::Abi;
// Returns true if the given item must be inlined because it may be
// monomorphized or it was marked with `#[inline]`. This will only return
// true for functions.
fn item_might_be_inlined(tcx: TyCtxt<'tcx>, item: &hir::Item<'_>, attrs: &CodegenFnAttrs) -> bool {
if attrs.requests_inline() {
return true;
}
match item.kind {
hir::ItemKind::Fn(ref sig,..) if sig.header.is_const() => true,
hir::ItemKind::Impl {.. } | hir::ItemKind::Fn(..) => {
let generics = tcx.generics_of(item.def_id);
generics.requires_monomorphization(tcx)
}
_ => false,
}
}
fn method_might_be_inlined(
tcx: TyCtxt<'_>,
impl_item: &hir::ImplItem<'_>,
impl_src: LocalDefId,
) -> bool {
let codegen_fn_attrs = tcx.codegen_fn_attrs(impl_item.hir_id().owner.to_def_id());
let generics = tcx.generics_of(impl_item.def_id);
if codegen_fn_attrs.requests_inline() || generics.requires_monomorphization(tcx) {
return true;
}
if let hir::ImplItemKind::Fn(method_sig, _) = &impl_item.kind {
if method_sig.header.is_const() {
return true;
}
}
match tcx.hir().find(tcx.hir().local_def_id_to_hir_id(impl_src)) {
Some(Node::Item(item)) => item_might_be_inlined(tcx, &item, codegen_fn_attrs),
Some(..) | None => span_bug!(impl_item.span, "impl did is not an item"),
}
}
// Information needed while computing reachability.
struct ReachableContext<'tcx> {
// The type context.
tcx: TyCtxt<'tcx>,
maybe_typeck_results: Option<&'tcx ty::TypeckResults<'tcx>>,
// The set of items which must be exported in the linkage sense.
reachable_symbols: FxHashSet<LocalDefId>,
// A worklist of item IDs. Each item ID in this worklist will be inlined
// and will be scanned for further references.
// FIXME(eddyb) benchmark if this would be faster as a `VecDeque`.
worklist: Vec<LocalDefId>,
// Whether any output of this compilation is a library
any_library: bool,
}
impl<'tcx> Visitor<'tcx> for ReachableContext<'tcx> {
type Map = intravisit::ErasedMap<'tcx>;
fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
NestedVisitorMap::None
}
fn visit_nested_body(&mut self, body: hir::BodyId) {
let old_maybe_typeck_results =
self.maybe_typeck_results.replace(self.tcx.typeck_body(body));
let body = self.tcx.hir().body(body);
self.visit_body(body);
self.maybe_typeck_results = old_maybe_typeck_results;
}
fn visit_expr(&mut self, expr: &'tcx hir::Expr<'tcx>) {
let res = match expr.kind {
hir::ExprKind::Path(ref qpath) => {
Some(self.typeck_results().qpath_res(qpath, expr.hir_id))
}
hir::ExprKind::MethodCall(..) => self
.typeck_results()
.type_dependent_def(expr.hir_id)
.map(|(kind, def_id)| Res::Def(kind, def_id)),
_ => None,
};
if let Some(res) = res {
if let Some(def_id) = res.opt_def_id().and_then(|def_id| def_id.as_local()) {
if self.def_id_represents_local_inlined_item(def_id.to_def_id())
|
else {
match res {
// If this path leads to a constant, then we need to
// recurse into the constant to continue finding
// items that are reachable.
Res::Def(DefKind::Const | DefKind::AssocConst, _) => {
self.worklist.push(def_id);
}
// If this wasn't a static, then the destination is
// surely reachable.
_ => {
self.reachable_symbols.insert(def_id);
}
}
}
}
}
intravisit::walk_expr(self, expr)
}
}
impl<'tcx> ReachableContext<'tcx> {
/// Gets the type-checking results for the current body.
/// As this will ICE if called outside bodies, only call when working with
/// `Expr` or `Pat` nodes (they are guaranteed to be found only in bodies).
#[track_caller]
fn typeck_results(&self) -> &'tcx ty::TypeckResults<'tcx> {
self.maybe_typeck_results
.expect("`ReachableContext::typeck_results` called outside of body")
}
// Returns true if the given def ID represents a local item that is
// eligible for inlining and false otherwise.
fn def_id_represents_local_inlined_item(&self, def_id: DefId) -> bool {
let hir_id = match def_id.as_local() {
Some(def_id) => self.tcx.hir().local_def_id_to_hir_id(def_id),
None => {
return false;
}
};
match self.tcx.hir().find(hir_id) {
Some(Node::Item(item)) => match item.kind {
hir::ItemKind::Fn(..) => {
item_might_be_inlined(self.tcx, &item, self.tcx.codegen_fn_attrs(def_id))
}
_ => false,
},
Some(Node::TraitItem(trait_method)) => match trait_method.kind {
hir::TraitItemKind::Const(_, ref default) => default.is_some(),
hir::TraitItemKind::Fn(_, hir::TraitFn::Provided(_)) => true,
hir::TraitItemKind::Fn(_, hir::TraitFn::Required(_))
| hir::TraitItemKind::Type(..) => false,
},
Some(Node::ImplItem(impl_item)) => {
match impl_item.kind {
hir::ImplItemKind::Const(..) => true,
hir::ImplItemKind::Fn(..) => {
let attrs = self.tcx.codegen_fn_attrs(def_id);
let generics = self.tcx.generics_of(def_id);
if generics.requires_monomorphization(self.tcx) || attrs.requests_inline() {
true
} else {
let impl_did = self.tcx.hir().get_parent_did(hir_id);
// Check the impl. If the generics on the self
// type of the impl require inlining, this method
// does too.
let impl_hir_id = self.tcx.hir().local_def_id_to_hir_id(impl_did);
match self.tcx.hir().expect_item(impl_hir_id).kind {
hir::ItemKind::Impl {.. } => {
let generics = self.tcx.generics_of(impl_did);
generics.requires_monomorphization(self.tcx)
}
_ => false,
}
}
}
hir::ImplItemKind::TyAlias(_) => false,
}
}
Some(_) => false,
None => false, // This will happen for default methods.
}
}
// Step 2: Mark all symbols that the symbols on the worklist touch.
fn propagate(&mut self) {
let mut scanned = FxHashSet::default();
while let Some(search_item) = self.worklist.pop() {
if!scanned.insert(search_item) {
continue;
}
if let Some(ref item) =
self.tcx.hir().find(self.tcx.hir().local_def_id_to_hir_id(search_item))
{
self.propagate_node(item, search_item);
}
}
}
fn propagate_node(&mut self, node: &Node<'tcx>, search_item: LocalDefId) {
if!self.any_library {
// If we are building an executable, only explicitly extern
// types need to be exported.
let reachable =
if let Node::Item(hir::Item { kind: hir::ItemKind::Fn(sig,..),.. })
| Node::ImplItem(hir::ImplItem {
kind: hir::ImplItemKind::Fn(sig,..),..
}) = *node
{
sig.header.abi!= Abi::Rust
} else {
false
};
let codegen_attrs = self.tcx.codegen_fn_attrs(search_item);
let is_extern = codegen_attrs.contains_extern_indicator();
let std_internal =
codegen_attrs.flags.contains(CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL);
if reachable || is_extern || std_internal {
self.reachable_symbols.insert(search_item);
}
} else {
// If we are building a library, then reachable symbols will
// continue to participate in linkage after this product is
// produced. In this case, we traverse the ast node, recursing on
// all reachable nodes from this one.
self.reachable_symbols.insert(search_item);
}
match *node {
Node::Item(item) => {
match item.kind {
hir::ItemKind::Fn(.., body) => {
if item_might_be_inlined(
self.tcx,
&item,
self.tcx.codegen_fn_attrs(item.def_id),
) {
self.visit_nested_body(body);
}
}
// Reachable constants will be inlined into other crates
// unconditionally, so we need to make sure that their
// contents are also reachable.
hir::ItemKind::Const(_, init) | hir::ItemKind::Static(_, _, init) => {
self.visit_nested_body(init);
}
// These are normal, nothing reachable about these
// inherently and their children are already in the
// worklist, as determined by the privacy pass
hir::ItemKind::ExternCrate(_)
| hir::ItemKind::Use(..)
| hir::ItemKind::OpaqueTy(..)
| hir::ItemKind::TyAlias(..)
| hir::ItemKind::Macro(..)
| hir::ItemKind::Mod(..)
| hir::ItemKind::ForeignMod {.. }
| hir::ItemKind::Impl {.. }
| hir::ItemKind::Trait(..)
| hir::ItemKind::TraitAlias(..)
| hir::ItemKind::Struct(..)
| hir::ItemKind::Enum(..)
| hir::ItemKind::Union(..)
| hir::ItemKind::GlobalAsm(..) => {}
}
}
Node::TraitItem(trait_method) => {
match trait_method.kind {
hir::TraitItemKind::Const(_, None)
| hir::TraitItemKind::Fn(_, hir::TraitFn::Required(_)) => {
// Keep going, nothing to get exported
}
hir::TraitItemKind::Const(_, Some(body_id))
| hir::TraitItemKind::Fn(_, hir::TraitFn::Provided(body_id)) => {
self.visit_nested_body(body_id);
}
hir::TraitItemKind::Type(..) => {}
}
}
Node::ImplItem(impl_item) => match impl_item.kind {
hir::ImplItemKind::Const(_, body) => {
self.visit_nested_body(body);
}
hir::ImplItemKind::Fn(_, body) => {
let impl_def_id =
self.tcx.parent(search_item.to_def_id()).unwrap().expect_local();
if method_might_be_inlined(self.tcx, impl_item, impl_def_id) {
self.visit_nested_body(body)
}
}
hir::ImplItemKind::TyAlias(_) => {}
},
Node::Expr(&hir::Expr { kind: hir::ExprKind::Closure(.., body, _, _),.. }) => {
self.visit_nested_body(body);
}
// Nothing to recurse on for these
Node::ForeignItem(_)
| Node::Variant(_)
| Node::Ctor(..)
| Node::Field(_)
| Node::Ty(_)
| Node::Crate(_) => {}
_ => {
bug!(
"found unexpected node kind in worklist: {} ({:?})",
self.tcx
.hir()
.node_to_string(self.tcx.hir().local_def_id_to_hir_id(search_item)),
node,
);
}
}
}
}
// Some methods from non-exported (completely private) trait impls still have to be
// reachable if they are called from inlinable code. Generally, it's not known until
// monomorphization if a specific trait impl item can be reachable or not. So, we
// conservatively mark all of them as reachable.
// FIXME: One possible strategy for pruning the reachable set is to avoid marking impl
// items of non-exported traits (or maybe all local traits?) unless their respective
// trait items are used from inlinable code through method call syntax or UFCS, or their
// trait is a lang item.
struct CollectPrivateImplItemsVisitor<'a, 'tcx> {
tcx: TyCtxt<'tcx>,
access_levels: &'a privacy::AccessLevels,
worklist: &'a mut Vec<LocalDefId>,
}
impl CollectPrivateImplItemsVisitor<'_, '_> {
fn push_to_worklist_if_has_custom_linkage(&mut self, def_id: LocalDefId) {
// Anything which has custom linkage gets thrown on the worklist no
// matter where it is in the crate, along with "special std symbols"
// which are currently akin to allocator symbols.
let codegen_attrs = self.tcx.codegen_fn_attrs(def_id);
if codegen_attrs.contains_extern_indicator()
|| codegen_attrs.flags.contains(CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL)
{
self.worklist.push(def_id);
}
}
}
impl<'a, 'tcx> ItemLikeVisitor<'tcx> for CollectPrivateImplItemsVisitor<'a, 'tcx> {
fn visit_item(&mut self, item: &hir::Item<'_>) {
self.push_to_worklist_if_has_custom_linkage(item.def_id);
// We need only trait impls here, not inherent impls, and only non-exported ones
if let hir::ItemKind::Impl(hir::Impl { of_trait: Some(ref trait_ref), ref items,.. }) =
item.kind
{
if!self.access_levels.is_reachable(item.def_id) {
// FIXME(#53488) remove `let`
let tcx = self.tcx;
self.worklist.extend(items.iter().map(|ii_ref| ii_ref.id.def_id));
let trait_def_id = match trait_ref.path.res {
Res::Def(DefKind::Trait, def_id) => def_id,
_ => unreachable!(),
};
if!trait_def_id.is_local() {
return;
}
self.worklist.extend(
tcx.provided_trait_methods(trait_def_id)
.map(|assoc| assoc.def_id.expect_local()),
);
}
}
}
fn visit_trait_item(&mut self, _trait_item: &hir::TraitItem<'_>) {}
fn visit_impl_item(&mut self, impl_item: &hir::ImplItem<'_>) {
self.push_to_worklist_if_has_custom_linkage(impl_item.def_id);
}
fn visit_foreign_item(&mut self, _foreign_item: &hir::ForeignItem<'_>) {
// We never export foreign functions as they have no body to export.
}
}
fn reachable_set<'tcx>(tcx: TyCtxt<'tcx>, (): ()) -> FxHashSet<LocalDefId> {
let access_levels = &tcx.privacy_access_levels(());
let any_library =
tcx.sess.crate_types().iter().any(|ty| {
*ty == CrateType::Rlib || *ty == CrateType::Dylib || *ty == CrateType::ProcMacro
});
let mut reachable_context = ReachableContext {
tcx,
maybe_typeck_results: None,
reachable_symbols: Default::default(),
worklist: Vec::new(),
any_library,
};
// Step 1: Seed the worklist with all nodes which were found to be public as
// a result of
|
{
self.worklist.push(def_id);
}
|
conditional_block
|
reachable.rs
|
rustc_middle::ty::query::Providers;
use rustc_middle::ty::{self, DefIdTree, TyCtxt};
use rustc_session::config::CrateType;
use rustc_target::spec::abi::Abi;
// Returns true if the given item must be inlined because it may be
// monomorphized or it was marked with `#[inline]`. This will only return
// true for functions.
fn item_might_be_inlined(tcx: TyCtxt<'tcx>, item: &hir::Item<'_>, attrs: &CodegenFnAttrs) -> bool {
if attrs.requests_inline() {
return true;
}
match item.kind {
hir::ItemKind::Fn(ref sig,..) if sig.header.is_const() => true,
hir::ItemKind::Impl {.. } | hir::ItemKind::Fn(..) => {
let generics = tcx.generics_of(item.def_id);
generics.requires_monomorphization(tcx)
}
_ => false,
}
}
fn method_might_be_inlined(
tcx: TyCtxt<'_>,
impl_item: &hir::ImplItem<'_>,
impl_src: LocalDefId,
) -> bool {
let codegen_fn_attrs = tcx.codegen_fn_attrs(impl_item.hir_id().owner.to_def_id());
let generics = tcx.generics_of(impl_item.def_id);
if codegen_fn_attrs.requests_inline() || generics.requires_monomorphization(tcx) {
return true;
}
if let hir::ImplItemKind::Fn(method_sig, _) = &impl_item.kind {
if method_sig.header.is_const() {
return true;
}
}
match tcx.hir().find(tcx.hir().local_def_id_to_hir_id(impl_src)) {
Some(Node::Item(item)) => item_might_be_inlined(tcx, &item, codegen_fn_attrs),
Some(..) | None => span_bug!(impl_item.span, "impl did is not an item"),
}
}
// Information needed while computing reachability.
struct ReachableContext<'tcx> {
// The type context.
tcx: TyCtxt<'tcx>,
maybe_typeck_results: Option<&'tcx ty::TypeckResults<'tcx>>,
// The set of items which must be exported in the linkage sense.
reachable_symbols: FxHashSet<LocalDefId>,
// A worklist of item IDs. Each item ID in this worklist will be inlined
// and will be scanned for further references.
// FIXME(eddyb) benchmark if this would be faster as a `VecDeque`.
worklist: Vec<LocalDefId>,
// Whether any output of this compilation is a library
any_library: bool,
}
impl<'tcx> Visitor<'tcx> for ReachableContext<'tcx> {
type Map = intravisit::ErasedMap<'tcx>;
fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
NestedVisitorMap::None
}
fn visit_nested_body(&mut self, body: hir::BodyId) {
let old_maybe_typeck_results =
self.maybe_typeck_results.replace(self.tcx.typeck_body(body));
let body = self.tcx.hir().body(body);
self.visit_body(body);
self.maybe_typeck_results = old_maybe_typeck_results;
}
fn visit_expr(&mut self, expr: &'tcx hir::Expr<'tcx>) {
let res = match expr.kind {
hir::ExprKind::Path(ref qpath) => {
Some(self.typeck_results().qpath_res(qpath, expr.hir_id))
}
hir::ExprKind::MethodCall(..) => self
.typeck_results()
.type_dependent_def(expr.hir_id)
.map(|(kind, def_id)| Res::Def(kind, def_id)),
_ => None,
};
if let Some(res) = res {
if let Some(def_id) = res.opt_def_id().and_then(|def_id| def_id.as_local()) {
if self.def_id_represents_local_inlined_item(def_id.to_def_id()) {
self.worklist.push(def_id);
} else {
match res {
// If this path leads to a constant, then we need to
// recurse into the constant to continue finding
// items that are reachable.
Res::Def(DefKind::Const | DefKind::AssocConst, _) => {
self.worklist.push(def_id);
}
// If this wasn't a static, then the destination is
// surely reachable.
_ => {
self.reachable_symbols.insert(def_id);
}
}
}
}
}
intravisit::walk_expr(self, expr)
}
}
impl<'tcx> ReachableContext<'tcx> {
/// Gets the type-checking results for the current body.
/// As this will ICE if called outside bodies, only call when working with
/// `Expr` or `Pat` nodes (they are guaranteed to be found only in bodies).
#[track_caller]
fn typeck_results(&self) -> &'tcx ty::TypeckResults<'tcx> {
self.maybe_typeck_results
.expect("`ReachableContext::typeck_results` called outside of body")
}
// Returns true if the given def ID represents a local item that is
// eligible for inlining and false otherwise.
fn def_id_represents_local_inlined_item(&self, def_id: DefId) -> bool
|
},
Some(Node::ImplItem(impl_item)) => {
match impl_item.kind {
hir::ImplItemKind::Const(..) => true,
hir::ImplItemKind::Fn(..) => {
let attrs = self.tcx.codegen_fn_attrs(def_id);
let generics = self.tcx.generics_of(def_id);
if generics.requires_monomorphization(self.tcx) || attrs.requests_inline() {
true
} else {
let impl_did = self.tcx.hir().get_parent_did(hir_id);
// Check the impl. If the generics on the self
// type of the impl require inlining, this method
// does too.
let impl_hir_id = self.tcx.hir().local_def_id_to_hir_id(impl_did);
match self.tcx.hir().expect_item(impl_hir_id).kind {
hir::ItemKind::Impl {.. } => {
let generics = self.tcx.generics_of(impl_did);
generics.requires_monomorphization(self.tcx)
}
_ => false,
}
}
}
hir::ImplItemKind::TyAlias(_) => false,
}
}
Some(_) => false,
None => false, // This will happen for default methods.
}
}
// Step 2: Mark all symbols that the symbols on the worklist touch.
fn propagate(&mut self) {
let mut scanned = FxHashSet::default();
while let Some(search_item) = self.worklist.pop() {
if!scanned.insert(search_item) {
continue;
}
if let Some(ref item) =
self.tcx.hir().find(self.tcx.hir().local_def_id_to_hir_id(search_item))
{
self.propagate_node(item, search_item);
}
}
}
fn propagate_node(&mut self, node: &Node<'tcx>, search_item: LocalDefId) {
if!self.any_library {
// If we are building an executable, only explicitly extern
// types need to be exported.
let reachable =
if let Node::Item(hir::Item { kind: hir::ItemKind::Fn(sig,..),.. })
| Node::ImplItem(hir::ImplItem {
kind: hir::ImplItemKind::Fn(sig,..),..
}) = *node
{
sig.header.abi!= Abi::Rust
} else {
false
};
let codegen_attrs = self.tcx.codegen_fn_attrs(search_item);
let is_extern = codegen_attrs.contains_extern_indicator();
let std_internal =
codegen_attrs.flags.contains(CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL);
if reachable || is_extern || std_internal {
self.reachable_symbols.insert(search_item);
}
} else {
// If we are building a library, then reachable symbols will
// continue to participate in linkage after this product is
// produced. In this case, we traverse the ast node, recursing on
// all reachable nodes from this one.
self.reachable_symbols.insert(search_item);
}
match *node {
Node::Item(item) => {
match item.kind {
hir::ItemKind::Fn(.., body) => {
if item_might_be_inlined(
self.tcx,
&item,
self.tcx.codegen_fn_attrs(item.def_id),
) {
self.visit_nested_body(body);
}
}
// Reachable constants will be inlined into other crates
// unconditionally, so we need to make sure that their
// contents are also reachable.
hir::ItemKind::Const(_, init) | hir::ItemKind::Static(_, _, init) => {
self.visit_nested_body(init);
}
// These are normal, nothing reachable about these
// inherently and their children are already in the
// worklist, as determined by the privacy pass
hir::ItemKind::ExternCrate(_)
| hir::ItemKind::Use(..)
| hir::ItemKind::OpaqueTy(..)
| hir::ItemKind::TyAlias(..)
| hir::ItemKind::Macro(..)
| hir::ItemKind::Mod(..)
| hir::ItemKind::ForeignMod {.. }
| hir::ItemKind::Impl {.. }
| hir::ItemKind::Trait(..)
| hir::ItemKind::TraitAlias(..)
| hir::ItemKind::Struct(..)
| hir::ItemKind::Enum(..)
| hir::ItemKind::Union(..)
| hir::ItemKind::GlobalAsm(..) => {}
}
}
Node::TraitItem(trait_method) => {
match trait_method.kind {
hir::TraitItemKind::Const(_, None)
| hir::TraitItemKind::Fn(_, hir::TraitFn::Required(_)) => {
// Keep going, nothing to get exported
}
hir::TraitItemKind::Const(_, Some(body_id))
| hir::TraitItemKind::Fn(_, hir::TraitFn::Provided(body_id)) => {
self.visit_nested_body(body_id);
}
hir::TraitItemKind::Type(..) => {}
}
}
Node::ImplItem(impl_item) => match impl_item.kind {
hir::ImplItemKind::Const(_, body) => {
self.visit_nested_body(body);
}
hir::ImplItemKind::Fn(_, body) => {
let impl_def_id =
self.tcx.parent(search_item.to_def_id()).unwrap().expect_local();
if method_might_be_inlined(self.tcx, impl_item, impl_def_id) {
self.visit_nested_body(body)
}
}
hir::ImplItemKind::TyAlias(_) => {}
},
Node::Expr(&hir::Expr { kind: hir::ExprKind::Closure(.., body, _, _),.. }) => {
self.visit_nested_body(body);
}
// Nothing to recurse on for these
Node::ForeignItem(_)
| Node::Variant(_)
| Node::Ctor(..)
| Node::Field(_)
| Node::Ty(_)
| Node::Crate(_) => {}
_ => {
bug!(
"found unexpected node kind in worklist: {} ({:?})",
self.tcx
.hir()
.node_to_string(self.tcx.hir().local_def_id_to_hir_id(search_item)),
node,
);
}
}
}
}
// Some methods from non-exported (completely private) trait impls still have to be
// reachable if they are called from inlinable code. Generally, it's not known until
// monomorphization if a specific trait impl item can be reachable or not. So, we
// conservatively mark all of them as reachable.
// FIXME: One possible strategy for pruning the reachable set is to avoid marking impl
// items of non-exported traits (or maybe all local traits?) unless their respective
// trait items are used from inlinable code through method call syntax or UFCS, or their
// trait is a lang item.
struct CollectPrivateImplItemsVisitor<'a, 'tcx> {
tcx: TyCtxt<'tcx>,
access_levels: &'a privacy::AccessLevels,
worklist: &'a mut Vec<LocalDefId>,
}
impl CollectPrivateImplItemsVisitor<'_, '_> {
fn push_to_worklist_if_has_custom_linkage(&mut self, def_id: LocalDefId) {
// Anything which has custom linkage gets thrown on the worklist no
// matter where it is in the crate, along with "special std symbols"
// which are currently akin to allocator symbols.
let codegen_attrs = self.tcx.codegen_fn_attrs(def_id);
if codegen_attrs.contains_extern_indicator()
|| codegen_attrs.flags.contains(CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL)
{
self.worklist.push(def_id);
}
}
}
impl<'a, 'tcx> ItemLikeVisitor<'tcx> for CollectPrivateImplItemsVisitor<'a, 'tcx> {
fn visit_item(&mut self, item: &hir::Item<'_>) {
self.push_to_worklist_if_has_custom_linkage(item.def_id);
// We need only trait impls here, not inherent impls, and only non-exported ones
if let hir::ItemKind::Impl(hir::Impl { of_trait: Some(ref trait_ref), ref items,.. }) =
item.kind
{
if!self.access_levels.is_reachable(item.def_id) {
// FIXME(#53488) remove `let`
let tcx = self.tcx;
self.worklist.extend(items.iter().map(|ii_ref| ii_ref.id.def_id));
let trait_def_id = match trait_ref.path.res {
Res::Def(DefKind::Trait, def_id) => def_id,
_ => unreachable!(),
};
if!trait_def_id.is_local() {
return;
}
self.worklist.extend(
tcx.provided_trait_methods(trait_def_id)
.map(|assoc| assoc.def_id.expect_local()),
);
}
}
}
fn visit_trait_item(&mut self, _trait_item: &hir::TraitItem<'_>) {}
fn visit_impl_item(&mut self, impl_item: &hir::ImplItem<'_>) {
self.push_to_worklist_if_has_custom_linkage(impl_item.def_id);
}
fn visit_foreign_item(&mut self, _foreign_item: &hir::ForeignItem<'_>) {
// We never export foreign functions as they have no body to export.
}
}
fn reachable_set<'tcx>(tcx: TyCtxt<'tcx>, (): ()) -> FxHashSet<LocalDefId> {
let access_levels = &tcx.privacy_access_levels(());
let any_library =
tcx.sess.crate_types().iter().any(|ty| {
*ty == CrateType::Rlib || *ty == CrateType::Dylib || *ty == CrateType::ProcMacro
});
let mut reachable_context = ReachableContext {
tcx,
maybe_typeck_results: None,
reachable_symbols: Default::default(),
worklist: Vec::new(),
any_library,
};
// Step 1: Seed the worklist with all nodes which were found to be public as
// a result of
|
{
let hir_id = match def_id.as_local() {
Some(def_id) => self.tcx.hir().local_def_id_to_hir_id(def_id),
None => {
return false;
}
};
match self.tcx.hir().find(hir_id) {
Some(Node::Item(item)) => match item.kind {
hir::ItemKind::Fn(..) => {
item_might_be_inlined(self.tcx, &item, self.tcx.codegen_fn_attrs(def_id))
}
_ => false,
},
Some(Node::TraitItem(trait_method)) => match trait_method.kind {
hir::TraitItemKind::Const(_, ref default) => default.is_some(),
hir::TraitItemKind::Fn(_, hir::TraitFn::Provided(_)) => true,
hir::TraitItemKind::Fn(_, hir::TraitFn::Required(_))
| hir::TraitItemKind::Type(..) => false,
|
identifier_body
|
reachable.rs
|
rustc_middle::ty::query::Providers;
use rustc_middle::ty::{self, DefIdTree, TyCtxt};
use rustc_session::config::CrateType;
use rustc_target::spec::abi::Abi;
// Returns true if the given item must be inlined because it may be
// monomorphized or it was marked with `#[inline]`. This will only return
// true for functions.
fn item_might_be_inlined(tcx: TyCtxt<'tcx>, item: &hir::Item<'_>, attrs: &CodegenFnAttrs) -> bool {
if attrs.requests_inline() {
return true;
}
match item.kind {
hir::ItemKind::Fn(ref sig,..) if sig.header.is_const() => true,
hir::ItemKind::Impl {.. } | hir::ItemKind::Fn(..) => {
let generics = tcx.generics_of(item.def_id);
generics.requires_monomorphization(tcx)
}
_ => false,
}
}
fn method_might_be_inlined(
tcx: TyCtxt<'_>,
impl_item: &hir::ImplItem<'_>,
impl_src: LocalDefId,
) -> bool {
let codegen_fn_attrs = tcx.codegen_fn_attrs(impl_item.hir_id().owner.to_def_id());
let generics = tcx.generics_of(impl_item.def_id);
if codegen_fn_attrs.requests_inline() || generics.requires_monomorphization(tcx) {
return true;
}
if let hir::ImplItemKind::Fn(method_sig, _) = &impl_item.kind {
if method_sig.header.is_const() {
return true;
}
}
match tcx.hir().find(tcx.hir().local_def_id_to_hir_id(impl_src)) {
Some(Node::Item(item)) => item_might_be_inlined(tcx, &item, codegen_fn_attrs),
Some(..) | None => span_bug!(impl_item.span, "impl did is not an item"),
}
}
// Information needed while computing reachability.
struct ReachableContext<'tcx> {
// The type context.
tcx: TyCtxt<'tcx>,
maybe_typeck_results: Option<&'tcx ty::TypeckResults<'tcx>>,
// The set of items which must be exported in the linkage sense.
reachable_symbols: FxHashSet<LocalDefId>,
// A worklist of item IDs. Each item ID in this worklist will be inlined
// and will be scanned for further references.
// FIXME(eddyb) benchmark if this would be faster as a `VecDeque`.
worklist: Vec<LocalDefId>,
// Whether any output of this compilation is a library
any_library: bool,
}
impl<'tcx> Visitor<'tcx> for ReachableContext<'tcx> {
type Map = intravisit::ErasedMap<'tcx>;
fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
NestedVisitorMap::None
}
fn visit_nested_body(&mut self, body: hir::BodyId) {
let old_maybe_typeck_results =
self.maybe_typeck_results.replace(self.tcx.typeck_body(body));
let body = self.tcx.hir().body(body);
self.visit_body(body);
self.maybe_typeck_results = old_maybe_typeck_results;
}
fn visit_expr(&mut self, expr: &'tcx hir::Expr<'tcx>) {
let res = match expr.kind {
hir::ExprKind::Path(ref qpath) => {
Some(self.typeck_results().qpath_res(qpath, expr.hir_id))
}
hir::ExprKind::MethodCall(..) => self
.typeck_results()
.type_dependent_def(expr.hir_id)
.map(|(kind, def_id)| Res::Def(kind, def_id)),
_ => None,
};
if let Some(res) = res {
if let Some(def_id) = res.opt_def_id().and_then(|def_id| def_id.as_local()) {
if self.def_id_represents_local_inlined_item(def_id.to_def_id()) {
self.worklist.push(def_id);
} else {
match res {
// If this path leads to a constant, then we need to
// recurse into the constant to continue finding
// items that are reachable.
Res::Def(DefKind::Const | DefKind::AssocConst, _) => {
self.worklist.push(def_id);
}
// If this wasn't a static, then the destination is
// surely reachable.
_ => {
self.reachable_symbols.insert(def_id);
}
}
}
}
}
intravisit::walk_expr(self, expr)
}
}
impl<'tcx> ReachableContext<'tcx> {
/// Gets the type-checking results for the current body.
/// As this will ICE if called outside bodies, only call when working with
/// `Expr` or `Pat` nodes (they are guaranteed to be found only in bodies).
#[track_caller]
fn typeck_results(&self) -> &'tcx ty::TypeckResults<'tcx> {
self.maybe_typeck_results
.expect("`ReachableContext::typeck_results` called outside of body")
}
// Returns true if the given def ID represents a local item that is
// eligible for inlining and false otherwise.
fn def_id_represents_local_inlined_item(&self, def_id: DefId) -> bool {
let hir_id = match def_id.as_local() {
Some(def_id) => self.tcx.hir().local_def_id_to_hir_id(def_id),
None => {
return false;
}
};
match self.tcx.hir().find(hir_id) {
Some(Node::Item(item)) => match item.kind {
hir::ItemKind::Fn(..) => {
item_might_be_inlined(self.tcx, &item, self.tcx.codegen_fn_attrs(def_id))
}
_ => false,
},
Some(Node::TraitItem(trait_method)) => match trait_method.kind {
hir::TraitItemKind::Const(_, ref default) => default.is_some(),
hir::TraitItemKind::Fn(_, hir::TraitFn::Provided(_)) => true,
hir::TraitItemKind::Fn(_, hir::TraitFn::Required(_))
| hir::TraitItemKind::Type(..) => false,
},
Some(Node::ImplItem(impl_item)) => {
match impl_item.kind {
hir::ImplItemKind::Const(..) => true,
hir::ImplItemKind::Fn(..) => {
let attrs = self.tcx.codegen_fn_attrs(def_id);
let generics = self.tcx.generics_of(def_id);
if generics.requires_monomorphization(self.tcx) || attrs.requests_inline() {
true
} else {
let impl_did = self.tcx.hir().get_parent_did(hir_id);
// Check the impl. If the generics on the self
// type of the impl require inlining, this method
// does too.
let impl_hir_id = self.tcx.hir().local_def_id_to_hir_id(impl_did);
match self.tcx.hir().expect_item(impl_hir_id).kind {
hir::ItemKind::Impl {.. } => {
let generics = self.tcx.generics_of(impl_did);
generics.requires_monomorphization(self.tcx)
}
_ => false,
}
}
}
hir::ImplItemKind::TyAlias(_) => false,
}
}
Some(_) => false,
None => false, // This will happen for default methods.
}
}
// Step 2: Mark all symbols that the symbols on the worklist touch.
fn propagate(&mut self) {
let mut scanned = FxHashSet::default();
while let Some(search_item) = self.worklist.pop() {
if!scanned.insert(search_item) {
continue;
}
if let Some(ref item) =
self.tcx.hir().find(self.tcx.hir().local_def_id_to_hir_id(search_item))
{
self.propagate_node(item, search_item);
}
}
}
fn propagate_node(&mut self, node: &Node<'tcx>, search_item: LocalDefId) {
if!self.any_library {
// If we are building an executable, only explicitly extern
// types need to be exported.
let reachable =
if let Node::Item(hir::Item { kind: hir::ItemKind::Fn(sig,..),.. })
| Node::ImplItem(hir::ImplItem {
kind: hir::ImplItemKind::Fn(sig,..),..
}) = *node
{
sig.header.abi!= Abi::Rust
} else {
false
};
let codegen_attrs = self.tcx.codegen_fn_attrs(search_item);
let is_extern = codegen_attrs.contains_extern_indicator();
let std_internal =
codegen_attrs.flags.contains(CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL);
if reachable || is_extern || std_internal {
self.reachable_symbols.insert(search_item);
}
} else {
// If we are building a library, then reachable symbols will
// continue to participate in linkage after this product is
// produced. In this case, we traverse the ast node, recursing on
// all reachable nodes from this one.
self.reachable_symbols.insert(search_item);
}
match *node {
Node::Item(item) => {
match item.kind {
hir::ItemKind::Fn(.., body) => {
if item_might_be_inlined(
self.tcx,
&item,
self.tcx.codegen_fn_attrs(item.def_id),
) {
self.visit_nested_body(body);
}
}
// Reachable constants will be inlined into other crates
// unconditionally, so we need to make sure that their
// contents are also reachable.
hir::ItemKind::Const(_, init) | hir::ItemKind::Static(_, _, init) => {
self.visit_nested_body(init);
}
// These are normal, nothing reachable about these
// inherently and their children are already in the
// worklist, as determined by the privacy pass
hir::ItemKind::ExternCrate(_)
| hir::ItemKind::Use(..)
| hir::ItemKind::OpaqueTy(..)
| hir::ItemKind::TyAlias(..)
| hir::ItemKind::Macro(..)
| hir::ItemKind::Mod(..)
| hir::ItemKind::ForeignMod {.. }
| hir::ItemKind::Impl {.. }
| hir::ItemKind::Trait(..)
| hir::ItemKind::TraitAlias(..)
| hir::ItemKind::Struct(..)
| hir::ItemKind::Enum(..)
| hir::ItemKind::Union(..)
| hir::ItemKind::GlobalAsm(..) => {}
}
}
Node::TraitItem(trait_method) => {
match trait_method.kind {
hir::TraitItemKind::Const(_, None)
| hir::TraitItemKind::Fn(_, hir::TraitFn::Required(_)) => {
// Keep going, nothing to get exported
}
hir::TraitItemKind::Const(_, Some(body_id))
| hir::TraitItemKind::Fn(_, hir::TraitFn::Provided(body_id)) => {
self.visit_nested_body(body_id);
}
hir::TraitItemKind::Type(..) => {}
}
}
Node::ImplItem(impl_item) => match impl_item.kind {
hir::ImplItemKind::Const(_, body) => {
self.visit_nested_body(body);
}
hir::ImplItemKind::Fn(_, body) => {
let impl_def_id =
self.tcx.parent(search_item.to_def_id()).unwrap().expect_local();
if method_might_be_inlined(self.tcx, impl_item, impl_def_id) {
self.visit_nested_body(body)
}
}
hir::ImplItemKind::TyAlias(_) => {}
},
Node::Expr(&hir::Expr { kind: hir::ExprKind::Closure(.., body, _, _),.. }) => {
self.visit_nested_body(body);
}
// Nothing to recurse on for these
Node::ForeignItem(_)
| Node::Variant(_)
| Node::Ctor(..)
| Node::Field(_)
| Node::Ty(_)
| Node::Crate(_) => {}
_ => {
bug!(
"found unexpected node kind in worklist: {} ({:?})",
self.tcx
.hir()
.node_to_string(self.tcx.hir().local_def_id_to_hir_id(search_item)),
node,
);
}
}
}
}
// Some methods from non-exported (completely private) trait impls still have to be
// reachable if they are called from inlinable code. Generally, it's not known until
// monomorphization if a specific trait impl item can be reachable or not. So, we
// conservatively mark all of them as reachable.
// FIXME: One possible strategy for pruning the reachable set is to avoid marking impl
// items of non-exported traits (or maybe all local traits?) unless their respective
// trait items are used from inlinable code through method call syntax or UFCS, or their
// trait is a lang item.
struct CollectPrivateImplItemsVisitor<'a, 'tcx> {
tcx: TyCtxt<'tcx>,
access_levels: &'a privacy::AccessLevels,
worklist: &'a mut Vec<LocalDefId>,
}
impl CollectPrivateImplItemsVisitor<'_, '_> {
fn push_to_worklist_if_has_custom_linkage(&mut self, def_id: LocalDefId) {
// Anything which has custom linkage gets thrown on the worklist no
// matter where it is in the crate, along with "special std symbols"
// which are currently akin to allocator symbols.
let codegen_attrs = self.tcx.codegen_fn_attrs(def_id);
if codegen_attrs.contains_extern_indicator()
|| codegen_attrs.flags.contains(CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL)
{
self.worklist.push(def_id);
}
}
}
impl<'a, 'tcx> ItemLikeVisitor<'tcx> for CollectPrivateImplItemsVisitor<'a, 'tcx> {
fn visit_item(&mut self, item: &hir::Item<'_>) {
self.push_to_worklist_if_has_custom_linkage(item.def_id);
// We need only trait impls here, not inherent impls, and only non-exported ones
if let hir::ItemKind::Impl(hir::Impl { of_trait: Some(ref trait_ref), ref items,.. }) =
item.kind
{
if!self.access_levels.is_reachable(item.def_id) {
// FIXME(#53488) remove `let`
let tcx = self.tcx;
self.worklist.extend(items.iter().map(|ii_ref| ii_ref.id.def_id));
let trait_def_id = match trait_ref.path.res {
Res::Def(DefKind::Trait, def_id) => def_id,
_ => unreachable!(),
};
if!trait_def_id.is_local() {
return;
}
self.worklist.extend(
tcx.provided_trait_methods(trait_def_id)
.map(|assoc| assoc.def_id.expect_local()),
);
}
}
}
fn visit_trait_item(&mut self, _trait_item: &hir::TraitItem<'_>) {}
fn visit_impl_item(&mut self, impl_item: &hir::ImplItem<'_>) {
self.push_to_worklist_if_has_custom_linkage(impl_item.def_id);
}
fn visit_foreign_item(&mut self, _foreign_item: &hir::ForeignItem<'_>) {
// We never export foreign functions as they have no body to export.
}
}
fn
|
<'tcx>(tcx: TyCtxt<'tcx>, (): ()) -> FxHashSet<LocalDefId> {
let access_levels = &tcx.privacy_access_levels(());
let any_library =
tcx.sess.crate_types().iter().any(|ty| {
*ty == CrateType::Rlib || *ty == CrateType::Dylib || *ty == CrateType::ProcMacro
});
let mut reachable_context = ReachableContext {
tcx,
maybe_typeck_results: None,
reachable_symbols: Default::default(),
worklist: Vec::new(),
any_library,
};
// Step 1: Seed the worklist with all nodes which were found to be public as
// a result of
|
reachable_set
|
identifier_name
|
reachable.rs
|
use rustc_middle::ty::query::Providers;
use rustc_middle::ty::{self, DefIdTree, TyCtxt};
use rustc_session::config::CrateType;
|
// Returns true if the given item must be inlined because it may be
// monomorphized or it was marked with `#[inline]`. This will only return
// true for functions.
fn item_might_be_inlined(tcx: TyCtxt<'tcx>, item: &hir::Item<'_>, attrs: &CodegenFnAttrs) -> bool {
if attrs.requests_inline() {
return true;
}
match item.kind {
hir::ItemKind::Fn(ref sig,..) if sig.header.is_const() => true,
hir::ItemKind::Impl {.. } | hir::ItemKind::Fn(..) => {
let generics = tcx.generics_of(item.def_id);
generics.requires_monomorphization(tcx)
}
_ => false,
}
}
fn method_might_be_inlined(
tcx: TyCtxt<'_>,
impl_item: &hir::ImplItem<'_>,
impl_src: LocalDefId,
) -> bool {
let codegen_fn_attrs = tcx.codegen_fn_attrs(impl_item.hir_id().owner.to_def_id());
let generics = tcx.generics_of(impl_item.def_id);
if codegen_fn_attrs.requests_inline() || generics.requires_monomorphization(tcx) {
return true;
}
if let hir::ImplItemKind::Fn(method_sig, _) = &impl_item.kind {
if method_sig.header.is_const() {
return true;
}
}
match tcx.hir().find(tcx.hir().local_def_id_to_hir_id(impl_src)) {
Some(Node::Item(item)) => item_might_be_inlined(tcx, &item, codegen_fn_attrs),
Some(..) | None => span_bug!(impl_item.span, "impl did is not an item"),
}
}
// Information needed while computing reachability.
struct ReachableContext<'tcx> {
// The type context.
tcx: TyCtxt<'tcx>,
maybe_typeck_results: Option<&'tcx ty::TypeckResults<'tcx>>,
// The set of items which must be exported in the linkage sense.
reachable_symbols: FxHashSet<LocalDefId>,
// A worklist of item IDs. Each item ID in this worklist will be inlined
// and will be scanned for further references.
// FIXME(eddyb) benchmark if this would be faster as a `VecDeque`.
worklist: Vec<LocalDefId>,
// Whether any output of this compilation is a library
any_library: bool,
}
impl<'tcx> Visitor<'tcx> for ReachableContext<'tcx> {
type Map = intravisit::ErasedMap<'tcx>;
fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
NestedVisitorMap::None
}
fn visit_nested_body(&mut self, body: hir::BodyId) {
let old_maybe_typeck_results =
self.maybe_typeck_results.replace(self.tcx.typeck_body(body));
let body = self.tcx.hir().body(body);
self.visit_body(body);
self.maybe_typeck_results = old_maybe_typeck_results;
}
fn visit_expr(&mut self, expr: &'tcx hir::Expr<'tcx>) {
let res = match expr.kind {
hir::ExprKind::Path(ref qpath) => {
Some(self.typeck_results().qpath_res(qpath, expr.hir_id))
}
hir::ExprKind::MethodCall(..) => self
.typeck_results()
.type_dependent_def(expr.hir_id)
.map(|(kind, def_id)| Res::Def(kind, def_id)),
_ => None,
};
if let Some(res) = res {
if let Some(def_id) = res.opt_def_id().and_then(|def_id| def_id.as_local()) {
if self.def_id_represents_local_inlined_item(def_id.to_def_id()) {
self.worklist.push(def_id);
} else {
match res {
// If this path leads to a constant, then we need to
// recurse into the constant to continue finding
// items that are reachable.
Res::Def(DefKind::Const | DefKind::AssocConst, _) => {
self.worklist.push(def_id);
}
// If this wasn't a static, then the destination is
// surely reachable.
_ => {
self.reachable_symbols.insert(def_id);
}
}
}
}
}
intravisit::walk_expr(self, expr)
}
}
impl<'tcx> ReachableContext<'tcx> {
/// Gets the type-checking results for the current body.
/// As this will ICE if called outside bodies, only call when working with
/// `Expr` or `Pat` nodes (they are guaranteed to be found only in bodies).
#[track_caller]
fn typeck_results(&self) -> &'tcx ty::TypeckResults<'tcx> {
self.maybe_typeck_results
.expect("`ReachableContext::typeck_results` called outside of body")
}
// Returns true if the given def ID represents a local item that is
// eligible for inlining and false otherwise.
fn def_id_represents_local_inlined_item(&self, def_id: DefId) -> bool {
let hir_id = match def_id.as_local() {
Some(def_id) => self.tcx.hir().local_def_id_to_hir_id(def_id),
None => {
return false;
}
};
match self.tcx.hir().find(hir_id) {
Some(Node::Item(item)) => match item.kind {
hir::ItemKind::Fn(..) => {
item_might_be_inlined(self.tcx, &item, self.tcx.codegen_fn_attrs(def_id))
}
_ => false,
},
Some(Node::TraitItem(trait_method)) => match trait_method.kind {
hir::TraitItemKind::Const(_, ref default) => default.is_some(),
hir::TraitItemKind::Fn(_, hir::TraitFn::Provided(_)) => true,
hir::TraitItemKind::Fn(_, hir::TraitFn::Required(_))
| hir::TraitItemKind::Type(..) => false,
},
Some(Node::ImplItem(impl_item)) => {
match impl_item.kind {
hir::ImplItemKind::Const(..) => true,
hir::ImplItemKind::Fn(..) => {
let attrs = self.tcx.codegen_fn_attrs(def_id);
let generics = self.tcx.generics_of(def_id);
if generics.requires_monomorphization(self.tcx) || attrs.requests_inline() {
true
} else {
let impl_did = self.tcx.hir().get_parent_did(hir_id);
// Check the impl. If the generics on the self
// type of the impl require inlining, this method
// does too.
let impl_hir_id = self.tcx.hir().local_def_id_to_hir_id(impl_did);
match self.tcx.hir().expect_item(impl_hir_id).kind {
hir::ItemKind::Impl {.. } => {
let generics = self.tcx.generics_of(impl_did);
generics.requires_monomorphization(self.tcx)
}
_ => false,
}
}
}
hir::ImplItemKind::TyAlias(_) => false,
}
}
Some(_) => false,
None => false, // This will happen for default methods.
}
}
// Step 2: Mark all symbols that the symbols on the worklist touch.
fn propagate(&mut self) {
let mut scanned = FxHashSet::default();
while let Some(search_item) = self.worklist.pop() {
if!scanned.insert(search_item) {
continue;
}
if let Some(ref item) =
self.tcx.hir().find(self.tcx.hir().local_def_id_to_hir_id(search_item))
{
self.propagate_node(item, search_item);
}
}
}
fn propagate_node(&mut self, node: &Node<'tcx>, search_item: LocalDefId) {
if!self.any_library {
// If we are building an executable, only explicitly extern
// types need to be exported.
let reachable =
if let Node::Item(hir::Item { kind: hir::ItemKind::Fn(sig,..),.. })
| Node::ImplItem(hir::ImplItem {
kind: hir::ImplItemKind::Fn(sig,..),..
}) = *node
{
sig.header.abi!= Abi::Rust
} else {
false
};
let codegen_attrs = self.tcx.codegen_fn_attrs(search_item);
let is_extern = codegen_attrs.contains_extern_indicator();
let std_internal =
codegen_attrs.flags.contains(CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL);
if reachable || is_extern || std_internal {
self.reachable_symbols.insert(search_item);
}
} else {
// If we are building a library, then reachable symbols will
// continue to participate in linkage after this product is
// produced. In this case, we traverse the ast node, recursing on
// all reachable nodes from this one.
self.reachable_symbols.insert(search_item);
}
match *node {
Node::Item(item) => {
match item.kind {
hir::ItemKind::Fn(.., body) => {
if item_might_be_inlined(
self.tcx,
&item,
self.tcx.codegen_fn_attrs(item.def_id),
) {
self.visit_nested_body(body);
}
}
// Reachable constants will be inlined into other crates
// unconditionally, so we need to make sure that their
// contents are also reachable.
hir::ItemKind::Const(_, init) | hir::ItemKind::Static(_, _, init) => {
self.visit_nested_body(init);
}
// These are normal, nothing reachable about these
// inherently and their children are already in the
// worklist, as determined by the privacy pass
hir::ItemKind::ExternCrate(_)
| hir::ItemKind::Use(..)
| hir::ItemKind::OpaqueTy(..)
| hir::ItemKind::TyAlias(..)
| hir::ItemKind::Macro(..)
| hir::ItemKind::Mod(..)
| hir::ItemKind::ForeignMod {.. }
| hir::ItemKind::Impl {.. }
| hir::ItemKind::Trait(..)
| hir::ItemKind::TraitAlias(..)
| hir::ItemKind::Struct(..)
| hir::ItemKind::Enum(..)
| hir::ItemKind::Union(..)
| hir::ItemKind::GlobalAsm(..) => {}
}
}
Node::TraitItem(trait_method) => {
match trait_method.kind {
hir::TraitItemKind::Const(_, None)
| hir::TraitItemKind::Fn(_, hir::TraitFn::Required(_)) => {
// Keep going, nothing to get exported
}
hir::TraitItemKind::Const(_, Some(body_id))
| hir::TraitItemKind::Fn(_, hir::TraitFn::Provided(body_id)) => {
self.visit_nested_body(body_id);
}
hir::TraitItemKind::Type(..) => {}
}
}
Node::ImplItem(impl_item) => match impl_item.kind {
hir::ImplItemKind::Const(_, body) => {
self.visit_nested_body(body);
}
hir::ImplItemKind::Fn(_, body) => {
let impl_def_id =
self.tcx.parent(search_item.to_def_id()).unwrap().expect_local();
if method_might_be_inlined(self.tcx, impl_item, impl_def_id) {
self.visit_nested_body(body)
}
}
hir::ImplItemKind::TyAlias(_) => {}
},
Node::Expr(&hir::Expr { kind: hir::ExprKind::Closure(.., body, _, _),.. }) => {
self.visit_nested_body(body);
}
// Nothing to recurse on for these
Node::ForeignItem(_)
| Node::Variant(_)
| Node::Ctor(..)
| Node::Field(_)
| Node::Ty(_)
| Node::Crate(_) => {}
_ => {
bug!(
"found unexpected node kind in worklist: {} ({:?})",
self.tcx
.hir()
.node_to_string(self.tcx.hir().local_def_id_to_hir_id(search_item)),
node,
);
}
}
}
}
// Some methods from non-exported (completely private) trait impls still have to be
// reachable if they are called from inlinable code. Generally, it's not known until
// monomorphization if a specific trait impl item can be reachable or not. So, we
// conservatively mark all of them as reachable.
// FIXME: One possible strategy for pruning the reachable set is to avoid marking impl
// items of non-exported traits (or maybe all local traits?) unless their respective
// trait items are used from inlinable code through method call syntax or UFCS, or their
// trait is a lang item.
struct CollectPrivateImplItemsVisitor<'a, 'tcx> {
tcx: TyCtxt<'tcx>,
access_levels: &'a privacy::AccessLevels,
worklist: &'a mut Vec<LocalDefId>,
}
impl CollectPrivateImplItemsVisitor<'_, '_> {
fn push_to_worklist_if_has_custom_linkage(&mut self, def_id: LocalDefId) {
// Anything which has custom linkage gets thrown on the worklist no
// matter where it is in the crate, along with "special std symbols"
// which are currently akin to allocator symbols.
let codegen_attrs = self.tcx.codegen_fn_attrs(def_id);
if codegen_attrs.contains_extern_indicator()
|| codegen_attrs.flags.contains(CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL)
{
self.worklist.push(def_id);
}
}
}
impl<'a, 'tcx> ItemLikeVisitor<'tcx> for CollectPrivateImplItemsVisitor<'a, 'tcx> {
fn visit_item(&mut self, item: &hir::Item<'_>) {
self.push_to_worklist_if_has_custom_linkage(item.def_id);
// We need only trait impls here, not inherent impls, and only non-exported ones
if let hir::ItemKind::Impl(hir::Impl { of_trait: Some(ref trait_ref), ref items,.. }) =
item.kind
{
if!self.access_levels.is_reachable(item.def_id) {
// FIXME(#53488) remove `let`
let tcx = self.tcx;
self.worklist.extend(items.iter().map(|ii_ref| ii_ref.id.def_id));
let trait_def_id = match trait_ref.path.res {
Res::Def(DefKind::Trait, def_id) => def_id,
_ => unreachable!(),
};
if!trait_def_id.is_local() {
return;
}
self.worklist.extend(
tcx.provided_trait_methods(trait_def_id)
.map(|assoc| assoc.def_id.expect_local()),
);
}
}
}
fn visit_trait_item(&mut self, _trait_item: &hir::TraitItem<'_>) {}
fn visit_impl_item(&mut self, impl_item: &hir::ImplItem<'_>) {
self.push_to_worklist_if_has_custom_linkage(impl_item.def_id);
}
fn visit_foreign_item(&mut self, _foreign_item: &hir::ForeignItem<'_>) {
// We never export foreign functions as they have no body to export.
}
}
fn reachable_set<'tcx>(tcx: TyCtxt<'tcx>, (): ()) -> FxHashSet<LocalDefId> {
let access_levels = &tcx.privacy_access_levels(());
let any_library =
tcx.sess.crate_types().iter().any(|ty| {
*ty == CrateType::Rlib || *ty == CrateType::Dylib || *ty == CrateType::ProcMacro
});
let mut reachable_context = ReachableContext {
tcx,
maybe_typeck_results: None,
reachable_symbols: Default::default(),
worklist: Vec::new(),
any_library,
};
// Step 1: Seed the worklist with all nodes which were found to be public as
// a result of the privacy
|
use rustc_target::spec::abi::Abi;
|
random_line_split
|
lib.rs
|
pub mod errors;
mod keywords;
mod parser;
mod scc;
pub mod types;
use errors::{Error, Result};
use std::path::{Path, PathBuf};
use types::Config;
/// A builder for [Config]
///
/// # Example build.rs
///
/// ```rust,no_run
/// use pb_rs::{types::FileDescriptor, ConfigBuilder};
/// use std::path::{Path, PathBuf};
/// use walkdir::WalkDir;
///
/// fn main() {
/// let out_dir = std::env::var("OUT_DIR").unwrap();
/// let out_dir = Path::new(&out_dir).join("protos");
///
/// let in_dir = PathBuf::from(::std::env::var("CARGO_MANIFEST_DIR").unwrap()).join("protos");
/// // Re-run this build.rs if the protos dir changes (i.e. a new file is added)
/// println!("cargo:rerun-if-changed={}", in_dir.to_str().unwrap());
///
/// // Find all *.proto files in the `in_dir` and add them to the list of files
/// let mut protos = Vec::new();
/// let proto_ext = Some(Path::new("proto").as_os_str());
/// for entry in WalkDir::new(&in_dir) {
/// let path = entry.unwrap().into_path();
/// if path.extension() == proto_ext {
/// // Re-run this build.rs if any of the files in the protos dir change
/// println!("cargo:rerun-if-changed={}", path.to_str().unwrap());
/// protos.push(path);
/// }
/// }
///
/// // Delete all old generated files before re-generating new ones
/// if out_dir.exists() {
/// std::fs::remove_dir_all(&out_dir).unwrap();
/// }
/// std::fs::DirBuilder::new().create(&out_dir).unwrap();
/// let config_builder = ConfigBuilder::new(&protos, None, Some(&out_dir), &[in_dir]).unwrap();
/// FileDescriptor::run(&config_builder.build()).unwrap()
/// }
/// ```
#[derive(Debug, Default)]
pub struct ConfigBuilder {
in_files: Vec<PathBuf>,
out_file: Option<PathBuf>,
include_paths: Vec<PathBuf>,
single_module: bool,
no_output: bool,
error_cycle: bool,
headers: bool,
dont_use_cow: bool,
custom_struct_derive: Vec<String>,
custom_repr: Option<String>,
owned: bool,
nostd: bool,
hashbrown: bool,
gen_info: bool,
add_deprecated_fields: bool,
}
impl ConfigBuilder {
pub fn new<P: AsRef<Path>>(
in_files: &[P],
output: Option<&P>,
output_dir: Option<&P>,
include_paths: &[P],
) -> Result<ConfigBuilder> {
let in_files = in_files
.iter()
.map(|f| f.as_ref().into())
.collect::<Vec<PathBuf>>();
let output = output.map(|f| f.as_ref().into());
let output_dir: Option<PathBuf> = output_dir.map(|f| f.as_ref().into());
let mut include_paths = include_paths
.iter()
.map(|f| f.as_ref().into())
.collect::<Vec<PathBuf>>();
if in_files.is_empty() {
return Err(Error::NoProto);
}
for f in &in_files {
if!f.exists() {
return Err(Error::InputFile(format!("{}", f.display())));
}
}
let out_file = match (output, output_dir) {
(Some(_), None) if in_files.len() > 1 => return Err(Error::OutputMultipleInputs),
(Some(output), None) => Some(output),
(None, Some(output_dir)) => {
if!output_dir.is_dir() {
return Err(Error::OutputDirectory(format!("{}", output_dir.display())));
}
Some(output_dir)
}
(Some(_), Some(_)) => return Err(Error::OutputAndOutputDir),
(None, None) => None,
};
let default = PathBuf::from(".");
if include_paths.is_empty() ||!include_paths.contains(&default) {
include_paths.push(default);
}
Ok(ConfigBuilder {
in_files,
out_file,
include_paths,
headers: true,
..Default::default()
})
}
/// Omit generation of modules for each package when there is only one package
pub fn single_module(mut self, val: bool) -> Self {
self.single_module = val;
self
}
/// Show enums and messages in this.proto file, including those imported. No code generated.
/// `no_output` should probably only be used by the pb-rs cli.
pub fn no_output(mut self, val: bool) -> Self {
self.no_output = val;
self
}
/// Error out if recursive messages do not have optional fields
pub fn error_cycle(mut self, val: bool) -> Self {
self.error_cycle = val;
self
}
/// Enable module comments and module attributes in generated file (default = true)
pub fn headers(mut self, val: bool) -> Self {
self.headers = val;
self
}
/// Add custom values to `#[derive(...)]` at the beginning of every structure
pub fn custom_struct_derive(mut self, val: Vec<String>) -> Self {
self.custom_struct_derive = val;
self
}
/// Add custom values to `#[repr(...)]` at the beginning of every structure
pub fn custom_repr(mut self, val: Option<String>) -> Self {
self.custom_repr = val;
self
}
/// Use `Cow<_,_>` for Strings and Bytes
pub fn
|
(mut self, val: bool) -> Self {
self.dont_use_cow = val;
self
}
/// Generate Owned structs when the proto struct has a lifetime
pub fn owned(mut self, val: bool) -> Self {
self.owned = val;
self
}
/// Generate `#![no_std]` compliant code
pub fn nostd(mut self, val: bool) -> Self {
self.nostd = val;
self
}
/// Use hashbrown as `HashMap` implementation instead of [std::collections::HashMap] or
/// [alloc::collections::BTreeMap](https://doc.rust-lang.org/alloc/collections/btree_map/struct.BTreeMap.html)
/// in a `no_std` environment
pub fn hashbrown(mut self, val: bool) -> Self {
self.hashbrown = val;
self
}
/// Generate `MessageInfo` implementations
pub fn gen_info(mut self, val: bool) -> Self {
self.gen_info = val;
self
}
/// Add deprecated fields and mark them as `#[deprecated]`
pub fn add_deprecated_fields(mut self, val: bool) -> Self {
self.add_deprecated_fields = val;
self
}
/// Build [Config] from this `ConfigBuilder`
pub fn build(self) -> Vec<Config> {
self.in_files
.iter()
.map(|in_file| {
let mut out_file = in_file.with_extension("rs");
if let Some(ref ofile) = self.out_file {
if ofile.is_dir() {
out_file = ofile.join(out_file.file_name().unwrap());
} else {
out_file = ofile.into();
}
}
Config {
in_file: in_file.to_owned(),
out_file,
import_search_path: self.include_paths.clone(),
single_module: self.single_module,
no_output: self.no_output,
error_cycle: self.error_cycle,
headers: self.headers,
dont_use_cow: self.dont_use_cow, //Change this to true to not use cow with./generate.sh for v2 and v3 tests
custom_struct_derive: self.custom_struct_derive.clone(),
custom_repr: self.custom_repr.clone(),
custom_rpc_generator: Box::new(|_, _| Ok(())),
custom_includes: Vec::new(),
owned: self.owned,
nostd: self.nostd,
hashbrown: self.hashbrown,
gen_info: self.gen_info,
add_deprecated_fields: self.add_deprecated_fields,
}
})
.collect()
}
}
|
dont_use_cow
|
identifier_name
|
lib.rs
|
pub mod errors;
mod keywords;
mod parser;
mod scc;
pub mod types;
use errors::{Error, Result};
use std::path::{Path, PathBuf};
use types::Config;
/// A builder for [Config]
///
/// # Example build.rs
///
/// ```rust,no_run
/// use pb_rs::{types::FileDescriptor, ConfigBuilder};
/// use std::path::{Path, PathBuf};
/// use walkdir::WalkDir;
///
/// fn main() {
/// let out_dir = std::env::var("OUT_DIR").unwrap();
/// let out_dir = Path::new(&out_dir).join("protos");
///
/// let in_dir = PathBuf::from(::std::env::var("CARGO_MANIFEST_DIR").unwrap()).join("protos");
/// // Re-run this build.rs if the protos dir changes (i.e. a new file is added)
/// println!("cargo:rerun-if-changed={}", in_dir.to_str().unwrap());
///
|
/// let path = entry.unwrap().into_path();
/// if path.extension() == proto_ext {
/// // Re-run this build.rs if any of the files in the protos dir change
/// println!("cargo:rerun-if-changed={}", path.to_str().unwrap());
/// protos.push(path);
/// }
/// }
///
/// // Delete all old generated files before re-generating new ones
/// if out_dir.exists() {
/// std::fs::remove_dir_all(&out_dir).unwrap();
/// }
/// std::fs::DirBuilder::new().create(&out_dir).unwrap();
/// let config_builder = ConfigBuilder::new(&protos, None, Some(&out_dir), &[in_dir]).unwrap();
/// FileDescriptor::run(&config_builder.build()).unwrap()
/// }
/// ```
#[derive(Debug, Default)]
pub struct ConfigBuilder {
in_files: Vec<PathBuf>,
out_file: Option<PathBuf>,
include_paths: Vec<PathBuf>,
single_module: bool,
no_output: bool,
error_cycle: bool,
headers: bool,
dont_use_cow: bool,
custom_struct_derive: Vec<String>,
custom_repr: Option<String>,
owned: bool,
nostd: bool,
hashbrown: bool,
gen_info: bool,
add_deprecated_fields: bool,
}
impl ConfigBuilder {
pub fn new<P: AsRef<Path>>(
in_files: &[P],
output: Option<&P>,
output_dir: Option<&P>,
include_paths: &[P],
) -> Result<ConfigBuilder> {
let in_files = in_files
.iter()
.map(|f| f.as_ref().into())
.collect::<Vec<PathBuf>>();
let output = output.map(|f| f.as_ref().into());
let output_dir: Option<PathBuf> = output_dir.map(|f| f.as_ref().into());
let mut include_paths = include_paths
.iter()
.map(|f| f.as_ref().into())
.collect::<Vec<PathBuf>>();
if in_files.is_empty() {
return Err(Error::NoProto);
}
for f in &in_files {
if!f.exists() {
return Err(Error::InputFile(format!("{}", f.display())));
}
}
let out_file = match (output, output_dir) {
(Some(_), None) if in_files.len() > 1 => return Err(Error::OutputMultipleInputs),
(Some(output), None) => Some(output),
(None, Some(output_dir)) => {
if!output_dir.is_dir() {
return Err(Error::OutputDirectory(format!("{}", output_dir.display())));
}
Some(output_dir)
}
(Some(_), Some(_)) => return Err(Error::OutputAndOutputDir),
(None, None) => None,
};
let default = PathBuf::from(".");
if include_paths.is_empty() ||!include_paths.contains(&default) {
include_paths.push(default);
}
Ok(ConfigBuilder {
in_files,
out_file,
include_paths,
headers: true,
..Default::default()
})
}
/// Omit generation of modules for each package when there is only one package
pub fn single_module(mut self, val: bool) -> Self {
self.single_module = val;
self
}
/// Show enums and messages in this.proto file, including those imported. No code generated.
/// `no_output` should probably only be used by the pb-rs cli.
pub fn no_output(mut self, val: bool) -> Self {
self.no_output = val;
self
}
/// Error out if recursive messages do not have optional fields
pub fn error_cycle(mut self, val: bool) -> Self {
self.error_cycle = val;
self
}
/// Enable module comments and module attributes in generated file (default = true)
pub fn headers(mut self, val: bool) -> Self {
self.headers = val;
self
}
/// Add custom values to `#[derive(...)]` at the beginning of every structure
pub fn custom_struct_derive(mut self, val: Vec<String>) -> Self {
self.custom_struct_derive = val;
self
}
/// Add custom values to `#[repr(...)]` at the beginning of every structure
pub fn custom_repr(mut self, val: Option<String>) -> Self {
self.custom_repr = val;
self
}
/// Use `Cow<_,_>` for Strings and Bytes
pub fn dont_use_cow(mut self, val: bool) -> Self {
self.dont_use_cow = val;
self
}
/// Generate Owned structs when the proto struct has a lifetime
pub fn owned(mut self, val: bool) -> Self {
self.owned = val;
self
}
/// Generate `#![no_std]` compliant code
pub fn nostd(mut self, val: bool) -> Self {
self.nostd = val;
self
}
/// Use hashbrown as `HashMap` implementation instead of [std::collections::HashMap] or
/// [alloc::collections::BTreeMap](https://doc.rust-lang.org/alloc/collections/btree_map/struct.BTreeMap.html)
/// in a `no_std` environment
pub fn hashbrown(mut self, val: bool) -> Self {
self.hashbrown = val;
self
}
/// Generate `MessageInfo` implementations
pub fn gen_info(mut self, val: bool) -> Self {
self.gen_info = val;
self
}
/// Add deprecated fields and mark them as `#[deprecated]`
pub fn add_deprecated_fields(mut self, val: bool) -> Self {
self.add_deprecated_fields = val;
self
}
/// Build [Config] from this `ConfigBuilder`
pub fn build(self) -> Vec<Config> {
self.in_files
.iter()
.map(|in_file| {
let mut out_file = in_file.with_extension("rs");
if let Some(ref ofile) = self.out_file {
if ofile.is_dir() {
out_file = ofile.join(out_file.file_name().unwrap());
} else {
out_file = ofile.into();
}
}
Config {
in_file: in_file.to_owned(),
out_file,
import_search_path: self.include_paths.clone(),
single_module: self.single_module,
no_output: self.no_output,
error_cycle: self.error_cycle,
headers: self.headers,
dont_use_cow: self.dont_use_cow, //Change this to true to not use cow with./generate.sh for v2 and v3 tests
custom_struct_derive: self.custom_struct_derive.clone(),
custom_repr: self.custom_repr.clone(),
custom_rpc_generator: Box::new(|_, _| Ok(())),
custom_includes: Vec::new(),
owned: self.owned,
nostd: self.nostd,
hashbrown: self.hashbrown,
gen_info: self.gen_info,
add_deprecated_fields: self.add_deprecated_fields,
}
})
.collect()
}
}
|
/// // Find all *.proto files in the `in_dir` and add them to the list of files
/// let mut protos = Vec::new();
/// let proto_ext = Some(Path::new("proto").as_os_str());
/// for entry in WalkDir::new(&in_dir) {
|
random_line_split
|
lib.rs
|
pub mod errors;
mod keywords;
mod parser;
mod scc;
pub mod types;
use errors::{Error, Result};
use std::path::{Path, PathBuf};
use types::Config;
/// A builder for [Config]
///
/// # Example build.rs
///
/// ```rust,no_run
/// use pb_rs::{types::FileDescriptor, ConfigBuilder};
/// use std::path::{Path, PathBuf};
/// use walkdir::WalkDir;
///
/// fn main() {
/// let out_dir = std::env::var("OUT_DIR").unwrap();
/// let out_dir = Path::new(&out_dir).join("protos");
///
/// let in_dir = PathBuf::from(::std::env::var("CARGO_MANIFEST_DIR").unwrap()).join("protos");
/// // Re-run this build.rs if the protos dir changes (i.e. a new file is added)
/// println!("cargo:rerun-if-changed={}", in_dir.to_str().unwrap());
///
/// // Find all *.proto files in the `in_dir` and add them to the list of files
/// let mut protos = Vec::new();
/// let proto_ext = Some(Path::new("proto").as_os_str());
/// for entry in WalkDir::new(&in_dir) {
/// let path = entry.unwrap().into_path();
/// if path.extension() == proto_ext {
/// // Re-run this build.rs if any of the files in the protos dir change
/// println!("cargo:rerun-if-changed={}", path.to_str().unwrap());
/// protos.push(path);
/// }
/// }
///
/// // Delete all old generated files before re-generating new ones
/// if out_dir.exists() {
/// std::fs::remove_dir_all(&out_dir).unwrap();
/// }
/// std::fs::DirBuilder::new().create(&out_dir).unwrap();
/// let config_builder = ConfigBuilder::new(&protos, None, Some(&out_dir), &[in_dir]).unwrap();
/// FileDescriptor::run(&config_builder.build()).unwrap()
/// }
/// ```
#[derive(Debug, Default)]
pub struct ConfigBuilder {
in_files: Vec<PathBuf>,
out_file: Option<PathBuf>,
include_paths: Vec<PathBuf>,
single_module: bool,
no_output: bool,
error_cycle: bool,
headers: bool,
dont_use_cow: bool,
custom_struct_derive: Vec<String>,
custom_repr: Option<String>,
owned: bool,
nostd: bool,
hashbrown: bool,
gen_info: bool,
add_deprecated_fields: bool,
}
impl ConfigBuilder {
pub fn new<P: AsRef<Path>>(
in_files: &[P],
output: Option<&P>,
output_dir: Option<&P>,
include_paths: &[P],
) -> Result<ConfigBuilder> {
let in_files = in_files
.iter()
.map(|f| f.as_ref().into())
.collect::<Vec<PathBuf>>();
let output = output.map(|f| f.as_ref().into());
let output_dir: Option<PathBuf> = output_dir.map(|f| f.as_ref().into());
let mut include_paths = include_paths
.iter()
.map(|f| f.as_ref().into())
.collect::<Vec<PathBuf>>();
if in_files.is_empty() {
return Err(Error::NoProto);
}
for f in &in_files {
if!f.exists() {
return Err(Error::InputFile(format!("{}", f.display())));
}
}
let out_file = match (output, output_dir) {
(Some(_), None) if in_files.len() > 1 => return Err(Error::OutputMultipleInputs),
(Some(output), None) => Some(output),
(None, Some(output_dir)) => {
if!output_dir.is_dir() {
return Err(Error::OutputDirectory(format!("{}", output_dir.display())));
}
Some(output_dir)
}
(Some(_), Some(_)) => return Err(Error::OutputAndOutputDir),
(None, None) => None,
};
let default = PathBuf::from(".");
if include_paths.is_empty() ||!include_paths.contains(&default) {
include_paths.push(default);
}
Ok(ConfigBuilder {
in_files,
out_file,
include_paths,
headers: true,
..Default::default()
})
}
/// Omit generation of modules for each package when there is only one package
pub fn single_module(mut self, val: bool) -> Self {
self.single_module = val;
self
}
/// Show enums and messages in this.proto file, including those imported. No code generated.
/// `no_output` should probably only be used by the pb-rs cli.
pub fn no_output(mut self, val: bool) -> Self {
self.no_output = val;
self
}
/// Error out if recursive messages do not have optional fields
pub fn error_cycle(mut self, val: bool) -> Self {
self.error_cycle = val;
self
}
/// Enable module comments and module attributes in generated file (default = true)
pub fn headers(mut self, val: bool) -> Self {
self.headers = val;
self
}
/// Add custom values to `#[derive(...)]` at the beginning of every structure
pub fn custom_struct_derive(mut self, val: Vec<String>) -> Self {
self.custom_struct_derive = val;
self
}
/// Add custom values to `#[repr(...)]` at the beginning of every structure
pub fn custom_repr(mut self, val: Option<String>) -> Self {
self.custom_repr = val;
self
}
/// Use `Cow<_,_>` for Strings and Bytes
pub fn dont_use_cow(mut self, val: bool) -> Self {
self.dont_use_cow = val;
self
}
/// Generate Owned structs when the proto struct has a lifetime
pub fn owned(mut self, val: bool) -> Self {
self.owned = val;
self
}
/// Generate `#![no_std]` compliant code
pub fn nostd(mut self, val: bool) -> Self {
self.nostd = val;
self
}
/// Use hashbrown as `HashMap` implementation instead of [std::collections::HashMap] or
/// [alloc::collections::BTreeMap](https://doc.rust-lang.org/alloc/collections/btree_map/struct.BTreeMap.html)
/// in a `no_std` environment
pub fn hashbrown(mut self, val: bool) -> Self
|
/// Generate `MessageInfo` implementations
pub fn gen_info(mut self, val: bool) -> Self {
self.gen_info = val;
self
}
/// Add deprecated fields and mark them as `#[deprecated]`
pub fn add_deprecated_fields(mut self, val: bool) -> Self {
self.add_deprecated_fields = val;
self
}
/// Build [Config] from this `ConfigBuilder`
pub fn build(self) -> Vec<Config> {
self.in_files
.iter()
.map(|in_file| {
let mut out_file = in_file.with_extension("rs");
if let Some(ref ofile) = self.out_file {
if ofile.is_dir() {
out_file = ofile.join(out_file.file_name().unwrap());
} else {
out_file = ofile.into();
}
}
Config {
in_file: in_file.to_owned(),
out_file,
import_search_path: self.include_paths.clone(),
single_module: self.single_module,
no_output: self.no_output,
error_cycle: self.error_cycle,
headers: self.headers,
dont_use_cow: self.dont_use_cow, //Change this to true to not use cow with./generate.sh for v2 and v3 tests
custom_struct_derive: self.custom_struct_derive.clone(),
custom_repr: self.custom_repr.clone(),
custom_rpc_generator: Box::new(|_, _| Ok(())),
custom_includes: Vec::new(),
owned: self.owned,
nostd: self.nostd,
hashbrown: self.hashbrown,
gen_info: self.gen_info,
add_deprecated_fields: self.add_deprecated_fields,
}
})
.collect()
}
}
|
{
self.hashbrown = val;
self
}
|
identifier_body
|
vector_reader.rs
|
use ::{Context, Ptr};
use ::lang::{Value, Object, Scope, List, Vector};
use super::reader::Reader;
pub fn
|
(context: &Context, scope: Ptr<Object<Scope>>, args: Ptr<Object<List>>) -> Ptr<Value> {
let mut reader = args.first(context).downcast::<Object<Reader>>().unwrap();
let ch = reader.peek(0);
if ch == '[' {
reader.read();
let mut vector = context.gc.new_object(context.VectorType, Vector::new(context));
loop {
let ch = reader.peek(0);
if ch == ']' {
reader.read();
break;
} else {
let ret_list = reader.next(context, scope);
let first = ret_list.first(context);
if first.typ() == context.BooleanType && first.downcast::<Object<bool>>().unwrap().value() == &true {
vector.push_mut(context, ret_list.last(context));
} else {
break;
}
}
}
let mut ret_list = context.gc.new_object(context.ListType, List::new(context));
ret_list.push_back_mut(context, context.true_value.as_value());
ret_list.push_back_mut(context, vector.as_value());
ret_list.as_value()
} else {
let mut ret_list = context.gc.new_object(context.ListType, List::new(context));
ret_list.push_back_mut(context, context.false_value.as_value());
ret_list.as_value()
}
}
|
vector_reader
|
identifier_name
|
vector_reader.rs
|
use ::{Context, Ptr};
use ::lang::{Value, Object, Scope, List, Vector};
use super::reader::Reader;
pub fn vector_reader(context: &Context, scope: Ptr<Object<Scope>>, args: Ptr<Object<List>>) -> Ptr<Value>
|
if first.typ() == context.BooleanType && first.downcast::<Object<bool>>().unwrap().value() == &true {
vector.push_mut(context, ret_list.last(context));
} else {
break;
}
}
}
let mut ret_list = context.gc.new_object(context.ListType, List::new(context));
ret_list.push_back_mut(context, context.true_value.as_value());
ret_list.push_back_mut(context, vector.as_value());
ret_list.as_value()
} else {
let mut ret_list = context.gc.new_object(context.ListType, List::new(context));
ret_list.push_back_mut(context, context.false_value.as_value());
ret_list.as_value()
}
}
|
{
let mut reader = args.first(context).downcast::<Object<Reader>>().unwrap();
let ch = reader.peek(0);
if ch == '[' {
reader.read();
let mut vector = context.gc.new_object(context.VectorType, Vector::new(context));
loop {
let ch = reader.peek(0);
if ch == ']' {
reader.read();
break;
} else {
let ret_list = reader.next(context, scope);
let first = ret_list.first(context);
|
identifier_body
|
vector_reader.rs
|
use ::{Context, Ptr};
|
let mut reader = args.first(context).downcast::<Object<Reader>>().unwrap();
let ch = reader.peek(0);
if ch == '[' {
reader.read();
let mut vector = context.gc.new_object(context.VectorType, Vector::new(context));
loop {
let ch = reader.peek(0);
if ch == ']' {
reader.read();
break;
} else {
let ret_list = reader.next(context, scope);
let first = ret_list.first(context);
if first.typ() == context.BooleanType && first.downcast::<Object<bool>>().unwrap().value() == &true {
vector.push_mut(context, ret_list.last(context));
} else {
break;
}
}
}
let mut ret_list = context.gc.new_object(context.ListType, List::new(context));
ret_list.push_back_mut(context, context.true_value.as_value());
ret_list.push_back_mut(context, vector.as_value());
ret_list.as_value()
} else {
let mut ret_list = context.gc.new_object(context.ListType, List::new(context));
ret_list.push_back_mut(context, context.false_value.as_value());
ret_list.as_value()
}
}
|
use ::lang::{Value, Object, Scope, List, Vector};
use super::reader::Reader;
pub fn vector_reader(context: &Context, scope: Ptr<Object<Scope>>, args: Ptr<Object<List>>) -> Ptr<Value> {
|
random_line_split
|
vector_reader.rs
|
use ::{Context, Ptr};
use ::lang::{Value, Object, Scope, List, Vector};
use super::reader::Reader;
pub fn vector_reader(context: &Context, scope: Ptr<Object<Scope>>, args: Ptr<Object<List>>) -> Ptr<Value> {
let mut reader = args.first(context).downcast::<Object<Reader>>().unwrap();
let ch = reader.peek(0);
if ch == '[' {
reader.read();
let mut vector = context.gc.new_object(context.VectorType, Vector::new(context));
loop {
let ch = reader.peek(0);
if ch == ']' {
reader.read();
break;
} else {
let ret_list = reader.next(context, scope);
let first = ret_list.first(context);
if first.typ() == context.BooleanType && first.downcast::<Object<bool>>().unwrap().value() == &true
|
else {
break;
}
}
}
let mut ret_list = context.gc.new_object(context.ListType, List::new(context));
ret_list.push_back_mut(context, context.true_value.as_value());
ret_list.push_back_mut(context, vector.as_value());
ret_list.as_value()
} else {
let mut ret_list = context.gc.new_object(context.ListType, List::new(context));
ret_list.push_back_mut(context, context.false_value.as_value());
ret_list.as_value()
}
}
|
{
vector.push_mut(context, ret_list.last(context));
}
|
conditional_block
|
mod.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Tests for parsing and serialization of values/properties
use cssparser::{Parser, ParserInput};
use style::context::QuirksMode;
use style::parser::ParserContext;
use style::stylesheets::{CssRuleType, Origin};
use style_traits::{ParsingMode, ParseError};
fn parse<T, F>(f: F, s: &'static str) -> Result<T, ParseError<'static>>
where F: for<'t> Fn(&ParserContext, &mut Parser<'static, 't>) -> Result<T, ParseError<'static>> {
let mut input = ParserInput::new(s);
parse_input(f, &mut input)
}
fn parse_input<'i: 't, 't, T, F>(f: F, input: &'t mut ParserInput<'i>) -> Result<T, ParseError<'i>>
where F: Fn(&ParserContext, &mut Parser<'i, 't>) -> Result<T, ParseError<'i>> {
let url = ::servo_url::ServoUrl::parse("http://localhost").unwrap();
let context = ParserContext::new(Origin::Author, &url, Some(CssRuleType::Style),
ParsingMode::DEFAULT,
QuirksMode::NoQuirks);
let mut parser = Parser::new(input);
f(&context, &mut parser)
}
fn parse_entirely<T, F>(f: F, s: &'static str) -> Result<T, ParseError<'static>>
where F: for<'t> Fn(&ParserContext, &mut Parser<'static, 't>) -> Result<T, ParseError<'static>>
|
fn parse_entirely_input<'i: 't, 't, T, F>(f: F, input: &'t mut ParserInput<'i>) -> Result<T, ParseError<'i>>
where F: Fn(&ParserContext, &mut Parser<'i, 't>) -> Result<T, ParseError<'i>> {
parse_input(|context, parser| parser.parse_entirely(|p| f(context, p)), input)
}
// This is a macro so that the file/line information
// is preserved in the panic
macro_rules! assert_roundtrip_with_context {
($fun:expr, $string:expr) => {
assert_roundtrip_with_context!($fun, $string, $string);
};
($fun:expr, $input:expr, $output:expr) => {{
let mut input = ::cssparser::ParserInput::new($input);
let serialized = super::parse_input(|context, i| {
let parsed = $fun(context, i)
.expect(&format!("Failed to parse {}", $input));
let serialized = ToCss::to_css_string(&parsed);
assert_eq!(serialized, $output);
Ok(serialized)
}, &mut input).unwrap();
let mut input = ::cssparser::ParserInput::new(&serialized);
let unwrapped = super::parse_input(|context, i| {
let re_parsed = $fun(context, i)
.expect(&format!("Failed to parse serialization {}", $input));
let re_serialized = ToCss::to_css_string(&re_parsed);
assert_eq!(serialized, re_serialized);
Ok(())
}, &mut input).unwrap();
unwrapped
}}
}
macro_rules! assert_roundtrip {
($fun:expr, $string:expr) => {
assert_roundtrip!($fun, $string, $string);
};
($fun:expr, $input:expr, $output:expr) => {
let mut input = ParserInput::new($input);
let mut parser = Parser::new(&mut input);
let parsed = $fun(&mut parser)
.expect(&format!("Failed to parse {}", $input));
let serialized = ToCss::to_css_string(&parsed);
assert_eq!(serialized, $output);
let mut input = ParserInput::new(&serialized);
let mut parser = Parser::new(&mut input);
let re_parsed = $fun(&mut parser)
.expect(&format!("Failed to parse serialization {}", $input));
let re_serialized = ToCss::to_css_string(&re_parsed);
assert_eq!(serialized, re_serialized)
}
}
macro_rules! assert_parser_exhausted {
($fun:expr, $string:expr, $should_exhausted:expr) => {{
parse(|context, input| {
let parsed = $fun(context, input);
assert_eq!(parsed.is_ok(), true);
assert_eq!(input.is_exhausted(), $should_exhausted);
Ok(())
}, $string).unwrap()
}}
}
macro_rules! parse_longhand {
($name:ident, $s:expr) => {
parse($name::parse, $s).unwrap()
};
}
mod animation;
mod background;
mod border;
mod box_;
mod column;
mod effects;
mod image;
mod inherited_text;
mod length;
mod outline;
mod position;
mod selectors;
mod supports;
mod text_overflow;
mod transition_duration;
mod transition_timing_function;
mod value;
|
{
let mut input = ParserInput::new(s);
parse_entirely_input(f, &mut input)
}
|
identifier_body
|
mod.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Tests for parsing and serialization of values/properties
use cssparser::{Parser, ParserInput};
use style::context::QuirksMode;
use style::parser::ParserContext;
use style::stylesheets::{CssRuleType, Origin};
use style_traits::{ParsingMode, ParseError};
fn
|
<T, F>(f: F, s: &'static str) -> Result<T, ParseError<'static>>
where F: for<'t> Fn(&ParserContext, &mut Parser<'static, 't>) -> Result<T, ParseError<'static>> {
let mut input = ParserInput::new(s);
parse_input(f, &mut input)
}
fn parse_input<'i: 't, 't, T, F>(f: F, input: &'t mut ParserInput<'i>) -> Result<T, ParseError<'i>>
where F: Fn(&ParserContext, &mut Parser<'i, 't>) -> Result<T, ParseError<'i>> {
let url = ::servo_url::ServoUrl::parse("http://localhost").unwrap();
let context = ParserContext::new(Origin::Author, &url, Some(CssRuleType::Style),
ParsingMode::DEFAULT,
QuirksMode::NoQuirks);
let mut parser = Parser::new(input);
f(&context, &mut parser)
}
fn parse_entirely<T, F>(f: F, s: &'static str) -> Result<T, ParseError<'static>>
where F: for<'t> Fn(&ParserContext, &mut Parser<'static, 't>) -> Result<T, ParseError<'static>> {
let mut input = ParserInput::new(s);
parse_entirely_input(f, &mut input)
}
fn parse_entirely_input<'i: 't, 't, T, F>(f: F, input: &'t mut ParserInput<'i>) -> Result<T, ParseError<'i>>
where F: Fn(&ParserContext, &mut Parser<'i, 't>) -> Result<T, ParseError<'i>> {
parse_input(|context, parser| parser.parse_entirely(|p| f(context, p)), input)
}
// This is a macro so that the file/line information
// is preserved in the panic
macro_rules! assert_roundtrip_with_context {
($fun:expr, $string:expr) => {
assert_roundtrip_with_context!($fun, $string, $string);
};
($fun:expr, $input:expr, $output:expr) => {{
let mut input = ::cssparser::ParserInput::new($input);
let serialized = super::parse_input(|context, i| {
let parsed = $fun(context, i)
.expect(&format!("Failed to parse {}", $input));
let serialized = ToCss::to_css_string(&parsed);
assert_eq!(serialized, $output);
Ok(serialized)
}, &mut input).unwrap();
let mut input = ::cssparser::ParserInput::new(&serialized);
let unwrapped = super::parse_input(|context, i| {
let re_parsed = $fun(context, i)
.expect(&format!("Failed to parse serialization {}", $input));
let re_serialized = ToCss::to_css_string(&re_parsed);
assert_eq!(serialized, re_serialized);
Ok(())
}, &mut input).unwrap();
unwrapped
}}
}
macro_rules! assert_roundtrip {
($fun:expr, $string:expr) => {
assert_roundtrip!($fun, $string, $string);
};
($fun:expr, $input:expr, $output:expr) => {
let mut input = ParserInput::new($input);
let mut parser = Parser::new(&mut input);
let parsed = $fun(&mut parser)
.expect(&format!("Failed to parse {}", $input));
let serialized = ToCss::to_css_string(&parsed);
assert_eq!(serialized, $output);
let mut input = ParserInput::new(&serialized);
let mut parser = Parser::new(&mut input);
let re_parsed = $fun(&mut parser)
.expect(&format!("Failed to parse serialization {}", $input));
let re_serialized = ToCss::to_css_string(&re_parsed);
assert_eq!(serialized, re_serialized)
}
}
macro_rules! assert_parser_exhausted {
($fun:expr, $string:expr, $should_exhausted:expr) => {{
parse(|context, input| {
let parsed = $fun(context, input);
assert_eq!(parsed.is_ok(), true);
assert_eq!(input.is_exhausted(), $should_exhausted);
Ok(())
}, $string).unwrap()
}}
}
macro_rules! parse_longhand {
($name:ident, $s:expr) => {
parse($name::parse, $s).unwrap()
};
}
mod animation;
mod background;
mod border;
mod box_;
mod column;
mod effects;
mod image;
mod inherited_text;
mod length;
mod outline;
mod position;
mod selectors;
mod supports;
mod text_overflow;
mod transition_duration;
mod transition_timing_function;
mod value;
|
parse
|
identifier_name
|
mod.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Tests for parsing and serialization of values/properties
use cssparser::{Parser, ParserInput};
use style::context::QuirksMode;
use style::parser::ParserContext;
use style::stylesheets::{CssRuleType, Origin};
use style_traits::{ParsingMode, ParseError};
fn parse<T, F>(f: F, s: &'static str) -> Result<T, ParseError<'static>>
where F: for<'t> Fn(&ParserContext, &mut Parser<'static, 't>) -> Result<T, ParseError<'static>> {
let mut input = ParserInput::new(s);
parse_input(f, &mut input)
}
fn parse_input<'i: 't, 't, T, F>(f: F, input: &'t mut ParserInput<'i>) -> Result<T, ParseError<'i>>
where F: Fn(&ParserContext, &mut Parser<'i, 't>) -> Result<T, ParseError<'i>> {
let url = ::servo_url::ServoUrl::parse("http://localhost").unwrap();
|
let context = ParserContext::new(Origin::Author, &url, Some(CssRuleType::Style),
ParsingMode::DEFAULT,
QuirksMode::NoQuirks);
let mut parser = Parser::new(input);
f(&context, &mut parser)
}
fn parse_entirely<T, F>(f: F, s: &'static str) -> Result<T, ParseError<'static>>
where F: for<'t> Fn(&ParserContext, &mut Parser<'static, 't>) -> Result<T, ParseError<'static>> {
let mut input = ParserInput::new(s);
parse_entirely_input(f, &mut input)
}
fn parse_entirely_input<'i: 't, 't, T, F>(f: F, input: &'t mut ParserInput<'i>) -> Result<T, ParseError<'i>>
where F: Fn(&ParserContext, &mut Parser<'i, 't>) -> Result<T, ParseError<'i>> {
parse_input(|context, parser| parser.parse_entirely(|p| f(context, p)), input)
}
// This is a macro so that the file/line information
// is preserved in the panic
macro_rules! assert_roundtrip_with_context {
($fun:expr, $string:expr) => {
assert_roundtrip_with_context!($fun, $string, $string);
};
($fun:expr, $input:expr, $output:expr) => {{
let mut input = ::cssparser::ParserInput::new($input);
let serialized = super::parse_input(|context, i| {
let parsed = $fun(context, i)
.expect(&format!("Failed to parse {}", $input));
let serialized = ToCss::to_css_string(&parsed);
assert_eq!(serialized, $output);
Ok(serialized)
}, &mut input).unwrap();
let mut input = ::cssparser::ParserInput::new(&serialized);
let unwrapped = super::parse_input(|context, i| {
let re_parsed = $fun(context, i)
.expect(&format!("Failed to parse serialization {}", $input));
let re_serialized = ToCss::to_css_string(&re_parsed);
assert_eq!(serialized, re_serialized);
Ok(())
}, &mut input).unwrap();
unwrapped
}}
}
macro_rules! assert_roundtrip {
($fun:expr, $string:expr) => {
assert_roundtrip!($fun, $string, $string);
};
($fun:expr, $input:expr, $output:expr) => {
let mut input = ParserInput::new($input);
let mut parser = Parser::new(&mut input);
let parsed = $fun(&mut parser)
.expect(&format!("Failed to parse {}", $input));
let serialized = ToCss::to_css_string(&parsed);
assert_eq!(serialized, $output);
let mut input = ParserInput::new(&serialized);
let mut parser = Parser::new(&mut input);
let re_parsed = $fun(&mut parser)
.expect(&format!("Failed to parse serialization {}", $input));
let re_serialized = ToCss::to_css_string(&re_parsed);
assert_eq!(serialized, re_serialized)
}
}
macro_rules! assert_parser_exhausted {
($fun:expr, $string:expr, $should_exhausted:expr) => {{
parse(|context, input| {
let parsed = $fun(context, input);
assert_eq!(parsed.is_ok(), true);
assert_eq!(input.is_exhausted(), $should_exhausted);
Ok(())
}, $string).unwrap()
}}
}
macro_rules! parse_longhand {
($name:ident, $s:expr) => {
parse($name::parse, $s).unwrap()
};
}
mod animation;
mod background;
mod border;
mod box_;
mod column;
mod effects;
mod image;
mod inherited_text;
mod length;
mod outline;
mod position;
mod selectors;
mod supports;
mod text_overflow;
mod transition_duration;
mod transition_timing_function;
mod value;
|
random_line_split
|
|
expand.rs
|
// This file is part of the uutils coreutils package.
//
// (c) Virgile Andreani <[email protected]>
// (c) kwantam <[email protected]>
// * 2015-04-28 ~ updated to work with both UTF-8 and non-UTF-8 encodings
//
// For the full copyright and license information, please view the LICENSE
// file that was distributed with this source code.
// spell-checker:ignore (ToDO) ctype cwidth iflag nbytes nspaces nums tspaces uflag
#[macro_use]
extern crate uucore;
use clap::{crate_version, App, AppSettings, Arg, ArgMatches};
use std::fs::File;
use std::io::{stdin, stdout, BufRead, BufReader, BufWriter, Read, Write};
use std::str::from_utf8;
use unicode_width::UnicodeWidthChar;
use uucore::display::Quotable;
use uucore::error::{FromIo, UResult};
use uucore::format_usage;
static ABOUT: &str = "Convert tabs in each FILE to spaces, writing to standard output.
With no FILE, or when FILE is -, read standard input.";
const USAGE: &str = "{} [OPTION]... [FILE]...";
pub mod options {
pub static TABS: &str = "tabs";
pub static INITIAL: &str = "initial";
pub static NO_UTF8: &str = "no-utf8";
pub static FILES: &str = "FILES";
}
static LONG_HELP: &str = "";
static DEFAULT_TABSTOP: usize = 8;
/// The mode to use when replacing tabs beyond the last one specified in
/// the `--tabs` argument.
enum RemainingMode {
None,
Slash,
Plus,
}
/// Decide whether the character is either a space or a comma.
///
/// # Examples
///
/// ```rust,ignore
/// assert!(is_space_or_comma(' '))
/// assert!(is_space_or_comma(','))
/// assert!(!is_space_or_comma('a'))
/// ```
fn is_space_or_comma(c: char) -> bool {
c =='' || c == ','
}
/// Parse a list of tabstops from a `--tabs` argument.
///
/// This function returns both the vector of numbers appearing in the
/// comma- or space-separated list, and also an optional mode, specified
/// by either a "/" or a "+" character appearing before the final number
/// in the list. This mode defines the strategy to use for computing the
/// number of spaces to use for columns beyond the end of the tab stop
/// list specified here.
fn tabstops_parse(s: &str) -> (RemainingMode, Vec<usize>) {
// Leading commas and spaces are ignored.
let s = s.trim_start_matches(is_space_or_comma);
// If there were only commas and spaces in the string, just use the
// default tabstops.
if s.is_empty() {
return (RemainingMode::None, vec![DEFAULT_TABSTOP]);
}
let mut nums = vec![];
let mut remaining_mode = RemainingMode::None;
for word in s.split(is_space_or_comma) {
let bytes = word.as_bytes();
for i in 0..bytes.len() {
match bytes[i] {
b'+' => {
remaining_mode = RemainingMode::Plus;
}
b'/' => {
remaining_mode = RemainingMode::Slash;
}
_ => {
// Parse a number from the byte sequence.
let num = from_utf8(&bytes[i..]).unwrap().parse::<usize>().unwrap();
// Tab size must be positive.
if num == 0 {
crash!(1, "{}\n", "tab size cannot be 0");
}
// Tab sizes must be ascending.
if let Some(last_stop) = nums.last() {
if *last_stop >= num {
crash!(1, "tab sizes must be ascending");
}
}
// Append this tab stop to the list of all tabstops.
nums.push(num);
break;
}
}
}
}
// If no numbers could be parsed (for example, if `s` were "+,+,+"),
// then just use the default tabstops.
if nums.is_empty() {
nums = vec![DEFAULT_TABSTOP];
}
(remaining_mode, nums)
}
struct Options {
files: Vec<String>,
tabstops: Vec<usize>,
tspaces: String,
iflag: bool,
uflag: bool,
/// Strategy for expanding tabs for columns beyond those specified
/// in `tabstops`.
remaining_mode: RemainingMode,
}
impl Options {
fn new(matches: &ArgMatches) -> Self {
let (remaining_mode, tabstops) = match matches.value_of(options::TABS) {
Some(s) => tabstops_parse(s),
None => (RemainingMode::None, vec![DEFAULT_TABSTOP]),
};
let iflag = matches.is_present(options::INITIAL);
let uflag =!matches.is_present(options::NO_UTF8);
// avoid allocations when dumping out long sequences of spaces
// by precomputing the longest string of spaces we will ever need
let nspaces = tabstops
.iter()
.scan(0, |pr, &it| {
let ret = Some(it - *pr);
*pr = it;
ret
})
.max()
.unwrap(); // length of tabstops is guaranteed >= 1
let tspaces = " ".repeat(nspaces);
let files: Vec<String> = match matches.values_of(options::FILES) {
Some(s) => s.map(|v| v.to_string()).collect(),
None => vec!["-".to_owned()],
};
Self {
files,
tabstops,
tspaces,
iflag,
uflag,
remaining_mode,
}
}
}
#[uucore::main]
pub fn uumain(args: impl uucore::Args) -> UResult<()> {
let matches = uu_app().get_matches_from(args);
expand(&Options::new(&matches)).map_err_context(|| "failed to write output".to_string())
}
pub fn uu_app<'a>() -> App<'a> {
App::new(uucore::util_name())
.version(crate_version!())
.about(ABOUT)
.after_help(LONG_HELP)
.override_usage(format_usage(USAGE))
.setting(AppSettings::InferLongArgs)
.arg(
Arg::new(options::INITIAL)
.long(options::INITIAL)
.short('i')
.help("do not convert tabs after non blanks"),
)
.arg(
Arg::new(options::TABS)
.long(options::TABS)
.short('t')
.value_name("N, LIST")
.takes_value(true)
.help("have tabs N characters apart, not 8 or use comma separated list of explicit tab positions"),
)
.arg(
Arg::new(options::NO_UTF8)
.long(options::NO_UTF8)
.short('U')
.help("interpret input file as 8-bit ASCII rather than UTF-8"),
).arg(
Arg::new(options::FILES)
.multiple_occurrences(true)
.hide(true)
.takes_value(true)
)
}
fn open(path: &str) -> BufReader<Box<dyn Read +'static>> {
let file_buf;
if path == "-" {
BufReader::new(Box::new(stdin()) as Box<dyn Read>)
} else {
file_buf = match File::open(path) {
Ok(a) => a,
Err(e) => crash!(1, "{}: {}\n", path.maybe_quote(), e),
};
BufReader::new(Box::new(file_buf) as Box<dyn Read>)
}
}
/// Compute the number of spaces to the next tabstop.
///
/// `tabstops` is the sequence of tabstop locations.
///
/// `col` is the index of the current cursor in the line being written.
///
/// If `remaining_mode` is [`RemainingMode::Plus`], then the last entry
/// in the `tabstops` slice is interpreted as a relative number of
/// spaces, which this function will return for every input value of
/// `col` beyond the end of the second-to-last element of `tabstops`.
///
/// If `remaining_mode` is [`RemainingMode::Plus`], then the last entry
/// in the `tabstops` slice is interpreted as a relative number of
/// spaces, which this function will return for every input value of
/// `col` beyond the end of the second-to-last element of `tabstops`.
fn next_tabstop(tabstops: &[usize], col: usize, remaining_mode: &RemainingMode) -> usize {
let num_tabstops = tabstops.len();
match remaining_mode {
RemainingMode::Plus => match tabstops[0..num_tabstops - 1].iter().find(|&&t| t > col) {
Some(t) => t - col,
None => tabstops[num_tabstops - 1] - 1,
},
RemainingMode::Slash => match tabstops[0..num_tabstops - 1].iter().find(|&&t| t > col) {
Some(t) => t - col,
None => tabstops[num_tabstops - 1] - col % tabstops[num_tabstops - 1],
},
RemainingMode::None => {
if num_tabstops == 1 {
tabstops[0] - col % tabstops[0]
} else {
match tabstops.iter().find(|&&t| t > col) {
Some(t) => t - col,
None => 1,
}
}
}
}
}
#[derive(PartialEq, Eq, Debug)]
enum CharType {
Backspace,
Tab,
Other,
}
fn expand(options: &Options) -> std::io::Result<()>
|
let nbytes = char::from(buf[byte]).len_utf8();
if byte + nbytes > buf.len() {
// don't overrun buffer because of invalid UTF-8
(Other, 1, 1)
} else if let Ok(t) = from_utf8(&buf[byte..byte + nbytes]) {
match t.chars().next() {
Some('\t') => (Tab, 0, nbytes),
Some('\x08') => (Backspace, 0, nbytes),
Some(c) => (Other, UnicodeWidthChar::width(c).unwrap_or(0), nbytes),
None => {
// no valid char at start of t, so take 1 byte
(Other, 1, 1)
}
}
} else {
(Other, 1, 1) // implicit assumption: non-UTF-8 char is 1 col wide
}
} else {
(
match buf[byte] {
// always take exactly 1 byte in strict ASCII mode
0x09 => Tab,
0x08 => Backspace,
_ => Other,
},
1,
1,
)
};
// figure out how many columns this char takes up
match ctype {
Tab => {
// figure out how many spaces to the next tabstop
let nts = next_tabstop(ts, col, &options.remaining_mode);
col += nts;
// now dump out either spaces if we're expanding, or a literal tab if we're not
if init ||!options.iflag {
if nts <= options.tspaces.len() {
output.write_all(options.tspaces[..nts].as_bytes())?;
} else {
output.write_all(" ".repeat(nts).as_bytes())?;
};
} else {
output.write_all(&buf[byte..byte + nbytes])?;
}
}
_ => {
col = if ctype == Other {
col + cwidth
} else if col > 0 {
col - 1
} else {
0
};
// if we're writing anything other than a space, then we're
// done with the line's leading spaces
if buf[byte]!= 0x20 {
init = false;
}
output.write_all(&buf[byte..byte + nbytes])?;
}
}
byte += nbytes; // advance the pointer
}
output.flush()?;
buf.truncate(0); // clear the buffer
}
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::next_tabstop;
use super::RemainingMode;
#[test]
fn test_next_tabstop_remaining_mode_none() {
assert_eq!(next_tabstop(&[1, 5], 0, &RemainingMode::None), 1);
assert_eq!(next_tabstop(&[1, 5], 3, &RemainingMode::None), 2);
assert_eq!(next_tabstop(&[1, 5], 6, &RemainingMode::None), 1);
}
#[test]
fn test_next_tabstop_remaining_mode_plus() {
assert_eq!(next_tabstop(&[1, 5], 0, &RemainingMode::Plus), 1);
assert_eq!(next_tabstop(&[1, 5], 3, &RemainingMode::Plus), 4);
assert_eq!(next_tabstop(&[1, 5], 6, &RemainingMode::Plus), 4);
}
#[test]
fn test_next_tabstop_remaining_mode_slash() {
assert_eq!(next_tabstop(&[1, 5], 0, &RemainingMode::Slash), 1);
assert_eq!(next_tabstop(&[1, 5], 3, &RemainingMode::Slash), 2);
assert_eq!(next_tabstop(&[1, 5], 6, &RemainingMode::Slash), 4);
}
}
|
{
use self::CharType::*;
let mut output = BufWriter::new(stdout());
let ts = options.tabstops.as_ref();
let mut buf = Vec::new();
for file in &options.files {
let mut fh = open(file);
while match fh.read_until(b'\n', &mut buf) {
Ok(s) => s > 0,
Err(_) => buf.is_empty(),
} {
let mut col = 0;
let mut byte = 0;
let mut init = true;
while byte < buf.len() {
let (ctype, cwidth, nbytes) = if options.uflag {
|
identifier_body
|
expand.rs
|
// This file is part of the uutils coreutils package.
//
// (c) Virgile Andreani <[email protected]>
// (c) kwantam <[email protected]>
// * 2015-04-28 ~ updated to work with both UTF-8 and non-UTF-8 encodings
//
// For the full copyright and license information, please view the LICENSE
// file that was distributed with this source code.
// spell-checker:ignore (ToDO) ctype cwidth iflag nbytes nspaces nums tspaces uflag
#[macro_use]
extern crate uucore;
use clap::{crate_version, App, AppSettings, Arg, ArgMatches};
use std::fs::File;
use std::io::{stdin, stdout, BufRead, BufReader, BufWriter, Read, Write};
use std::str::from_utf8;
use unicode_width::UnicodeWidthChar;
use uucore::display::Quotable;
use uucore::error::{FromIo, UResult};
use uucore::format_usage;
static ABOUT: &str = "Convert tabs in each FILE to spaces, writing to standard output.
With no FILE, or when FILE is -, read standard input.";
const USAGE: &str = "{} [OPTION]... [FILE]...";
pub mod options {
pub static TABS: &str = "tabs";
pub static INITIAL: &str = "initial";
pub static NO_UTF8: &str = "no-utf8";
pub static FILES: &str = "FILES";
}
static LONG_HELP: &str = "";
static DEFAULT_TABSTOP: usize = 8;
/// The mode to use when replacing tabs beyond the last one specified in
/// the `--tabs` argument.
enum RemainingMode {
None,
Slash,
Plus,
}
/// Decide whether the character is either a space or a comma.
///
/// # Examples
///
|
/// ```rust,ignore
/// assert!(is_space_or_comma(' '))
/// assert!(is_space_or_comma(','))
/// assert!(!is_space_or_comma('a'))
/// ```
fn is_space_or_comma(c: char) -> bool {
c =='' || c == ','
}
/// Parse a list of tabstops from a `--tabs` argument.
///
/// This function returns both the vector of numbers appearing in the
/// comma- or space-separated list, and also an optional mode, specified
/// by either a "/" or a "+" character appearing before the final number
/// in the list. This mode defines the strategy to use for computing the
/// number of spaces to use for columns beyond the end of the tab stop
/// list specified here.
fn tabstops_parse(s: &str) -> (RemainingMode, Vec<usize>) {
// Leading commas and spaces are ignored.
let s = s.trim_start_matches(is_space_or_comma);
// If there were only commas and spaces in the string, just use the
// default tabstops.
if s.is_empty() {
return (RemainingMode::None, vec![DEFAULT_TABSTOP]);
}
let mut nums = vec![];
let mut remaining_mode = RemainingMode::None;
for word in s.split(is_space_or_comma) {
let bytes = word.as_bytes();
for i in 0..bytes.len() {
match bytes[i] {
b'+' => {
remaining_mode = RemainingMode::Plus;
}
b'/' => {
remaining_mode = RemainingMode::Slash;
}
_ => {
// Parse a number from the byte sequence.
let num = from_utf8(&bytes[i..]).unwrap().parse::<usize>().unwrap();
// Tab size must be positive.
if num == 0 {
crash!(1, "{}\n", "tab size cannot be 0");
}
// Tab sizes must be ascending.
if let Some(last_stop) = nums.last() {
if *last_stop >= num {
crash!(1, "tab sizes must be ascending");
}
}
// Append this tab stop to the list of all tabstops.
nums.push(num);
break;
}
}
}
}
// If no numbers could be parsed (for example, if `s` were "+,+,+"),
// then just use the default tabstops.
if nums.is_empty() {
nums = vec![DEFAULT_TABSTOP];
}
(remaining_mode, nums)
}
struct Options {
files: Vec<String>,
tabstops: Vec<usize>,
tspaces: String,
iflag: bool,
uflag: bool,
/// Strategy for expanding tabs for columns beyond those specified
/// in `tabstops`.
remaining_mode: RemainingMode,
}
impl Options {
fn new(matches: &ArgMatches) -> Self {
let (remaining_mode, tabstops) = match matches.value_of(options::TABS) {
Some(s) => tabstops_parse(s),
None => (RemainingMode::None, vec![DEFAULT_TABSTOP]),
};
let iflag = matches.is_present(options::INITIAL);
let uflag =!matches.is_present(options::NO_UTF8);
// avoid allocations when dumping out long sequences of spaces
// by precomputing the longest string of spaces we will ever need
let nspaces = tabstops
.iter()
.scan(0, |pr, &it| {
let ret = Some(it - *pr);
*pr = it;
ret
})
.max()
.unwrap(); // length of tabstops is guaranteed >= 1
let tspaces = " ".repeat(nspaces);
let files: Vec<String> = match matches.values_of(options::FILES) {
Some(s) => s.map(|v| v.to_string()).collect(),
None => vec!["-".to_owned()],
};
Self {
files,
tabstops,
tspaces,
iflag,
uflag,
remaining_mode,
}
}
}
#[uucore::main]
pub fn uumain(args: impl uucore::Args) -> UResult<()> {
let matches = uu_app().get_matches_from(args);
expand(&Options::new(&matches)).map_err_context(|| "failed to write output".to_string())
}
pub fn uu_app<'a>() -> App<'a> {
App::new(uucore::util_name())
.version(crate_version!())
.about(ABOUT)
.after_help(LONG_HELP)
.override_usage(format_usage(USAGE))
.setting(AppSettings::InferLongArgs)
.arg(
Arg::new(options::INITIAL)
.long(options::INITIAL)
.short('i')
.help("do not convert tabs after non blanks"),
)
.arg(
Arg::new(options::TABS)
.long(options::TABS)
.short('t')
.value_name("N, LIST")
.takes_value(true)
.help("have tabs N characters apart, not 8 or use comma separated list of explicit tab positions"),
)
.arg(
Arg::new(options::NO_UTF8)
.long(options::NO_UTF8)
.short('U')
.help("interpret input file as 8-bit ASCII rather than UTF-8"),
).arg(
Arg::new(options::FILES)
.multiple_occurrences(true)
.hide(true)
.takes_value(true)
)
}
fn open(path: &str) -> BufReader<Box<dyn Read +'static>> {
let file_buf;
if path == "-" {
BufReader::new(Box::new(stdin()) as Box<dyn Read>)
} else {
file_buf = match File::open(path) {
Ok(a) => a,
Err(e) => crash!(1, "{}: {}\n", path.maybe_quote(), e),
};
BufReader::new(Box::new(file_buf) as Box<dyn Read>)
}
}
/// Compute the number of spaces to the next tabstop.
///
/// `tabstops` is the sequence of tabstop locations.
///
/// `col` is the index of the current cursor in the line being written.
///
/// If `remaining_mode` is [`RemainingMode::Plus`], then the last entry
/// in the `tabstops` slice is interpreted as a relative number of
/// spaces, which this function will return for every input value of
/// `col` beyond the end of the second-to-last element of `tabstops`.
///
/// If `remaining_mode` is [`RemainingMode::Plus`], then the last entry
/// in the `tabstops` slice is interpreted as a relative number of
/// spaces, which this function will return for every input value of
/// `col` beyond the end of the second-to-last element of `tabstops`.
fn next_tabstop(tabstops: &[usize], col: usize, remaining_mode: &RemainingMode) -> usize {
let num_tabstops = tabstops.len();
match remaining_mode {
RemainingMode::Plus => match tabstops[0..num_tabstops - 1].iter().find(|&&t| t > col) {
Some(t) => t - col,
None => tabstops[num_tabstops - 1] - 1,
},
RemainingMode::Slash => match tabstops[0..num_tabstops - 1].iter().find(|&&t| t > col) {
Some(t) => t - col,
None => tabstops[num_tabstops - 1] - col % tabstops[num_tabstops - 1],
},
RemainingMode::None => {
if num_tabstops == 1 {
tabstops[0] - col % tabstops[0]
} else {
match tabstops.iter().find(|&&t| t > col) {
Some(t) => t - col,
None => 1,
}
}
}
}
}
#[derive(PartialEq, Eq, Debug)]
enum CharType {
Backspace,
Tab,
Other,
}
fn expand(options: &Options) -> std::io::Result<()> {
use self::CharType::*;
let mut output = BufWriter::new(stdout());
let ts = options.tabstops.as_ref();
let mut buf = Vec::new();
for file in &options.files {
let mut fh = open(file);
while match fh.read_until(b'\n', &mut buf) {
Ok(s) => s > 0,
Err(_) => buf.is_empty(),
} {
let mut col = 0;
let mut byte = 0;
let mut init = true;
while byte < buf.len() {
let (ctype, cwidth, nbytes) = if options.uflag {
let nbytes = char::from(buf[byte]).len_utf8();
if byte + nbytes > buf.len() {
// don't overrun buffer because of invalid UTF-8
(Other, 1, 1)
} else if let Ok(t) = from_utf8(&buf[byte..byte + nbytes]) {
match t.chars().next() {
Some('\t') => (Tab, 0, nbytes),
Some('\x08') => (Backspace, 0, nbytes),
Some(c) => (Other, UnicodeWidthChar::width(c).unwrap_or(0), nbytes),
None => {
// no valid char at start of t, so take 1 byte
(Other, 1, 1)
}
}
} else {
(Other, 1, 1) // implicit assumption: non-UTF-8 char is 1 col wide
}
} else {
(
match buf[byte] {
// always take exactly 1 byte in strict ASCII mode
0x09 => Tab,
0x08 => Backspace,
_ => Other,
},
1,
1,
)
};
// figure out how many columns this char takes up
match ctype {
Tab => {
// figure out how many spaces to the next tabstop
let nts = next_tabstop(ts, col, &options.remaining_mode);
col += nts;
// now dump out either spaces if we're expanding, or a literal tab if we're not
if init ||!options.iflag {
if nts <= options.tspaces.len() {
output.write_all(options.tspaces[..nts].as_bytes())?;
} else {
output.write_all(" ".repeat(nts).as_bytes())?;
};
} else {
output.write_all(&buf[byte..byte + nbytes])?;
}
}
_ => {
col = if ctype == Other {
col + cwidth
} else if col > 0 {
col - 1
} else {
0
};
// if we're writing anything other than a space, then we're
// done with the line's leading spaces
if buf[byte]!= 0x20 {
init = false;
}
output.write_all(&buf[byte..byte + nbytes])?;
}
}
byte += nbytes; // advance the pointer
}
output.flush()?;
buf.truncate(0); // clear the buffer
}
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::next_tabstop;
use super::RemainingMode;
#[test]
fn test_next_tabstop_remaining_mode_none() {
assert_eq!(next_tabstop(&[1, 5], 0, &RemainingMode::None), 1);
assert_eq!(next_tabstop(&[1, 5], 3, &RemainingMode::None), 2);
assert_eq!(next_tabstop(&[1, 5], 6, &RemainingMode::None), 1);
}
#[test]
fn test_next_tabstop_remaining_mode_plus() {
assert_eq!(next_tabstop(&[1, 5], 0, &RemainingMode::Plus), 1);
assert_eq!(next_tabstop(&[1, 5], 3, &RemainingMode::Plus), 4);
assert_eq!(next_tabstop(&[1, 5], 6, &RemainingMode::Plus), 4);
}
#[test]
fn test_next_tabstop_remaining_mode_slash() {
assert_eq!(next_tabstop(&[1, 5], 0, &RemainingMode::Slash), 1);
assert_eq!(next_tabstop(&[1, 5], 3, &RemainingMode::Slash), 2);
assert_eq!(next_tabstop(&[1, 5], 6, &RemainingMode::Slash), 4);
}
}
|
random_line_split
|
|
expand.rs
|
// This file is part of the uutils coreutils package.
//
// (c) Virgile Andreani <[email protected]>
// (c) kwantam <[email protected]>
// * 2015-04-28 ~ updated to work with both UTF-8 and non-UTF-8 encodings
//
// For the full copyright and license information, please view the LICENSE
// file that was distributed with this source code.
// spell-checker:ignore (ToDO) ctype cwidth iflag nbytes nspaces nums tspaces uflag
#[macro_use]
extern crate uucore;
use clap::{crate_version, App, AppSettings, Arg, ArgMatches};
use std::fs::File;
use std::io::{stdin, stdout, BufRead, BufReader, BufWriter, Read, Write};
use std::str::from_utf8;
use unicode_width::UnicodeWidthChar;
use uucore::display::Quotable;
use uucore::error::{FromIo, UResult};
use uucore::format_usage;
static ABOUT: &str = "Convert tabs in each FILE to spaces, writing to standard output.
With no FILE, or when FILE is -, read standard input.";
const USAGE: &str = "{} [OPTION]... [FILE]...";
pub mod options {
pub static TABS: &str = "tabs";
pub static INITIAL: &str = "initial";
pub static NO_UTF8: &str = "no-utf8";
pub static FILES: &str = "FILES";
}
static LONG_HELP: &str = "";
static DEFAULT_TABSTOP: usize = 8;
/// The mode to use when replacing tabs beyond the last one specified in
/// the `--tabs` argument.
enum RemainingMode {
None,
Slash,
Plus,
}
/// Decide whether the character is either a space or a comma.
///
/// # Examples
///
/// ```rust,ignore
/// assert!(is_space_or_comma(' '))
/// assert!(is_space_or_comma(','))
/// assert!(!is_space_or_comma('a'))
/// ```
fn is_space_or_comma(c: char) -> bool {
c =='' || c == ','
}
/// Parse a list of tabstops from a `--tabs` argument.
///
/// This function returns both the vector of numbers appearing in the
/// comma- or space-separated list, and also an optional mode, specified
/// by either a "/" or a "+" character appearing before the final number
/// in the list. This mode defines the strategy to use for computing the
/// number of spaces to use for columns beyond the end of the tab stop
/// list specified here.
fn tabstops_parse(s: &str) -> (RemainingMode, Vec<usize>) {
// Leading commas and spaces are ignored.
let s = s.trim_start_matches(is_space_or_comma);
// If there were only commas and spaces in the string, just use the
// default tabstops.
if s.is_empty() {
return (RemainingMode::None, vec![DEFAULT_TABSTOP]);
}
let mut nums = vec![];
let mut remaining_mode = RemainingMode::None;
for word in s.split(is_space_or_comma) {
let bytes = word.as_bytes();
for i in 0..bytes.len() {
match bytes[i] {
b'+' => {
remaining_mode = RemainingMode::Plus;
}
b'/' => {
remaining_mode = RemainingMode::Slash;
}
_ => {
// Parse a number from the byte sequence.
let num = from_utf8(&bytes[i..]).unwrap().parse::<usize>().unwrap();
// Tab size must be positive.
if num == 0 {
crash!(1, "{}\n", "tab size cannot be 0");
}
// Tab sizes must be ascending.
if let Some(last_stop) = nums.last() {
if *last_stop >= num {
crash!(1, "tab sizes must be ascending");
}
}
// Append this tab stop to the list of all tabstops.
nums.push(num);
break;
}
}
}
}
// If no numbers could be parsed (for example, if `s` were "+,+,+"),
// then just use the default tabstops.
if nums.is_empty() {
nums = vec![DEFAULT_TABSTOP];
}
(remaining_mode, nums)
}
struct Options {
files: Vec<String>,
tabstops: Vec<usize>,
tspaces: String,
iflag: bool,
uflag: bool,
/// Strategy for expanding tabs for columns beyond those specified
/// in `tabstops`.
remaining_mode: RemainingMode,
}
impl Options {
fn new(matches: &ArgMatches) -> Self {
let (remaining_mode, tabstops) = match matches.value_of(options::TABS) {
Some(s) => tabstops_parse(s),
None => (RemainingMode::None, vec![DEFAULT_TABSTOP]),
};
let iflag = matches.is_present(options::INITIAL);
let uflag =!matches.is_present(options::NO_UTF8);
// avoid allocations when dumping out long sequences of spaces
// by precomputing the longest string of spaces we will ever need
let nspaces = tabstops
.iter()
.scan(0, |pr, &it| {
let ret = Some(it - *pr);
*pr = it;
ret
})
.max()
.unwrap(); // length of tabstops is guaranteed >= 1
let tspaces = " ".repeat(nspaces);
let files: Vec<String> = match matches.values_of(options::FILES) {
Some(s) => s.map(|v| v.to_string()).collect(),
None => vec!["-".to_owned()],
};
Self {
files,
tabstops,
tspaces,
iflag,
uflag,
remaining_mode,
}
}
}
#[uucore::main]
pub fn uumain(args: impl uucore::Args) -> UResult<()> {
let matches = uu_app().get_matches_from(args);
expand(&Options::new(&matches)).map_err_context(|| "failed to write output".to_string())
}
pub fn
|
<'a>() -> App<'a> {
App::new(uucore::util_name())
.version(crate_version!())
.about(ABOUT)
.after_help(LONG_HELP)
.override_usage(format_usage(USAGE))
.setting(AppSettings::InferLongArgs)
.arg(
Arg::new(options::INITIAL)
.long(options::INITIAL)
.short('i')
.help("do not convert tabs after non blanks"),
)
.arg(
Arg::new(options::TABS)
.long(options::TABS)
.short('t')
.value_name("N, LIST")
.takes_value(true)
.help("have tabs N characters apart, not 8 or use comma separated list of explicit tab positions"),
)
.arg(
Arg::new(options::NO_UTF8)
.long(options::NO_UTF8)
.short('U')
.help("interpret input file as 8-bit ASCII rather than UTF-8"),
).arg(
Arg::new(options::FILES)
.multiple_occurrences(true)
.hide(true)
.takes_value(true)
)
}
fn open(path: &str) -> BufReader<Box<dyn Read +'static>> {
let file_buf;
if path == "-" {
BufReader::new(Box::new(stdin()) as Box<dyn Read>)
} else {
file_buf = match File::open(path) {
Ok(a) => a,
Err(e) => crash!(1, "{}: {}\n", path.maybe_quote(), e),
};
BufReader::new(Box::new(file_buf) as Box<dyn Read>)
}
}
/// Compute the number of spaces to the next tabstop.
///
/// `tabstops` is the sequence of tabstop locations.
///
/// `col` is the index of the current cursor in the line being written.
///
/// If `remaining_mode` is [`RemainingMode::Plus`], then the last entry
/// in the `tabstops` slice is interpreted as a relative number of
/// spaces, which this function will return for every input value of
/// `col` beyond the end of the second-to-last element of `tabstops`.
///
/// If `remaining_mode` is [`RemainingMode::Plus`], then the last entry
/// in the `tabstops` slice is interpreted as a relative number of
/// spaces, which this function will return for every input value of
/// `col` beyond the end of the second-to-last element of `tabstops`.
fn next_tabstop(tabstops: &[usize], col: usize, remaining_mode: &RemainingMode) -> usize {
let num_tabstops = tabstops.len();
match remaining_mode {
RemainingMode::Plus => match tabstops[0..num_tabstops - 1].iter().find(|&&t| t > col) {
Some(t) => t - col,
None => tabstops[num_tabstops - 1] - 1,
},
RemainingMode::Slash => match tabstops[0..num_tabstops - 1].iter().find(|&&t| t > col) {
Some(t) => t - col,
None => tabstops[num_tabstops - 1] - col % tabstops[num_tabstops - 1],
},
RemainingMode::None => {
if num_tabstops == 1 {
tabstops[0] - col % tabstops[0]
} else {
match tabstops.iter().find(|&&t| t > col) {
Some(t) => t - col,
None => 1,
}
}
}
}
}
#[derive(PartialEq, Eq, Debug)]
enum CharType {
Backspace,
Tab,
Other,
}
fn expand(options: &Options) -> std::io::Result<()> {
use self::CharType::*;
let mut output = BufWriter::new(stdout());
let ts = options.tabstops.as_ref();
let mut buf = Vec::new();
for file in &options.files {
let mut fh = open(file);
while match fh.read_until(b'\n', &mut buf) {
Ok(s) => s > 0,
Err(_) => buf.is_empty(),
} {
let mut col = 0;
let mut byte = 0;
let mut init = true;
while byte < buf.len() {
let (ctype, cwidth, nbytes) = if options.uflag {
let nbytes = char::from(buf[byte]).len_utf8();
if byte + nbytes > buf.len() {
// don't overrun buffer because of invalid UTF-8
(Other, 1, 1)
} else if let Ok(t) = from_utf8(&buf[byte..byte + nbytes]) {
match t.chars().next() {
Some('\t') => (Tab, 0, nbytes),
Some('\x08') => (Backspace, 0, nbytes),
Some(c) => (Other, UnicodeWidthChar::width(c).unwrap_or(0), nbytes),
None => {
// no valid char at start of t, so take 1 byte
(Other, 1, 1)
}
}
} else {
(Other, 1, 1) // implicit assumption: non-UTF-8 char is 1 col wide
}
} else {
(
match buf[byte] {
// always take exactly 1 byte in strict ASCII mode
0x09 => Tab,
0x08 => Backspace,
_ => Other,
},
1,
1,
)
};
// figure out how many columns this char takes up
match ctype {
Tab => {
// figure out how many spaces to the next tabstop
let nts = next_tabstop(ts, col, &options.remaining_mode);
col += nts;
// now dump out either spaces if we're expanding, or a literal tab if we're not
if init ||!options.iflag {
if nts <= options.tspaces.len() {
output.write_all(options.tspaces[..nts].as_bytes())?;
} else {
output.write_all(" ".repeat(nts).as_bytes())?;
};
} else {
output.write_all(&buf[byte..byte + nbytes])?;
}
}
_ => {
col = if ctype == Other {
col + cwidth
} else if col > 0 {
col - 1
} else {
0
};
// if we're writing anything other than a space, then we're
// done with the line's leading spaces
if buf[byte]!= 0x20 {
init = false;
}
output.write_all(&buf[byte..byte + nbytes])?;
}
}
byte += nbytes; // advance the pointer
}
output.flush()?;
buf.truncate(0); // clear the buffer
}
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::next_tabstop;
use super::RemainingMode;
#[test]
fn test_next_tabstop_remaining_mode_none() {
assert_eq!(next_tabstop(&[1, 5], 0, &RemainingMode::None), 1);
assert_eq!(next_tabstop(&[1, 5], 3, &RemainingMode::None), 2);
assert_eq!(next_tabstop(&[1, 5], 6, &RemainingMode::None), 1);
}
#[test]
fn test_next_tabstop_remaining_mode_plus() {
assert_eq!(next_tabstop(&[1, 5], 0, &RemainingMode::Plus), 1);
assert_eq!(next_tabstop(&[1, 5], 3, &RemainingMode::Plus), 4);
assert_eq!(next_tabstop(&[1, 5], 6, &RemainingMode::Plus), 4);
}
#[test]
fn test_next_tabstop_remaining_mode_slash() {
assert_eq!(next_tabstop(&[1, 5], 0, &RemainingMode::Slash), 1);
assert_eq!(next_tabstop(&[1, 5], 3, &RemainingMode::Slash), 2);
assert_eq!(next_tabstop(&[1, 5], 6, &RemainingMode::Slash), 4);
}
}
|
uu_app
|
identifier_name
|
oestexturefloat.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use super::{constants as webgl, WebGLExtension, WebGLExtensionSpec, WebGLExtensions};
use crate::dom::bindings::reflector::{reflect_dom_object, DomObject, Reflector};
use crate::dom::bindings::root::DomRoot;
use crate::dom::webglrenderingcontext::WebGLRenderingContext;
use canvas_traits::webgl::{TexFormat, WebGLVersion};
use dom_struct::dom_struct;
#[dom_struct]
pub struct OESTextureFloat {
reflector_: Reflector,
}
impl OESTextureFloat {
fn new_inherited() -> OESTextureFloat {
Self {
reflector_: Reflector::new(),
}
}
}
impl WebGLExtension for OESTextureFloat {
type Extension = OESTextureFloat;
fn new(ctx: &WebGLRenderingContext) -> DomRoot<OESTextureFloat> {
reflect_dom_object(Box::new(OESTextureFloat::new_inherited()), &*ctx.global())
}
fn spec() -> WebGLExtensionSpec {
WebGLExtensionSpec::Specific(WebGLVersion::WebGL1)
}
fn is_supported(ext: &WebGLExtensions) -> bool
|
fn enable(ext: &WebGLExtensions) {
ext.enable_tex_type(webgl::FLOAT);
ext.add_effective_tex_internal_format(TexFormat::RGBA, webgl::FLOAT, TexFormat::RGBA32f);
ext.add_effective_tex_internal_format(TexFormat::RGB, webgl::FLOAT, TexFormat::RGB32f);
ext.add_effective_tex_internal_format(
TexFormat::Luminance,
webgl::FLOAT,
TexFormat::Luminance32f,
);
ext.add_effective_tex_internal_format(TexFormat::Alpha, webgl::FLOAT, TexFormat::Alpha32f);
ext.add_effective_tex_internal_format(
TexFormat::LuminanceAlpha,
webgl::FLOAT,
TexFormat::LuminanceAlpha32f,
);
}
fn name() -> &'static str {
"OES_texture_float"
}
}
|
{
ext.supports_any_gl_extension(&[
"GL_OES_texture_float",
"GL_ARB_texture_float",
"GL_EXT_color_buffer_float",
])
}
|
identifier_body
|
oestexturefloat.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use super::{constants as webgl, WebGLExtension, WebGLExtensionSpec, WebGLExtensions};
use crate::dom::bindings::reflector::{reflect_dom_object, DomObject, Reflector};
use crate::dom::bindings::root::DomRoot;
use crate::dom::webglrenderingcontext::WebGLRenderingContext;
use canvas_traits::webgl::{TexFormat, WebGLVersion};
use dom_struct::dom_struct;
#[dom_struct]
pub struct OESTextureFloat {
reflector_: Reflector,
}
impl OESTextureFloat {
fn new_inherited() -> OESTextureFloat {
Self {
reflector_: Reflector::new(),
}
}
}
impl WebGLExtension for OESTextureFloat {
type Extension = OESTextureFloat;
fn
|
(ctx: &WebGLRenderingContext) -> DomRoot<OESTextureFloat> {
reflect_dom_object(Box::new(OESTextureFloat::new_inherited()), &*ctx.global())
}
fn spec() -> WebGLExtensionSpec {
WebGLExtensionSpec::Specific(WebGLVersion::WebGL1)
}
fn is_supported(ext: &WebGLExtensions) -> bool {
ext.supports_any_gl_extension(&[
"GL_OES_texture_float",
"GL_ARB_texture_float",
"GL_EXT_color_buffer_float",
])
}
fn enable(ext: &WebGLExtensions) {
ext.enable_tex_type(webgl::FLOAT);
ext.add_effective_tex_internal_format(TexFormat::RGBA, webgl::FLOAT, TexFormat::RGBA32f);
ext.add_effective_tex_internal_format(TexFormat::RGB, webgl::FLOAT, TexFormat::RGB32f);
ext.add_effective_tex_internal_format(
TexFormat::Luminance,
webgl::FLOAT,
TexFormat::Luminance32f,
);
ext.add_effective_tex_internal_format(TexFormat::Alpha, webgl::FLOAT, TexFormat::Alpha32f);
ext.add_effective_tex_internal_format(
TexFormat::LuminanceAlpha,
webgl::FLOAT,
TexFormat::LuminanceAlpha32f,
);
}
fn name() -> &'static str {
"OES_texture_float"
}
}
|
new
|
identifier_name
|
oestexturefloat.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use super::{constants as webgl, WebGLExtension, WebGLExtensionSpec, WebGLExtensions};
use crate::dom::bindings::reflector::{reflect_dom_object, DomObject, Reflector};
use crate::dom::bindings::root::DomRoot;
use crate::dom::webglrenderingcontext::WebGLRenderingContext;
use canvas_traits::webgl::{TexFormat, WebGLVersion};
use dom_struct::dom_struct;
#[dom_struct]
pub struct OESTextureFloat {
|
}
impl OESTextureFloat {
fn new_inherited() -> OESTextureFloat {
Self {
reflector_: Reflector::new(),
}
}
}
impl WebGLExtension for OESTextureFloat {
type Extension = OESTextureFloat;
fn new(ctx: &WebGLRenderingContext) -> DomRoot<OESTextureFloat> {
reflect_dom_object(Box::new(OESTextureFloat::new_inherited()), &*ctx.global())
}
fn spec() -> WebGLExtensionSpec {
WebGLExtensionSpec::Specific(WebGLVersion::WebGL1)
}
fn is_supported(ext: &WebGLExtensions) -> bool {
ext.supports_any_gl_extension(&[
"GL_OES_texture_float",
"GL_ARB_texture_float",
"GL_EXT_color_buffer_float",
])
}
fn enable(ext: &WebGLExtensions) {
ext.enable_tex_type(webgl::FLOAT);
ext.add_effective_tex_internal_format(TexFormat::RGBA, webgl::FLOAT, TexFormat::RGBA32f);
ext.add_effective_tex_internal_format(TexFormat::RGB, webgl::FLOAT, TexFormat::RGB32f);
ext.add_effective_tex_internal_format(
TexFormat::Luminance,
webgl::FLOAT,
TexFormat::Luminance32f,
);
ext.add_effective_tex_internal_format(TexFormat::Alpha, webgl::FLOAT, TexFormat::Alpha32f);
ext.add_effective_tex_internal_format(
TexFormat::LuminanceAlpha,
webgl::FLOAT,
TexFormat::LuminanceAlpha32f,
);
}
fn name() -> &'static str {
"OES_texture_float"
}
}
|
reflector_: Reflector,
|
random_line_split
|
schema.rs
|
// Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
//! This module provides traits that define the behavior of a schema and its associated key and
//! value types, along with helpers to define a new schema with ease.
use crate::ColumnFamilyName;
use anyhow::Result;
use std::fmt::Debug;
/// Macro for defining a SchemaDB schema.
///
/// `define_schema!` allows a schema to be defined in the following syntax:
/// ```
/// use anyhow::Result;
/// use schemadb::{
/// define_schema,
/// schema::{KeyCodec, SeekKeyCodec, ValueCodec},
/// };
///
/// // Define key type and value type for a schema with derived traits (Clone, Debug, Eq, PartialEq)
/// #[derive(Clone, Debug, Eq, PartialEq)]
/// pub struct Key;
/// #[derive(Clone, Debug, Eq, PartialEq)]
/// pub struct Value;
///
/// // Implement KeyCodec/ValueCodec traits for key and value types
/// impl KeyCodec<ExampleSchema> for Key {
/// fn encode_key(&self) -> Result<Vec<u8>> {
/// Ok(vec![])
/// }
///
/// fn decode_key(data: &[u8]) -> Result<Self> {
/// Ok(Key)
/// }
/// }
///
/// impl ValueCodec<ExampleSchema> for Value {
/// fn encode_value(&self) -> Result<Vec<u8>> {
/// Ok(vec![])
/// }
///
|
/// Ok(Value)
/// }
/// }
///
/// // And finally define a schema type and associate it with key and value types, as well as the
/// // column family name, by generating code that implements the `Schema` trait for the type.
/// define_schema!(ExampleSchema, Key, Value, "exmaple_cf_name");
///
/// // SeekKeyCodec is automatically implemented for KeyCodec,
/// // so you can seek an iterator with the Key type:
/// // iter.seek(&Key);
///
/// // Or if seek-by-prefix is desired, you can implement your own SeekKey
/// #[derive(Clone, Eq, PartialEq, Debug)]
/// pub struct PrefixSeekKey;
///
/// impl SeekKeyCodec<ExampleSchema> for PrefixSeekKey {
/// fn encode_seek_key(&self) -> Result<Vec<u8>> {
/// Ok(vec![])
/// }
/// }
/// // and seek like this:
/// // iter.seek(&PrefixSeekKey);
/// ```
#[macro_export]
macro_rules! define_schema {
($schema_type: ident, $key_type: ty, $value_type: ty, $cf_name: expr) => {
pub(crate) struct $schema_type;
impl $crate::schema::Schema for $schema_type {
const COLUMN_FAMILY_NAME: $crate::ColumnFamilyName = $cf_name;
type Key = $key_type;
type Value = $value_type;
}
};
}
/// This trait defines a type that can serve as a [`Schema::Key`].
pub trait KeyCodec<S: Schema +?Sized>: Sized + PartialEq + Debug {
/// Converts `self` to bytes to be stored in DB.
fn encode_key(&self) -> Result<Vec<u8>>;
/// Converts bytes fetched from DB to `Self`.
fn decode_key(data: &[u8]) -> Result<Self>;
}
/// This trait defines a type that can serve as a [`Schema::Value`].
pub trait ValueCodec<S: Schema +?Sized>: Sized + PartialEq + Debug {
/// Converts `self` to bytes to be stored in DB.
fn encode_value(&self) -> Result<Vec<u8>>;
/// Converts bytes fetched from DB to `Self`.
fn decode_value(data: &[u8]) -> Result<Self>;
}
/// This defines a type that can be used to seek a [`SchemaIterator`](crate::SchemaIterator), via
/// interfaces like [`seek`](crate::SchemaIterator::seek).
pub trait SeekKeyCodec<S: Schema +?Sized>: Sized {
/// Converts `self` to bytes which is used to seek the underlying raw iterator.
fn encode_seek_key(&self) -> Result<Vec<u8>>;
}
/// All keys can automatically be used as seek keys.
impl<S, K> SeekKeyCodec<S> for K
where
S: Schema,
K: KeyCodec<S>,
{
/// Delegates to [`KeyCodec::encode_key`].
fn encode_seek_key(&self) -> Result<Vec<u8>> {
<K as KeyCodec<S>>::encode_key(&self)
}
}
/// This trait defines a schema: an association of a column family name, the key type and the value
/// type.
pub trait Schema {
/// The column family name associated with this struct.
/// Note: all schemas within the same SchemaDB must have distinct column family names.
const COLUMN_FAMILY_NAME: ColumnFamilyName;
/// Type of the key.
type Key: KeyCodec<Self>;
/// Type of the value.
type Value: ValueCodec<Self>;
}
/// Helper used in tests to assert a (key, value) pair for a certain [`Schema`] is able to convert
/// to bytes and convert back.
pub fn assert_encode_decode<S: Schema>(key: &S::Key, value: &S::Value) {
{
let encoded = key.encode_key().expect("Encoding key should work.");
let decoded = S::Key::decode_key(&encoded).expect("Decoding key should work.");
assert_eq!(*key, decoded);
}
{
let encoded = value.encode_value().expect("Encoding value should work.");
let decoded = S::Value::decode_value(&encoded).expect("Decoding value should work.");
assert_eq!(*value, decoded);
}
}
|
/// fn decode_value(data: &[u8]) -> Result<Self> {
|
random_line_split
|
schema.rs
|
// Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
//! This module provides traits that define the behavior of a schema and its associated key and
//! value types, along with helpers to define a new schema with ease.
use crate::ColumnFamilyName;
use anyhow::Result;
use std::fmt::Debug;
/// Macro for defining a SchemaDB schema.
///
/// `define_schema!` allows a schema to be defined in the following syntax:
/// ```
/// use anyhow::Result;
/// use schemadb::{
/// define_schema,
/// schema::{KeyCodec, SeekKeyCodec, ValueCodec},
/// };
///
/// // Define key type and value type for a schema with derived traits (Clone, Debug, Eq, PartialEq)
/// #[derive(Clone, Debug, Eq, PartialEq)]
/// pub struct Key;
/// #[derive(Clone, Debug, Eq, PartialEq)]
/// pub struct Value;
///
/// // Implement KeyCodec/ValueCodec traits for key and value types
/// impl KeyCodec<ExampleSchema> for Key {
/// fn encode_key(&self) -> Result<Vec<u8>> {
/// Ok(vec![])
/// }
///
/// fn decode_key(data: &[u8]) -> Result<Self> {
/// Ok(Key)
/// }
/// }
///
/// impl ValueCodec<ExampleSchema> for Value {
/// fn encode_value(&self) -> Result<Vec<u8>> {
/// Ok(vec![])
/// }
///
/// fn decode_value(data: &[u8]) -> Result<Self> {
/// Ok(Value)
/// }
/// }
///
/// // And finally define a schema type and associate it with key and value types, as well as the
/// // column family name, by generating code that implements the `Schema` trait for the type.
/// define_schema!(ExampleSchema, Key, Value, "exmaple_cf_name");
///
/// // SeekKeyCodec is automatically implemented for KeyCodec,
/// // so you can seek an iterator with the Key type:
/// // iter.seek(&Key);
///
/// // Or if seek-by-prefix is desired, you can implement your own SeekKey
/// #[derive(Clone, Eq, PartialEq, Debug)]
/// pub struct PrefixSeekKey;
///
/// impl SeekKeyCodec<ExampleSchema> for PrefixSeekKey {
/// fn encode_seek_key(&self) -> Result<Vec<u8>> {
/// Ok(vec![])
/// }
/// }
/// // and seek like this:
/// // iter.seek(&PrefixSeekKey);
/// ```
#[macro_export]
macro_rules! define_schema {
($schema_type: ident, $key_type: ty, $value_type: ty, $cf_name: expr) => {
pub(crate) struct $schema_type;
impl $crate::schema::Schema for $schema_type {
const COLUMN_FAMILY_NAME: $crate::ColumnFamilyName = $cf_name;
type Key = $key_type;
type Value = $value_type;
}
};
}
/// This trait defines a type that can serve as a [`Schema::Key`].
pub trait KeyCodec<S: Schema +?Sized>: Sized + PartialEq + Debug {
/// Converts `self` to bytes to be stored in DB.
fn encode_key(&self) -> Result<Vec<u8>>;
/// Converts bytes fetched from DB to `Self`.
fn decode_key(data: &[u8]) -> Result<Self>;
}
/// This trait defines a type that can serve as a [`Schema::Value`].
pub trait ValueCodec<S: Schema +?Sized>: Sized + PartialEq + Debug {
/// Converts `self` to bytes to be stored in DB.
fn encode_value(&self) -> Result<Vec<u8>>;
/// Converts bytes fetched from DB to `Self`.
fn decode_value(data: &[u8]) -> Result<Self>;
}
/// This defines a type that can be used to seek a [`SchemaIterator`](crate::SchemaIterator), via
/// interfaces like [`seek`](crate::SchemaIterator::seek).
pub trait SeekKeyCodec<S: Schema +?Sized>: Sized {
/// Converts `self` to bytes which is used to seek the underlying raw iterator.
fn encode_seek_key(&self) -> Result<Vec<u8>>;
}
/// All keys can automatically be used as seek keys.
impl<S, K> SeekKeyCodec<S> for K
where
S: Schema,
K: KeyCodec<S>,
{
/// Delegates to [`KeyCodec::encode_key`].
fn encode_seek_key(&self) -> Result<Vec<u8>> {
<K as KeyCodec<S>>::encode_key(&self)
}
}
/// This trait defines a schema: an association of a column family name, the key type and the value
/// type.
pub trait Schema {
/// The column family name associated with this struct.
/// Note: all schemas within the same SchemaDB must have distinct column family names.
const COLUMN_FAMILY_NAME: ColumnFamilyName;
/// Type of the key.
type Key: KeyCodec<Self>;
/// Type of the value.
type Value: ValueCodec<Self>;
}
/// Helper used in tests to assert a (key, value) pair for a certain [`Schema`] is able to convert
/// to bytes and convert back.
pub fn assert_encode_decode<S: Schema>(key: &S::Key, value: &S::Value)
|
{
{
let encoded = key.encode_key().expect("Encoding key should work.");
let decoded = S::Key::decode_key(&encoded).expect("Decoding key should work.");
assert_eq!(*key, decoded);
}
{
let encoded = value.encode_value().expect("Encoding value should work.");
let decoded = S::Value::decode_value(&encoded).expect("Decoding value should work.");
assert_eq!(*value, decoded);
}
}
|
identifier_body
|
|
schema.rs
|
// Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
//! This module provides traits that define the behavior of a schema and its associated key and
//! value types, along with helpers to define a new schema with ease.
use crate::ColumnFamilyName;
use anyhow::Result;
use std::fmt::Debug;
/// Macro for defining a SchemaDB schema.
///
/// `define_schema!` allows a schema to be defined in the following syntax:
/// ```
/// use anyhow::Result;
/// use schemadb::{
/// define_schema,
/// schema::{KeyCodec, SeekKeyCodec, ValueCodec},
/// };
///
/// // Define key type and value type for a schema with derived traits (Clone, Debug, Eq, PartialEq)
/// #[derive(Clone, Debug, Eq, PartialEq)]
/// pub struct Key;
/// #[derive(Clone, Debug, Eq, PartialEq)]
/// pub struct Value;
///
/// // Implement KeyCodec/ValueCodec traits for key and value types
/// impl KeyCodec<ExampleSchema> for Key {
/// fn encode_key(&self) -> Result<Vec<u8>> {
/// Ok(vec![])
/// }
///
/// fn decode_key(data: &[u8]) -> Result<Self> {
/// Ok(Key)
/// }
/// }
///
/// impl ValueCodec<ExampleSchema> for Value {
/// fn encode_value(&self) -> Result<Vec<u8>> {
/// Ok(vec![])
/// }
///
/// fn decode_value(data: &[u8]) -> Result<Self> {
/// Ok(Value)
/// }
/// }
///
/// // And finally define a schema type and associate it with key and value types, as well as the
/// // column family name, by generating code that implements the `Schema` trait for the type.
/// define_schema!(ExampleSchema, Key, Value, "exmaple_cf_name");
///
/// // SeekKeyCodec is automatically implemented for KeyCodec,
/// // so you can seek an iterator with the Key type:
/// // iter.seek(&Key);
///
/// // Or if seek-by-prefix is desired, you can implement your own SeekKey
/// #[derive(Clone, Eq, PartialEq, Debug)]
/// pub struct PrefixSeekKey;
///
/// impl SeekKeyCodec<ExampleSchema> for PrefixSeekKey {
/// fn encode_seek_key(&self) -> Result<Vec<u8>> {
/// Ok(vec![])
/// }
/// }
/// // and seek like this:
/// // iter.seek(&PrefixSeekKey);
/// ```
#[macro_export]
macro_rules! define_schema {
($schema_type: ident, $key_type: ty, $value_type: ty, $cf_name: expr) => {
pub(crate) struct $schema_type;
impl $crate::schema::Schema for $schema_type {
const COLUMN_FAMILY_NAME: $crate::ColumnFamilyName = $cf_name;
type Key = $key_type;
type Value = $value_type;
}
};
}
/// This trait defines a type that can serve as a [`Schema::Key`].
pub trait KeyCodec<S: Schema +?Sized>: Sized + PartialEq + Debug {
/// Converts `self` to bytes to be stored in DB.
fn encode_key(&self) -> Result<Vec<u8>>;
/// Converts bytes fetched from DB to `Self`.
fn decode_key(data: &[u8]) -> Result<Self>;
}
/// This trait defines a type that can serve as a [`Schema::Value`].
pub trait ValueCodec<S: Schema +?Sized>: Sized + PartialEq + Debug {
/// Converts `self` to bytes to be stored in DB.
fn encode_value(&self) -> Result<Vec<u8>>;
/// Converts bytes fetched from DB to `Self`.
fn decode_value(data: &[u8]) -> Result<Self>;
}
/// This defines a type that can be used to seek a [`SchemaIterator`](crate::SchemaIterator), via
/// interfaces like [`seek`](crate::SchemaIterator::seek).
pub trait SeekKeyCodec<S: Schema +?Sized>: Sized {
/// Converts `self` to bytes which is used to seek the underlying raw iterator.
fn encode_seek_key(&self) -> Result<Vec<u8>>;
}
/// All keys can automatically be used as seek keys.
impl<S, K> SeekKeyCodec<S> for K
where
S: Schema,
K: KeyCodec<S>,
{
/// Delegates to [`KeyCodec::encode_key`].
fn encode_seek_key(&self) -> Result<Vec<u8>> {
<K as KeyCodec<S>>::encode_key(&self)
}
}
/// This trait defines a schema: an association of a column family name, the key type and the value
/// type.
pub trait Schema {
/// The column family name associated with this struct.
/// Note: all schemas within the same SchemaDB must have distinct column family names.
const COLUMN_FAMILY_NAME: ColumnFamilyName;
/// Type of the key.
type Key: KeyCodec<Self>;
/// Type of the value.
type Value: ValueCodec<Self>;
}
/// Helper used in tests to assert a (key, value) pair for a certain [`Schema`] is able to convert
/// to bytes and convert back.
pub fn
|
<S: Schema>(key: &S::Key, value: &S::Value) {
{
let encoded = key.encode_key().expect("Encoding key should work.");
let decoded = S::Key::decode_key(&encoded).expect("Decoding key should work.");
assert_eq!(*key, decoded);
}
{
let encoded = value.encode_value().expect("Encoding value should work.");
let decoded = S::Value::decode_value(&encoded).expect("Decoding value should work.");
assert_eq!(*value, decoded);
}
}
|
assert_encode_decode
|
identifier_name
|
uninstall.rs
|
use cargo::ops;
use cargo::util::{CliResult, Config};
#[derive(RustcDecodable)]
pub struct Options {
flag_bin: Vec<String>,
flag_root: Option<String>,
flag_verbose: u32,
flag_quiet: Option<bool>,
flag_color: Option<String>,
flag_frozen: bool,
flag_locked: bool,
arg_spec: String,
}
pub const USAGE: &'static str = "
Remove a Rust binary
Usage:
cargo uninstall [options] <spec>
cargo uninstall (-h | --help)
Options:
-h, --help Print this message
--root DIR Directory to uninstall packages from
--bin NAME Only uninstall the binary NAME
-v, --verbose... Use verbose output (-vv very verbose/build.rs output)
-q, --quiet Less output printed to stdout
--color WHEN Coloring: auto, always, never
--frozen Require Cargo.lock and cache are up to date
--locked Require Cargo.lock is up to date
The argument SPEC is a package id specification (see `cargo help pkgid`) to
specify which crate should be uninstalled. By default all binaries are
uninstalled for a crate but the `--bin` and `--example` flags can be used to
only uninstall particular binaries.
";
pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
config.configure(options.flag_verbose,
options.flag_quiet,
&options.flag_color,
options.flag_frozen,
options.flag_locked)?;
|
Ok(None)
}
|
let root = options.flag_root.as_ref().map(|s| &s[..]);
ops::uninstall(root, &options.arg_spec, &options.flag_bin, config)?;
|
random_line_split
|
uninstall.rs
|
use cargo::ops;
use cargo::util::{CliResult, Config};
#[derive(RustcDecodable)]
pub struct Options {
flag_bin: Vec<String>,
flag_root: Option<String>,
flag_verbose: u32,
flag_quiet: Option<bool>,
flag_color: Option<String>,
flag_frozen: bool,
flag_locked: bool,
arg_spec: String,
}
pub const USAGE: &'static str = "
Remove a Rust binary
Usage:
cargo uninstall [options] <spec>
cargo uninstall (-h | --help)
Options:
-h, --help Print this message
--root DIR Directory to uninstall packages from
--bin NAME Only uninstall the binary NAME
-v, --verbose... Use verbose output (-vv very verbose/build.rs output)
-q, --quiet Less output printed to stdout
--color WHEN Coloring: auto, always, never
--frozen Require Cargo.lock and cache are up to date
--locked Require Cargo.lock is up to date
The argument SPEC is a package id specification (see `cargo help pkgid`) to
specify which crate should be uninstalled. By default all binaries are
uninstalled for a crate but the `--bin` and `--example` flags can be used to
only uninstall particular binaries.
";
pub fn
|
(options: Options, config: &Config) -> CliResult<Option<()>> {
config.configure(options.flag_verbose,
options.flag_quiet,
&options.flag_color,
options.flag_frozen,
options.flag_locked)?;
let root = options.flag_root.as_ref().map(|s| &s[..]);
ops::uninstall(root, &options.arg_spec, &options.flag_bin, config)?;
Ok(None)
}
|
execute
|
identifier_name
|
string.rs
|
use {ADDRESS_PATTERN, AddressLengthType};
use patterns::{Pattern, Patterns, parse_patterns};
use std::borrow::Borrow;
use std::sync::{Arc, Mutex};
use clap::ArgMatches;
use generic_array::GenericArray;
use termcolor::BufferWriter;
use rayon::prelude::*;
impl Pattern for String {
fn matches(&self, string: &str) -> bool {
string.starts_with(self)
}
fn parse<T: AsRef<str>>(string: T) -> Result<Self, String> {
let string = string.as_ref().to_lowercase();
if!ADDRESS_PATTERN.is_match(&string) {
return Err("Pattern contains invalid characters".to_string());
}
return Ok(string);
}
}
pub struct StringPatterns {
// Strings of length `n` are in the `n-1`th index of this array
sorted_vecs: GenericArray<Option<Vec<String>>, AddressLengthType>,
}
|
impl StringPatterns {
pub fn new(buffer_writer: Arc<Mutex<BufferWriter>>, matches: &ArgMatches) -> StringPatterns {
let patterns = parse_patterns::<String>(buffer_writer, matches);
let patterns_by_len: Arc<GenericArray<Mutex<Option<Vec<String>>>, AddressLengthType>> = Arc::new(arr![Mutex<Option<Vec<String>>>; Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None)]);
patterns.par_iter()
.for_each(|pattern| {
let patterns_by_len_borrowed: &GenericArray<Mutex<Option<Vec<String>>>, AddressLengthType> = patterns_by_len.borrow();
let mut vec = patterns_by_len_borrowed[pattern.len() - 1].lock().expect("Something panicked somewhere, oops. Please report this incident to the author.");
let vec = vec.get_or_insert_with(Vec::new);
vec.push(pattern.clone());
});
let patterns_by_len_borrowed: GenericArray<Mutex<Option<Vec<String>>>,
AddressLengthType> =
Arc::try_unwrap(patterns_by_len)
.unwrap_or_else(|_| panic!("Couldn't unwrap petterns."));
let sorted_vecs = patterns_by_len_borrowed.map(|item| {
let item: Option<Vec<String>> = item.into_inner().unwrap();
item.map(|mut vec| {
vec.sort();
vec.dedup();
vec
})
});
StringPatterns { sorted_vecs }
}
}
impl Patterns for StringPatterns {
fn contains(&self, address: &String) -> bool {
// Try match from shortest to longest patterns
for (index, option_vec) in self.sorted_vecs.iter().enumerate() {
if let &Some(ref vec) = option_vec {
let pattern_len = index + 1;
let target_address_slice = &address[0..pattern_len];
if vec.binary_search_by(|item| item.as_str().cmp(target_address_slice))
.is_ok() {
return true;
}
}
}
return false;
}
fn len(&self) -> usize {
self.sorted_vecs
.par_iter()
.filter(|opt| opt.is_some())
.map(|opt| opt.as_ref().unwrap().len())
.sum()
}
}
|
random_line_split
|
|
string.rs
|
use {ADDRESS_PATTERN, AddressLengthType};
use patterns::{Pattern, Patterns, parse_patterns};
use std::borrow::Borrow;
use std::sync::{Arc, Mutex};
use clap::ArgMatches;
use generic_array::GenericArray;
use termcolor::BufferWriter;
use rayon::prelude::*;
impl Pattern for String {
fn matches(&self, string: &str) -> bool {
string.starts_with(self)
}
fn parse<T: AsRef<str>>(string: T) -> Result<Self, String> {
let string = string.as_ref().to_lowercase();
if!ADDRESS_PATTERN.is_match(&string) {
return Err("Pattern contains invalid characters".to_string());
}
return Ok(string);
}
}
pub struct StringPatterns {
// Strings of length `n` are in the `n-1`th index of this array
sorted_vecs: GenericArray<Option<Vec<String>>, AddressLengthType>,
}
impl StringPatterns {
pub fn new(buffer_writer: Arc<Mutex<BufferWriter>>, matches: &ArgMatches) -> StringPatterns
|
item.map(|mut vec| {
vec.sort();
vec.dedup();
vec
})
});
StringPatterns { sorted_vecs }
}
}
impl Patterns for StringPatterns {
fn contains(&self, address: &String) -> bool {
// Try match from shortest to longest patterns
for (index, option_vec) in self.sorted_vecs.iter().enumerate() {
if let &Some(ref vec) = option_vec {
let pattern_len = index + 1;
let target_address_slice = &address[0..pattern_len];
if vec.binary_search_by(|item| item.as_str().cmp(target_address_slice))
.is_ok() {
return true;
}
}
}
return false;
}
fn len(&self) -> usize {
self.sorted_vecs
.par_iter()
.filter(|opt| opt.is_some())
.map(|opt| opt.as_ref().unwrap().len())
.sum()
}
}
|
{
let patterns = parse_patterns::<String>(buffer_writer, matches);
let patterns_by_len: Arc<GenericArray<Mutex<Option<Vec<String>>>, AddressLengthType>> = Arc::new(arr![Mutex<Option<Vec<String>>>; Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None)]);
patterns.par_iter()
.for_each(|pattern| {
let patterns_by_len_borrowed: &GenericArray<Mutex<Option<Vec<String>>>, AddressLengthType> = patterns_by_len.borrow();
let mut vec = patterns_by_len_borrowed[pattern.len() - 1].lock().expect("Something panicked somewhere, oops. Please report this incident to the author.");
let vec = vec.get_or_insert_with(Vec::new);
vec.push(pattern.clone());
});
let patterns_by_len_borrowed: GenericArray<Mutex<Option<Vec<String>>>,
AddressLengthType> =
Arc::try_unwrap(patterns_by_len)
.unwrap_or_else(|_| panic!("Couldn't unwrap petterns."));
let sorted_vecs = patterns_by_len_borrowed.map(|item| {
let item: Option<Vec<String>> = item.into_inner().unwrap();
|
identifier_body
|
string.rs
|
use {ADDRESS_PATTERN, AddressLengthType};
use patterns::{Pattern, Patterns, parse_patterns};
use std::borrow::Borrow;
use std::sync::{Arc, Mutex};
use clap::ArgMatches;
use generic_array::GenericArray;
use termcolor::BufferWriter;
use rayon::prelude::*;
impl Pattern for String {
fn matches(&self, string: &str) -> bool {
string.starts_with(self)
}
fn parse<T: AsRef<str>>(string: T) -> Result<Self, String> {
let string = string.as_ref().to_lowercase();
if!ADDRESS_PATTERN.is_match(&string) {
return Err("Pattern contains invalid characters".to_string());
}
return Ok(string);
}
}
pub struct StringPatterns {
// Strings of length `n` are in the `n-1`th index of this array
sorted_vecs: GenericArray<Option<Vec<String>>, AddressLengthType>,
}
impl StringPatterns {
pub fn
|
(buffer_writer: Arc<Mutex<BufferWriter>>, matches: &ArgMatches) -> StringPatterns {
let patterns = parse_patterns::<String>(buffer_writer, matches);
let patterns_by_len: Arc<GenericArray<Mutex<Option<Vec<String>>>, AddressLengthType>> = Arc::new(arr![Mutex<Option<Vec<String>>>; Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None)]);
patterns.par_iter()
.for_each(|pattern| {
let patterns_by_len_borrowed: &GenericArray<Mutex<Option<Vec<String>>>, AddressLengthType> = patterns_by_len.borrow();
let mut vec = patterns_by_len_borrowed[pattern.len() - 1].lock().expect("Something panicked somewhere, oops. Please report this incident to the author.");
let vec = vec.get_or_insert_with(Vec::new);
vec.push(pattern.clone());
});
let patterns_by_len_borrowed: GenericArray<Mutex<Option<Vec<String>>>,
AddressLengthType> =
Arc::try_unwrap(patterns_by_len)
.unwrap_or_else(|_| panic!("Couldn't unwrap petterns."));
let sorted_vecs = patterns_by_len_borrowed.map(|item| {
let item: Option<Vec<String>> = item.into_inner().unwrap();
item.map(|mut vec| {
vec.sort();
vec.dedup();
vec
})
});
StringPatterns { sorted_vecs }
}
}
impl Patterns for StringPatterns {
fn contains(&self, address: &String) -> bool {
// Try match from shortest to longest patterns
for (index, option_vec) in self.sorted_vecs.iter().enumerate() {
if let &Some(ref vec) = option_vec {
let pattern_len = index + 1;
let target_address_slice = &address[0..pattern_len];
if vec.binary_search_by(|item| item.as_str().cmp(target_address_slice))
.is_ok() {
return true;
}
}
}
return false;
}
fn len(&self) -> usize {
self.sorted_vecs
.par_iter()
.filter(|opt| opt.is_some())
.map(|opt| opt.as_ref().unwrap().len())
.sum()
}
}
|
new
|
identifier_name
|
string.rs
|
use {ADDRESS_PATTERN, AddressLengthType};
use patterns::{Pattern, Patterns, parse_patterns};
use std::borrow::Borrow;
use std::sync::{Arc, Mutex};
use clap::ArgMatches;
use generic_array::GenericArray;
use termcolor::BufferWriter;
use rayon::prelude::*;
impl Pattern for String {
fn matches(&self, string: &str) -> bool {
string.starts_with(self)
}
fn parse<T: AsRef<str>>(string: T) -> Result<Self, String> {
let string = string.as_ref().to_lowercase();
if!ADDRESS_PATTERN.is_match(&string) {
return Err("Pattern contains invalid characters".to_string());
}
return Ok(string);
}
}
pub struct StringPatterns {
// Strings of length `n` are in the `n-1`th index of this array
sorted_vecs: GenericArray<Option<Vec<String>>, AddressLengthType>,
}
impl StringPatterns {
pub fn new(buffer_writer: Arc<Mutex<BufferWriter>>, matches: &ArgMatches) -> StringPatterns {
let patterns = parse_patterns::<String>(buffer_writer, matches);
let patterns_by_len: Arc<GenericArray<Mutex<Option<Vec<String>>>, AddressLengthType>> = Arc::new(arr![Mutex<Option<Vec<String>>>; Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None), Mutex::new(None)]);
patterns.par_iter()
.for_each(|pattern| {
let patterns_by_len_borrowed: &GenericArray<Mutex<Option<Vec<String>>>, AddressLengthType> = patterns_by_len.borrow();
let mut vec = patterns_by_len_borrowed[pattern.len() - 1].lock().expect("Something panicked somewhere, oops. Please report this incident to the author.");
let vec = vec.get_or_insert_with(Vec::new);
vec.push(pattern.clone());
});
let patterns_by_len_borrowed: GenericArray<Mutex<Option<Vec<String>>>,
AddressLengthType> =
Arc::try_unwrap(patterns_by_len)
.unwrap_or_else(|_| panic!("Couldn't unwrap petterns."));
let sorted_vecs = patterns_by_len_borrowed.map(|item| {
let item: Option<Vec<String>> = item.into_inner().unwrap();
item.map(|mut vec| {
vec.sort();
vec.dedup();
vec
})
});
StringPatterns { sorted_vecs }
}
}
impl Patterns for StringPatterns {
fn contains(&self, address: &String) -> bool {
// Try match from shortest to longest patterns
for (index, option_vec) in self.sorted_vecs.iter().enumerate() {
if let &Some(ref vec) = option_vec
|
}
return false;
}
fn len(&self) -> usize {
self.sorted_vecs
.par_iter()
.filter(|opt| opt.is_some())
.map(|opt| opt.as_ref().unwrap().len())
.sum()
}
}
|
{
let pattern_len = index + 1;
let target_address_slice = &address[0..pattern_len];
if vec.binary_search_by(|item| item.as_str().cmp(target_address_slice))
.is_ok() {
return true;
}
}
|
conditional_block
|
stylesheets.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use std::iter::Iterator;
use std::ascii::StrAsciiExt;
use extra::url::Url;
use encoding::EncodingRef;
use cssparser::{decode_stylesheet_bytes, tokenize, parse_stylesheet_rules, ToCss};
use cssparser::ast::*;
use selectors;
use properties;
use errors::{ErrorLoggerIterator, log_css_error};
use namespaces::{NamespaceMap, parse_namespace_rule};
use media_queries::{MediaRule, parse_media_rule};
use media_queries;
pub struct Stylesheet {
/// List of rules in the order they were found (important for
/// cascading order)
rules: ~[CSSRule],
namespaces: NamespaceMap,
encoding: EncodingRef,
base_url: Url,
}
pub enum CSSRule {
CSSStyleRule(StyleRule),
CSSMediaRule(MediaRule),
}
pub struct StyleRule {
selectors: ~[selectors::Selector],
declarations: properties::PropertyDeclarationBlock,
}
impl Stylesheet {
pub fn from_bytes_iter<I: Iterator<~[u8]>>(
mut input: I, base_url: Url, protocol_encoding_label: Option<&str>,
environment_encoding: Option<EncodingRef>) -> Stylesheet {
let mut bytes = ~[];
// TODO: incremental decoding and tokinization/parsing
for chunk in input {
bytes.push_all(chunk)
}
Stylesheet::from_bytes(bytes, base_url, protocol_encoding_label, environment_encoding)
}
pub fn from_bytes(
bytes: &[u8], base_url: Url, protocol_encoding_label: Option<&str>,
environment_encoding: Option<EncodingRef>) -> Stylesheet {
let (string, used_encoding) = decode_stylesheet_bytes(
bytes, protocol_encoding_label, environment_encoding);
Stylesheet::from_str(string, base_url, used_encoding)
}
pub fn from_str(css: &str, base_url: Url, encoding: EncodingRef) -> Stylesheet {
static STATE_CHARSET: uint = 1;
static STATE_IMPORTS: uint = 2;
static STATE_NAMESPACES: uint = 3;
static STATE_BODY: uint = 4;
let mut state: uint = STATE_CHARSET;
let mut rules = ~[];
let mut namespaces = NamespaceMap::new();
for rule in ErrorLoggerIterator(parse_stylesheet_rules(tokenize(css))) {
let next_state; // Unitialized to force each branch to set it.
match rule {
QualifiedRule(rule) => {
next_state = STATE_BODY;
parse_style_rule(rule, &mut rules, &namespaces)
},
AtRule(rule) => {
let lower_name = rule.name.to_ascii_lower();
match lower_name.as_slice() {
"charset" => {
if state > STATE_CHARSET {
log_css_error(rule.location, "@charset must be the first rule")
}
// Valid @charset rules are just ignored
next_state = STATE_IMPORTS;
},
"import" => {
if state > STATE_IMPORTS {
next_state = state;
log_css_error(rule.location,
"@import must be before any rule but @charset")
} else {
next_state = STATE_IMPORTS;
// TODO: support @import
log_css_error(rule.location, "@import is not supported yet")
}
},
"namespace" => {
if state > STATE_NAMESPACES {
next_state = state;
log_css_error(
rule.location,
"@namespace must be before any rule but @charset and @import"
)
} else {
next_state = STATE_NAMESPACES;
parse_namespace_rule(rule, &mut namespaces)
}
},
_ => {
next_state = STATE_BODY;
parse_nested_at_rule(lower_name, rule, &mut rules, &namespaces)
},
}
},
}
state = next_state;
}
Stylesheet{ rules: rules, namespaces: namespaces, encoding: encoding, base_url: base_url }
}
}
pub fn parse_style_rule(rule: QualifiedRule, parent_rules: &mut ~[CSSRule],
namespaces: &NamespaceMap) {
let QualifiedRule{location: location, prelude: prelude, block: block} = rule;
// FIXME: avoid doing this for valid selectors
let serialized = prelude.iter().to_css();
match selectors::parse_selector_list(prelude, namespaces) {
Some(selectors) => parent_rules.push(CSSStyleRule(StyleRule{
selectors: selectors,
declarations: properties::parse_property_declaration_list(block.move_iter())
})),
None => log_css_error(location, format!(
"Invalid/unsupported selector: {}", serialized)),
}
}
// lower_name is passed explicitly to avoid computing it twice.
pub fn parse_nested_at_rule(lower_name: &str, rule: AtRule,
parent_rules: &mut ~[CSSRule], namespaces: &NamespaceMap) {
match lower_name {
"media" => parse_media_rule(rule, parent_rules, namespaces),
_ => log_css_error(rule.location, format!("Unsupported at-rule: @{:s}", lower_name))
}
}
pub fn
|
<'a>(rules: &[CSSRule], device: &media_queries::Device,
callback: |&StyleRule|) {
for rule in rules.iter() {
match *rule {
CSSStyleRule(ref rule) => callback(rule),
CSSMediaRule(ref rule) => if rule.media_queries.evaluate(device) {
iter_style_rules(rule.rules.as_slice(), device, |s| callback(s))
}
}
}
}
|
iter_style_rules
|
identifier_name
|
stylesheets.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use std::iter::Iterator;
use std::ascii::StrAsciiExt;
use extra::url::Url;
use encoding::EncodingRef;
use cssparser::{decode_stylesheet_bytes, tokenize, parse_stylesheet_rules, ToCss};
use cssparser::ast::*;
use selectors;
use properties;
use errors::{ErrorLoggerIterator, log_css_error};
use namespaces::{NamespaceMap, parse_namespace_rule};
use media_queries::{MediaRule, parse_media_rule};
use media_queries;
pub struct Stylesheet {
/// List of rules in the order they were found (important for
/// cascading order)
rules: ~[CSSRule],
namespaces: NamespaceMap,
encoding: EncodingRef,
base_url: Url,
}
pub enum CSSRule {
CSSStyleRule(StyleRule),
CSSMediaRule(MediaRule),
}
pub struct StyleRule {
selectors: ~[selectors::Selector],
declarations: properties::PropertyDeclarationBlock,
}
impl Stylesheet {
pub fn from_bytes_iter<I: Iterator<~[u8]>>(
mut input: I, base_url: Url, protocol_encoding_label: Option<&str>,
environment_encoding: Option<EncodingRef>) -> Stylesheet {
let mut bytes = ~[];
// TODO: incremental decoding and tokinization/parsing
for chunk in input {
bytes.push_all(chunk)
}
Stylesheet::from_bytes(bytes, base_url, protocol_encoding_label, environment_encoding)
}
pub fn from_bytes(
bytes: &[u8], base_url: Url, protocol_encoding_label: Option<&str>,
environment_encoding: Option<EncodingRef>) -> Stylesheet {
let (string, used_encoding) = decode_stylesheet_bytes(
bytes, protocol_encoding_label, environment_encoding);
Stylesheet::from_str(string, base_url, used_encoding)
}
pub fn from_str(css: &str, base_url: Url, encoding: EncodingRef) -> Stylesheet {
static STATE_CHARSET: uint = 1;
static STATE_IMPORTS: uint = 2;
static STATE_NAMESPACES: uint = 3;
static STATE_BODY: uint = 4;
let mut state: uint = STATE_CHARSET;
let mut rules = ~[];
let mut namespaces = NamespaceMap::new();
for rule in ErrorLoggerIterator(parse_stylesheet_rules(tokenize(css))) {
let next_state; // Unitialized to force each branch to set it.
match rule {
QualifiedRule(rule) => {
next_state = STATE_BODY;
parse_style_rule(rule, &mut rules, &namespaces)
},
AtRule(rule) => {
let lower_name = rule.name.to_ascii_lower();
match lower_name.as_slice() {
"charset" => {
if state > STATE_CHARSET {
log_css_error(rule.location, "@charset must be the first rule")
}
// Valid @charset rules are just ignored
next_state = STATE_IMPORTS;
},
"import" => {
if state > STATE_IMPORTS {
next_state = state;
log_css_error(rule.location,
"@import must be before any rule but @charset")
} else {
next_state = STATE_IMPORTS;
// TODO: support @import
log_css_error(rule.location, "@import is not supported yet")
}
},
"namespace" => {
if state > STATE_NAMESPACES
|
else {
next_state = STATE_NAMESPACES;
parse_namespace_rule(rule, &mut namespaces)
}
},
_ => {
next_state = STATE_BODY;
parse_nested_at_rule(lower_name, rule, &mut rules, &namespaces)
},
}
},
}
state = next_state;
}
Stylesheet{ rules: rules, namespaces: namespaces, encoding: encoding, base_url: base_url }
}
}
pub fn parse_style_rule(rule: QualifiedRule, parent_rules: &mut ~[CSSRule],
namespaces: &NamespaceMap) {
let QualifiedRule{location: location, prelude: prelude, block: block} = rule;
// FIXME: avoid doing this for valid selectors
let serialized = prelude.iter().to_css();
match selectors::parse_selector_list(prelude, namespaces) {
Some(selectors) => parent_rules.push(CSSStyleRule(StyleRule{
selectors: selectors,
declarations: properties::parse_property_declaration_list(block.move_iter())
})),
None => log_css_error(location, format!(
"Invalid/unsupported selector: {}", serialized)),
}
}
// lower_name is passed explicitly to avoid computing it twice.
pub fn parse_nested_at_rule(lower_name: &str, rule: AtRule,
parent_rules: &mut ~[CSSRule], namespaces: &NamespaceMap) {
match lower_name {
"media" => parse_media_rule(rule, parent_rules, namespaces),
_ => log_css_error(rule.location, format!("Unsupported at-rule: @{:s}", lower_name))
}
}
pub fn iter_style_rules<'a>(rules: &[CSSRule], device: &media_queries::Device,
callback: |&StyleRule|) {
for rule in rules.iter() {
match *rule {
CSSStyleRule(ref rule) => callback(rule),
CSSMediaRule(ref rule) => if rule.media_queries.evaluate(device) {
iter_style_rules(rule.rules.as_slice(), device, |s| callback(s))
}
}
}
}
|
{
next_state = state;
log_css_error(
rule.location,
"@namespace must be before any rule but @charset and @import"
)
}
|
conditional_block
|
stylesheets.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use std::iter::Iterator;
use std::ascii::StrAsciiExt;
use extra::url::Url;
use encoding::EncodingRef;
use cssparser::{decode_stylesheet_bytes, tokenize, parse_stylesheet_rules, ToCss};
use cssparser::ast::*;
use selectors;
use properties;
use errors::{ErrorLoggerIterator, log_css_error};
use namespaces::{NamespaceMap, parse_namespace_rule};
use media_queries::{MediaRule, parse_media_rule};
use media_queries;
pub struct Stylesheet {
/// List of rules in the order they were found (important for
/// cascading order)
rules: ~[CSSRule],
namespaces: NamespaceMap,
encoding: EncodingRef,
base_url: Url,
}
pub enum CSSRule {
CSSStyleRule(StyleRule),
CSSMediaRule(MediaRule),
}
pub struct StyleRule {
selectors: ~[selectors::Selector],
declarations: properties::PropertyDeclarationBlock,
}
impl Stylesheet {
pub fn from_bytes_iter<I: Iterator<~[u8]>>(
mut input: I, base_url: Url, protocol_encoding_label: Option<&str>,
environment_encoding: Option<EncodingRef>) -> Stylesheet {
let mut bytes = ~[];
// TODO: incremental decoding and tokinization/parsing
for chunk in input {
bytes.push_all(chunk)
}
Stylesheet::from_bytes(bytes, base_url, protocol_encoding_label, environment_encoding)
}
pub fn from_bytes(
bytes: &[u8], base_url: Url, protocol_encoding_label: Option<&str>,
environment_encoding: Option<EncodingRef>) -> Stylesheet {
let (string, used_encoding) = decode_stylesheet_bytes(
bytes, protocol_encoding_label, environment_encoding);
Stylesheet::from_str(string, base_url, used_encoding)
}
pub fn from_str(css: &str, base_url: Url, encoding: EncodingRef) -> Stylesheet {
static STATE_CHARSET: uint = 1;
static STATE_IMPORTS: uint = 2;
static STATE_NAMESPACES: uint = 3;
static STATE_BODY: uint = 4;
let mut state: uint = STATE_CHARSET;
let mut rules = ~[];
let mut namespaces = NamespaceMap::new();
for rule in ErrorLoggerIterator(parse_stylesheet_rules(tokenize(css))) {
let next_state; // Unitialized to force each branch to set it.
match rule {
QualifiedRule(rule) => {
next_state = STATE_BODY;
parse_style_rule(rule, &mut rules, &namespaces)
},
AtRule(rule) => {
let lower_name = rule.name.to_ascii_lower();
match lower_name.as_slice() {
"charset" => {
if state > STATE_CHARSET {
log_css_error(rule.location, "@charset must be the first rule")
}
// Valid @charset rules are just ignored
next_state = STATE_IMPORTS;
},
"import" => {
if state > STATE_IMPORTS {
next_state = state;
log_css_error(rule.location,
"@import must be before any rule but @charset")
} else {
next_state = STATE_IMPORTS;
// TODO: support @import
log_css_error(rule.location, "@import is not supported yet")
}
},
"namespace" => {
if state > STATE_NAMESPACES {
next_state = state;
log_css_error(
rule.location,
"@namespace must be before any rule but @charset and @import"
)
} else {
next_state = STATE_NAMESPACES;
parse_namespace_rule(rule, &mut namespaces)
}
},
_ => {
|
parse_nested_at_rule(lower_name, rule, &mut rules, &namespaces)
},
}
},
}
state = next_state;
}
Stylesheet{ rules: rules, namespaces: namespaces, encoding: encoding, base_url: base_url }
}
}
pub fn parse_style_rule(rule: QualifiedRule, parent_rules: &mut ~[CSSRule],
namespaces: &NamespaceMap) {
let QualifiedRule{location: location, prelude: prelude, block: block} = rule;
// FIXME: avoid doing this for valid selectors
let serialized = prelude.iter().to_css();
match selectors::parse_selector_list(prelude, namespaces) {
Some(selectors) => parent_rules.push(CSSStyleRule(StyleRule{
selectors: selectors,
declarations: properties::parse_property_declaration_list(block.move_iter())
})),
None => log_css_error(location, format!(
"Invalid/unsupported selector: {}", serialized)),
}
}
// lower_name is passed explicitly to avoid computing it twice.
pub fn parse_nested_at_rule(lower_name: &str, rule: AtRule,
parent_rules: &mut ~[CSSRule], namespaces: &NamespaceMap) {
match lower_name {
"media" => parse_media_rule(rule, parent_rules, namespaces),
_ => log_css_error(rule.location, format!("Unsupported at-rule: @{:s}", lower_name))
}
}
pub fn iter_style_rules<'a>(rules: &[CSSRule], device: &media_queries::Device,
callback: |&StyleRule|) {
for rule in rules.iter() {
match *rule {
CSSStyleRule(ref rule) => callback(rule),
CSSMediaRule(ref rule) => if rule.media_queries.evaluate(device) {
iter_style_rules(rule.rules.as_slice(), device, |s| callback(s))
}
}
}
}
|
next_state = STATE_BODY;
|
random_line_split
|
attr.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use cssparser::ToCss;
use parser::SelectorImpl;
use std::ascii::AsciiExt;
use std::fmt;
#[derive(Clone, Eq, PartialEq)]
pub struct AttrSelectorWithNamespace<Impl: SelectorImpl> {
pub namespace: NamespaceConstraint<(Impl::NamespacePrefix, Impl::NamespaceUrl)>,
pub local_name: Impl::LocalName,
pub local_name_lower: Impl::LocalName,
pub operation: ParsedAttrSelectorOperation<Impl::AttrValue>,
pub never_matches: bool,
}
impl<Impl: SelectorImpl> AttrSelectorWithNamespace<Impl> {
pub fn namespace(&self) -> NamespaceConstraint<&Impl::NamespaceUrl> {
match self.namespace {
NamespaceConstraint::Any => NamespaceConstraint::Any,
NamespaceConstraint::Specific((_, ref url)) => {
NamespaceConstraint::Specific(url)
}
}
}
}
#[derive(Clone, Eq, PartialEq)]
pub enum NamespaceConstraint<NamespaceUrl> {
Any,
/// Empty string for no namespace
Specific(NamespaceUrl),
}
#[derive(Clone, Eq, PartialEq)]
pub enum ParsedAttrSelectorOperation<AttrValue> {
Exists,
WithValue {
operator: AttrSelectorOperator,
case_sensitivity: ParsedCaseSensitivity,
expected_value: AttrValue,
}
}
#[derive(Clone, Eq, PartialEq)]
pub enum AttrSelectorOperation<AttrValue> {
Exists,
WithValue {
operator: AttrSelectorOperator,
case_sensitivity: CaseSensitivity,
expected_value: AttrValue,
}
}
impl<AttrValue> AttrSelectorOperation<AttrValue> {
pub fn eval_str(&self, element_attr_value: &str) -> bool where AttrValue: AsRef<str> {
match *self {
AttrSelectorOperation::Exists => true,
AttrSelectorOperation::WithValue { operator, case_sensitivity, ref expected_value } => {
operator.eval_str(element_attr_value, expected_value.as_ref(), case_sensitivity)
}
}
}
}
#[derive(Clone, Copy, Eq, PartialEq)]
pub enum AttrSelectorOperator {
Equal,
Includes,
DashMatch,
Prefix,
Substring,
Suffix,
}
impl ToCss for AttrSelectorOperator {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
// https://drafts.csswg.org/cssom/#serializing-selectors
// See "attribute selector".
dest.write_str(match *self {
AttrSelectorOperator::Equal => "=",
AttrSelectorOperator::Includes => "~=",
AttrSelectorOperator::DashMatch => "|=",
AttrSelectorOperator::Prefix => "^=",
AttrSelectorOperator::Substring => "*=",
AttrSelectorOperator::Suffix => "$=",
})
}
}
impl AttrSelectorOperator {
pub fn eval_str(self, element_attr_value: &str, attr_selector_value: &str,
case_sensitivity: CaseSensitivity) -> bool {
let e = element_attr_value.as_bytes();
let s = attr_selector_value.as_bytes();
let case = case_sensitivity;
match self {
AttrSelectorOperator::Equal => {
case.eq(e, s)
}
AttrSelectorOperator::Prefix => {
e.len() >= s.len() && case.eq(&e[..s.len()], s)
}
AttrSelectorOperator::Suffix => {
e.len() >= s.len() && case.eq(&e[(e.len() - s.len())..], s)
}
AttrSelectorOperator::Substring => {
case.contains(element_attr_value, attr_selector_value)
}
AttrSelectorOperator::Includes => {
element_attr_value.split(SELECTOR_WHITESPACE)
.any(|part| case.eq(part.as_bytes(), s))
}
AttrSelectorOperator::DashMatch => {
case.eq(e, s) || (
e.get(s.len()) == Some(&b'-') &&
case.eq(&e[..s.len()], s)
)
}
}
}
}
/// The definition of whitespace per CSS Selectors Level 3 § 4.
pub static SELECTOR_WHITESPACE: &'static [char] = &[' ', '\t', '\n', '\r', '\x0C'];
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum ParsedCaseSensitivity {
CaseSensitive,
AsciiCaseInsensitive,
AsciiCaseInsensitiveIfInHtmlElementInHtmlDocument,
}
impl ParsedCaseSensitivity {
pub fn to_unconditional(self, is_html_element_in_html_document: bool) -> CaseSensitivity {
match self {
ParsedCaseSensitivity::AsciiCaseInsensitiveIfInHtmlElementInHtmlDocument
if is_html_element_in_html_document => {
CaseSensitivity::AsciiCaseInsensitive
}
ParsedCaseSensitivity::AsciiCaseInsensitiveIfInHtmlElementInHtmlDocument => {
CaseSensitivity::CaseSensitive
}
ParsedCaseSensitivity::CaseSensitive => CaseSensitivity::CaseSensitive,
ParsedCaseSensitivity::AsciiCaseInsensitive => CaseSensitivity::AsciiCaseInsensitive,
}
}
}
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum CaseSensitivity {
CaseSensitive,
AsciiCaseInsensitive,
}
impl CaseSensitivity {
pub fn eq(self, a: &[u8], b: &[u8]) -> bool {
match self {
CaseSensitivity::CaseSensitive => a == b,
CaseSensitivity::AsciiCaseInsensitive => a.eq_ignore_ascii_case(b),
}
}
pub fn contains(self, haystack: &str, needle: &str) -> bool {
match self {
CaseSensitivity::CaseSensitive => haystack.contains(needle),
CaseSensitivity::AsciiCaseInsensitive => {
if let Some((&n_first_byte, n_rest)) = needle.as_bytes().split_first() {
haystack.bytes().enumerate().any(|(i, byte)| {
if!byte.eq_ignore_ascii_case(&n_first_byte) {
return false
}
let after_this_byte = &haystack.as_bytes()[i + 1..];
match after_this_byte.get(..n_rest.len()) {
None => false,
Some(haystack_slice) => {
haystack_slice.eq_ignore_ascii_case(n_rest)
}
}
})
} else {
// any_str.contains("") == true,
// though these cases should be handled with *NeverMatches and never go here.
true
}
}
}
|
}
|
}
|
random_line_split
|
attr.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use cssparser::ToCss;
use parser::SelectorImpl;
use std::ascii::AsciiExt;
use std::fmt;
#[derive(Clone, Eq, PartialEq)]
pub struct AttrSelectorWithNamespace<Impl: SelectorImpl> {
pub namespace: NamespaceConstraint<(Impl::NamespacePrefix, Impl::NamespaceUrl)>,
pub local_name: Impl::LocalName,
pub local_name_lower: Impl::LocalName,
pub operation: ParsedAttrSelectorOperation<Impl::AttrValue>,
pub never_matches: bool,
}
impl<Impl: SelectorImpl> AttrSelectorWithNamespace<Impl> {
pub fn namespace(&self) -> NamespaceConstraint<&Impl::NamespaceUrl> {
match self.namespace {
NamespaceConstraint::Any => NamespaceConstraint::Any,
NamespaceConstraint::Specific((_, ref url)) => {
NamespaceConstraint::Specific(url)
}
}
}
}
#[derive(Clone, Eq, PartialEq)]
pub enum NamespaceConstraint<NamespaceUrl> {
Any,
/// Empty string for no namespace
Specific(NamespaceUrl),
}
#[derive(Clone, Eq, PartialEq)]
pub enum ParsedAttrSelectorOperation<AttrValue> {
Exists,
WithValue {
operator: AttrSelectorOperator,
case_sensitivity: ParsedCaseSensitivity,
expected_value: AttrValue,
}
}
#[derive(Clone, Eq, PartialEq)]
pub enum AttrSelectorOperation<AttrValue> {
Exists,
WithValue {
operator: AttrSelectorOperator,
case_sensitivity: CaseSensitivity,
expected_value: AttrValue,
}
}
impl<AttrValue> AttrSelectorOperation<AttrValue> {
pub fn eval_str(&self, element_attr_value: &str) -> bool where AttrValue: AsRef<str> {
match *self {
AttrSelectorOperation::Exists => true,
AttrSelectorOperation::WithValue { operator, case_sensitivity, ref expected_value } => {
operator.eval_str(element_attr_value, expected_value.as_ref(), case_sensitivity)
}
}
}
}
#[derive(Clone, Copy, Eq, PartialEq)]
pub enum AttrSelectorOperator {
Equal,
Includes,
DashMatch,
Prefix,
Substring,
Suffix,
}
impl ToCss for AttrSelectorOperator {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
// https://drafts.csswg.org/cssom/#serializing-selectors
// See "attribute selector".
dest.write_str(match *self {
AttrSelectorOperator::Equal => "=",
AttrSelectorOperator::Includes => "~=",
AttrSelectorOperator::DashMatch => "|=",
AttrSelectorOperator::Prefix => "^=",
AttrSelectorOperator::Substring => "*=",
AttrSelectorOperator::Suffix => "$=",
})
}
}
impl AttrSelectorOperator {
pub fn eval_str(self, element_attr_value: &str, attr_selector_value: &str,
case_sensitivity: CaseSensitivity) -> bool {
let e = element_attr_value.as_bytes();
let s = attr_selector_value.as_bytes();
let case = case_sensitivity;
match self {
AttrSelectorOperator::Equal => {
case.eq(e, s)
}
AttrSelectorOperator::Prefix => {
e.len() >= s.len() && case.eq(&e[..s.len()], s)
}
AttrSelectorOperator::Suffix => {
e.len() >= s.len() && case.eq(&e[(e.len() - s.len())..], s)
}
AttrSelectorOperator::Substring => {
case.contains(element_attr_value, attr_selector_value)
}
AttrSelectorOperator::Includes => {
element_attr_value.split(SELECTOR_WHITESPACE)
.any(|part| case.eq(part.as_bytes(), s))
}
AttrSelectorOperator::DashMatch => {
case.eq(e, s) || (
e.get(s.len()) == Some(&b'-') &&
case.eq(&e[..s.len()], s)
)
}
}
}
}
/// The definition of whitespace per CSS Selectors Level 3 § 4.
pub static SELECTOR_WHITESPACE: &'static [char] = &[' ', '\t', '\n', '\r', '\x0C'];
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum ParsedCaseSensitivity {
CaseSensitive,
AsciiCaseInsensitive,
AsciiCaseInsensitiveIfInHtmlElementInHtmlDocument,
}
impl ParsedCaseSensitivity {
pub fn to_unconditional(self, is_html_element_in_html_document: bool) -> CaseSensitivity {
match self {
ParsedCaseSensitivity::AsciiCaseInsensitiveIfInHtmlElementInHtmlDocument
if is_html_element_in_html_document => {
CaseSensitivity::AsciiCaseInsensitive
}
ParsedCaseSensitivity::AsciiCaseInsensitiveIfInHtmlElementInHtmlDocument => {
CaseSensitivity::CaseSensitive
}
ParsedCaseSensitivity::CaseSensitive => CaseSensitivity::CaseSensitive,
ParsedCaseSensitivity::AsciiCaseInsensitive => CaseSensitivity::AsciiCaseInsensitive,
}
}
}
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum CaseSensitivity {
CaseSensitive,
AsciiCaseInsensitive,
}
impl CaseSensitivity {
pub fn eq(self, a: &[u8], b: &[u8]) -> bool {
match self {
CaseSensitivity::CaseSensitive => a == b,
CaseSensitivity::AsciiCaseInsensitive => a.eq_ignore_ascii_case(b),
}
}
pub fn contains(self, haystack: &str, needle: &str) -> bool {
match self {
CaseSensitivity::CaseSensitive => haystack.contains(needle),
CaseSensitivity::AsciiCaseInsensitive => {
if let Some((&n_first_byte, n_rest)) = needle.as_bytes().split_first() {
haystack.bytes().enumerate().any(|(i, byte)| {
if!byte.eq_ignore_ascii_case(&n_first_byte) {
return false
}
let after_this_byte = &haystack.as_bytes()[i + 1..];
match after_this_byte.get(..n_rest.len()) {
None => false,
Some(haystack_slice) => {
|
}
})
} else {
// any_str.contains("") == true,
// though these cases should be handled with *NeverMatches and never go here.
true
}
}
}
}
}
|
haystack_slice.eq_ignore_ascii_case(n_rest)
}
|
conditional_block
|
attr.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use cssparser::ToCss;
use parser::SelectorImpl;
use std::ascii::AsciiExt;
use std::fmt;
#[derive(Clone, Eq, PartialEq)]
pub struct AttrSelectorWithNamespace<Impl: SelectorImpl> {
pub namespace: NamespaceConstraint<(Impl::NamespacePrefix, Impl::NamespaceUrl)>,
pub local_name: Impl::LocalName,
pub local_name_lower: Impl::LocalName,
pub operation: ParsedAttrSelectorOperation<Impl::AttrValue>,
pub never_matches: bool,
}
impl<Impl: SelectorImpl> AttrSelectorWithNamespace<Impl> {
pub fn namespace(&self) -> NamespaceConstraint<&Impl::NamespaceUrl> {
match self.namespace {
NamespaceConstraint::Any => NamespaceConstraint::Any,
NamespaceConstraint::Specific((_, ref url)) => {
NamespaceConstraint::Specific(url)
}
}
}
}
#[derive(Clone, Eq, PartialEq)]
pub enum NamespaceConstraint<NamespaceUrl> {
Any,
/// Empty string for no namespace
Specific(NamespaceUrl),
}
#[derive(Clone, Eq, PartialEq)]
pub enum ParsedAttrSelectorOperation<AttrValue> {
Exists,
WithValue {
operator: AttrSelectorOperator,
case_sensitivity: ParsedCaseSensitivity,
expected_value: AttrValue,
}
}
#[derive(Clone, Eq, PartialEq)]
pub enum AttrSelectorOperation<AttrValue> {
Exists,
WithValue {
operator: AttrSelectorOperator,
case_sensitivity: CaseSensitivity,
expected_value: AttrValue,
}
}
impl<AttrValue> AttrSelectorOperation<AttrValue> {
pub fn eval_str(&self, element_attr_value: &str) -> bool where AttrValue: AsRef<str> {
match *self {
AttrSelectorOperation::Exists => true,
AttrSelectorOperation::WithValue { operator, case_sensitivity, ref expected_value } => {
operator.eval_str(element_attr_value, expected_value.as_ref(), case_sensitivity)
}
}
}
}
#[derive(Clone, Copy, Eq, PartialEq)]
pub enum AttrSelectorOperator {
Equal,
Includes,
DashMatch,
Prefix,
Substring,
Suffix,
}
impl ToCss for AttrSelectorOperator {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
// https://drafts.csswg.org/cssom/#serializing-selectors
// See "attribute selector".
dest.write_str(match *self {
AttrSelectorOperator::Equal => "=",
AttrSelectorOperator::Includes => "~=",
AttrSelectorOperator::DashMatch => "|=",
AttrSelectorOperator::Prefix => "^=",
AttrSelectorOperator::Substring => "*=",
AttrSelectorOperator::Suffix => "$=",
})
}
}
impl AttrSelectorOperator {
pub fn eval_str(self, element_attr_value: &str, attr_selector_value: &str,
case_sensitivity: CaseSensitivity) -> bool
|
}
AttrSelectorOperator::DashMatch => {
case.eq(e, s) || (
e.get(s.len()) == Some(&b'-') &&
case.eq(&e[..s.len()], s)
)
}
}
}
}
/// The definition of whitespace per CSS Selectors Level 3 § 4.
pub static SELECTOR_WHITESPACE: &'static [char] = &[' ', '\t', '\n', '\r', '\x0C'];
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum ParsedCaseSensitivity {
CaseSensitive,
AsciiCaseInsensitive,
AsciiCaseInsensitiveIfInHtmlElementInHtmlDocument,
}
impl ParsedCaseSensitivity {
pub fn to_unconditional(self, is_html_element_in_html_document: bool) -> CaseSensitivity {
match self {
ParsedCaseSensitivity::AsciiCaseInsensitiveIfInHtmlElementInHtmlDocument
if is_html_element_in_html_document => {
CaseSensitivity::AsciiCaseInsensitive
}
ParsedCaseSensitivity::AsciiCaseInsensitiveIfInHtmlElementInHtmlDocument => {
CaseSensitivity::CaseSensitive
}
ParsedCaseSensitivity::CaseSensitive => CaseSensitivity::CaseSensitive,
ParsedCaseSensitivity::AsciiCaseInsensitive => CaseSensitivity::AsciiCaseInsensitive,
}
}
}
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum CaseSensitivity {
CaseSensitive,
AsciiCaseInsensitive,
}
impl CaseSensitivity {
pub fn eq(self, a: &[u8], b: &[u8]) -> bool {
match self {
CaseSensitivity::CaseSensitive => a == b,
CaseSensitivity::AsciiCaseInsensitive => a.eq_ignore_ascii_case(b),
}
}
pub fn contains(self, haystack: &str, needle: &str) -> bool {
match self {
CaseSensitivity::CaseSensitive => haystack.contains(needle),
CaseSensitivity::AsciiCaseInsensitive => {
if let Some((&n_first_byte, n_rest)) = needle.as_bytes().split_first() {
haystack.bytes().enumerate().any(|(i, byte)| {
if!byte.eq_ignore_ascii_case(&n_first_byte) {
return false
}
let after_this_byte = &haystack.as_bytes()[i + 1..];
match after_this_byte.get(..n_rest.len()) {
None => false,
Some(haystack_slice) => {
haystack_slice.eq_ignore_ascii_case(n_rest)
}
}
})
} else {
// any_str.contains("") == true,
// though these cases should be handled with *NeverMatches and never go here.
true
}
}
}
}
}
|
{
let e = element_attr_value.as_bytes();
let s = attr_selector_value.as_bytes();
let case = case_sensitivity;
match self {
AttrSelectorOperator::Equal => {
case.eq(e, s)
}
AttrSelectorOperator::Prefix => {
e.len() >= s.len() && case.eq(&e[..s.len()], s)
}
AttrSelectorOperator::Suffix => {
e.len() >= s.len() && case.eq(&e[(e.len() - s.len())..], s)
}
AttrSelectorOperator::Substring => {
case.contains(element_attr_value, attr_selector_value)
}
AttrSelectorOperator::Includes => {
element_attr_value.split(SELECTOR_WHITESPACE)
.any(|part| case.eq(part.as_bytes(), s))
|
identifier_body
|
attr.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use cssparser::ToCss;
use parser::SelectorImpl;
use std::ascii::AsciiExt;
use std::fmt;
#[derive(Clone, Eq, PartialEq)]
pub struct
|
<Impl: SelectorImpl> {
pub namespace: NamespaceConstraint<(Impl::NamespacePrefix, Impl::NamespaceUrl)>,
pub local_name: Impl::LocalName,
pub local_name_lower: Impl::LocalName,
pub operation: ParsedAttrSelectorOperation<Impl::AttrValue>,
pub never_matches: bool,
}
impl<Impl: SelectorImpl> AttrSelectorWithNamespace<Impl> {
pub fn namespace(&self) -> NamespaceConstraint<&Impl::NamespaceUrl> {
match self.namespace {
NamespaceConstraint::Any => NamespaceConstraint::Any,
NamespaceConstraint::Specific((_, ref url)) => {
NamespaceConstraint::Specific(url)
}
}
}
}
#[derive(Clone, Eq, PartialEq)]
pub enum NamespaceConstraint<NamespaceUrl> {
Any,
/// Empty string for no namespace
Specific(NamespaceUrl),
}
#[derive(Clone, Eq, PartialEq)]
pub enum ParsedAttrSelectorOperation<AttrValue> {
Exists,
WithValue {
operator: AttrSelectorOperator,
case_sensitivity: ParsedCaseSensitivity,
expected_value: AttrValue,
}
}
#[derive(Clone, Eq, PartialEq)]
pub enum AttrSelectorOperation<AttrValue> {
Exists,
WithValue {
operator: AttrSelectorOperator,
case_sensitivity: CaseSensitivity,
expected_value: AttrValue,
}
}
impl<AttrValue> AttrSelectorOperation<AttrValue> {
pub fn eval_str(&self, element_attr_value: &str) -> bool where AttrValue: AsRef<str> {
match *self {
AttrSelectorOperation::Exists => true,
AttrSelectorOperation::WithValue { operator, case_sensitivity, ref expected_value } => {
operator.eval_str(element_attr_value, expected_value.as_ref(), case_sensitivity)
}
}
}
}
#[derive(Clone, Copy, Eq, PartialEq)]
pub enum AttrSelectorOperator {
Equal,
Includes,
DashMatch,
Prefix,
Substring,
Suffix,
}
impl ToCss for AttrSelectorOperator {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
// https://drafts.csswg.org/cssom/#serializing-selectors
// See "attribute selector".
dest.write_str(match *self {
AttrSelectorOperator::Equal => "=",
AttrSelectorOperator::Includes => "~=",
AttrSelectorOperator::DashMatch => "|=",
AttrSelectorOperator::Prefix => "^=",
AttrSelectorOperator::Substring => "*=",
AttrSelectorOperator::Suffix => "$=",
})
}
}
impl AttrSelectorOperator {
pub fn eval_str(self, element_attr_value: &str, attr_selector_value: &str,
case_sensitivity: CaseSensitivity) -> bool {
let e = element_attr_value.as_bytes();
let s = attr_selector_value.as_bytes();
let case = case_sensitivity;
match self {
AttrSelectorOperator::Equal => {
case.eq(e, s)
}
AttrSelectorOperator::Prefix => {
e.len() >= s.len() && case.eq(&e[..s.len()], s)
}
AttrSelectorOperator::Suffix => {
e.len() >= s.len() && case.eq(&e[(e.len() - s.len())..], s)
}
AttrSelectorOperator::Substring => {
case.contains(element_attr_value, attr_selector_value)
}
AttrSelectorOperator::Includes => {
element_attr_value.split(SELECTOR_WHITESPACE)
.any(|part| case.eq(part.as_bytes(), s))
}
AttrSelectorOperator::DashMatch => {
case.eq(e, s) || (
e.get(s.len()) == Some(&b'-') &&
case.eq(&e[..s.len()], s)
)
}
}
}
}
/// The definition of whitespace per CSS Selectors Level 3 § 4.
pub static SELECTOR_WHITESPACE: &'static [char] = &[' ', '\t', '\n', '\r', '\x0C'];
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum ParsedCaseSensitivity {
CaseSensitive,
AsciiCaseInsensitive,
AsciiCaseInsensitiveIfInHtmlElementInHtmlDocument,
}
impl ParsedCaseSensitivity {
pub fn to_unconditional(self, is_html_element_in_html_document: bool) -> CaseSensitivity {
match self {
ParsedCaseSensitivity::AsciiCaseInsensitiveIfInHtmlElementInHtmlDocument
if is_html_element_in_html_document => {
CaseSensitivity::AsciiCaseInsensitive
}
ParsedCaseSensitivity::AsciiCaseInsensitiveIfInHtmlElementInHtmlDocument => {
CaseSensitivity::CaseSensitive
}
ParsedCaseSensitivity::CaseSensitive => CaseSensitivity::CaseSensitive,
ParsedCaseSensitivity::AsciiCaseInsensitive => CaseSensitivity::AsciiCaseInsensitive,
}
}
}
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum CaseSensitivity {
CaseSensitive,
AsciiCaseInsensitive,
}
impl CaseSensitivity {
pub fn eq(self, a: &[u8], b: &[u8]) -> bool {
match self {
CaseSensitivity::CaseSensitive => a == b,
CaseSensitivity::AsciiCaseInsensitive => a.eq_ignore_ascii_case(b),
}
}
pub fn contains(self, haystack: &str, needle: &str) -> bool {
match self {
CaseSensitivity::CaseSensitive => haystack.contains(needle),
CaseSensitivity::AsciiCaseInsensitive => {
if let Some((&n_first_byte, n_rest)) = needle.as_bytes().split_first() {
haystack.bytes().enumerate().any(|(i, byte)| {
if!byte.eq_ignore_ascii_case(&n_first_byte) {
return false
}
let after_this_byte = &haystack.as_bytes()[i + 1..];
match after_this_byte.get(..n_rest.len()) {
None => false,
Some(haystack_slice) => {
haystack_slice.eq_ignore_ascii_case(n_rest)
}
}
})
} else {
// any_str.contains("") == true,
// though these cases should be handled with *NeverMatches and never go here.
true
}
}
}
}
}
|
AttrSelectorWithNamespace
|
identifier_name
|
main.rs
|
#![feature(slice_patterns,convert)]
use std::collections::HashSet;
extern crate adventutils;
extern crate permutohedron;
#[derive(Clone)]
struct Entry {
pub subject: String,
pub other: String,
pub happiness: isize
}
fn main() {
let input = adventutils::read_input_file();
let mut entries : Vec<_> = input.lines().map(&parse_line).collect();
let result = calculate_best_happiness(entries.clone());
println!("Part 1 Max happiness is: {}", result);
// Add myself to the list
let people : Vec<_> = get_distinct_people(&entries);
for person in people {
entries.push(Entry {
subject: person.to_string(),
other: "Andrew".to_string(),
happiness: 0
});
entries.push(Entry {
subject: "Andrew".to_string(),
other: person.to_string(),
happiness: 0
});
}
let result = calculate_best_happiness(entries);
println!("Part 2 Max happiness is: {}", result);
}
fn get_distinct_people(entries: &Vec<Entry>) -> Vec<String> {
let people_hash : HashSet<_> = entries.iter().map(|e| e.subject.clone()).collect();
people_hash.iter().cloned().collect()
}
fn calculate_best_happiness(entries: Vec<Entry>) -> isize {
let mut people = get_distinct_people(&entries);
// Permute all the people
let permutations = permutohedron::Heap::new(&mut people);
permutations
.map(|p| calculate_happiness(&p, &entries))
.max()
.unwrap()
}
fn calculate_happiness(order: &Vec<String>, entries: &Vec<Entry>) -> isize {
let mut total = 0;
for (idx, person) in order.iter().enumerate() {
let neighbor = if idx < order.len() - 1 {
&order[idx + 1]
} else {
&order[0]
};
total += entries.iter().find(|e| e.subject == *person && e.other == *neighbor).unwrap().happiness;
total += entries.iter().find(|e| e.other == *person && e.subject == *neighbor).unwrap().happiness;
}
total
}
fn
|
(line: &str) -> Entry {
let tokens : Vec<_> = line.split_whitespace().collect();
match tokens.as_slice() {
[ subject, "would", "gain", happiness, "happiness", "units", "by", "sitting", "next", "to", other ] => Entry {
subject: subject.into(),
other: other.trim_right_matches('.').into(),
happiness: isize::from_str_radix(happiness, 10).unwrap()
},
[ subject, "would", "lose", happiness, "happiness", "units", "by", "sitting", "next", "to", other ] => Entry {
subject: subject.into(),
other: other.trim_right_matches('.').into(),
happiness: -(isize::from_str_radix(happiness, 10).unwrap())
},
_ => panic!("Unexpected line: {}", line)
}
}
|
parse_line
|
identifier_name
|
main.rs
|
#![feature(slice_patterns,convert)]
use std::collections::HashSet;
|
#[derive(Clone)]
struct Entry {
pub subject: String,
pub other: String,
pub happiness: isize
}
fn main() {
let input = adventutils::read_input_file();
let mut entries : Vec<_> = input.lines().map(&parse_line).collect();
let result = calculate_best_happiness(entries.clone());
println!("Part 1 Max happiness is: {}", result);
// Add myself to the list
let people : Vec<_> = get_distinct_people(&entries);
for person in people {
entries.push(Entry {
subject: person.to_string(),
other: "Andrew".to_string(),
happiness: 0
});
entries.push(Entry {
subject: "Andrew".to_string(),
other: person.to_string(),
happiness: 0
});
}
let result = calculate_best_happiness(entries);
println!("Part 2 Max happiness is: {}", result);
}
fn get_distinct_people(entries: &Vec<Entry>) -> Vec<String> {
let people_hash : HashSet<_> = entries.iter().map(|e| e.subject.clone()).collect();
people_hash.iter().cloned().collect()
}
fn calculate_best_happiness(entries: Vec<Entry>) -> isize {
let mut people = get_distinct_people(&entries);
// Permute all the people
let permutations = permutohedron::Heap::new(&mut people);
permutations
.map(|p| calculate_happiness(&p, &entries))
.max()
.unwrap()
}
fn calculate_happiness(order: &Vec<String>, entries: &Vec<Entry>) -> isize {
let mut total = 0;
for (idx, person) in order.iter().enumerate() {
let neighbor = if idx < order.len() - 1 {
&order[idx + 1]
} else {
&order[0]
};
total += entries.iter().find(|e| e.subject == *person && e.other == *neighbor).unwrap().happiness;
total += entries.iter().find(|e| e.other == *person && e.subject == *neighbor).unwrap().happiness;
}
total
}
fn parse_line(line: &str) -> Entry {
let tokens : Vec<_> = line.split_whitespace().collect();
match tokens.as_slice() {
[ subject, "would", "gain", happiness, "happiness", "units", "by", "sitting", "next", "to", other ] => Entry {
subject: subject.into(),
other: other.trim_right_matches('.').into(),
happiness: isize::from_str_radix(happiness, 10).unwrap()
},
[ subject, "would", "lose", happiness, "happiness", "units", "by", "sitting", "next", "to", other ] => Entry {
subject: subject.into(),
other: other.trim_right_matches('.').into(),
happiness: -(isize::from_str_radix(happiness, 10).unwrap())
},
_ => panic!("Unexpected line: {}", line)
}
}
|
extern crate adventutils;
extern crate permutohedron;
|
random_line_split
|
gui.rs
|
// See LICENSE file for copyright and license details.
use std::collections::{HashMap};
use cgmath::{Vector3};
use common::types::{ZInt, Size2, ZFloat};
use zgl::types::{ScreenPos, MatId};
use zgl::shader::{Shader};
use zgl::font_stash::{FontStash};
use zgl::mesh::{Mesh};
use zgl::zgl::{Zgl};
#[derive(PartialEq, Eq, Hash, Clone)]
pub struct ButtonId {pub id: ZInt}
pub struct Button {
pos: ScreenPos,
size: Size2,
mesh: Mesh,
}
impl Button {
pub fn new(
zgl: &Zgl,
win_size: &Size2,
label: &str,
font_stash: &mut FontStash,
pos: ScreenPos,
) -> Button {
let text_size = (win_size.h as ZFloat) / 400.0; // TODO: 400?
let (_, size) = font_stash.get_text_size(zgl, label);
Button {
pos: pos,
size: Size2 {
w: (size.w as ZFloat * text_size) as ZInt,
h: (size.h as ZFloat * text_size) as ZInt,
},
mesh: font_stash.get_mesh(zgl, label, text_size, false),
}
}
pub fn draw(&self, zgl: &Zgl, shader: &Shader) {
self.mesh.draw(zgl, shader);
}
pub fn pos(&self) -> &ScreenPos {
&self.pos
}
pub fn size(&self) -> &Size2 {
&self.size
}
}
pub struct ButtonManager {
buttons: HashMap<ButtonId, Button>,
last_id: ButtonId,
}
impl ButtonManager {
pub fn new() -> ButtonManager {
ButtonManager {
buttons: HashMap::new(),
last_id: ButtonId{id: 0},
}
}
pub fn buttons(&self) -> &HashMap<ButtonId, Button> {
&self.buttons
}
pub fn add_button(&mut self, button: Button) -> ButtonId {
let id = self.last_id.clone();
self.buttons.insert(id.clone(), button);
self.last_id.id += 1;
id
}
// TODO: context: &Context
pub fn get_clicked_button_id(
&self,
mouse_pos: &ScreenPos,
win_size: &Size2,
) -> Option<ButtonId>
|
// TODO: context: &Context
pub fn draw(
&self,
zgl: &Zgl,
win_size: &Size2,
shader: &Shader,
mvp_mat_id: &MatId,
) {
let m = zgl.get_2d_screen_matrix(win_size);
for (_, button) in self.buttons() {
let text_offset = Vector3 {
x: button.pos().v.x as ZFloat,
y: button.pos().v.y as ZFloat,
z: 0.0,
};
shader.set_uniform_mat4f(
zgl, mvp_mat_id, &zgl.tr(m, &text_offset));
button.draw(zgl, shader);
}
}
}
// vim: set tabstop=4 shiftwidth=4 softtabstop=4 expandtab:
|
{
let x = mouse_pos.v.x;
let y = win_size.h - mouse_pos.v.y;
for (id, button) in self.buttons() {
if x >= button.pos().v.x
&& x <= button.pos().v.x + button.size().w
&& y >= button.pos().v.y
&& y <= button.pos().v.y + button.size().h
{
return Some(id.clone());
}
}
None
}
|
identifier_body
|
gui.rs
|
// See LICENSE file for copyright and license details.
use std::collections::{HashMap};
use cgmath::{Vector3};
use common::types::{ZInt, Size2, ZFloat};
use zgl::types::{ScreenPos, MatId};
use zgl::shader::{Shader};
use zgl::font_stash::{FontStash};
use zgl::mesh::{Mesh};
use zgl::zgl::{Zgl};
#[derive(PartialEq, Eq, Hash, Clone)]
pub struct ButtonId {pub id: ZInt}
pub struct Button {
pos: ScreenPos,
size: Size2,
mesh: Mesh,
}
impl Button {
pub fn
|
(
zgl: &Zgl,
win_size: &Size2,
label: &str,
font_stash: &mut FontStash,
pos: ScreenPos,
) -> Button {
let text_size = (win_size.h as ZFloat) / 400.0; // TODO: 400?
let (_, size) = font_stash.get_text_size(zgl, label);
Button {
pos: pos,
size: Size2 {
w: (size.w as ZFloat * text_size) as ZInt,
h: (size.h as ZFloat * text_size) as ZInt,
},
mesh: font_stash.get_mesh(zgl, label, text_size, false),
}
}
pub fn draw(&self, zgl: &Zgl, shader: &Shader) {
self.mesh.draw(zgl, shader);
}
pub fn pos(&self) -> &ScreenPos {
&self.pos
}
pub fn size(&self) -> &Size2 {
&self.size
}
}
pub struct ButtonManager {
buttons: HashMap<ButtonId, Button>,
last_id: ButtonId,
}
impl ButtonManager {
pub fn new() -> ButtonManager {
ButtonManager {
buttons: HashMap::new(),
last_id: ButtonId{id: 0},
}
}
pub fn buttons(&self) -> &HashMap<ButtonId, Button> {
&self.buttons
}
pub fn add_button(&mut self, button: Button) -> ButtonId {
let id = self.last_id.clone();
self.buttons.insert(id.clone(), button);
self.last_id.id += 1;
id
}
// TODO: context: &Context
pub fn get_clicked_button_id(
&self,
mouse_pos: &ScreenPos,
win_size: &Size2,
) -> Option<ButtonId> {
let x = mouse_pos.v.x;
let y = win_size.h - mouse_pos.v.y;
for (id, button) in self.buttons() {
if x >= button.pos().v.x
&& x <= button.pos().v.x + button.size().w
&& y >= button.pos().v.y
&& y <= button.pos().v.y + button.size().h
{
return Some(id.clone());
}
}
None
}
// TODO: context: &Context
pub fn draw(
&self,
zgl: &Zgl,
win_size: &Size2,
shader: &Shader,
mvp_mat_id: &MatId,
) {
let m = zgl.get_2d_screen_matrix(win_size);
for (_, button) in self.buttons() {
let text_offset = Vector3 {
x: button.pos().v.x as ZFloat,
y: button.pos().v.y as ZFloat,
z: 0.0,
};
shader.set_uniform_mat4f(
zgl, mvp_mat_id, &zgl.tr(m, &text_offset));
button.draw(zgl, shader);
}
}
}
// vim: set tabstop=4 shiftwidth=4 softtabstop=4 expandtab:
|
new
|
identifier_name
|
gui.rs
|
// See LICENSE file for copyright and license details.
use std::collections::{HashMap};
use cgmath::{Vector3};
use common::types::{ZInt, Size2, ZFloat};
use zgl::types::{ScreenPos, MatId};
use zgl::shader::{Shader};
use zgl::font_stash::{FontStash};
use zgl::mesh::{Mesh};
use zgl::zgl::{Zgl};
#[derive(PartialEq, Eq, Hash, Clone)]
pub struct ButtonId {pub id: ZInt}
pub struct Button {
pos: ScreenPos,
size: Size2,
mesh: Mesh,
}
impl Button {
pub fn new(
zgl: &Zgl,
win_size: &Size2,
label: &str,
font_stash: &mut FontStash,
pos: ScreenPos,
) -> Button {
let text_size = (win_size.h as ZFloat) / 400.0; // TODO: 400?
let (_, size) = font_stash.get_text_size(zgl, label);
Button {
pos: pos,
size: Size2 {
w: (size.w as ZFloat * text_size) as ZInt,
h: (size.h as ZFloat * text_size) as ZInt,
},
mesh: font_stash.get_mesh(zgl, label, text_size, false),
}
}
pub fn draw(&self, zgl: &Zgl, shader: &Shader) {
self.mesh.draw(zgl, shader);
}
pub fn pos(&self) -> &ScreenPos {
&self.pos
}
pub fn size(&self) -> &Size2 {
&self.size
}
}
pub struct ButtonManager {
buttons: HashMap<ButtonId, Button>,
last_id: ButtonId,
}
impl ButtonManager {
pub fn new() -> ButtonManager {
ButtonManager {
buttons: HashMap::new(),
last_id: ButtonId{id: 0},
}
}
pub fn buttons(&self) -> &HashMap<ButtonId, Button> {
&self.buttons
}
pub fn add_button(&mut self, button: Button) -> ButtonId {
let id = self.last_id.clone();
self.buttons.insert(id.clone(), button);
|
}
// TODO: context: &Context
pub fn get_clicked_button_id(
&self,
mouse_pos: &ScreenPos,
win_size: &Size2,
) -> Option<ButtonId> {
let x = mouse_pos.v.x;
let y = win_size.h - mouse_pos.v.y;
for (id, button) in self.buttons() {
if x >= button.pos().v.x
&& x <= button.pos().v.x + button.size().w
&& y >= button.pos().v.y
&& y <= button.pos().v.y + button.size().h
{
return Some(id.clone());
}
}
None
}
// TODO: context: &Context
pub fn draw(
&self,
zgl: &Zgl,
win_size: &Size2,
shader: &Shader,
mvp_mat_id: &MatId,
) {
let m = zgl.get_2d_screen_matrix(win_size);
for (_, button) in self.buttons() {
let text_offset = Vector3 {
x: button.pos().v.x as ZFloat,
y: button.pos().v.y as ZFloat,
z: 0.0,
};
shader.set_uniform_mat4f(
zgl, mvp_mat_id, &zgl.tr(m, &text_offset));
button.draw(zgl, shader);
}
}
}
// vim: set tabstop=4 shiftwidth=4 softtabstop=4 expandtab:
|
self.last_id.id += 1;
id
|
random_line_split
|
gui.rs
|
// See LICENSE file for copyright and license details.
use std::collections::{HashMap};
use cgmath::{Vector3};
use common::types::{ZInt, Size2, ZFloat};
use zgl::types::{ScreenPos, MatId};
use zgl::shader::{Shader};
use zgl::font_stash::{FontStash};
use zgl::mesh::{Mesh};
use zgl::zgl::{Zgl};
#[derive(PartialEq, Eq, Hash, Clone)]
pub struct ButtonId {pub id: ZInt}
pub struct Button {
pos: ScreenPos,
size: Size2,
mesh: Mesh,
}
impl Button {
pub fn new(
zgl: &Zgl,
win_size: &Size2,
label: &str,
font_stash: &mut FontStash,
pos: ScreenPos,
) -> Button {
let text_size = (win_size.h as ZFloat) / 400.0; // TODO: 400?
let (_, size) = font_stash.get_text_size(zgl, label);
Button {
pos: pos,
size: Size2 {
w: (size.w as ZFloat * text_size) as ZInt,
h: (size.h as ZFloat * text_size) as ZInt,
},
mesh: font_stash.get_mesh(zgl, label, text_size, false),
}
}
pub fn draw(&self, zgl: &Zgl, shader: &Shader) {
self.mesh.draw(zgl, shader);
}
pub fn pos(&self) -> &ScreenPos {
&self.pos
}
pub fn size(&self) -> &Size2 {
&self.size
}
}
pub struct ButtonManager {
buttons: HashMap<ButtonId, Button>,
last_id: ButtonId,
}
impl ButtonManager {
pub fn new() -> ButtonManager {
ButtonManager {
buttons: HashMap::new(),
last_id: ButtonId{id: 0},
}
}
pub fn buttons(&self) -> &HashMap<ButtonId, Button> {
&self.buttons
}
pub fn add_button(&mut self, button: Button) -> ButtonId {
let id = self.last_id.clone();
self.buttons.insert(id.clone(), button);
self.last_id.id += 1;
id
}
// TODO: context: &Context
pub fn get_clicked_button_id(
&self,
mouse_pos: &ScreenPos,
win_size: &Size2,
) -> Option<ButtonId> {
let x = mouse_pos.v.x;
let y = win_size.h - mouse_pos.v.y;
for (id, button) in self.buttons() {
if x >= button.pos().v.x
&& x <= button.pos().v.x + button.size().w
&& y >= button.pos().v.y
&& y <= button.pos().v.y + button.size().h
|
}
None
}
// TODO: context: &Context
pub fn draw(
&self,
zgl: &Zgl,
win_size: &Size2,
shader: &Shader,
mvp_mat_id: &MatId,
) {
let m = zgl.get_2d_screen_matrix(win_size);
for (_, button) in self.buttons() {
let text_offset = Vector3 {
x: button.pos().v.x as ZFloat,
y: button.pos().v.y as ZFloat,
z: 0.0,
};
shader.set_uniform_mat4f(
zgl, mvp_mat_id, &zgl.tr(m, &text_offset));
button.draw(zgl, shader);
}
}
}
// vim: set tabstop=4 shiftwidth=4 softtabstop=4 expandtab:
|
{
return Some(id.clone());
}
|
conditional_block
|
args.rs
|
// Copyright (c) 2018 Jason White
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
use std::io::{self, Write};
use structopt::StructOpt;
use termcolor::{self as tc, WriteColor};
use button::Error;
use crate::cmd::Command;
use crate::opts::{ColorChoice, GlobalOpts};
#[derive(StructOpt, Debug)]
pub struct Args {
#[structopt(flatten)]
global: GlobalOpts,
#[structopt(subcommand)]
cmd: Command,
}
/// Displays an error and its list of causes.
pub fn display_error(
error: Error,
color: ColorChoice,
) -> Result<(), io::Error> {
let mut red = tc::ColorSpec::new();
red.set_fg(Some(tc::Color::Red));
red.set_bold(true);
let mut stdout = tc::StandardStream::stdout(color.into());
let mut causes = error.iter_chain();
// Primary error.
if let Some(cause) = causes.next() {
stdout.set_color(&red)?;
write!(&mut stdout, " Error")?;
stdout.reset()?;
writeln!(&mut stdout, ": {}", cause)?;
}
// Rest of the causes.
for cause in causes {
stdout.set_color(&red)?;
write!(&mut stdout, "Caused by")?;
stdout.reset()?;
|
writeln!(&mut stdout, "{}", error.backtrace())?;
Ok(())
}
impl Args {
// Delegate to a subcommand. If any errors occur, print out the error and
// its chain of causes.
pub fn main(self) -> i32 {
if let Err(error) = self.cmd.main(&self.global) {
let _ = display_error(error, self.global.color);
1
} else {
0
}
}
}
|
writeln!(&mut stdout, ": {}", cause)?;
}
|
random_line_split
|
args.rs
|
// Copyright (c) 2018 Jason White
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
use std::io::{self, Write};
use structopt::StructOpt;
use termcolor::{self as tc, WriteColor};
use button::Error;
use crate::cmd::Command;
use crate::opts::{ColorChoice, GlobalOpts};
#[derive(StructOpt, Debug)]
pub struct Args {
#[structopt(flatten)]
global: GlobalOpts,
#[structopt(subcommand)]
cmd: Command,
}
/// Displays an error and its list of causes.
pub fn display_error(
error: Error,
color: ColorChoice,
) -> Result<(), io::Error> {
let mut red = tc::ColorSpec::new();
red.set_fg(Some(tc::Color::Red));
red.set_bold(true);
let mut stdout = tc::StandardStream::stdout(color.into());
let mut causes = error.iter_chain();
// Primary error.
if let Some(cause) = causes.next() {
stdout.set_color(&red)?;
write!(&mut stdout, " Error")?;
stdout.reset()?;
writeln!(&mut stdout, ": {}", cause)?;
}
// Rest of the causes.
for cause in causes {
stdout.set_color(&red)?;
write!(&mut stdout, "Caused by")?;
stdout.reset()?;
writeln!(&mut stdout, ": {}", cause)?;
}
writeln!(&mut stdout, "{}", error.backtrace())?;
Ok(())
}
impl Args {
// Delegate to a subcommand. If any errors occur, print out the error and
// its chain of causes.
pub fn main(self) -> i32 {
if let Err(error) = self.cmd.main(&self.global)
|
else {
0
}
}
}
|
{
let _ = display_error(error, self.global.color);
1
}
|
conditional_block
|
args.rs
|
// Copyright (c) 2018 Jason White
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
use std::io::{self, Write};
use structopt::StructOpt;
use termcolor::{self as tc, WriteColor};
use button::Error;
use crate::cmd::Command;
use crate::opts::{ColorChoice, GlobalOpts};
#[derive(StructOpt, Debug)]
pub struct Args {
#[structopt(flatten)]
global: GlobalOpts,
#[structopt(subcommand)]
cmd: Command,
}
/// Displays an error and its list of causes.
pub fn display_error(
error: Error,
color: ColorChoice,
) -> Result<(), io::Error> {
let mut red = tc::ColorSpec::new();
red.set_fg(Some(tc::Color::Red));
red.set_bold(true);
let mut stdout = tc::StandardStream::stdout(color.into());
let mut causes = error.iter_chain();
// Primary error.
if let Some(cause) = causes.next() {
stdout.set_color(&red)?;
write!(&mut stdout, " Error")?;
stdout.reset()?;
writeln!(&mut stdout, ": {}", cause)?;
}
// Rest of the causes.
for cause in causes {
stdout.set_color(&red)?;
write!(&mut stdout, "Caused by")?;
stdout.reset()?;
writeln!(&mut stdout, ": {}", cause)?;
}
writeln!(&mut stdout, "{}", error.backtrace())?;
Ok(())
}
impl Args {
// Delegate to a subcommand. If any errors occur, print out the error and
// its chain of causes.
pub fn
|
(self) -> i32 {
if let Err(error) = self.cmd.main(&self.global) {
let _ = display_error(error, self.global.color);
1
} else {
0
}
}
}
|
main
|
identifier_name
|
mod.rs
|
#![cfg(feature = "window")]
#[phase(plugin)]
extern crate gl_generator;
use glutin;
#[cfg(not(target_os = "android"))]
mod gl {
generate_gl_bindings! {
api: "gl",
profile: "core",
version: "1.1",
generator: "struct"
}
}
#[cfg(target_os = "android")]
mod gl {
pub use self::Gles1 as Gl;
generate_gl_bindings! {
api: "gles1",
profile: "core",
version: "1.1",
generator: "struct"
}
}
pub struct Context {
gl: gl::Gl
}
pub fn load(window: &glutin::Window) -> Context {
let gl = gl::Gl::load_with(|symbol| window.get_proc_address(symbol));
let version = {
use std::c_str::CString;
unsafe { CString::new(gl.GetString(gl::VERSION) as *const i8, false) }
};
println!("OpenGL version {}", version.as_str().unwrap());
Context { gl: gl }
}
impl Context {
#[cfg(not(target_os = "android"))]
pub fn draw_frame(&self, color: (f32, f32, f32, f32)) {
self.gl.ClearColor(color.0, color.1, color.2, color.3);
self.gl.Clear(gl::COLOR_BUFFER_BIT);
self.gl.Begin(gl::TRIANGLES);
self.gl.Color3f(1.0, 0.0, 0.0);
self.gl.Vertex2f(-0.5, -0.5);
|
self.gl.Vertex2f(0.0, 0.5);
self.gl.Color3f(0.0, 0.0, 1.0);
self.gl.Vertex2f(0.5, -0.5);
self.gl.End();
}
#[cfg(target_os = "android")]
pub fn draw_frame(&self, color: (f32, f32, f32, f32)) {
self.gl.ClearColor(color.0, color.1, color.2, color.3);
self.gl.Clear(gl::COLOR_BUFFER_BIT);
self.gl.EnableClientState(gl::VERTEX_ARRAY);
self.gl.EnableClientState(gl::COLOR_ARRAY);
unsafe {
use std::mem;
self.gl.VertexPointer(2, gl::FLOAT, (mem::size_of::<f32>() * 5) as i32,
mem::transmute(VERTEX_DATA.as_slice().as_ptr()));
self.gl.ColorPointer(3, gl::FLOAT, (mem::size_of::<f32>() * 5) as i32,
mem::transmute(VERTEX_DATA.as_slice().as_ptr().offset(2)));
}
self.gl.DrawArrays(gl::TRIANGLES, 0, 3);
self.gl.DisableClientState(gl::VERTEX_ARRAY);
self.gl.DisableClientState(gl::COLOR_ARRAY);
}
}
#[cfg(target_os = "android")]
static VERTEX_DATA: [f32,..15] = [
-0.5, -0.5, 1.0, 0.0, 0.0,
0.0, 0.5, 0.0, 1.0, 0.0,
0.5, -0.5, 0.0, 0.0, 1.0
];
|
self.gl.Color3f(0.0, 1.0, 0.0);
|
random_line_split
|
mod.rs
|
#![cfg(feature = "window")]
#[phase(plugin)]
extern crate gl_generator;
use glutin;
#[cfg(not(target_os = "android"))]
mod gl {
generate_gl_bindings! {
api: "gl",
profile: "core",
version: "1.1",
generator: "struct"
}
}
#[cfg(target_os = "android")]
mod gl {
pub use self::Gles1 as Gl;
generate_gl_bindings! {
api: "gles1",
profile: "core",
version: "1.1",
generator: "struct"
}
}
pub struct Context {
gl: gl::Gl
}
pub fn load(window: &glutin::Window) -> Context {
let gl = gl::Gl::load_with(|symbol| window.get_proc_address(symbol));
let version = {
use std::c_str::CString;
unsafe { CString::new(gl.GetString(gl::VERSION) as *const i8, false) }
};
println!("OpenGL version {}", version.as_str().unwrap());
Context { gl: gl }
}
impl Context {
#[cfg(not(target_os = "android"))]
pub fn draw_frame(&self, color: (f32, f32, f32, f32)) {
self.gl.ClearColor(color.0, color.1, color.2, color.3);
self.gl.Clear(gl::COLOR_BUFFER_BIT);
self.gl.Begin(gl::TRIANGLES);
self.gl.Color3f(1.0, 0.0, 0.0);
self.gl.Vertex2f(-0.5, -0.5);
self.gl.Color3f(0.0, 1.0, 0.0);
self.gl.Vertex2f(0.0, 0.5);
self.gl.Color3f(0.0, 0.0, 1.0);
self.gl.Vertex2f(0.5, -0.5);
self.gl.End();
}
#[cfg(target_os = "android")]
pub fn draw_frame(&self, color: (f32, f32, f32, f32))
|
}
#[cfg(target_os = "android")]
static VERTEX_DATA: [f32,..15] = [
-0.5, -0.5, 1.0, 0.0, 0.0,
0.0, 0.5, 0.0, 1.0, 0.0,
0.5, -0.5, 0.0, 0.0, 1.0
];
|
{
self.gl.ClearColor(color.0, color.1, color.2, color.3);
self.gl.Clear(gl::COLOR_BUFFER_BIT);
self.gl.EnableClientState(gl::VERTEX_ARRAY);
self.gl.EnableClientState(gl::COLOR_ARRAY);
unsafe {
use std::mem;
self.gl.VertexPointer(2, gl::FLOAT, (mem::size_of::<f32>() * 5) as i32,
mem::transmute(VERTEX_DATA.as_slice().as_ptr()));
self.gl.ColorPointer(3, gl::FLOAT, (mem::size_of::<f32>() * 5) as i32,
mem::transmute(VERTEX_DATA.as_slice().as_ptr().offset(2)));
}
self.gl.DrawArrays(gl::TRIANGLES, 0, 3);
self.gl.DisableClientState(gl::VERTEX_ARRAY);
self.gl.DisableClientState(gl::COLOR_ARRAY);
}
|
identifier_body
|
mod.rs
|
#![cfg(feature = "window")]
#[phase(plugin)]
extern crate gl_generator;
use glutin;
#[cfg(not(target_os = "android"))]
mod gl {
generate_gl_bindings! {
api: "gl",
profile: "core",
version: "1.1",
generator: "struct"
}
}
#[cfg(target_os = "android")]
mod gl {
pub use self::Gles1 as Gl;
generate_gl_bindings! {
api: "gles1",
profile: "core",
version: "1.1",
generator: "struct"
}
}
pub struct Context {
gl: gl::Gl
}
pub fn
|
(window: &glutin::Window) -> Context {
let gl = gl::Gl::load_with(|symbol| window.get_proc_address(symbol));
let version = {
use std::c_str::CString;
unsafe { CString::new(gl.GetString(gl::VERSION) as *const i8, false) }
};
println!("OpenGL version {}", version.as_str().unwrap());
Context { gl: gl }
}
impl Context {
#[cfg(not(target_os = "android"))]
pub fn draw_frame(&self, color: (f32, f32, f32, f32)) {
self.gl.ClearColor(color.0, color.1, color.2, color.3);
self.gl.Clear(gl::COLOR_BUFFER_BIT);
self.gl.Begin(gl::TRIANGLES);
self.gl.Color3f(1.0, 0.0, 0.0);
self.gl.Vertex2f(-0.5, -0.5);
self.gl.Color3f(0.0, 1.0, 0.0);
self.gl.Vertex2f(0.0, 0.5);
self.gl.Color3f(0.0, 0.0, 1.0);
self.gl.Vertex2f(0.5, -0.5);
self.gl.End();
}
#[cfg(target_os = "android")]
pub fn draw_frame(&self, color: (f32, f32, f32, f32)) {
self.gl.ClearColor(color.0, color.1, color.2, color.3);
self.gl.Clear(gl::COLOR_BUFFER_BIT);
self.gl.EnableClientState(gl::VERTEX_ARRAY);
self.gl.EnableClientState(gl::COLOR_ARRAY);
unsafe {
use std::mem;
self.gl.VertexPointer(2, gl::FLOAT, (mem::size_of::<f32>() * 5) as i32,
mem::transmute(VERTEX_DATA.as_slice().as_ptr()));
self.gl.ColorPointer(3, gl::FLOAT, (mem::size_of::<f32>() * 5) as i32,
mem::transmute(VERTEX_DATA.as_slice().as_ptr().offset(2)));
}
self.gl.DrawArrays(gl::TRIANGLES, 0, 3);
self.gl.DisableClientState(gl::VERTEX_ARRAY);
self.gl.DisableClientState(gl::COLOR_ARRAY);
}
}
#[cfg(target_os = "android")]
static VERTEX_DATA: [f32,..15] = [
-0.5, -0.5, 1.0, 0.0, 0.0,
0.0, 0.5, 0.0, 1.0, 0.0,
0.5, -0.5, 0.0, 0.0, 1.0
];
|
load
|
identifier_name
|
array_const_index-1.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
#![allow(dead_code)]
#![allow(stable_features)]
#![feature(const_indexing)]
fn
|
() {
const ARR: [i32; 6] = [42, 43, 44, 45, 46, 47];
const IDX: usize = 3;
const VAL: i32 = ARR[IDX];
const BLUB: [i32; (ARR[0] - 41) as usize] = [5];
}
|
main
|
identifier_name
|
array_const_index-1.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
#![allow(dead_code)]
#![allow(stable_features)]
#![feature(const_indexing)]
fn main()
|
{
const ARR: [i32; 6] = [42, 43, 44, 45, 46, 47];
const IDX: usize = 3;
const VAL: i32 = ARR[IDX];
const BLUB: [i32; (ARR[0] - 41) as usize] = [5];
}
|
identifier_body
|
|
array_const_index-1.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
|
fn main() {
const ARR: [i32; 6] = [42, 43, 44, 45, 46, 47];
const IDX: usize = 3;
const VAL: i32 = ARR[IDX];
const BLUB: [i32; (ARR[0] - 41) as usize] = [5];
}
|
#![allow(dead_code)]
#![allow(stable_features)]
#![feature(const_indexing)]
|
random_line_split
|
callback.rs
|
// Copyright 2019 TiKV Project Authors. Licensed under Apache-2.0.
use crate::call::server::{RequestContext, UnaryRequestContext};
use crate::call::{BatchContext, Call};
use crate::cq::CompletionQueue;
use crate::server::{self, RequestCallContext};
pub struct Request {
ctx: RequestContext,
}
impl Request {
pub fn new(rc: RequestCallContext) -> Request {
let ctx = RequestContext::new(rc);
Request { ctx }
}
pub fn context(&self) -> &RequestContext {
&self.ctx
}
pub fn resolve(mut self, cq: &CompletionQueue, success: bool)
|
}
pub struct UnaryRequest {
ctx: UnaryRequestContext,
}
impl UnaryRequest {
pub fn new(ctx: RequestContext, rc: RequestCallContext) -> UnaryRequest {
let ctx = UnaryRequestContext::new(ctx, rc);
UnaryRequest { ctx }
}
pub fn batch_ctx(&self) -> &BatchContext {
self.ctx.batch_ctx()
}
pub fn request_ctx(&self) -> &RequestContext {
self.ctx.request_ctx()
}
pub fn resolve(mut self, cq: &CompletionQueue, success: bool) {
let mut rc = self.ctx.take_request_call_context().unwrap();
if!success {
server::request_call(rc, cq);
return;
}
let reader = self.ctx.batch_ctx_mut().recv_message();
self.ctx.handle(&mut rc, cq, reader);
server::request_call(rc, cq);
}
}
/// A callback to wait for status for the aborted rpc call to be sent.
pub struct Abort {
ctx: BatchContext,
_call: Call,
}
impl Abort {
pub fn new(call: Call) -> Abort {
Abort {
ctx: BatchContext::new(),
_call: call,
}
}
pub fn batch_ctx(&self) -> &BatchContext {
&self.ctx
}
}
|
{
let mut rc = self.ctx.take_request_call_context().unwrap();
if !success {
server::request_call(rc, cq);
return;
}
match self.ctx.handle_stream_req(cq, &mut rc) {
Ok(_) => server::request_call(rc, cq),
Err(ctx) => ctx.handle_unary_req(rc, cq),
}
}
|
identifier_body
|
callback.rs
|
// Copyright 2019 TiKV Project Authors. Licensed under Apache-2.0.
use crate::call::server::{RequestContext, UnaryRequestContext};
use crate::call::{BatchContext, Call};
use crate::cq::CompletionQueue;
use crate::server::{self, RequestCallContext};
pub struct Request {
ctx: RequestContext,
}
impl Request {
pub fn new(rc: RequestCallContext) -> Request {
let ctx = RequestContext::new(rc);
Request { ctx }
}
pub fn context(&self) -> &RequestContext {
&self.ctx
}
|
}
match self.ctx.handle_stream_req(cq, &mut rc) {
Ok(_) => server::request_call(rc, cq),
Err(ctx) => ctx.handle_unary_req(rc, cq),
}
}
}
pub struct UnaryRequest {
ctx: UnaryRequestContext,
}
impl UnaryRequest {
pub fn new(ctx: RequestContext, rc: RequestCallContext) -> UnaryRequest {
let ctx = UnaryRequestContext::new(ctx, rc);
UnaryRequest { ctx }
}
pub fn batch_ctx(&self) -> &BatchContext {
self.ctx.batch_ctx()
}
pub fn request_ctx(&self) -> &RequestContext {
self.ctx.request_ctx()
}
pub fn resolve(mut self, cq: &CompletionQueue, success: bool) {
let mut rc = self.ctx.take_request_call_context().unwrap();
if!success {
server::request_call(rc, cq);
return;
}
let reader = self.ctx.batch_ctx_mut().recv_message();
self.ctx.handle(&mut rc, cq, reader);
server::request_call(rc, cq);
}
}
/// A callback to wait for status for the aborted rpc call to be sent.
pub struct Abort {
ctx: BatchContext,
_call: Call,
}
impl Abort {
pub fn new(call: Call) -> Abort {
Abort {
ctx: BatchContext::new(),
_call: call,
}
}
pub fn batch_ctx(&self) -> &BatchContext {
&self.ctx
}
}
|
pub fn resolve(mut self, cq: &CompletionQueue, success: bool) {
let mut rc = self.ctx.take_request_call_context().unwrap();
if !success {
server::request_call(rc, cq);
return;
|
random_line_split
|
callback.rs
|
// Copyright 2019 TiKV Project Authors. Licensed under Apache-2.0.
use crate::call::server::{RequestContext, UnaryRequestContext};
use crate::call::{BatchContext, Call};
use crate::cq::CompletionQueue;
use crate::server::{self, RequestCallContext};
pub struct Request {
ctx: RequestContext,
}
impl Request {
pub fn new(rc: RequestCallContext) -> Request {
let ctx = RequestContext::new(rc);
Request { ctx }
}
pub fn context(&self) -> &RequestContext {
&self.ctx
}
pub fn resolve(mut self, cq: &CompletionQueue, success: bool) {
let mut rc = self.ctx.take_request_call_context().unwrap();
if!success {
server::request_call(rc, cq);
return;
}
match self.ctx.handle_stream_req(cq, &mut rc) {
Ok(_) => server::request_call(rc, cq),
Err(ctx) => ctx.handle_unary_req(rc, cq),
}
}
}
pub struct UnaryRequest {
ctx: UnaryRequestContext,
}
impl UnaryRequest {
pub fn new(ctx: RequestContext, rc: RequestCallContext) -> UnaryRequest {
let ctx = UnaryRequestContext::new(ctx, rc);
UnaryRequest { ctx }
}
pub fn
|
(&self) -> &BatchContext {
self.ctx.batch_ctx()
}
pub fn request_ctx(&self) -> &RequestContext {
self.ctx.request_ctx()
}
pub fn resolve(mut self, cq: &CompletionQueue, success: bool) {
let mut rc = self.ctx.take_request_call_context().unwrap();
if!success {
server::request_call(rc, cq);
return;
}
let reader = self.ctx.batch_ctx_mut().recv_message();
self.ctx.handle(&mut rc, cq, reader);
server::request_call(rc, cq);
}
}
/// A callback to wait for status for the aborted rpc call to be sent.
pub struct Abort {
ctx: BatchContext,
_call: Call,
}
impl Abort {
pub fn new(call: Call) -> Abort {
Abort {
ctx: BatchContext::new(),
_call: call,
}
}
pub fn batch_ctx(&self) -> &BatchContext {
&self.ctx
}
}
|
batch_ctx
|
identifier_name
|
lib.rs
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(asm)]
#![feature(macro_rules)]
#![feature(simd)]
#![feature(slicing_syntax)]
extern crate "rustc-serialize" as serialize;
extern crate time;
#[cfg(test)] extern crate test;
pub mod aes;
pub mod aessafe;
pub mod bcrypt;
pub mod bcrypt_pbkdf;
pub mod blake2b;
pub mod blockmodes;
pub mod blowfish;
pub mod buffer;
pub mod chacha20;
|
pub mod curve25519;
pub mod digest;
pub mod ed25519;
pub mod fortuna;
pub mod ghash;
pub mod hmac;
pub mod mac;
pub mod md5;
pub mod pbkdf2;
pub mod poly1305;
pub mod rc4;
pub mod ripemd160;
pub mod salsa20;
pub mod scrypt;
pub mod sha1;
pub mod sha2;
pub mod symmetriccipher;
pub mod util;
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
pub mod aesni;
|
mod cryptoutil;
|
random_line_split
|
specialization_graph.rs
|
use crate::ich::{self, StableHashingContext};
use crate::ty::fast_reject::SimplifiedType;
use crate::ty::fold::TypeFoldable;
use crate::ty::{self, TyCtxt};
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_errors::ErrorReported;
use rustc_hir::def_id::{DefId, DefIdMap};
use rustc_span::symbol::Ident;
/// A per-trait graph of impls in specialization order. At the moment, this
/// graph forms a tree rooted with the trait itself, with all other nodes
/// representing impls, and parent-child relationships representing
/// specializations.
///
/// The graph provides two key services:
///
/// - Construction. This implicitly checks for overlapping impls (i.e., impls
/// that overlap but where neither specializes the other -- an artifact of the
/// simple "chain" rule.
///
/// - Parent extraction. In particular, the graph can give you the *immediate*
/// parents of a given specializing impl, which is needed for extracting
/// default items amongst other things. In the simple "chain" rule, every impl
/// has at most one parent.
#[derive(TyEncodable, TyDecodable, HashStable, Debug)]
pub struct Graph {
/// All impls have a parent; the "root" impls have as their parent the `def_id`
/// of the trait.
pub parent: DefIdMap<DefId>,
/// The "root" impls are found by looking up the trait's def_id.
pub children: DefIdMap<Children>,
/// Whether an error was emitted while constructing the graph.
pub has_errored: bool,
}
impl Graph {
pub fn new() -> Graph {
Graph { parent: Default::default(), children: Default::default(), has_errored: false }
}
/// The parent of a given impl, which is the `DefId` of the trait when the
/// impl is a "specialization root".
pub fn parent(&self, child: DefId) -> DefId {
*self.parent.get(&child).unwrap_or_else(|| panic!("Failed to get parent for {:?}", child))
}
}
/// Children of a given impl, grouped into blanket/non-blanket varieties as is
/// done in `TraitDef`.
#[derive(Default, TyEncodable, TyDecodable, Debug)]
pub struct Children {
// Impls of a trait (or specializations of a given impl). To allow for
// quicker lookup, the impls are indexed by a simplified version of their
// `Self` type: impls with a simplifiable `Self` are stored in
// `nonblanket_impls` keyed by it, while all other impls are stored in
// `blanket_impls`.
//
// A similar division is used within `TraitDef`, but the lists there collect
// together *all* the impls for a trait, and are populated prior to building
// the specialization graph.
/// Impls of the trait.
pub nonblanket_impls: FxHashMap<SimplifiedType, Vec<DefId>>,
/// Blanket impls associated with the trait.
pub blanket_impls: Vec<DefId>,
}
/// A node in the specialization graph is either an impl or a trait
/// definition; either can serve as a source of item definitions.
/// There is always exactly one trait definition node: the root.
#[derive(Debug, Copy, Clone)]
pub enum Node {
Impl(DefId),
Trait(DefId),
}
impl<'tcx> Node {
pub fn is_from_trait(&self) -> bool {
matches!(self, Node::Trait(..))
}
/// Iterate over the items defined directly by the given (impl or trait) node.
pub fn items(&self, tcx: TyCtxt<'tcx>) -> impl 'tcx + Iterator<Item = &'tcx ty::AssocItem> {
tcx.associated_items(self.def_id()).in_definition_order()
}
/// Finds an associated item defined in this node.
///
/// If this returns `None`, the item can potentially still be found in
/// parents of this node.
pub fn item(
&self,
tcx: TyCtxt<'tcx>,
trait_item_name: Ident,
trait_item_kind: ty::AssocKind,
trait_def_id: DefId,
) -> Option<ty::AssocItem>
|
pub fn def_id(&self) -> DefId {
match *self {
Node::Impl(did) => did,
Node::Trait(did) => did,
}
}
}
#[derive(Copy, Clone)]
pub struct Ancestors<'tcx> {
trait_def_id: DefId,
specialization_graph: &'tcx Graph,
current_source: Option<Node>,
}
impl Iterator for Ancestors<'_> {
type Item = Node;
fn next(&mut self) -> Option<Node> {
let cur = self.current_source.take();
if let Some(Node::Impl(cur_impl)) = cur {
let parent = self.specialization_graph.parent(cur_impl);
self.current_source = if parent == self.trait_def_id {
Some(Node::Trait(parent))
} else {
Some(Node::Impl(parent))
};
}
cur
}
}
/// Information about the most specialized definition of an associated item.
pub struct LeafDef {
/// The associated item described by this `LeafDef`.
pub item: ty::AssocItem,
/// The node in the specialization graph containing the definition of `item`.
pub defining_node: Node,
/// The "top-most" (ie. least specialized) specialization graph node that finalized the
/// definition of `item`.
///
/// Example:
///
/// ```
/// trait Tr {
/// fn assoc(&self);
/// }
///
/// impl<T> Tr for T {
/// default fn assoc(&self) {}
/// }
///
/// impl Tr for u8 {}
/// ```
///
/// If we start the leaf definition search at `impl Tr for u8`, that impl will be the
/// `finalizing_node`, while `defining_node` will be the generic impl.
///
/// If the leaf definition search is started at the generic impl, `finalizing_node` will be
/// `None`, since the most specialized impl we found still allows overriding the method
/// (doesn't finalize it).
pub finalizing_node: Option<Node>,
}
impl LeafDef {
/// Returns whether this definition is known to not be further specializable.
pub fn is_final(&self) -> bool {
self.finalizing_node.is_some()
}
}
impl<'tcx> Ancestors<'tcx> {
/// Finds the bottom-most (ie. most specialized) definition of an associated
/// item.
pub fn leaf_def(
mut self,
tcx: TyCtxt<'tcx>,
trait_item_name: Ident,
trait_item_kind: ty::AssocKind,
) -> Option<LeafDef> {
let trait_def_id = self.trait_def_id;
let mut finalizing_node = None;
self.find_map(|node| {
if let Some(item) = node.item(tcx, trait_item_name, trait_item_kind, trait_def_id) {
if finalizing_node.is_none() {
let is_specializable = item.defaultness.is_default()
|| tcx.impl_defaultness(node.def_id()).is_default();
if!is_specializable {
finalizing_node = Some(node);
}
}
Some(LeafDef { item, defining_node: node, finalizing_node })
} else {
// Item not mentioned. This "finalizes" any defaulted item provided by an ancestor.
finalizing_node = Some(node);
None
}
})
}
}
/// Walk up the specialization ancestors of a given impl, starting with that
/// impl itself.
///
/// Returns `Err` if an error was reported while building the specialization
/// graph.
pub fn ancestors(
tcx: TyCtxt<'tcx>,
trait_def_id: DefId,
start_from_impl: DefId,
) -> Result<Ancestors<'tcx>, ErrorReported> {
let specialization_graph = tcx.specialization_graph_of(trait_def_id);
if specialization_graph.has_errored || tcx.type_of(start_from_impl).references_error() {
Err(ErrorReported)
} else {
Ok(Ancestors {
trait_def_id,
specialization_graph,
current_source: Some(Node::Impl(start_from_impl)),
})
}
}
impl<'a> HashStable<StableHashingContext<'a>> for Children {
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
let Children { ref nonblanket_impls, ref blanket_impls } = *self;
ich::hash_stable_trait_impls(hcx, hasher, blanket_impls, nonblanket_impls);
}
}
|
{
tcx.associated_items(self.def_id())
.filter_by_name_unhygienic(trait_item_name.name)
.find(move |impl_item| {
trait_item_kind == impl_item.kind
&& tcx.hygienic_eq(impl_item.ident, trait_item_name, trait_def_id)
})
.copied()
}
|
identifier_body
|
specialization_graph.rs
|
use crate::ich::{self, StableHashingContext};
use crate::ty::fast_reject::SimplifiedType;
use crate::ty::fold::TypeFoldable;
use crate::ty::{self, TyCtxt};
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_errors::ErrorReported;
use rustc_hir::def_id::{DefId, DefIdMap};
use rustc_span::symbol::Ident;
/// A per-trait graph of impls in specialization order. At the moment, this
/// graph forms a tree rooted with the trait itself, with all other nodes
/// representing impls, and parent-child relationships representing
/// specializations.
///
/// The graph provides two key services:
///
/// - Construction. This implicitly checks for overlapping impls (i.e., impls
/// that overlap but where neither specializes the other -- an artifact of the
/// simple "chain" rule.
///
/// - Parent extraction. In particular, the graph can give you the *immediate*
/// parents of a given specializing impl, which is needed for extracting
/// default items amongst other things. In the simple "chain" rule, every impl
/// has at most one parent.
#[derive(TyEncodable, TyDecodable, HashStable, Debug)]
pub struct Graph {
/// All impls have a parent; the "root" impls have as their parent the `def_id`
/// of the trait.
pub parent: DefIdMap<DefId>,
/// The "root" impls are found by looking up the trait's def_id.
pub children: DefIdMap<Children>,
/// Whether an error was emitted while constructing the graph.
pub has_errored: bool,
}
impl Graph {
pub fn new() -> Graph {
Graph { parent: Default::default(), children: Default::default(), has_errored: false }
}
/// The parent of a given impl, which is the `DefId` of the trait when the
/// impl is a "specialization root".
pub fn parent(&self, child: DefId) -> DefId {
*self.parent.get(&child).unwrap_or_else(|| panic!("Failed to get parent for {:?}", child))
}
}
/// Children of a given impl, grouped into blanket/non-blanket varieties as is
/// done in `TraitDef`.
#[derive(Default, TyEncodable, TyDecodable, Debug)]
pub struct Children {
// Impls of a trait (or specializations of a given impl). To allow for
// quicker lookup, the impls are indexed by a simplified version of their
// `Self` type: impls with a simplifiable `Self` are stored in
// `nonblanket_impls` keyed by it, while all other impls are stored in
// `blanket_impls`.
//
// A similar division is used within `TraitDef`, but the lists there collect
// together *all* the impls for a trait, and are populated prior to building
// the specialization graph.
/// Impls of the trait.
pub nonblanket_impls: FxHashMap<SimplifiedType, Vec<DefId>>,
/// Blanket impls associated with the trait.
pub blanket_impls: Vec<DefId>,
}
/// A node in the specialization graph is either an impl or a trait
/// definition; either can serve as a source of item definitions.
/// There is always exactly one trait definition node: the root.
#[derive(Debug, Copy, Clone)]
pub enum Node {
Impl(DefId),
Trait(DefId),
}
impl<'tcx> Node {
pub fn is_from_trait(&self) -> bool {
matches!(self, Node::Trait(..))
}
/// Iterate over the items defined directly by the given (impl or trait) node.
pub fn items(&self, tcx: TyCtxt<'tcx>) -> impl 'tcx + Iterator<Item = &'tcx ty::AssocItem> {
tcx.associated_items(self.def_id()).in_definition_order()
}
/// Finds an associated item defined in this node.
///
/// If this returns `None`, the item can potentially still be found in
/// parents of this node.
pub fn item(
&self,
tcx: TyCtxt<'tcx>,
trait_item_name: Ident,
trait_item_kind: ty::AssocKind,
trait_def_id: DefId,
) -> Option<ty::AssocItem> {
tcx.associated_items(self.def_id())
.filter_by_name_unhygienic(trait_item_name.name)
.find(move |impl_item| {
trait_item_kind == impl_item.kind
&& tcx.hygienic_eq(impl_item.ident, trait_item_name, trait_def_id)
})
.copied()
}
pub fn def_id(&self) -> DefId {
match *self {
Node::Impl(did) => did,
Node::Trait(did) => did,
}
}
}
#[derive(Copy, Clone)]
pub struct Ancestors<'tcx> {
trait_def_id: DefId,
specialization_graph: &'tcx Graph,
current_source: Option<Node>,
}
impl Iterator for Ancestors<'_> {
type Item = Node;
fn next(&mut self) -> Option<Node> {
let cur = self.current_source.take();
if let Some(Node::Impl(cur_impl)) = cur {
let parent = self.specialization_graph.parent(cur_impl);
self.current_source = if parent == self.trait_def_id {
Some(Node::Trait(parent))
} else {
Some(Node::Impl(parent))
};
}
cur
}
}
/// Information about the most specialized definition of an associated item.
pub struct LeafDef {
/// The associated item described by this `LeafDef`.
pub item: ty::AssocItem,
/// The node in the specialization graph containing the definition of `item`.
pub defining_node: Node,
/// The "top-most" (ie. least specialized) specialization graph node that finalized the
/// definition of `item`.
///
/// Example:
///
/// ```
/// trait Tr {
/// fn assoc(&self);
/// }
///
/// impl<T> Tr for T {
/// default fn assoc(&self) {}
/// }
///
/// impl Tr for u8 {}
/// ```
///
/// If we start the leaf definition search at `impl Tr for u8`, that impl will be the
/// `finalizing_node`, while `defining_node` will be the generic impl.
///
/// If the leaf definition search is started at the generic impl, `finalizing_node` will be
/// `None`, since the most specialized impl we found still allows overriding the method
/// (doesn't finalize it).
pub finalizing_node: Option<Node>,
}
impl LeafDef {
/// Returns whether this definition is known to not be further specializable.
pub fn is_final(&self) -> bool {
self.finalizing_node.is_some()
}
}
impl<'tcx> Ancestors<'tcx> {
/// Finds the bottom-most (ie. most specialized) definition of an associated
/// item.
pub fn leaf_def(
mut self,
tcx: TyCtxt<'tcx>,
trait_item_name: Ident,
trait_item_kind: ty::AssocKind,
) -> Option<LeafDef> {
let trait_def_id = self.trait_def_id;
let mut finalizing_node = None;
self.find_map(|node| {
if let Some(item) = node.item(tcx, trait_item_name, trait_item_kind, trait_def_id) {
if finalizing_node.is_none()
|
Some(LeafDef { item, defining_node: node, finalizing_node })
} else {
// Item not mentioned. This "finalizes" any defaulted item provided by an ancestor.
finalizing_node = Some(node);
None
}
})
}
}
/// Walk up the specialization ancestors of a given impl, starting with that
/// impl itself.
///
/// Returns `Err` if an error was reported while building the specialization
/// graph.
pub fn ancestors(
tcx: TyCtxt<'tcx>,
trait_def_id: DefId,
start_from_impl: DefId,
) -> Result<Ancestors<'tcx>, ErrorReported> {
let specialization_graph = tcx.specialization_graph_of(trait_def_id);
if specialization_graph.has_errored || tcx.type_of(start_from_impl).references_error() {
Err(ErrorReported)
} else {
Ok(Ancestors {
trait_def_id,
specialization_graph,
current_source: Some(Node::Impl(start_from_impl)),
})
}
}
impl<'a> HashStable<StableHashingContext<'a>> for Children {
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
let Children { ref nonblanket_impls, ref blanket_impls } = *self;
ich::hash_stable_trait_impls(hcx, hasher, blanket_impls, nonblanket_impls);
}
}
|
{
let is_specializable = item.defaultness.is_default()
|| tcx.impl_defaultness(node.def_id()).is_default();
if !is_specializable {
finalizing_node = Some(node);
}
}
|
conditional_block
|
specialization_graph.rs
|
use crate::ich::{self, StableHashingContext};
use crate::ty::fast_reject::SimplifiedType;
use crate::ty::fold::TypeFoldable;
use crate::ty::{self, TyCtxt};
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_errors::ErrorReported;
use rustc_hir::def_id::{DefId, DefIdMap};
use rustc_span::symbol::Ident;
/// A per-trait graph of impls in specialization order. At the moment, this
/// graph forms a tree rooted with the trait itself, with all other nodes
/// representing impls, and parent-child relationships representing
/// specializations.
///
/// The graph provides two key services:
///
/// - Construction. This implicitly checks for overlapping impls (i.e., impls
/// that overlap but where neither specializes the other -- an artifact of the
/// simple "chain" rule.
///
/// - Parent extraction. In particular, the graph can give you the *immediate*
/// parents of a given specializing impl, which is needed for extracting
/// default items amongst other things. In the simple "chain" rule, every impl
/// has at most one parent.
#[derive(TyEncodable, TyDecodable, HashStable, Debug)]
pub struct Graph {
/// All impls have a parent; the "root" impls have as their parent the `def_id`
/// of the trait.
pub parent: DefIdMap<DefId>,
/// The "root" impls are found by looking up the trait's def_id.
pub children: DefIdMap<Children>,
/// Whether an error was emitted while constructing the graph.
pub has_errored: bool,
}
impl Graph {
pub fn new() -> Graph {
Graph { parent: Default::default(), children: Default::default(), has_errored: false }
}
/// The parent of a given impl, which is the `DefId` of the trait when the
/// impl is a "specialization root".
pub fn parent(&self, child: DefId) -> DefId {
*self.parent.get(&child).unwrap_or_else(|| panic!("Failed to get parent for {:?}", child))
}
}
/// Children of a given impl, grouped into blanket/non-blanket varieties as is
/// done in `TraitDef`.
#[derive(Default, TyEncodable, TyDecodable, Debug)]
pub struct Children {
// Impls of a trait (or specializations of a given impl). To allow for
// quicker lookup, the impls are indexed by a simplified version of their
// `Self` type: impls with a simplifiable `Self` are stored in
// `nonblanket_impls` keyed by it, while all other impls are stored in
// `blanket_impls`.
//
// A similar division is used within `TraitDef`, but the lists there collect
// together *all* the impls for a trait, and are populated prior to building
// the specialization graph.
/// Impls of the trait.
pub nonblanket_impls: FxHashMap<SimplifiedType, Vec<DefId>>,
/// Blanket impls associated with the trait.
pub blanket_impls: Vec<DefId>,
}
/// A node in the specialization graph is either an impl or a trait
/// definition; either can serve as a source of item definitions.
/// There is always exactly one trait definition node: the root.
#[derive(Debug, Copy, Clone)]
pub enum Node {
Impl(DefId),
Trait(DefId),
}
impl<'tcx> Node {
pub fn is_from_trait(&self) -> bool {
matches!(self, Node::Trait(..))
}
/// Iterate over the items defined directly by the given (impl or trait) node.
pub fn items(&self, tcx: TyCtxt<'tcx>) -> impl 'tcx + Iterator<Item = &'tcx ty::AssocItem> {
tcx.associated_items(self.def_id()).in_definition_order()
}
/// Finds an associated item defined in this node.
///
/// If this returns `None`, the item can potentially still be found in
/// parents of this node.
pub fn
|
(
&self,
tcx: TyCtxt<'tcx>,
trait_item_name: Ident,
trait_item_kind: ty::AssocKind,
trait_def_id: DefId,
) -> Option<ty::AssocItem> {
tcx.associated_items(self.def_id())
.filter_by_name_unhygienic(trait_item_name.name)
.find(move |impl_item| {
trait_item_kind == impl_item.kind
&& tcx.hygienic_eq(impl_item.ident, trait_item_name, trait_def_id)
})
.copied()
}
pub fn def_id(&self) -> DefId {
match *self {
Node::Impl(did) => did,
Node::Trait(did) => did,
}
}
}
#[derive(Copy, Clone)]
pub struct Ancestors<'tcx> {
trait_def_id: DefId,
specialization_graph: &'tcx Graph,
current_source: Option<Node>,
}
impl Iterator for Ancestors<'_> {
type Item = Node;
fn next(&mut self) -> Option<Node> {
let cur = self.current_source.take();
if let Some(Node::Impl(cur_impl)) = cur {
let parent = self.specialization_graph.parent(cur_impl);
self.current_source = if parent == self.trait_def_id {
Some(Node::Trait(parent))
} else {
Some(Node::Impl(parent))
};
}
cur
}
}
/// Information about the most specialized definition of an associated item.
pub struct LeafDef {
/// The associated item described by this `LeafDef`.
pub item: ty::AssocItem,
/// The node in the specialization graph containing the definition of `item`.
pub defining_node: Node,
/// The "top-most" (ie. least specialized) specialization graph node that finalized the
/// definition of `item`.
///
/// Example:
///
/// ```
/// trait Tr {
/// fn assoc(&self);
/// }
///
/// impl<T> Tr for T {
/// default fn assoc(&self) {}
/// }
///
/// impl Tr for u8 {}
/// ```
///
/// If we start the leaf definition search at `impl Tr for u8`, that impl will be the
/// `finalizing_node`, while `defining_node` will be the generic impl.
///
/// If the leaf definition search is started at the generic impl, `finalizing_node` will be
/// `None`, since the most specialized impl we found still allows overriding the method
/// (doesn't finalize it).
pub finalizing_node: Option<Node>,
}
impl LeafDef {
/// Returns whether this definition is known to not be further specializable.
pub fn is_final(&self) -> bool {
self.finalizing_node.is_some()
}
}
impl<'tcx> Ancestors<'tcx> {
/// Finds the bottom-most (ie. most specialized) definition of an associated
/// item.
pub fn leaf_def(
mut self,
tcx: TyCtxt<'tcx>,
trait_item_name: Ident,
trait_item_kind: ty::AssocKind,
) -> Option<LeafDef> {
let trait_def_id = self.trait_def_id;
let mut finalizing_node = None;
self.find_map(|node| {
if let Some(item) = node.item(tcx, trait_item_name, trait_item_kind, trait_def_id) {
if finalizing_node.is_none() {
let is_specializable = item.defaultness.is_default()
|| tcx.impl_defaultness(node.def_id()).is_default();
if!is_specializable {
finalizing_node = Some(node);
}
}
Some(LeafDef { item, defining_node: node, finalizing_node })
} else {
// Item not mentioned. This "finalizes" any defaulted item provided by an ancestor.
finalizing_node = Some(node);
None
}
})
}
}
/// Walk up the specialization ancestors of a given impl, starting with that
/// impl itself.
///
/// Returns `Err` if an error was reported while building the specialization
/// graph.
pub fn ancestors(
tcx: TyCtxt<'tcx>,
trait_def_id: DefId,
start_from_impl: DefId,
) -> Result<Ancestors<'tcx>, ErrorReported> {
let specialization_graph = tcx.specialization_graph_of(trait_def_id);
if specialization_graph.has_errored || tcx.type_of(start_from_impl).references_error() {
Err(ErrorReported)
} else {
Ok(Ancestors {
trait_def_id,
specialization_graph,
current_source: Some(Node::Impl(start_from_impl)),
})
}
}
impl<'a> HashStable<StableHashingContext<'a>> for Children {
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
let Children { ref nonblanket_impls, ref blanket_impls } = *self;
ich::hash_stable_trait_impls(hcx, hasher, blanket_impls, nonblanket_impls);
}
}
|
item
|
identifier_name
|
specialization_graph.rs
|
use crate::ich::{self, StableHashingContext};
use crate::ty::fast_reject::SimplifiedType;
use crate::ty::fold::TypeFoldable;
use crate::ty::{self, TyCtxt};
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_errors::ErrorReported;
use rustc_hir::def_id::{DefId, DefIdMap};
use rustc_span::symbol::Ident;
/// A per-trait graph of impls in specialization order. At the moment, this
/// graph forms a tree rooted with the trait itself, with all other nodes
/// representing impls, and parent-child relationships representing
/// specializations.
///
/// The graph provides two key services:
///
/// - Construction. This implicitly checks for overlapping impls (i.e., impls
/// that overlap but where neither specializes the other -- an artifact of the
/// simple "chain" rule.
///
/// - Parent extraction. In particular, the graph can give you the *immediate*
/// parents of a given specializing impl, which is needed for extracting
/// default items amongst other things. In the simple "chain" rule, every impl
/// has at most one parent.
#[derive(TyEncodable, TyDecodable, HashStable, Debug)]
pub struct Graph {
/// All impls have a parent; the "root" impls have as their parent the `def_id`
/// of the trait.
pub parent: DefIdMap<DefId>,
/// The "root" impls are found by looking up the trait's def_id.
pub children: DefIdMap<Children>,
/// Whether an error was emitted while constructing the graph.
pub has_errored: bool,
}
impl Graph {
pub fn new() -> Graph {
Graph { parent: Default::default(), children: Default::default(), has_errored: false }
}
/// The parent of a given impl, which is the `DefId` of the trait when the
/// impl is a "specialization root".
pub fn parent(&self, child: DefId) -> DefId {
*self.parent.get(&child).unwrap_or_else(|| panic!("Failed to get parent for {:?}", child))
}
}
/// Children of a given impl, grouped into blanket/non-blanket varieties as is
/// done in `TraitDef`.
#[derive(Default, TyEncodable, TyDecodable, Debug)]
pub struct Children {
// Impls of a trait (or specializations of a given impl). To allow for
// quicker lookup, the impls are indexed by a simplified version of their
// `Self` type: impls with a simplifiable `Self` are stored in
// `nonblanket_impls` keyed by it, while all other impls are stored in
// `blanket_impls`.
//
// A similar division is used within `TraitDef`, but the lists there collect
// together *all* the impls for a trait, and are populated prior to building
// the specialization graph.
/// Impls of the trait.
pub nonblanket_impls: FxHashMap<SimplifiedType, Vec<DefId>>,
/// Blanket impls associated with the trait.
pub blanket_impls: Vec<DefId>,
}
/// A node in the specialization graph is either an impl or a trait
/// definition; either can serve as a source of item definitions.
/// There is always exactly one trait definition node: the root.
#[derive(Debug, Copy, Clone)]
pub enum Node {
Impl(DefId),
Trait(DefId),
}
impl<'tcx> Node {
pub fn is_from_trait(&self) -> bool {
matches!(self, Node::Trait(..))
}
/// Iterate over the items defined directly by the given (impl or trait) node.
pub fn items(&self, tcx: TyCtxt<'tcx>) -> impl 'tcx + Iterator<Item = &'tcx ty::AssocItem> {
tcx.associated_items(self.def_id()).in_definition_order()
}
/// Finds an associated item defined in this node.
///
/// If this returns `None`, the item can potentially still be found in
/// parents of this node.
pub fn item(
&self,
tcx: TyCtxt<'tcx>,
trait_item_name: Ident,
trait_item_kind: ty::AssocKind,
trait_def_id: DefId,
) -> Option<ty::AssocItem> {
tcx.associated_items(self.def_id())
.filter_by_name_unhygienic(trait_item_name.name)
.find(move |impl_item| {
trait_item_kind == impl_item.kind
&& tcx.hygienic_eq(impl_item.ident, trait_item_name, trait_def_id)
})
.copied()
}
pub fn def_id(&self) -> DefId {
match *self {
Node::Impl(did) => did,
Node::Trait(did) => did,
}
}
}
#[derive(Copy, Clone)]
pub struct Ancestors<'tcx> {
trait_def_id: DefId,
specialization_graph: &'tcx Graph,
current_source: Option<Node>,
}
impl Iterator for Ancestors<'_> {
type Item = Node;
fn next(&mut self) -> Option<Node> {
let cur = self.current_source.take();
if let Some(Node::Impl(cur_impl)) = cur {
let parent = self.specialization_graph.parent(cur_impl);
self.current_source = if parent == self.trait_def_id {
Some(Node::Trait(parent))
} else {
Some(Node::Impl(parent))
};
}
cur
}
}
/// Information about the most specialized definition of an associated item.
pub struct LeafDef {
/// The associated item described by this `LeafDef`.
pub item: ty::AssocItem,
/// The node in the specialization graph containing the definition of `item`.
pub defining_node: Node,
/// The "top-most" (ie. least specialized) specialization graph node that finalized the
/// definition of `item`.
///
/// Example:
///
/// ```
/// trait Tr {
/// fn assoc(&self);
/// }
///
/// impl<T> Tr for T {
/// default fn assoc(&self) {}
/// }
///
/// impl Tr for u8 {}
/// ```
///
/// If we start the leaf definition search at `impl Tr for u8`, that impl will be the
/// `finalizing_node`, while `defining_node` will be the generic impl.
///
/// If the leaf definition search is started at the generic impl, `finalizing_node` will be
|
impl LeafDef {
/// Returns whether this definition is known to not be further specializable.
pub fn is_final(&self) -> bool {
self.finalizing_node.is_some()
}
}
impl<'tcx> Ancestors<'tcx> {
/// Finds the bottom-most (ie. most specialized) definition of an associated
/// item.
pub fn leaf_def(
mut self,
tcx: TyCtxt<'tcx>,
trait_item_name: Ident,
trait_item_kind: ty::AssocKind,
) -> Option<LeafDef> {
let trait_def_id = self.trait_def_id;
let mut finalizing_node = None;
self.find_map(|node| {
if let Some(item) = node.item(tcx, trait_item_name, trait_item_kind, trait_def_id) {
if finalizing_node.is_none() {
let is_specializable = item.defaultness.is_default()
|| tcx.impl_defaultness(node.def_id()).is_default();
if!is_specializable {
finalizing_node = Some(node);
}
}
Some(LeafDef { item, defining_node: node, finalizing_node })
} else {
// Item not mentioned. This "finalizes" any defaulted item provided by an ancestor.
finalizing_node = Some(node);
None
}
})
}
}
/// Walk up the specialization ancestors of a given impl, starting with that
/// impl itself.
///
/// Returns `Err` if an error was reported while building the specialization
/// graph.
pub fn ancestors(
tcx: TyCtxt<'tcx>,
trait_def_id: DefId,
start_from_impl: DefId,
) -> Result<Ancestors<'tcx>, ErrorReported> {
let specialization_graph = tcx.specialization_graph_of(trait_def_id);
if specialization_graph.has_errored || tcx.type_of(start_from_impl).references_error() {
Err(ErrorReported)
} else {
Ok(Ancestors {
trait_def_id,
specialization_graph,
current_source: Some(Node::Impl(start_from_impl)),
})
}
}
impl<'a> HashStable<StableHashingContext<'a>> for Children {
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
let Children { ref nonblanket_impls, ref blanket_impls } = *self;
ich::hash_stable_trait_impls(hcx, hasher, blanket_impls, nonblanket_impls);
}
}
|
/// `None`, since the most specialized impl we found still allows overriding the method
/// (doesn't finalize it).
pub finalizing_node: Option<Node>,
}
|
random_line_split
|
main.rs
|
//! This expands upon the implementation defined on [Rosetta Code][element definition] and consists
//! of the relevant lines from the `LinkedList` implementation in the Rust standard library.
//!
//! [element definition]: http://rosettacode.org/wiki/Doubly-linked_list/Element_definition
#![allow(dead_code)]
use std::mem;
use std::ptr;
|
pub struct LinkedList<T> {
length: usize,
list_head: Link<T>,
list_tail: Rawlink<Node<T>>,
}
type Link<T> = Option<Box<Node<T>>>;
struct Rawlink<T> {
p: *mut T,
}
struct Node<T> {
next: Link<T>,
prev: Rawlink<Node<T>>,
value: T,
}
impl<T> Node<T> {
fn new(v: T) -> Node<T> {
Node {
value: v,
next: None,
prev: Rawlink::none(),
}
}
}
impl<T> Rawlink<T> {
fn none() -> Self {
Rawlink { p: ptr::null_mut() }
}
fn some(n: &mut T) -> Rawlink<T> {
Rawlink { p: n }
}
}
impl<'a, T> From<&'a mut Link<T>> for Rawlink<Node<T>> {
fn from(node: &'a mut Link<T>) -> Self {
match node.as_mut() {
None => Rawlink::none(),
Some(ptr) => Rawlink::some(ptr),
}
}
}
fn link_no_prev<T>(mut next: Box<Node<T>>) -> Link<T> {
next.prev = Rawlink::none();
Some(next)
}
impl<T> LinkedList<T> {
pub fn new() -> LinkedList<T> {
LinkedList {
length: 0,
list_head: None,
list_tail: Rawlink { p: ptr::null_mut() },
}
}
#[inline]
fn push_front_node(&mut self, mut new_head: Box<Node<T>>) {
match self.list_head {
None => {
self.list_head = link_no_prev(new_head);
self.list_tail = Rawlink::from(&mut self.list_head);
}
Some(ref mut head) => {
new_head.prev = Rawlink::none();
head.prev = Rawlink::some(&mut *new_head);
mem::swap(head, &mut new_head);
head.next = Some(new_head);
}
}
self.length += 1;
}
pub fn push_front(&mut self, elt: T) {
self.push_front_node(Box::new(Node::new(elt)));
}
}
impl<T> Default for LinkedList<T> {
fn default() -> Self {
Self::new()
}
}
fn main() {
use std::collections;
let mut list1 = collections::LinkedList::new();
list1.push_front(8);
let mut list2 = LinkedList::new();
list2.push_front(8);
}
|
random_line_split
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.