file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
media_queries.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! [Media queries][mq].
//!
//! [mq]: https://drafts.csswg.org/mediaqueries/
use Atom;
use context::QuirksMode;
use cssparser::{Delimiter, Parser, Token, ParserInput};
use parser::ParserContext;
use selectors::parser::SelectorParseError;
use serialize_comma_separated_list;
use std::fmt;
use str::string_as_ascii_lowercase;
use style_traits::{ToCss, ParseError, StyleParseError};
use values::CustomIdent;
#[cfg(feature = "servo")]
pub use servo::media_queries::{Device, Expression};
#[cfg(feature = "gecko")]
pub use gecko::media_queries::{Device, Expression};
/// A type that encapsulates a media query list.
#[derive(Debug, Clone)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct MediaList {
/// The list of media queries.
pub media_queries: Vec<MediaQuery>,
}
impl ToCss for MediaList {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result
where W: fmt::Write
{
serialize_comma_separated_list(dest, &self.media_queries)
}
}
impl MediaList {
/// Create an empty MediaList.
pub fn empty() -> Self {
MediaList { media_queries: vec![] }
}
}
/// https://drafts.csswg.org/mediaqueries/#mq-prefix
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
#[derive(Clone, Copy, Debug, Eq, PartialEq, ToCss)]
pub enum Qualifier {
/// Hide a media query from legacy UAs:
/// https://drafts.csswg.org/mediaqueries/#mq-only
Only,
/// Negate a media query:
/// https://drafts.csswg.org/mediaqueries/#mq-not
Not,
}
/// A [media query][mq].
///
/// [mq]: https://drafts.csswg.org/mediaqueries/
#[derive(PartialEq, Clone, Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct MediaQuery {
/// The qualifier for this query.
pub qualifier: Option<Qualifier>,
/// The media type for this query, that can be known, unknown, or "all".
pub media_type: MediaQueryType,
/// The set of expressions that this media query contains.
pub expressions: Vec<Expression>,
}
impl MediaQuery {
/// Return a media query that never matches, used for when we fail to parse
/// a given media query.
fn never_matching() -> Self {
Self::new(Some(Qualifier::Not), MediaQueryType::All, vec![])
}
/// Trivially constructs a new media query.
pub fn new(qualifier: Option<Qualifier>,
media_type: MediaQueryType,
expressions: Vec<Expression>) -> MediaQuery {
MediaQuery {
qualifier: qualifier,
media_type: media_type,
expressions: expressions,
}
}
}
impl ToCss for MediaQuery {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result
where W: fmt::Write,
{
if let Some(qual) = self.qualifier {
qual.to_css(dest)?;
write!(dest, " ")?;
}
match self.media_type {
MediaQueryType::All => {
// We need to print "all" if there's a qualifier, or there's
// just an empty list of expressions.
//
// Otherwise, we'd serialize media queries like "(min-width:
// 40px)" in "all (min-width: 40px)", which is unexpected.
if self.qualifier.is_some() || self.expressions.is_empty() {
write!(dest, "all")?;
}
},
MediaQueryType::Concrete(MediaType(ref desc)) => desc.to_css(dest)?,
}
if self.expressions.is_empty() {
return Ok(());
}
if self.media_type!= MediaQueryType::All || self.qualifier.is_some() {
write!(dest, " and ")?;
}
self.expressions[0].to_css(dest)?;
for expr in self.expressions.iter().skip(1) {
write!(dest, " and ")?;
expr.to_css(dest)?;
}
Ok(())
}
}
/// http://dev.w3.org/csswg/mediaqueries-3/#media0
#[derive(PartialEq, Eq, Clone, Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub enum MediaQueryType {
/// A media type that matches every device.
All,
/// A specific media type.
Concrete(MediaType),
}
impl MediaQueryType {
fn parse(ident: &str) -> Result<Self, ()> {
match_ignore_ascii_case! { ident,
"all" => return Ok(MediaQueryType::All),
_ => (),
};
// If parseable, accept this type as a concrete type.
MediaType::parse(ident).map(MediaQueryType::Concrete)
}
fn matches(&self, other: MediaType) -> bool {
match *self {
MediaQueryType::All => true,
MediaQueryType::Concrete(ref known_type) => *known_type == other,
}
}
}
/// https://drafts.csswg.org/mediaqueries/#media-types
#[derive(PartialEq, Eq, Clone, Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct MediaType(pub CustomIdent);
impl MediaType {
/// The `screen` media type.
pub fn
|
() -> Self {
MediaType(CustomIdent(atom!("screen")))
}
/// The `print` media type.
pub fn print() -> Self {
MediaType(CustomIdent(atom!("print")))
}
fn parse(name: &str) -> Result<Self, ()> {
// From https://drafts.csswg.org/mediaqueries/#mq-syntax:
//
// The <media-type> production does not include the keywords not, or, and, and only.
//
// Here we also perform the to-ascii-lowercase part of the serialization
// algorithm: https://drafts.csswg.org/cssom/#serializing-media-queries
match_ignore_ascii_case! { name,
"not" | "or" | "and" | "only" => Err(()),
_ => Ok(MediaType(CustomIdent(Atom::from(string_as_ascii_lowercase(name))))),
}
}
}
impl MediaQuery {
/// Parse a media query given css input.
///
/// Returns an error if any of the expressions is unknown.
pub fn parse<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>)
-> Result<MediaQuery, ParseError<'i>> {
let mut expressions = vec![];
let qualifier = if input.try(|input| input.expect_ident_matching("only")).is_ok() {
Some(Qualifier::Only)
} else if input.try(|input| input.expect_ident_matching("not")).is_ok() {
Some(Qualifier::Not)
} else {
None
};
let media_type = match input.try(|i| i.expect_ident_cloned()) {
Ok(ident) => {
let result: Result<_, ParseError> = MediaQueryType::parse(&*ident)
.map_err(|()| SelectorParseError::UnexpectedIdent(ident.clone()).into());
result?
}
Err(_) => {
// Media type is only optional if qualifier is not specified.
if qualifier.is_some() {
return Err(StyleParseError::UnspecifiedError.into())
}
// Without a media type, require at least one expression.
expressions.push(Expression::parse(context, input)?);
MediaQueryType::All
}
};
// Parse any subsequent expressions
loop {
if input.try(|input| input.expect_ident_matching("and")).is_err() {
return Ok(MediaQuery::new(qualifier, media_type, expressions))
}
expressions.push(Expression::parse(context, input)?)
}
}
}
/// Parse a media query list from CSS.
///
/// Always returns a media query list. If any invalid media query is found, the
/// media query list is only filled with the equivalent of "not all", see:
///
/// https://drafts.csswg.org/mediaqueries/#error-handling
pub fn parse_media_query_list(context: &ParserContext, input: &mut Parser) -> MediaList {
if input.is_exhausted() {
return MediaList::empty()
}
let mut media_queries = vec![];
loop {
match input.parse_until_before(Delimiter::Comma, |i| MediaQuery::parse(context, i)) {
Ok(mq) => {
media_queries.push(mq);
},
Err(..) => {
media_queries.push(MediaQuery::never_matching());
},
}
match input.next() {
Ok(&Token::Comma) => {},
Ok(_) => unreachable!(),
Err(_) => break,
}
}
MediaList {
media_queries: media_queries,
}
}
impl MediaList {
/// Evaluate a whole `MediaList` against `Device`.
pub fn evaluate(&self, device: &Device, quirks_mode: QuirksMode) -> bool {
// Check if it is an empty media query list or any queries match (OR condition)
// https://drafts.csswg.org/mediaqueries-4/#mq-list
self.media_queries.is_empty() || self.media_queries.iter().any(|mq| {
let media_match = mq.media_type.matches(device.media_type());
// Check if all conditions match (AND condition)
let query_match =
media_match &&
mq.expressions.iter()
.all(|expression| expression.matches(&device, quirks_mode));
// Apply the logical NOT qualifier to the result
match mq.qualifier {
Some(Qualifier::Not) =>!query_match,
_ => query_match,
}
})
}
/// Whether this `MediaList` contains no media queries.
pub fn is_empty(&self) -> bool {
self.media_queries.is_empty()
}
/// Append a new media query item to the media list.
/// https://drafts.csswg.org/cssom/#dom-medialist-appendmedium
///
/// Returns true if added, false if fail to parse the medium string.
pub fn append_medium(&mut self, context: &ParserContext, new_medium: &str) -> bool {
let mut input = ParserInput::new(new_medium);
let mut parser = Parser::new(&mut input);
let new_query = match MediaQuery::parse(&context, &mut parser) {
Ok(query) => query,
Err(_) => { return false; }
};
// This algorithm doesn't actually matches the current spec,
// but it matches the behavior of Gecko and Edge.
// See https://github.com/w3c/csswg-drafts/issues/697
self.media_queries.retain(|query| query!= &new_query);
self.media_queries.push(new_query);
true
}
/// Delete a media query from the media list.
/// https://drafts.csswg.org/cssom/#dom-medialist-deletemedium
///
/// Returns true if found and deleted, false otherwise.
pub fn delete_medium(&mut self, context: &ParserContext, old_medium: &str) -> bool {
let mut input = ParserInput::new(old_medium);
let mut parser = Parser::new(&mut input);
let old_query = match MediaQuery::parse(context, &mut parser) {
Ok(query) => query,
Err(_) => { return false; }
};
let old_len = self.media_queries.len();
self.media_queries.retain(|query| query!= &old_query);
old_len!= self.media_queries.len()
}
}
|
screen
|
identifier_name
|
media_queries.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! [Media queries][mq].
//!
//! [mq]: https://drafts.csswg.org/mediaqueries/
use Atom;
use context::QuirksMode;
use cssparser::{Delimiter, Parser, Token, ParserInput};
use parser::ParserContext;
use selectors::parser::SelectorParseError;
use serialize_comma_separated_list;
use std::fmt;
use str::string_as_ascii_lowercase;
use style_traits::{ToCss, ParseError, StyleParseError};
use values::CustomIdent;
#[cfg(feature = "servo")]
pub use servo::media_queries::{Device, Expression};
#[cfg(feature = "gecko")]
pub use gecko::media_queries::{Device, Expression};
/// A type that encapsulates a media query list.
#[derive(Debug, Clone)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct MediaList {
/// The list of media queries.
pub media_queries: Vec<MediaQuery>,
}
impl ToCss for MediaList {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result
where W: fmt::Write
{
serialize_comma_separated_list(dest, &self.media_queries)
}
}
impl MediaList {
/// Create an empty MediaList.
pub fn empty() -> Self {
MediaList { media_queries: vec![] }
}
}
/// https://drafts.csswg.org/mediaqueries/#mq-prefix
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
#[derive(Clone, Copy, Debug, Eq, PartialEq, ToCss)]
pub enum Qualifier {
/// Hide a media query from legacy UAs:
/// https://drafts.csswg.org/mediaqueries/#mq-only
Only,
/// Negate a media query:
/// https://drafts.csswg.org/mediaqueries/#mq-not
Not,
}
/// A [media query][mq].
///
/// [mq]: https://drafts.csswg.org/mediaqueries/
#[derive(PartialEq, Clone, Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct MediaQuery {
/// The qualifier for this query.
pub qualifier: Option<Qualifier>,
/// The media type for this query, that can be known, unknown, or "all".
pub media_type: MediaQueryType,
/// The set of expressions that this media query contains.
pub expressions: Vec<Expression>,
}
impl MediaQuery {
/// Return a media query that never matches, used for when we fail to parse
/// a given media query.
fn never_matching() -> Self {
Self::new(Some(Qualifier::Not), MediaQueryType::All, vec![])
}
/// Trivially constructs a new media query.
pub fn new(qualifier: Option<Qualifier>,
media_type: MediaQueryType,
expressions: Vec<Expression>) -> MediaQuery {
MediaQuery {
qualifier: qualifier,
media_type: media_type,
expressions: expressions,
}
}
}
impl ToCss for MediaQuery {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result
where W: fmt::Write,
{
if let Some(qual) = self.qualifier {
qual.to_css(dest)?;
write!(dest, " ")?;
}
match self.media_type {
MediaQueryType::All => {
// We need to print "all" if there's a qualifier, or there's
// just an empty list of expressions.
//
// Otherwise, we'd serialize media queries like "(min-width:
// 40px)" in "all (min-width: 40px)", which is unexpected.
if self.qualifier.is_some() || self.expressions.is_empty() {
write!(dest, "all")?;
}
},
MediaQueryType::Concrete(MediaType(ref desc)) => desc.to_css(dest)?,
}
if self.expressions.is_empty() {
return Ok(());
}
if self.media_type!= MediaQueryType::All || self.qualifier.is_some() {
write!(dest, " and ")?;
}
self.expressions[0].to_css(dest)?;
for expr in self.expressions.iter().skip(1) {
write!(dest, " and ")?;
expr.to_css(dest)?;
}
Ok(())
}
}
/// http://dev.w3.org/csswg/mediaqueries-3/#media0
#[derive(PartialEq, Eq, Clone, Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub enum MediaQueryType {
/// A media type that matches every device.
All,
/// A specific media type.
Concrete(MediaType),
}
impl MediaQueryType {
fn parse(ident: &str) -> Result<Self, ()>
|
fn matches(&self, other: MediaType) -> bool {
match *self {
MediaQueryType::All => true,
MediaQueryType::Concrete(ref known_type) => *known_type == other,
}
}
}
/// https://drafts.csswg.org/mediaqueries/#media-types
#[derive(PartialEq, Eq, Clone, Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct MediaType(pub CustomIdent);
impl MediaType {
/// The `screen` media type.
pub fn screen() -> Self {
MediaType(CustomIdent(atom!("screen")))
}
/// The `print` media type.
pub fn print() -> Self {
MediaType(CustomIdent(atom!("print")))
}
fn parse(name: &str) -> Result<Self, ()> {
// From https://drafts.csswg.org/mediaqueries/#mq-syntax:
//
// The <media-type> production does not include the keywords not, or, and, and only.
//
// Here we also perform the to-ascii-lowercase part of the serialization
// algorithm: https://drafts.csswg.org/cssom/#serializing-media-queries
match_ignore_ascii_case! { name,
"not" | "or" | "and" | "only" => Err(()),
_ => Ok(MediaType(CustomIdent(Atom::from(string_as_ascii_lowercase(name))))),
}
}
}
impl MediaQuery {
/// Parse a media query given css input.
///
/// Returns an error if any of the expressions is unknown.
pub fn parse<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>)
-> Result<MediaQuery, ParseError<'i>> {
let mut expressions = vec![];
let qualifier = if input.try(|input| input.expect_ident_matching("only")).is_ok() {
Some(Qualifier::Only)
} else if input.try(|input| input.expect_ident_matching("not")).is_ok() {
Some(Qualifier::Not)
} else {
None
};
let media_type = match input.try(|i| i.expect_ident_cloned()) {
Ok(ident) => {
let result: Result<_, ParseError> = MediaQueryType::parse(&*ident)
.map_err(|()| SelectorParseError::UnexpectedIdent(ident.clone()).into());
result?
}
Err(_) => {
// Media type is only optional if qualifier is not specified.
if qualifier.is_some() {
return Err(StyleParseError::UnspecifiedError.into())
}
// Without a media type, require at least one expression.
expressions.push(Expression::parse(context, input)?);
MediaQueryType::All
}
};
// Parse any subsequent expressions
loop {
if input.try(|input| input.expect_ident_matching("and")).is_err() {
return Ok(MediaQuery::new(qualifier, media_type, expressions))
}
expressions.push(Expression::parse(context, input)?)
}
}
}
/// Parse a media query list from CSS.
///
/// Always returns a media query list. If any invalid media query is found, the
/// media query list is only filled with the equivalent of "not all", see:
///
/// https://drafts.csswg.org/mediaqueries/#error-handling
pub fn parse_media_query_list(context: &ParserContext, input: &mut Parser) -> MediaList {
if input.is_exhausted() {
return MediaList::empty()
}
let mut media_queries = vec![];
loop {
match input.parse_until_before(Delimiter::Comma, |i| MediaQuery::parse(context, i)) {
Ok(mq) => {
media_queries.push(mq);
},
Err(..) => {
media_queries.push(MediaQuery::never_matching());
},
}
match input.next() {
Ok(&Token::Comma) => {},
Ok(_) => unreachable!(),
Err(_) => break,
}
}
MediaList {
media_queries: media_queries,
}
}
impl MediaList {
/// Evaluate a whole `MediaList` against `Device`.
pub fn evaluate(&self, device: &Device, quirks_mode: QuirksMode) -> bool {
// Check if it is an empty media query list or any queries match (OR condition)
// https://drafts.csswg.org/mediaqueries-4/#mq-list
self.media_queries.is_empty() || self.media_queries.iter().any(|mq| {
let media_match = mq.media_type.matches(device.media_type());
// Check if all conditions match (AND condition)
let query_match =
media_match &&
mq.expressions.iter()
.all(|expression| expression.matches(&device, quirks_mode));
// Apply the logical NOT qualifier to the result
match mq.qualifier {
Some(Qualifier::Not) =>!query_match,
_ => query_match,
}
})
}
/// Whether this `MediaList` contains no media queries.
pub fn is_empty(&self) -> bool {
self.media_queries.is_empty()
}
/// Append a new media query item to the media list.
/// https://drafts.csswg.org/cssom/#dom-medialist-appendmedium
///
/// Returns true if added, false if fail to parse the medium string.
pub fn append_medium(&mut self, context: &ParserContext, new_medium: &str) -> bool {
let mut input = ParserInput::new(new_medium);
let mut parser = Parser::new(&mut input);
let new_query = match MediaQuery::parse(&context, &mut parser) {
Ok(query) => query,
Err(_) => { return false; }
};
// This algorithm doesn't actually matches the current spec,
// but it matches the behavior of Gecko and Edge.
// See https://github.com/w3c/csswg-drafts/issues/697
self.media_queries.retain(|query| query!= &new_query);
self.media_queries.push(new_query);
true
}
/// Delete a media query from the media list.
/// https://drafts.csswg.org/cssom/#dom-medialist-deletemedium
///
/// Returns true if found and deleted, false otherwise.
pub fn delete_medium(&mut self, context: &ParserContext, old_medium: &str) -> bool {
let mut input = ParserInput::new(old_medium);
let mut parser = Parser::new(&mut input);
let old_query = match MediaQuery::parse(context, &mut parser) {
Ok(query) => query,
Err(_) => { return false; }
};
let old_len = self.media_queries.len();
self.media_queries.retain(|query| query!= &old_query);
old_len!= self.media_queries.len()
}
}
|
{
match_ignore_ascii_case! { ident,
"all" => return Ok(MediaQueryType::All),
_ => (),
};
// If parseable, accept this type as a concrete type.
MediaType::parse(ident).map(MediaQueryType::Concrete)
}
|
identifier_body
|
media_queries.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! [Media queries][mq].
//!
//! [mq]: https://drafts.csswg.org/mediaqueries/
use Atom;
use context::QuirksMode;
use cssparser::{Delimiter, Parser, Token, ParserInput};
use parser::ParserContext;
use selectors::parser::SelectorParseError;
use serialize_comma_separated_list;
use std::fmt;
use str::string_as_ascii_lowercase;
use style_traits::{ToCss, ParseError, StyleParseError};
use values::CustomIdent;
#[cfg(feature = "servo")]
pub use servo::media_queries::{Device, Expression};
#[cfg(feature = "gecko")]
pub use gecko::media_queries::{Device, Expression};
/// A type that encapsulates a media query list.
#[derive(Debug, Clone)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct MediaList {
/// The list of media queries.
pub media_queries: Vec<MediaQuery>,
}
impl ToCss for MediaList {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result
where W: fmt::Write
{
serialize_comma_separated_list(dest, &self.media_queries)
}
}
impl MediaList {
/// Create an empty MediaList.
pub fn empty() -> Self {
MediaList { media_queries: vec![] }
}
}
/// https://drafts.csswg.org/mediaqueries/#mq-prefix
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
#[derive(Clone, Copy, Debug, Eq, PartialEq, ToCss)]
pub enum Qualifier {
/// Hide a media query from legacy UAs:
/// https://drafts.csswg.org/mediaqueries/#mq-only
Only,
/// Negate a media query:
/// https://drafts.csswg.org/mediaqueries/#mq-not
Not,
}
/// A [media query][mq].
///
/// [mq]: https://drafts.csswg.org/mediaqueries/
#[derive(PartialEq, Clone, Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct MediaQuery {
/// The qualifier for this query.
pub qualifier: Option<Qualifier>,
/// The media type for this query, that can be known, unknown, or "all".
pub media_type: MediaQueryType,
/// The set of expressions that this media query contains.
pub expressions: Vec<Expression>,
}
impl MediaQuery {
/// Return a media query that never matches, used for when we fail to parse
/// a given media query.
fn never_matching() -> Self {
Self::new(Some(Qualifier::Not), MediaQueryType::All, vec![])
}
/// Trivially constructs a new media query.
pub fn new(qualifier: Option<Qualifier>,
media_type: MediaQueryType,
expressions: Vec<Expression>) -> MediaQuery {
MediaQuery {
qualifier: qualifier,
media_type: media_type,
expressions: expressions,
}
}
}
impl ToCss for MediaQuery {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result
where W: fmt::Write,
{
if let Some(qual) = self.qualifier {
qual.to_css(dest)?;
write!(dest, " ")?;
}
match self.media_type {
MediaQueryType::All => {
// We need to print "all" if there's a qualifier, or there's
// just an empty list of expressions.
//
// Otherwise, we'd serialize media queries like "(min-width:
// 40px)" in "all (min-width: 40px)", which is unexpected.
if self.qualifier.is_some() || self.expressions.is_empty() {
write!(dest, "all")?;
}
},
MediaQueryType::Concrete(MediaType(ref desc)) => desc.to_css(dest)?,
}
if self.expressions.is_empty() {
return Ok(());
}
if self.media_type!= MediaQueryType::All || self.qualifier.is_some() {
write!(dest, " and ")?;
}
self.expressions[0].to_css(dest)?;
for expr in self.expressions.iter().skip(1) {
write!(dest, " and ")?;
expr.to_css(dest)?;
}
Ok(())
}
}
/// http://dev.w3.org/csswg/mediaqueries-3/#media0
#[derive(PartialEq, Eq, Clone, Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub enum MediaQueryType {
/// A media type that matches every device.
All,
/// A specific media type.
Concrete(MediaType),
}
impl MediaQueryType {
fn parse(ident: &str) -> Result<Self, ()> {
match_ignore_ascii_case! { ident,
"all" => return Ok(MediaQueryType::All),
_ => (),
};
// If parseable, accept this type as a concrete type.
MediaType::parse(ident).map(MediaQueryType::Concrete)
}
fn matches(&self, other: MediaType) -> bool {
match *self {
MediaQueryType::All => true,
MediaQueryType::Concrete(ref known_type) => *known_type == other,
}
}
}
/// https://drafts.csswg.org/mediaqueries/#media-types
#[derive(PartialEq, Eq, Clone, Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct MediaType(pub CustomIdent);
impl MediaType {
/// The `screen` media type.
pub fn screen() -> Self {
MediaType(CustomIdent(atom!("screen")))
}
/// The `print` media type.
pub fn print() -> Self {
MediaType(CustomIdent(atom!("print")))
}
fn parse(name: &str) -> Result<Self, ()> {
// From https://drafts.csswg.org/mediaqueries/#mq-syntax:
//
// The <media-type> production does not include the keywords not, or, and, and only.
//
// Here we also perform the to-ascii-lowercase part of the serialization
// algorithm: https://drafts.csswg.org/cssom/#serializing-media-queries
match_ignore_ascii_case! { name,
"not" | "or" | "and" | "only" => Err(()),
_ => Ok(MediaType(CustomIdent(Atom::from(string_as_ascii_lowercase(name))))),
}
}
}
impl MediaQuery {
/// Parse a media query given css input.
///
/// Returns an error if any of the expressions is unknown.
pub fn parse<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>)
-> Result<MediaQuery, ParseError<'i>> {
let mut expressions = vec![];
let qualifier = if input.try(|input| input.expect_ident_matching("only")).is_ok() {
Some(Qualifier::Only)
} else if input.try(|input| input.expect_ident_matching("not")).is_ok()
|
else {
None
};
let media_type = match input.try(|i| i.expect_ident_cloned()) {
Ok(ident) => {
let result: Result<_, ParseError> = MediaQueryType::parse(&*ident)
.map_err(|()| SelectorParseError::UnexpectedIdent(ident.clone()).into());
result?
}
Err(_) => {
// Media type is only optional if qualifier is not specified.
if qualifier.is_some() {
return Err(StyleParseError::UnspecifiedError.into())
}
// Without a media type, require at least one expression.
expressions.push(Expression::parse(context, input)?);
MediaQueryType::All
}
};
// Parse any subsequent expressions
loop {
if input.try(|input| input.expect_ident_matching("and")).is_err() {
return Ok(MediaQuery::new(qualifier, media_type, expressions))
}
expressions.push(Expression::parse(context, input)?)
}
}
}
/// Parse a media query list from CSS.
///
/// Always returns a media query list. If any invalid media query is found, the
/// media query list is only filled with the equivalent of "not all", see:
///
/// https://drafts.csswg.org/mediaqueries/#error-handling
pub fn parse_media_query_list(context: &ParserContext, input: &mut Parser) -> MediaList {
if input.is_exhausted() {
return MediaList::empty()
}
let mut media_queries = vec![];
loop {
match input.parse_until_before(Delimiter::Comma, |i| MediaQuery::parse(context, i)) {
Ok(mq) => {
media_queries.push(mq);
},
Err(..) => {
media_queries.push(MediaQuery::never_matching());
},
}
match input.next() {
Ok(&Token::Comma) => {},
Ok(_) => unreachable!(),
Err(_) => break,
}
}
MediaList {
media_queries: media_queries,
}
}
impl MediaList {
/// Evaluate a whole `MediaList` against `Device`.
pub fn evaluate(&self, device: &Device, quirks_mode: QuirksMode) -> bool {
// Check if it is an empty media query list or any queries match (OR condition)
// https://drafts.csswg.org/mediaqueries-4/#mq-list
self.media_queries.is_empty() || self.media_queries.iter().any(|mq| {
let media_match = mq.media_type.matches(device.media_type());
// Check if all conditions match (AND condition)
let query_match =
media_match &&
mq.expressions.iter()
.all(|expression| expression.matches(&device, quirks_mode));
// Apply the logical NOT qualifier to the result
match mq.qualifier {
Some(Qualifier::Not) =>!query_match,
_ => query_match,
}
})
}
/// Whether this `MediaList` contains no media queries.
pub fn is_empty(&self) -> bool {
self.media_queries.is_empty()
}
/// Append a new media query item to the media list.
/// https://drafts.csswg.org/cssom/#dom-medialist-appendmedium
///
/// Returns true if added, false if fail to parse the medium string.
pub fn append_medium(&mut self, context: &ParserContext, new_medium: &str) -> bool {
let mut input = ParserInput::new(new_medium);
let mut parser = Parser::new(&mut input);
let new_query = match MediaQuery::parse(&context, &mut parser) {
Ok(query) => query,
Err(_) => { return false; }
};
// This algorithm doesn't actually matches the current spec,
// but it matches the behavior of Gecko and Edge.
// See https://github.com/w3c/csswg-drafts/issues/697
self.media_queries.retain(|query| query!= &new_query);
self.media_queries.push(new_query);
true
}
/// Delete a media query from the media list.
/// https://drafts.csswg.org/cssom/#dom-medialist-deletemedium
///
/// Returns true if found and deleted, false otherwise.
pub fn delete_medium(&mut self, context: &ParserContext, old_medium: &str) -> bool {
let mut input = ParserInput::new(old_medium);
let mut parser = Parser::new(&mut input);
let old_query = match MediaQuery::parse(context, &mut parser) {
Ok(query) => query,
Err(_) => { return false; }
};
let old_len = self.media_queries.len();
self.media_queries.retain(|query| query!= &old_query);
old_len!= self.media_queries.len()
}
}
|
{
Some(Qualifier::Not)
}
|
conditional_block
|
error_reporting.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Types used to report parsing errors.
#![deny(missing_docs)]
use cssparser::{Parser, SourcePosition};
use log;
use servo_url::ServoUrl;
/// A generic trait for an error reporter.
pub trait ParseErrorReporter : Sync + Send {
/// Called the style engine detects an error.
///
/// Returns the current input being parsed, the source position it was
/// reported from, and a message.
fn report_error(&self,
input: &mut Parser,
position: SourcePosition,
message: &str,
url: &ServoUrl);
}
/// An error reporter that reports the errors to the `info` log channel.
///
/// TODO(emilio): The name of this reporter is a lie, and should be renamed!
pub struct StdoutErrorReporter;
impl ParseErrorReporter for StdoutErrorReporter {
fn report_error(&self,
input: &mut Parser,
position: SourcePosition,
message: &str,
url: &ServoUrl) {
if log_enabled!(log::LogLevel::Info)
|
}
}
|
{
let location = input.source_location(position);
info!("Url:\t{}\n{}:{} {}", url.as_str(), location.line, location.column, message)
}
|
conditional_block
|
error_reporting.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Types used to report parsing errors.
#![deny(missing_docs)]
use cssparser::{Parser, SourcePosition};
use log;
use servo_url::ServoUrl;
/// A generic trait for an error reporter.
pub trait ParseErrorReporter : Sync + Send {
/// Called the style engine detects an error.
///
/// Returns the current input being parsed, the source position it was
/// reported from, and a message.
fn report_error(&self,
input: &mut Parser,
position: SourcePosition,
message: &str,
url: &ServoUrl);
}
/// An error reporter that reports the errors to the `info` log channel.
///
/// TODO(emilio): The name of this reporter is a lie, and should be renamed!
pub struct
|
;
impl ParseErrorReporter for StdoutErrorReporter {
fn report_error(&self,
input: &mut Parser,
position: SourcePosition,
message: &str,
url: &ServoUrl) {
if log_enabled!(log::LogLevel::Info) {
let location = input.source_location(position);
info!("Url:\t{}\n{}:{} {}", url.as_str(), location.line, location.column, message)
}
}
}
|
StdoutErrorReporter
|
identifier_name
|
error_reporting.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Types used to report parsing errors.
#![deny(missing_docs)]
use cssparser::{Parser, SourcePosition};
use log;
use servo_url::ServoUrl;
/// A generic trait for an error reporter.
pub trait ParseErrorReporter : Sync + Send {
/// Called the style engine detects an error.
///
/// Returns the current input being parsed, the source position it was
/// reported from, and a message.
fn report_error(&self,
input: &mut Parser,
position: SourcePosition,
message: &str,
url: &ServoUrl);
}
/// An error reporter that reports the errors to the `info` log channel.
///
/// TODO(emilio): The name of this reporter is a lie, and should be renamed!
pub struct StdoutErrorReporter;
impl ParseErrorReporter for StdoutErrorReporter {
fn report_error(&self,
input: &mut Parser,
position: SourcePosition,
message: &str,
url: &ServoUrl) {
if log_enabled!(log::LogLevel::Info) {
let location = input.source_location(position);
info!("Url:\t{}\n{}:{} {}", url.as_str(), location.line, location.column, message)
}
|
}
}
|
random_line_split
|
|
p068.rs
|
const N:usize = 5;
fn set_v(ans: &mut[usize],idx: &mut usize, vis: &mut[bool], value: usize) {
ans[*idx] = value;
vis[value] = true;
*idx+=1;
}
fn
|
(idx: &mut usize, vis: &mut[bool], value: usize) {
*idx -= 1;
vis[value] = false;
}
fn dfs(mut ans: &mut[usize],mut idx: &mut usize, mut vis: &mut[bool]) {
if *idx == 2*N {
for i in 1..N {
if ans[0]+ans[1]+ans[3]!= ans[i*2]+ans[i*2+1]+ans[(i*2+3)%(2*N)] {
return ;
}
}
let mut mini = 0;
for i in 0..N {
if ans[i*2] < ans[mini*2] {
mini = i;
}
}
for ii in 0..N {
let i = (ii+mini)%N;
print!("{}{}{}",ans[i*2],ans[i*2+1],ans[(i*2+3)%(2*N)]);
}
println!("");
return
}
for v in (1 as usize..(2*N+1)).rev() {
// println!("v={}",v);
if vis[v] {
continue;
}
if *idx >= 5 && *idx % 2 ==1 {
if ans[0]+ans[1]+ans[3]!= ans[*idx-3]+ans[*idx-2]+v {
continue;
}
}
set_v(&mut ans,&mut idx,&mut vis, v);
dfs(&mut ans,&mut idx,&mut vis);
unset_v(&mut idx,&mut vis, v);
}
}
fn main() {
let mut arr =[0 as usize;2*N];
let mut idx = 0;
let mut vis =[false;2*N+1];
set_v(&mut arr,&mut idx,&mut vis, 10);
dfs(&mut arr,&mut idx,&mut vis);
}
|
unset_v
|
identifier_name
|
p068.rs
|
const N:usize = 5;
fn set_v(ans: &mut[usize],idx: &mut usize, vis: &mut[bool], value: usize) {
ans[*idx] = value;
vis[value] = true;
*idx+=1;
}
fn unset_v(idx: &mut usize, vis: &mut[bool], value: usize) {
*idx -= 1;
vis[value] = false;
}
fn dfs(mut ans: &mut[usize],mut idx: &mut usize, mut vis: &mut[bool])
|
for v in (1 as usize..(2*N+1)).rev() {
// println!("v={}",v);
if vis[v] {
continue;
}
if *idx >= 5 && *idx % 2 ==1 {
if ans[0]+ans[1]+ans[3]!= ans[*idx-3]+ans[*idx-2]+v {
continue;
}
}
set_v(&mut ans,&mut idx,&mut vis, v);
dfs(&mut ans,&mut idx,&mut vis);
unset_v(&mut idx,&mut vis, v);
}
}
fn main() {
let mut arr =[0 as usize;2*N];
let mut idx = 0;
let mut vis =[false;2*N+1];
set_v(&mut arr,&mut idx,&mut vis, 10);
dfs(&mut arr,&mut idx,&mut vis);
}
|
{
if *idx == 2*N {
for i in 1..N {
if ans[0]+ans[1]+ans[3] != ans[i*2]+ans[i*2+1]+ans[(i*2+3)%(2*N)] {
return ;
}
}
let mut mini = 0;
for i in 0..N {
if ans[i*2] < ans[mini*2] {
mini = i;
}
}
for ii in 0..N {
let i = (ii+mini)%N;
print!("{}{}{}",ans[i*2],ans[i*2+1],ans[(i*2+3)%(2*N)]);
}
println!("");
return
}
|
identifier_body
|
p068.rs
|
const N:usize = 5;
fn set_v(ans: &mut[usize],idx: &mut usize, vis: &mut[bool], value: usize) {
|
fn unset_v(idx: &mut usize, vis: &mut[bool], value: usize) {
*idx -= 1;
vis[value] = false;
}
fn dfs(mut ans: &mut[usize],mut idx: &mut usize, mut vis: &mut[bool]) {
if *idx == 2*N {
for i in 1..N {
if ans[0]+ans[1]+ans[3]!= ans[i*2]+ans[i*2+1]+ans[(i*2+3)%(2*N)] {
return ;
}
}
let mut mini = 0;
for i in 0..N {
if ans[i*2] < ans[mini*2] {
mini = i;
}
}
for ii in 0..N {
let i = (ii+mini)%N;
print!("{}{}{}",ans[i*2],ans[i*2+1],ans[(i*2+3)%(2*N)]);
}
println!("");
return
}
for v in (1 as usize..(2*N+1)).rev() {
// println!("v={}",v);
if vis[v] {
continue;
}
if *idx >= 5 && *idx % 2 ==1 {
if ans[0]+ans[1]+ans[3]!= ans[*idx-3]+ans[*idx-2]+v {
continue;
}
}
set_v(&mut ans,&mut idx,&mut vis, v);
dfs(&mut ans,&mut idx,&mut vis);
unset_v(&mut idx,&mut vis, v);
}
}
fn main() {
let mut arr =[0 as usize;2*N];
let mut idx = 0;
let mut vis =[false;2*N+1];
set_v(&mut arr,&mut idx,&mut vis, 10);
dfs(&mut arr,&mut idx,&mut vis);
}
|
ans[*idx] = value;
vis[value] = true;
*idx+=1;
}
|
random_line_split
|
p068.rs
|
const N:usize = 5;
fn set_v(ans: &mut[usize],idx: &mut usize, vis: &mut[bool], value: usize) {
ans[*idx] = value;
vis[value] = true;
*idx+=1;
}
fn unset_v(idx: &mut usize, vis: &mut[bool], value: usize) {
*idx -= 1;
vis[value] = false;
}
fn dfs(mut ans: &mut[usize],mut idx: &mut usize, mut vis: &mut[bool]) {
if *idx == 2*N {
for i in 1..N {
if ans[0]+ans[1]+ans[3]!= ans[i*2]+ans[i*2+1]+ans[(i*2+3)%(2*N)]
|
}
let mut mini = 0;
for i in 0..N {
if ans[i*2] < ans[mini*2] {
mini = i;
}
}
for ii in 0..N {
let i = (ii+mini)%N;
print!("{}{}{}",ans[i*2],ans[i*2+1],ans[(i*2+3)%(2*N)]);
}
println!("");
return
}
for v in (1 as usize..(2*N+1)).rev() {
// println!("v={}",v);
if vis[v] {
continue;
}
if *idx >= 5 && *idx % 2 ==1 {
if ans[0]+ans[1]+ans[3]!= ans[*idx-3]+ans[*idx-2]+v {
continue;
}
}
set_v(&mut ans,&mut idx,&mut vis, v);
dfs(&mut ans,&mut idx,&mut vis);
unset_v(&mut idx,&mut vis, v);
}
}
fn main() {
let mut arr =[0 as usize;2*N];
let mut idx = 0;
let mut vis =[false;2*N+1];
set_v(&mut arr,&mut idx,&mut vis, 10);
dfs(&mut arr,&mut idx,&mut vis);
}
|
{
return ;
}
|
conditional_block
|
room_id.rs
|
//! Matrix room identifiers.
use super::{matrix_uri::UriAction, EventId, MatrixToUri, MatrixUri, ServerName};
/// A Matrix [room ID].
///
/// A `RoomId` is generated randomly or converted from a string slice, and can be converted back
/// into a string as needed.
///
/// ```
/// # use std::convert::TryFrom;
/// # use ruma_common::RoomId;
/// assert_eq!(<&RoomId>::try_from("!n8f893n9:example.com").unwrap(), "!n8f893n9:example.com");
/// ```
///
/// [room ID]: https://spec.matrix.org/v1.2/appendices/#room-ids-and-event-ids
#[repr(transparent)]
#[derive(PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct RoomId(str);
opaque_identifier_validated!(RoomId, ruma_identifiers_validation::room_id::validate);
impl RoomId {
/// Attempts to generate a `RoomId` for the given origin server with a localpart consisting of
/// 18 random ASCII characters.
///
/// Fails if the given homeserver cannot be parsed as a valid host.
#[cfg(feature = "rand")]
pub fn new(server_name: &ServerName) -> Box<Self> {
Self::from_owned(format!("!{}:{}", super::generate_localpart(18), server_name).into())
}
/// Returns the rooms's unique ID.
pub fn localpart(&self) -> &str {
&self.as_str()[1..self.colon_idx()]
}
/// Returns the server name of the room ID.
pub fn server_name(&self) -> &ServerName {
ServerName::from_borrowed(&self.as_str()[self.colon_idx() + 1..])
}
/// Create a `matrix.to` URI for this room ID.
///
/// # Example
///
/// ```
/// use ruma_common::{room_id, server_name};
///
/// assert_eq!(
/// room_id!("!somewhere:example.org")
/// .matrix_to_uri([&*server_name!("example.org"), &*server_name!("alt.example.org")])
/// .to_string(),
/// "https://matrix.to/#/%21somewhere%3Aexample.org?via=example.org&via=alt.example.org"
/// );
/// ```
pub fn matrix_to_uri<'a>(&self, via: impl IntoIterator<Item = &'a ServerName>) -> MatrixToUri {
MatrixToUri::new(self.into(), via.into_iter().collect())
}
/// Create a `matrix.to` URI for an event scoped under this room ID.
pub fn matrix_to_event_uri(&self, ev_id: &EventId) -> MatrixToUri {
MatrixToUri::new((self, ev_id).into(), Vec::new())
}
/// Create a `matrix:` URI for this room ID.
///
/// If `join` is `true`, a click on the URI should join the room.
///
/// # Example
///
/// ```
/// use ruma_common::{room_id, server_name};
///
/// assert_eq!(
/// room_id!("!somewhere:example.org")
/// .matrix_uri([&*server_name!("example.org"), &*server_name!("alt.example.org")], true)
/// .to_string(),
/// "matrix:roomid/somewhere:example.org?via=example.org&via=alt.example.org&action=join"
/// );
/// ```
pub fn matrix_uri<'a>(
&self,
via: impl IntoIterator<Item = &'a ServerName>,
join: bool,
) -> MatrixUri {
MatrixUri::new(
self.into(),
via.into_iter().collect(),
Some(UriAction::Join).filter(|_| join),
)
}
/// Create a `matrix:` URI for an event scoped under this room ID.
pub fn matrix_event_uri<'a>(
&self,
ev_id: &EventId,
via: impl IntoIterator<Item = &'a ServerName>,
) -> MatrixUri {
MatrixUri::new((self, ev_id).into(), via.into_iter().collect(), None)
}
fn colon_idx(&self) -> usize {
self.as_str().find(':').unwrap()
}
}
#[cfg(test)]
mod tests {
use std::convert::TryFrom;
use super::RoomId;
use crate::Error;
#[test]
fn valid_room_id() {
assert_eq!(
<&RoomId>::try_from("!29fhd83h92h0:example.com")
.expect("Failed to create RoomId.")
.as_ref(),
"!29fhd83h92h0:example.com"
);
}
#[test]
fn empty_localpart() {
assert_eq!(
<&RoomId>::try_from("!:example.com").expect("Failed to create RoomId.").as_ref(),
"!:example.com"
);
}
#[cfg(feature = "rand")]
#[test]
fn generate_random_valid_room_id() {
use crate::server_name;
let room_id = RoomId::new(server_name!("example.com"));
let id_str = room_id.as_str();
assert!(id_str.starts_with('!'));
assert_eq!(id_str.len(), 31);
}
#[test]
fn serialize_valid_room_id() {
assert_eq!(
serde_json::to_string(
<&RoomId>::try_from("!29fhd83h92h0:example.com").expect("Failed to create RoomId.")
)
.expect("Failed to convert RoomId to JSON."),
r#""!29fhd83h92h0:example.com""#
);
}
#[test]
fn deserialize_valid_room_id()
|
#[test]
fn valid_room_id_with_explicit_standard_port() {
assert_eq!(
<&RoomId>::try_from("!29fhd83h92h0:example.com:443")
.expect("Failed to create RoomId.")
.as_ref(),
"!29fhd83h92h0:example.com:443"
);
}
#[test]
fn valid_room_id_with_non_standard_port() {
assert_eq!(
<&RoomId>::try_from("!29fhd83h92h0:example.com:5000")
.expect("Failed to create RoomId.")
.as_ref(),
"!29fhd83h92h0:example.com:5000"
);
}
#[test]
fn missing_room_id_sigil() {
assert_eq!(
<&RoomId>::try_from("carl:example.com").unwrap_err(),
Error::MissingLeadingSigil
);
}
#[test]
fn missing_room_id_delimiter() {
assert_eq!(<&RoomId>::try_from("!29fhd83h92h0").unwrap_err(), Error::MissingDelimiter);
}
#[test]
fn invalid_room_id_host() {
assert_eq!(<&RoomId>::try_from("!29fhd83h92h0:/").unwrap_err(), Error::InvalidServerName);
}
#[test]
fn invalid_room_id_port() {
assert_eq!(
<&RoomId>::try_from("!29fhd83h92h0:example.com:notaport").unwrap_err(),
Error::InvalidServerName
);
}
}
|
{
assert_eq!(
serde_json::from_str::<Box<RoomId>>(r#""!29fhd83h92h0:example.com""#)
.expect("Failed to convert JSON to RoomId"),
<&RoomId>::try_from("!29fhd83h92h0:example.com").expect("Failed to create RoomId.")
);
}
|
identifier_body
|
room_id.rs
|
//! Matrix room identifiers.
use super::{matrix_uri::UriAction, EventId, MatrixToUri, MatrixUri, ServerName};
/// A Matrix [room ID].
///
/// A `RoomId` is generated randomly or converted from a string slice, and can be converted back
/// into a string as needed.
///
/// ```
/// # use std::convert::TryFrom;
/// # use ruma_common::RoomId;
/// assert_eq!(<&RoomId>::try_from("!n8f893n9:example.com").unwrap(), "!n8f893n9:example.com");
/// ```
///
/// [room ID]: https://spec.matrix.org/v1.2/appendices/#room-ids-and-event-ids
#[repr(transparent)]
#[derive(PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct RoomId(str);
opaque_identifier_validated!(RoomId, ruma_identifiers_validation::room_id::validate);
impl RoomId {
/// Attempts to generate a `RoomId` for the given origin server with a localpart consisting of
/// 18 random ASCII characters.
///
/// Fails if the given homeserver cannot be parsed as a valid host.
#[cfg(feature = "rand")]
pub fn new(server_name: &ServerName) -> Box<Self> {
Self::from_owned(format!("!{}:{}", super::generate_localpart(18), server_name).into())
}
/// Returns the rooms's unique ID.
pub fn localpart(&self) -> &str {
&self.as_str()[1..self.colon_idx()]
}
/// Returns the server name of the room ID.
pub fn server_name(&self) -> &ServerName {
ServerName::from_borrowed(&self.as_str()[self.colon_idx() + 1..])
}
/// Create a `matrix.to` URI for this room ID.
///
/// # Example
///
/// ```
/// use ruma_common::{room_id, server_name};
///
/// assert_eq!(
/// room_id!("!somewhere:example.org")
/// .matrix_to_uri([&*server_name!("example.org"), &*server_name!("alt.example.org")])
/// .to_string(),
/// "https://matrix.to/#/%21somewhere%3Aexample.org?via=example.org&via=alt.example.org"
/// );
/// ```
pub fn matrix_to_uri<'a>(&self, via: impl IntoIterator<Item = &'a ServerName>) -> MatrixToUri {
MatrixToUri::new(self.into(), via.into_iter().collect())
}
/// Create a `matrix.to` URI for an event scoped under this room ID.
pub fn matrix_to_event_uri(&self, ev_id: &EventId) -> MatrixToUri {
MatrixToUri::new((self, ev_id).into(), Vec::new())
}
/// Create a `matrix:` URI for this room ID.
///
/// If `join` is `true`, a click on the URI should join the room.
///
/// # Example
///
/// ```
/// use ruma_common::{room_id, server_name};
///
/// assert_eq!(
/// room_id!("!somewhere:example.org")
/// .matrix_uri([&*server_name!("example.org"), &*server_name!("alt.example.org")], true)
/// .to_string(),
/// "matrix:roomid/somewhere:example.org?via=example.org&via=alt.example.org&action=join"
/// );
/// ```
pub fn matrix_uri<'a>(
&self,
via: impl IntoIterator<Item = &'a ServerName>,
join: bool,
) -> MatrixUri {
MatrixUri::new(
self.into(),
via.into_iter().collect(),
Some(UriAction::Join).filter(|_| join),
)
}
/// Create a `matrix:` URI for an event scoped under this room ID.
pub fn matrix_event_uri<'a>(
&self,
ev_id: &EventId,
via: impl IntoIterator<Item = &'a ServerName>,
) -> MatrixUri {
MatrixUri::new((self, ev_id).into(), via.into_iter().collect(), None)
}
fn colon_idx(&self) -> usize {
self.as_str().find(':').unwrap()
}
}
#[cfg(test)]
mod tests {
use std::convert::TryFrom;
use super::RoomId;
use crate::Error;
#[test]
fn valid_room_id() {
assert_eq!(
<&RoomId>::try_from("!29fhd83h92h0:example.com")
.expect("Failed to create RoomId.")
.as_ref(),
"!29fhd83h92h0:example.com"
);
}
#[test]
fn empty_localpart() {
assert_eq!(
<&RoomId>::try_from("!:example.com").expect("Failed to create RoomId.").as_ref(),
"!:example.com"
);
}
#[cfg(feature = "rand")]
#[test]
fn generate_random_valid_room_id() {
use crate::server_name;
let room_id = RoomId::new(server_name!("example.com"));
let id_str = room_id.as_str();
assert!(id_str.starts_with('!'));
assert_eq!(id_str.len(), 31);
}
#[test]
fn serialize_valid_room_id() {
assert_eq!(
serde_json::to_string(
<&RoomId>::try_from("!29fhd83h92h0:example.com").expect("Failed to create RoomId.")
)
.expect("Failed to convert RoomId to JSON."),
r#""!29fhd83h92h0:example.com""#
);
}
#[test]
fn deserialize_valid_room_id() {
assert_eq!(
serde_json::from_str::<Box<RoomId>>(r#""!29fhd83h92h0:example.com""#)
.expect("Failed to convert JSON to RoomId"),
<&RoomId>::try_from("!29fhd83h92h0:example.com").expect("Failed to create RoomId.")
);
}
#[test]
fn valid_room_id_with_explicit_standard_port() {
assert_eq!(
<&RoomId>::try_from("!29fhd83h92h0:example.com:443")
.expect("Failed to create RoomId.")
.as_ref(),
"!29fhd83h92h0:example.com:443"
);
}
#[test]
fn valid_room_id_with_non_standard_port() {
assert_eq!(
<&RoomId>::try_from("!29fhd83h92h0:example.com:5000")
.expect("Failed to create RoomId.")
.as_ref(),
"!29fhd83h92h0:example.com:5000"
);
}
#[test]
fn missing_room_id_sigil() {
assert_eq!(
<&RoomId>::try_from("carl:example.com").unwrap_err(),
Error::MissingLeadingSigil
);
}
#[test]
fn missing_room_id_delimiter() {
assert_eq!(<&RoomId>::try_from("!29fhd83h92h0").unwrap_err(), Error::MissingDelimiter);
}
#[test]
fn
|
() {
assert_eq!(<&RoomId>::try_from("!29fhd83h92h0:/").unwrap_err(), Error::InvalidServerName);
}
#[test]
fn invalid_room_id_port() {
assert_eq!(
<&RoomId>::try_from("!29fhd83h92h0:example.com:notaport").unwrap_err(),
Error::InvalidServerName
);
}
}
|
invalid_room_id_host
|
identifier_name
|
room_id.rs
|
//! Matrix room identifiers.
use super::{matrix_uri::UriAction, EventId, MatrixToUri, MatrixUri, ServerName};
/// A Matrix [room ID].
///
/// A `RoomId` is generated randomly or converted from a string slice, and can be converted back
/// into a string as needed.
///
/// ```
/// # use std::convert::TryFrom;
/// # use ruma_common::RoomId;
/// assert_eq!(<&RoomId>::try_from("!n8f893n9:example.com").unwrap(), "!n8f893n9:example.com");
/// ```
///
/// [room ID]: https://spec.matrix.org/v1.2/appendices/#room-ids-and-event-ids
#[repr(transparent)]
#[derive(PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct RoomId(str);
opaque_identifier_validated!(RoomId, ruma_identifiers_validation::room_id::validate);
impl RoomId {
/// Attempts to generate a `RoomId` for the given origin server with a localpart consisting of
/// 18 random ASCII characters.
///
/// Fails if the given homeserver cannot be parsed as a valid host.
#[cfg(feature = "rand")]
pub fn new(server_name: &ServerName) -> Box<Self> {
Self::from_owned(format!("!{}:{}", super::generate_localpart(18), server_name).into())
}
/// Returns the rooms's unique ID.
pub fn localpart(&self) -> &str {
&self.as_str()[1..self.colon_idx()]
}
/// Returns the server name of the room ID.
pub fn server_name(&self) -> &ServerName {
ServerName::from_borrowed(&self.as_str()[self.colon_idx() + 1..])
}
/// Create a `matrix.to` URI for this room ID.
///
/// # Example
///
/// ```
/// use ruma_common::{room_id, server_name};
///
/// assert_eq!(
/// room_id!("!somewhere:example.org")
/// .matrix_to_uri([&*server_name!("example.org"), &*server_name!("alt.example.org")])
/// .to_string(),
/// "https://matrix.to/#/%21somewhere%3Aexample.org?via=example.org&via=alt.example.org"
/// );
/// ```
pub fn matrix_to_uri<'a>(&self, via: impl IntoIterator<Item = &'a ServerName>) -> MatrixToUri {
MatrixToUri::new(self.into(), via.into_iter().collect())
}
/// Create a `matrix.to` URI for an event scoped under this room ID.
pub fn matrix_to_event_uri(&self, ev_id: &EventId) -> MatrixToUri {
MatrixToUri::new((self, ev_id).into(), Vec::new())
}
/// Create a `matrix:` URI for this room ID.
///
/// If `join` is `true`, a click on the URI should join the room.
///
/// # Example
///
/// ```
/// use ruma_common::{room_id, server_name};
///
/// assert_eq!(
/// room_id!("!somewhere:example.org")
/// .matrix_uri([&*server_name!("example.org"), &*server_name!("alt.example.org")], true)
/// .to_string(),
/// "matrix:roomid/somewhere:example.org?via=example.org&via=alt.example.org&action=join"
/// );
/// ```
pub fn matrix_uri<'a>(
&self,
via: impl IntoIterator<Item = &'a ServerName>,
join: bool,
) -> MatrixUri {
MatrixUri::new(
self.into(),
via.into_iter().collect(),
Some(UriAction::Join).filter(|_| join),
)
}
/// Create a `matrix:` URI for an event scoped under this room ID.
pub fn matrix_event_uri<'a>(
&self,
ev_id: &EventId,
via: impl IntoIterator<Item = &'a ServerName>,
) -> MatrixUri {
MatrixUri::new((self, ev_id).into(), via.into_iter().collect(), None)
}
fn colon_idx(&self) -> usize {
self.as_str().find(':').unwrap()
}
}
#[cfg(test)]
mod tests {
use std::convert::TryFrom;
use super::RoomId;
use crate::Error;
#[test]
fn valid_room_id() {
assert_eq!(
<&RoomId>::try_from("!29fhd83h92h0:example.com")
.expect("Failed to create RoomId.")
.as_ref(),
"!29fhd83h92h0:example.com"
);
}
#[test]
fn empty_localpart() {
assert_eq!(
<&RoomId>::try_from("!:example.com").expect("Failed to create RoomId.").as_ref(),
"!:example.com"
);
}
#[cfg(feature = "rand")]
#[test]
fn generate_random_valid_room_id() {
use crate::server_name;
let room_id = RoomId::new(server_name!("example.com"));
let id_str = room_id.as_str();
assert!(id_str.starts_with('!'));
assert_eq!(id_str.len(), 31);
}
#[test]
fn serialize_valid_room_id() {
assert_eq!(
serde_json::to_string(
<&RoomId>::try_from("!29fhd83h92h0:example.com").expect("Failed to create RoomId.")
)
.expect("Failed to convert RoomId to JSON."),
r#""!29fhd83h92h0:example.com""#
);
}
#[test]
fn deserialize_valid_room_id() {
assert_eq!(
serde_json::from_str::<Box<RoomId>>(r#""!29fhd83h92h0:example.com""#)
.expect("Failed to convert JSON to RoomId"),
<&RoomId>::try_from("!29fhd83h92h0:example.com").expect("Failed to create RoomId.")
);
}
#[test]
fn valid_room_id_with_explicit_standard_port() {
assert_eq!(
<&RoomId>::try_from("!29fhd83h92h0:example.com:443")
.expect("Failed to create RoomId.")
.as_ref(),
"!29fhd83h92h0:example.com:443"
);
}
|
assert_eq!(
<&RoomId>::try_from("!29fhd83h92h0:example.com:5000")
.expect("Failed to create RoomId.")
.as_ref(),
"!29fhd83h92h0:example.com:5000"
);
}
#[test]
fn missing_room_id_sigil() {
assert_eq!(
<&RoomId>::try_from("carl:example.com").unwrap_err(),
Error::MissingLeadingSigil
);
}
#[test]
fn missing_room_id_delimiter() {
assert_eq!(<&RoomId>::try_from("!29fhd83h92h0").unwrap_err(), Error::MissingDelimiter);
}
#[test]
fn invalid_room_id_host() {
assert_eq!(<&RoomId>::try_from("!29fhd83h92h0:/").unwrap_err(), Error::InvalidServerName);
}
#[test]
fn invalid_room_id_port() {
assert_eq!(
<&RoomId>::try_from("!29fhd83h92h0:example.com:notaport").unwrap_err(),
Error::InvalidServerName
);
}
}
|
#[test]
fn valid_room_id_with_non_standard_port() {
|
random_line_split
|
player.rs
|
pub const MAX_VELOCITY: f64 = 1.5;
pub struct Player {
_x: f64,
_y: f64,
pub width: u32,
pub height: u32,
x_velocity: f64,
y_velocity: f64,
}
impl Player {
pub fn new() -> Player {
Player {
|
_x: 0.,
_y: 0.,
width: 32,
height: 32,
x_velocity: 0.,
y_velocity: 0.,
}
}
pub fn x(&self, lag: f64) -> i32 {
(self._x + self.x_velocity*lag) as i32
}
pub fn y(&self, lag: f64) -> i32 {
// println!("{}", lag);
(self._y + self.y_velocity*lag) as i32
}
pub fn change_velocity(&mut self, x_velocity: f64, y_velocity: f64) {
self.x_velocity = self.x_velocity + x_velocity;
self.y_velocity = self.y_velocity + y_velocity;
}
pub fn update(&mut self) {
self._x = self._x + self.x_velocity;
self._y = self._y + self.y_velocity;
}
}
|
random_line_split
|
|
player.rs
|
pub const MAX_VELOCITY: f64 = 1.5;
pub struct Player {
_x: f64,
_y: f64,
pub width: u32,
pub height: u32,
x_velocity: f64,
y_velocity: f64,
}
impl Player {
pub fn new() -> Player {
Player {
_x: 0.,
_y: 0.,
width: 32,
height: 32,
x_velocity: 0.,
y_velocity: 0.,
}
}
pub fn
|
(&self, lag: f64) -> i32 {
(self._x + self.x_velocity*lag) as i32
}
pub fn y(&self, lag: f64) -> i32 {
// println!("{}", lag);
(self._y + self.y_velocity*lag) as i32
}
pub fn change_velocity(&mut self, x_velocity: f64, y_velocity: f64) {
self.x_velocity = self.x_velocity + x_velocity;
self.y_velocity = self.y_velocity + y_velocity;
}
pub fn update(&mut self) {
self._x = self._x + self.x_velocity;
self._y = self._y + self.y_velocity;
}
}
|
x
|
identifier_name
|
player.rs
|
pub const MAX_VELOCITY: f64 = 1.5;
pub struct Player {
_x: f64,
_y: f64,
pub width: u32,
pub height: u32,
x_velocity: f64,
y_velocity: f64,
}
impl Player {
pub fn new() -> Player {
Player {
_x: 0.,
_y: 0.,
width: 32,
height: 32,
x_velocity: 0.,
y_velocity: 0.,
}
}
pub fn x(&self, lag: f64) -> i32
|
pub fn y(&self, lag: f64) -> i32 {
// println!("{}", lag);
(self._y + self.y_velocity*lag) as i32
}
pub fn change_velocity(&mut self, x_velocity: f64, y_velocity: f64) {
self.x_velocity = self.x_velocity + x_velocity;
self.y_velocity = self.y_velocity + y_velocity;
}
pub fn update(&mut self) {
self._x = self._x + self.x_velocity;
self._y = self._y + self.y_velocity;
}
}
|
{
(self._x + self.x_velocity*lag) as i32
}
|
identifier_body
|
string.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Format string literals.
use unicode_segmentation::UnicodeSegmentation;
use regex::Regex;
use Indent;
use config::Config;
use MIN_STRING;
pub struct StringFormat<'a> {
pub opener: &'a str,
pub closer: &'a str,
pub line_start: &'a str,
pub line_end: &'a str,
pub width: usize,
pub offset: Indent,
pub trim_end: bool,
pub config: &'a Config,
}
// FIXME: simplify this!
pub fn rewrite_string<'a>(orig: &str, fmt: &StringFormat<'a>) -> Option<String> {
// Strip line breaks.
let re = Regex::new(r"([^\\](\\\\)*)\\[\n\r][:space:]*").unwrap();
let stripped_str = re.replace_all(orig, "$1");
let graphemes = UnicodeSegmentation::graphemes(&*stripped_str, false).collect::<Vec<&str>>();
let indent = fmt.offset.to_string(fmt.config);
let punctuation = ":,;.";
// `cur_start` is the position in `orig` of the start of the current line.
let mut cur_start = 0;
let mut result = String::with_capacity(stripped_str.len()
.checked_next_power_of_two()
|
// succeed.
let max_chars = try_opt!(fmt.width.checked_sub(fmt.opener.len() + ender_length + 1)) + 1;
// Snip a line at a time from `orig` until it is used up. Push the snippet
// onto result.
'outer: loop {
// `cur_end` will be where we break the line, as an offset into `orig`.
// Initialised to the maximum it could be (which may be beyond `orig`).
let mut cur_end = cur_start + max_chars;
// We can fit the rest of the string on this line, so we're done.
if cur_end >= graphemes.len() {
let line = &graphemes[cur_start..].join("");
result.push_str(line);
break 'outer;
}
// Push cur_end left until we reach whitespace (or the line is too small).
while!graphemes[cur_end - 1].trim().is_empty() {
cur_end -= 1;
if cur_end < cur_start + MIN_STRING {
// We couldn't find whitespace before the string got too small.
// So start again at the max length and look for punctuation.
cur_end = cur_start + max_chars;
while!punctuation.contains(graphemes[cur_end - 1]) {
cur_end -= 1;
// If we can't break at whitespace or punctuation, grow the string instead.
if cur_end < cur_start + MIN_STRING {
cur_end = cur_start + max_chars;
while!(punctuation.contains(graphemes[cur_end - 1]) ||
graphemes[cur_end - 1].trim().is_empty()) {
if cur_end >= graphemes.len() {
let line = &graphemes[cur_start..].join("");
result.push_str(line);
break 'outer;
}
cur_end += 1;
}
break;
}
}
break;
}
}
// Make sure there is no whitespace to the right of the break.
while cur_end < stripped_str.len() && graphemes[cur_end].trim().is_empty() {
cur_end += 1;
}
// Make the current line and add it on to result.
let raw_line = graphemes[cur_start..cur_end].join("");
let line = if fmt.trim_end {
raw_line.trim()
} else {
raw_line.as_str()
};
result.push_str(line);
result.push_str(fmt.line_end);
result.push('\n');
result.push_str(&indent);
result.push_str(fmt.line_start);
// The next line starts where the current line ends.
cur_start = cur_end;
}
result.push_str(fmt.closer);
Some(result)
}
#[cfg(test)]
mod test {
use super::{StringFormat, rewrite_string};
#[test]
fn issue343() {
let config = Default::default();
let fmt = StringFormat {
opener: "\"",
closer: "\"",
line_start: " ",
line_end: "\\",
width: 2,
offset: ::Indent::empty(),
trim_end: false,
config: &config,
};
rewrite_string("eq_", &fmt);
}
}
|
.unwrap_or(usize::max_value()));
result.push_str(fmt.opener);
let ender_length = fmt.line_end.len();
// If we cannot put at least a single character per line, the rewrite won't
|
random_line_split
|
string.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Format string literals.
use unicode_segmentation::UnicodeSegmentation;
use regex::Regex;
use Indent;
use config::Config;
use MIN_STRING;
pub struct
|
<'a> {
pub opener: &'a str,
pub closer: &'a str,
pub line_start: &'a str,
pub line_end: &'a str,
pub width: usize,
pub offset: Indent,
pub trim_end: bool,
pub config: &'a Config,
}
// FIXME: simplify this!
pub fn rewrite_string<'a>(orig: &str, fmt: &StringFormat<'a>) -> Option<String> {
// Strip line breaks.
let re = Regex::new(r"([^\\](\\\\)*)\\[\n\r][:space:]*").unwrap();
let stripped_str = re.replace_all(orig, "$1");
let graphemes = UnicodeSegmentation::graphemes(&*stripped_str, false).collect::<Vec<&str>>();
let indent = fmt.offset.to_string(fmt.config);
let punctuation = ":,;.";
// `cur_start` is the position in `orig` of the start of the current line.
let mut cur_start = 0;
let mut result = String::with_capacity(stripped_str.len()
.checked_next_power_of_two()
.unwrap_or(usize::max_value()));
result.push_str(fmt.opener);
let ender_length = fmt.line_end.len();
// If we cannot put at least a single character per line, the rewrite won't
// succeed.
let max_chars = try_opt!(fmt.width.checked_sub(fmt.opener.len() + ender_length + 1)) + 1;
// Snip a line at a time from `orig` until it is used up. Push the snippet
// onto result.
'outer: loop {
// `cur_end` will be where we break the line, as an offset into `orig`.
// Initialised to the maximum it could be (which may be beyond `orig`).
let mut cur_end = cur_start + max_chars;
// We can fit the rest of the string on this line, so we're done.
if cur_end >= graphemes.len() {
let line = &graphemes[cur_start..].join("");
result.push_str(line);
break 'outer;
}
// Push cur_end left until we reach whitespace (or the line is too small).
while!graphemes[cur_end - 1].trim().is_empty() {
cur_end -= 1;
if cur_end < cur_start + MIN_STRING {
// We couldn't find whitespace before the string got too small.
// So start again at the max length and look for punctuation.
cur_end = cur_start + max_chars;
while!punctuation.contains(graphemes[cur_end - 1]) {
cur_end -= 1;
// If we can't break at whitespace or punctuation, grow the string instead.
if cur_end < cur_start + MIN_STRING {
cur_end = cur_start + max_chars;
while!(punctuation.contains(graphemes[cur_end - 1]) ||
graphemes[cur_end - 1].trim().is_empty()) {
if cur_end >= graphemes.len() {
let line = &graphemes[cur_start..].join("");
result.push_str(line);
break 'outer;
}
cur_end += 1;
}
break;
}
}
break;
}
}
// Make sure there is no whitespace to the right of the break.
while cur_end < stripped_str.len() && graphemes[cur_end].trim().is_empty() {
cur_end += 1;
}
// Make the current line and add it on to result.
let raw_line = graphemes[cur_start..cur_end].join("");
let line = if fmt.trim_end {
raw_line.trim()
} else {
raw_line.as_str()
};
result.push_str(line);
result.push_str(fmt.line_end);
result.push('\n');
result.push_str(&indent);
result.push_str(fmt.line_start);
// The next line starts where the current line ends.
cur_start = cur_end;
}
result.push_str(fmt.closer);
Some(result)
}
#[cfg(test)]
mod test {
use super::{StringFormat, rewrite_string};
#[test]
fn issue343() {
let config = Default::default();
let fmt = StringFormat {
opener: "\"",
closer: "\"",
line_start: " ",
line_end: "\\",
width: 2,
offset: ::Indent::empty(),
trim_end: false,
config: &config,
};
rewrite_string("eq_", &fmt);
}
}
|
StringFormat
|
identifier_name
|
string.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Format string literals.
use unicode_segmentation::UnicodeSegmentation;
use regex::Regex;
use Indent;
use config::Config;
use MIN_STRING;
pub struct StringFormat<'a> {
pub opener: &'a str,
pub closer: &'a str,
pub line_start: &'a str,
pub line_end: &'a str,
pub width: usize,
pub offset: Indent,
pub trim_end: bool,
pub config: &'a Config,
}
// FIXME: simplify this!
pub fn rewrite_string<'a>(orig: &str, fmt: &StringFormat<'a>) -> Option<String> {
// Strip line breaks.
let re = Regex::new(r"([^\\](\\\\)*)\\[\n\r][:space:]*").unwrap();
let stripped_str = re.replace_all(orig, "$1");
let graphemes = UnicodeSegmentation::graphemes(&*stripped_str, false).collect::<Vec<&str>>();
let indent = fmt.offset.to_string(fmt.config);
let punctuation = ":,;.";
// `cur_start` is the position in `orig` of the start of the current line.
let mut cur_start = 0;
let mut result = String::with_capacity(stripped_str.len()
.checked_next_power_of_two()
.unwrap_or(usize::max_value()));
result.push_str(fmt.opener);
let ender_length = fmt.line_end.len();
// If we cannot put at least a single character per line, the rewrite won't
// succeed.
let max_chars = try_opt!(fmt.width.checked_sub(fmt.opener.len() + ender_length + 1)) + 1;
// Snip a line at a time from `orig` until it is used up. Push the snippet
// onto result.
'outer: loop {
// `cur_end` will be where we break the line, as an offset into `orig`.
// Initialised to the maximum it could be (which may be beyond `orig`).
let mut cur_end = cur_start + max_chars;
// We can fit the rest of the string on this line, so we're done.
if cur_end >= graphemes.len() {
let line = &graphemes[cur_start..].join("");
result.push_str(line);
break 'outer;
}
// Push cur_end left until we reach whitespace (or the line is too small).
while!graphemes[cur_end - 1].trim().is_empty() {
cur_end -= 1;
if cur_end < cur_start + MIN_STRING {
// We couldn't find whitespace before the string got too small.
// So start again at the max length and look for punctuation.
cur_end = cur_start + max_chars;
while!punctuation.contains(graphemes[cur_end - 1]) {
cur_end -= 1;
// If we can't break at whitespace or punctuation, grow the string instead.
if cur_end < cur_start + MIN_STRING {
cur_end = cur_start + max_chars;
while!(punctuation.contains(graphemes[cur_end - 1]) ||
graphemes[cur_end - 1].trim().is_empty()) {
if cur_end >= graphemes.len()
|
cur_end += 1;
}
break;
}
}
break;
}
}
// Make sure there is no whitespace to the right of the break.
while cur_end < stripped_str.len() && graphemes[cur_end].trim().is_empty() {
cur_end += 1;
}
// Make the current line and add it on to result.
let raw_line = graphemes[cur_start..cur_end].join("");
let line = if fmt.trim_end {
raw_line.trim()
} else {
raw_line.as_str()
};
result.push_str(line);
result.push_str(fmt.line_end);
result.push('\n');
result.push_str(&indent);
result.push_str(fmt.line_start);
// The next line starts where the current line ends.
cur_start = cur_end;
}
result.push_str(fmt.closer);
Some(result)
}
#[cfg(test)]
mod test {
use super::{StringFormat, rewrite_string};
#[test]
fn issue343() {
let config = Default::default();
let fmt = StringFormat {
opener: "\"",
closer: "\"",
line_start: " ",
line_end: "\\",
width: 2,
offset: ::Indent::empty(),
trim_end: false,
config: &config,
};
rewrite_string("eq_", &fmt);
}
}
|
{
let line = &graphemes[cur_start..].join("");
result.push_str(line);
break 'outer;
}
|
conditional_block
|
lib.rs
|
#![cfg_attr(feature = "unstable", feature(const_fn, drop_types_in_const))]
#![cfg_attr(feature = "serde_derive", feature(proc_macro))]
#![cfg_attr(feature = "nightly-testing", feature(plugin))]
#![cfg_attr(feature = "nightly-testing", plugin(clippy))]
#![cfg_attr(not(feature = "unstable"), deny(warnings))]
extern crate inflector;
#[macro_use]
extern crate lazy_static;
extern crate regex;
extern crate serde;
extern crate serde_json;
#[cfg(feature = "serde_derive")]
#[macro_use]
extern crate serde_derive;
#[cfg(not(feature = "serde_derive"))]
extern crate serde_codegen;
use std::fs::File;
use std::io::{Write, BufReader, BufWriter};
use std::path::Path;
use botocore::Service as BotocoreService;
use generator::generate_source;
mod botocore;
mod generator;
mod serialization;
mod util;
const BOTOCORE_DIR: &'static str = concat!(env!("CARGO_MANIFEST_DIR"), "/botocore/botocore/data/");
pub struct
|
{
name: String,
protocol_date: String,
}
impl Service {
pub fn new<S>(name: S, protocol_date: S) -> Self
where S: Into<String>
{
Service {
name: name.into(),
protocol_date: protocol_date.into(),
}
}
}
pub fn generate(service: Service, output_path: &Path) {
let botocore_destination_path = output_path.join(format!("{}_botocore.rs", service.name));
let serde_destination_path = output_path.join(format!("{}.rs", service.name));
let botocore_service_data_path = Path::new(BOTOCORE_DIR)
.join(format!("{}/{}/service-2.json", service.name, service.protocol_date));
botocore_generate(botocore_service_data_path.as_path(),
botocore_destination_path.as_path());
serde_generate(botocore_destination_path.as_path(),
serde_destination_path.as_path());
}
fn botocore_generate(input_path: &Path, output_path: &Path) {
let input_file = File::open(input_path).expect(&format!(
"{:?} not found",
input_path,
));
let service_data_as_reader = BufReader::new(input_file);
let service: BotocoreService = serde_json::from_reader(service_data_as_reader).expect(&format!(
"Could not convert JSON in {:?} to Service",
input_path,
));
let source_code = generate_source(&service);
let output_file = File::create(output_path).expect(&format!(
"Couldn't open file for writing: {:?}",
output_path,
));
let mut output_bufwriter = BufWriter::new(output_file);
output_bufwriter.write_all(source_code.as_bytes()).expect(&format!(
"Failed to write generated source code to {:?}",
output_path,
));
}
#[cfg(not(feature = "serde_derive"))]
fn serde_generate(source: &Path, destination: &Path) {
::serde_codegen::expand(&source, &destination).unwrap();
}
#[cfg(feature = "serde_derive")]
fn serde_generate(source: &Path, destination: &Path) {
::std::fs::copy(source, destination).expect(&format!(
"Failed to copy {:?} to {:?}",
source,
destination,
));
}
|
Service
|
identifier_name
|
lib.rs
|
#![cfg_attr(feature = "unstable", feature(const_fn, drop_types_in_const))]
#![cfg_attr(feature = "serde_derive", feature(proc_macro))]
#![cfg_attr(feature = "nightly-testing", feature(plugin))]
#![cfg_attr(feature = "nightly-testing", plugin(clippy))]
#![cfg_attr(not(feature = "unstable"), deny(warnings))]
extern crate inflector;
#[macro_use]
|
extern crate regex;
extern crate serde;
extern crate serde_json;
#[cfg(feature = "serde_derive")]
#[macro_use]
extern crate serde_derive;
#[cfg(not(feature = "serde_derive"))]
extern crate serde_codegen;
use std::fs::File;
use std::io::{Write, BufReader, BufWriter};
use std::path::Path;
use botocore::Service as BotocoreService;
use generator::generate_source;
mod botocore;
mod generator;
mod serialization;
mod util;
const BOTOCORE_DIR: &'static str = concat!(env!("CARGO_MANIFEST_DIR"), "/botocore/botocore/data/");
pub struct Service {
name: String,
protocol_date: String,
}
impl Service {
pub fn new<S>(name: S, protocol_date: S) -> Self
where S: Into<String>
{
Service {
name: name.into(),
protocol_date: protocol_date.into(),
}
}
}
pub fn generate(service: Service, output_path: &Path) {
let botocore_destination_path = output_path.join(format!("{}_botocore.rs", service.name));
let serde_destination_path = output_path.join(format!("{}.rs", service.name));
let botocore_service_data_path = Path::new(BOTOCORE_DIR)
.join(format!("{}/{}/service-2.json", service.name, service.protocol_date));
botocore_generate(botocore_service_data_path.as_path(),
botocore_destination_path.as_path());
serde_generate(botocore_destination_path.as_path(),
serde_destination_path.as_path());
}
fn botocore_generate(input_path: &Path, output_path: &Path) {
let input_file = File::open(input_path).expect(&format!(
"{:?} not found",
input_path,
));
let service_data_as_reader = BufReader::new(input_file);
let service: BotocoreService = serde_json::from_reader(service_data_as_reader).expect(&format!(
"Could not convert JSON in {:?} to Service",
input_path,
));
let source_code = generate_source(&service);
let output_file = File::create(output_path).expect(&format!(
"Couldn't open file for writing: {:?}",
output_path,
));
let mut output_bufwriter = BufWriter::new(output_file);
output_bufwriter.write_all(source_code.as_bytes()).expect(&format!(
"Failed to write generated source code to {:?}",
output_path,
));
}
#[cfg(not(feature = "serde_derive"))]
fn serde_generate(source: &Path, destination: &Path) {
::serde_codegen::expand(&source, &destination).unwrap();
}
#[cfg(feature = "serde_derive")]
fn serde_generate(source: &Path, destination: &Path) {
::std::fs::copy(source, destination).expect(&format!(
"Failed to copy {:?} to {:?}",
source,
destination,
));
}
|
extern crate lazy_static;
|
random_line_split
|
orphan-widgets-attribute.rs
|
/*
* Copyright (c) 2020 Boucher, Antoni <[email protected]>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
use gtk::{
Inhibit,
prelude::BoxExt,
prelude::ButtonExt,
prelude::GestureDragExt,
prelude::LabelExt,
prelude::OrientableExt,
prelude::WidgetExt,
};
use gtk::Orientation::Vertical;
use relm::Widget;
use relm_derive::{Msg, widget};
use self::Msg::*;
pub struct Model {
counter: i32,
}
#[derive(Msg)]
pub enum Msg {
Click(f64, f64),
Decrement,
End,
Increment,
Move(f64, f64),
Quit,
}
#[widget]
impl Widget for Win {
fn model() -> Model {
Model {
counter: 0,
}
}
fn update(&mut self, event: Msg) {
match event {
Click(x, y) => println!("Clicked on {}, {}", x, y),
Decrement => self.model.counter -= 1,
End => println!("End"),
Increment => self.model.counter += 1,
Move(x, y) => println!("Moved to {}, {}", x, y),
Quit => gtk::main_quit(),
}
}
view! {
|
gtk::Button {
clicked => Increment,
label: "+",
},
gtk::Label {
text: &self.model.counter.to_string(),
},
#[name="radio1"]
gtk::RadioButton {
label: "First",
},
#[name="drawing_area"]
gtk::DrawingArea {
child: {
expand: true,
}
},
gtk::RadioButton({ group: self.radio1 }) {
label: "Second",
},
},
delete_event(_, _) => (Quit, Inhibit(false)),
}
gtk::GestureDrag(&self.drawing_area) {
drag_begin(_, x, y) => Click(x, y),
drag_update(_, x, y) => Move(x, y),
drag_end(_, _, _) => End,
}
}
}
fn main() {
Win::run(()).expect("Win::run failed");
}
|
gtk::Window {
gtk::Box {
orientation: Vertical,
#[name="inc_button"]
|
random_line_split
|
orphan-widgets-attribute.rs
|
/*
* Copyright (c) 2020 Boucher, Antoni <[email protected]>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
use gtk::{
Inhibit,
prelude::BoxExt,
prelude::ButtonExt,
prelude::GestureDragExt,
prelude::LabelExt,
prelude::OrientableExt,
prelude::WidgetExt,
};
use gtk::Orientation::Vertical;
use relm::Widget;
use relm_derive::{Msg, widget};
use self::Msg::*;
pub struct Model {
counter: i32,
}
#[derive(Msg)]
pub enum Msg {
Click(f64, f64),
Decrement,
End,
Increment,
Move(f64, f64),
Quit,
}
#[widget]
impl Widget for Win {
fn model() -> Model
|
fn update(&mut self, event: Msg) {
match event {
Click(x, y) => println!("Clicked on {}, {}", x, y),
Decrement => self.model.counter -= 1,
End => println!("End"),
Increment => self.model.counter += 1,
Move(x, y) => println!("Moved to {}, {}", x, y),
Quit => gtk::main_quit(),
}
}
view! {
gtk::Window {
gtk::Box {
orientation: Vertical,
#[name="inc_button"]
gtk::Button {
clicked => Increment,
label: "+",
},
gtk::Label {
text: &self.model.counter.to_string(),
},
#[name="radio1"]
gtk::RadioButton {
label: "First",
},
#[name="drawing_area"]
gtk::DrawingArea {
child: {
expand: true,
}
},
gtk::RadioButton({ group: self.radio1 }) {
label: "Second",
},
},
delete_event(_, _) => (Quit, Inhibit(false)),
}
gtk::GestureDrag(&self.drawing_area) {
drag_begin(_, x, y) => Click(x, y),
drag_update(_, x, y) => Move(x, y),
drag_end(_, _, _) => End,
}
}
}
fn main() {
Win::run(()).expect("Win::run failed");
}
|
{
Model {
counter: 0,
}
}
|
identifier_body
|
orphan-widgets-attribute.rs
|
/*
* Copyright (c) 2020 Boucher, Antoni <[email protected]>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
use gtk::{
Inhibit,
prelude::BoxExt,
prelude::ButtonExt,
prelude::GestureDragExt,
prelude::LabelExt,
prelude::OrientableExt,
prelude::WidgetExt,
};
use gtk::Orientation::Vertical;
use relm::Widget;
use relm_derive::{Msg, widget};
use self::Msg::*;
pub struct Model {
counter: i32,
}
#[derive(Msg)]
pub enum Msg {
Click(f64, f64),
Decrement,
End,
Increment,
Move(f64, f64),
Quit,
}
#[widget]
impl Widget for Win {
fn model() -> Model {
Model {
counter: 0,
}
}
fn update(&mut self, event: Msg) {
match event {
Click(x, y) => println!("Clicked on {}, {}", x, y),
Decrement => self.model.counter -= 1,
End => println!("End"),
Increment => self.model.counter += 1,
Move(x, y) => println!("Moved to {}, {}", x, y),
Quit => gtk::main_quit(),
}
}
view! {
gtk::Window {
gtk::Box {
orientation: Vertical,
#[name="inc_button"]
gtk::Button {
clicked => Increment,
label: "+",
},
gtk::Label {
text: &self.model.counter.to_string(),
},
#[name="radio1"]
gtk::RadioButton {
label: "First",
},
#[name="drawing_area"]
gtk::DrawingArea {
child: {
expand: true,
}
},
gtk::RadioButton({ group: self.radio1 }) {
label: "Second",
},
},
delete_event(_, _) => (Quit, Inhibit(false)),
}
gtk::GestureDrag(&self.drawing_area) {
drag_begin(_, x, y) => Click(x, y),
drag_update(_, x, y) => Move(x, y),
drag_end(_, _, _) => End,
}
}
}
fn
|
() {
Win::run(()).expect("Win::run failed");
}
|
main
|
identifier_name
|
issue-9951.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
|
// pretty-expanded FIXME #23616
#![allow(unused_variables)]
trait Bar {
fn noop(&self);
}
impl Bar for u8 {
fn noop(&self) {}
}
fn main() {
let (a, b) = (&5u8 as &Bar, &9u8 as &Bar);
let (c, d): (&Bar, &Bar) = (a, b);
let (a, b) = (Box::new(5u8) as Box<Bar>, Box::new(9u8) as Box<Bar>);
let (c, d): (&Bar, &Bar) = (&*a, &*b);
let (c, d): (&Bar, &Bar) = (&5, &9);
}
|
random_line_split
|
|
issue-9951.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// pretty-expanded FIXME #23616
#![allow(unused_variables)]
trait Bar {
fn noop(&self);
}
impl Bar for u8 {
fn noop(&self) {}
}
fn
|
() {
let (a, b) = (&5u8 as &Bar, &9u8 as &Bar);
let (c, d): (&Bar, &Bar) = (a, b);
let (a, b) = (Box::new(5u8) as Box<Bar>, Box::new(9u8) as Box<Bar>);
let (c, d): (&Bar, &Bar) = (&*a, &*b);
let (c, d): (&Bar, &Bar) = (&5, &9);
}
|
main
|
identifier_name
|
issue-9951.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// pretty-expanded FIXME #23616
#![allow(unused_variables)]
trait Bar {
fn noop(&self);
}
impl Bar for u8 {
fn noop(&self)
|
}
fn main() {
let (a, b) = (&5u8 as &Bar, &9u8 as &Bar);
let (c, d): (&Bar, &Bar) = (a, b);
let (a, b) = (Box::new(5u8) as Box<Bar>, Box::new(9u8) as Box<Bar>);
let (c, d): (&Bar, &Bar) = (&*a, &*b);
let (c, d): (&Bar, &Bar) = (&5, &9);
}
|
{}
|
identifier_body
|
backup.rs
|
use std;
use std::io::{self, Write, Read};
use std::fs;
// pub use decompress::decompress;
pub use compress::compress;
extern crate byteorder; //needed for lz4
extern crate rustc_serialize; //needed for lz4
extern crate docopt;
extern crate seahash; //to hash the blocks
extern crate rusqlite; //to save to backup archive
// The seed identifies compatible program version. If the file seed matches this, then it is safe to
// work on.
static SEED: &'static str = "elaiw0kahc3ohxe5ke3I3";
struct Block {
serial: String,
hash: String,
duplicate: String, /* TRUE/FALSE. Compressor writes no data, decompressor initates search for matching hash */
data_blob: Vec<u8>,
}
impl Block {
fn
|
(block_size: usize) -> Block {
Block {
serial: String::from(""),
hash: String::from(""),
data_blob: Vec::with_capacity(block_size),
duplicate: String::from("FALSE"),
}
}
}
pub fn backup(block_size: usize,
compression_type: &String,
destination_path: &std::path::Path,
silent_option: bool) {
let mut collection_of_other_suitable_backup_files: Vec<std::path::PathBuf> = Vec::new();
let mut vector_of_hashes: Vec<String> = Vec::new();
let mut components = destination_path.components();
components.next_back(); //get rid of the filename and get the directory its in
let working_directory_path = destination_path.parent()
.expect("Error encountered during backup, aborting. Error Code:400");
let directory_contents = if working_directory_path == std::path::Path::new("") {
fs::read_dir(std::env::current_dir().unwrap())
} else {
fs::read_dir(working_directory_path)
};
for file in directory_contents.unwrap() {
let filepath = file.unwrap().path();
match rusqlite::Connection::open(&filepath) {//try opening each file in the directory as an sqlite file
Ok(other_file_connection) => {
match other_file_connection.query_row("SELECT seed,block_size FROM file_info;", //Connection successful, but is it the correct format of the file? Atteempt get seed and block size
&[],
|row|{ (row.get::<_, String>(0), row.get::<_, String>(1))}) {
Ok(file_info_result) => { //Found the other backup compatible sqlite database here. Verify its parameters meet the current program version and block size settings
if (SEED == file_info_result.0) && (block_size.to_string() == file_info_result.1) {
collection_of_other_suitable_backup_files.push(filepath);
}
},
Err(_) => {},
};
}
Err(_) => {}
};
}
// load hashes from the collection of other suitable files into an array. For BACKUP we dont care
// which file they are actually in. We just need to know they exist and to skip them when
// encountered in the stream
if!silent_option {
println!("Found {} compatible files in the backup destination directory.",
collection_of_other_suitable_backup_files.len());
}
for filepath in collection_of_other_suitable_backup_files {
if!silent_option {
println!("Obtaining block hashes from {}", filepath.to_string_lossy());
}
let hash_extract_connection = rusqlite::Connection::open(&filepath)
.expect("Error encountered during backup, aborting. Error Code:401");//just unwrapping because we've just accessed it ok above.
let mut query = hash_extract_connection.prepare("SELECT hash, duplicate FROM blocks_table")
.expect("Error encountered during backup, aborting. Error Code:402");
let mut hashes_rows = query.query(&[])
.expect("Error encountered during backup, aborting. Error Code:403");
while let Some(Ok(hash)) = hashes_rows.next() {
if hash.get::<_, String>(1) == "FALSE" {
// not a duplicate reference, i.e. real data in this block
vector_of_hashes.push(hash.get::<_, String>(0));
}
}
}
let mut block_counter: u64 = 1; //the count will not be off-by-one, i.e. first block is "1"
let mut duplicate_blocks_found: u32 = 0; //just for the final user update
let sqlite_connection: rusqlite::Connection;
match rusqlite::Connection::open(destination_path) {
Ok(x) => sqlite_connection = x,
Err(err) => {
println!("Error: {}", err);
std::process::exit(1);
}
}
init_sqlite_data_file(&sqlite_connection, &block_size, &compression_type);
if!silent_option {
println!("Processing {}-byte blocks:", block_size);
}
let mut block_vector: Vec<u8> = Vec::with_capacity(block_size + 1);
let mut current_block: Block = Block::new(block_size); //zero out a new block struct for storing data
for byte in io::stdin().bytes() {
match byte {
Ok(read_byte) => {
if block_vector.len() < block_size - 1 {
// keep filling the vector
block_vector.push(read_byte);
} else if block_vector.len() == block_size - 1 {
// we have a full or final partial block, lets deal with it
block_vector.push(read_byte); //need to commit the byte of the current iteration to take it to block_size
// Things we want to do to completed full(+partial) blocks happen here.
current_block.serial = block_counter.to_string();
let current_hash: String = seahash::hash(&block_vector).to_string();
current_block.hash = current_hash.clone();
// if current block hash matches a hash from array of previous hashes, skip processing data
if vector_of_hashes.contains(¤t_hash) {
// this block is identical to some existing one in this or other compatible backup file
current_block.duplicate = String::from("TRUE");
duplicate_blocks_found += 1;
} else {
// new data here, we need to push it for storage
vector_of_hashes.push(current_hash.clone());
current_block.duplicate = String::from("FALSE");
if compression_type == "LZ4" {
current_block.data_blob = compress(&block_vector);//compress data into here
} else {
current_block.data_blob = block_vector.clone();//data into here
}
}
commit_block_to_sqlite(&sqlite_connection, ¤t_block);
if!silent_option {
print!("Blocks: {}, Duplicates: {}. Read: {} MiB, Dedup saving: {:.2} MiB",
block_counter,
duplicate_blocks_found,
block_counter as usize * block_size / 1048576,
(duplicate_blocks_found * block_size as u32 / 1048576) as f32);
print!("\r");
}
io::stdout()
.flush()
.expect("Error encountered during backup, aborting. Error Code:404");
block_counter += 1;
block_vector.clear(); //reset the vector to new for next iteration
current_block = Block::new(block_size); //reset the block to new for next iteration
}
}
Err(err) => {
println!("Error reading from stdin: {}", err);
}
}
}
// EOF has arrived. Lets deal with whats left in the block
current_block.serial = block_counter.to_string();
let current_hash: String = seahash::hash(&block_vector).to_string();
current_block.hash = current_hash.clone();
// if current block hash matches a hash from array of previous hashes, skip processing data
if vector_of_hashes.contains(¤t_hash) {
// this block is identical to some existing one in this or other compatible backup file
current_block.duplicate = String::from("TRUE");
duplicate_blocks_found += 1;
} else {
vector_of_hashes.push(current_hash.clone());
current_block.duplicate = String::from("FALSE");
if compression_type == "LZ4" {
current_block.data_blob = compress(&block_vector);//compress data into here
} else {
current_block.data_blob = block_vector.clone();//data into here
}
}
commit_block_to_sqlite(&sqlite_connection, ¤t_block);
if!silent_option {
print_backup_status_update(block_counter, duplicate_blocks_found, block_size);
}
io::stdout().flush().expect("Error encountered during backup, aborting. Error Code:405");
}
// TODO optimise sqlite operations into prepared statements
fn init_sqlite_data_file(connection: &rusqlite::Connection,
block_size: &usize,
compression_type: &String) {
connection.execute("
CREATE TABLE blocks_table (serial BLOB, hash BLOB, data BLOB, duplicate BLOB);",
&[])
.expect("Error encountered during backup, aborting. Error Code:406");
connection.execute("CREATE TABLE file_info (type BLOB, version BLOB, seed BLOB, block_size BLOB, compression_type BLOB);",
&[])
.expect("Error encountered during backup, aborting. Error Code:407");
connection.execute("INSERT INTO file_info (type, version, seed, block_size, compression_type) \
VALUES (\"dedup\",\"0.1.0\",?1,?2,?3);",
&[&SEED, &block_size.to_string(), &*compression_type])
.expect("Error encountered during backup, aborting. Error Code:408");
}
fn commit_block_to_sqlite(connection: &rusqlite::Connection, block: &Block) {
connection.execute("INSERT INTO blocks_table (serial, hash, data, duplicate) VALUES (?1,?2,?3,?4)",
&[&block.serial, &block.hash, &block.data_blob, &block.duplicate])
.expect("Error encountered during backup, aborting. Error Code:409");
}
fn print_backup_status_update(block_counter: u64, duplicate_blocks_found: u32, block_size: usize) {
print!("Blocks processed: {}, Duplicates found: {}, Maximum theoretical dedup saving: {:.2} \
MiB",
block_counter,
duplicate_blocks_found,
(duplicate_blocks_found * block_size as u32 / 1048576) as f32);
print!("\n");
}
|
new
|
identifier_name
|
backup.rs
|
use std;
use std::io::{self, Write, Read};
use std::fs;
// pub use decompress::decompress;
pub use compress::compress;
extern crate byteorder; //needed for lz4
extern crate rustc_serialize; //needed for lz4
extern crate docopt;
extern crate seahash; //to hash the blocks
extern crate rusqlite; //to save to backup archive
// The seed identifies compatible program version. If the file seed matches this, then it is safe to
// work on.
static SEED: &'static str = "elaiw0kahc3ohxe5ke3I3";
struct Block {
serial: String,
hash: String,
duplicate: String, /* TRUE/FALSE. Compressor writes no data, decompressor initates search for matching hash */
data_blob: Vec<u8>,
}
impl Block {
fn new(block_size: usize) -> Block {
Block {
serial: String::from(""),
hash: String::from(""),
data_blob: Vec::with_capacity(block_size),
duplicate: String::from("FALSE"),
}
}
}
pub fn backup(block_size: usize,
compression_type: &String,
destination_path: &std::path::Path,
silent_option: bool) {
let mut collection_of_other_suitable_backup_files: Vec<std::path::PathBuf> = Vec::new();
let mut vector_of_hashes: Vec<String> = Vec::new();
let mut components = destination_path.components();
components.next_back(); //get rid of the filename and get the directory its in
let working_directory_path = destination_path.parent()
.expect("Error encountered during backup, aborting. Error Code:400");
let directory_contents = if working_directory_path == std::path::Path::new("") {
fs::read_dir(std::env::current_dir().unwrap())
} else {
fs::read_dir(working_directory_path)
};
for file in directory_contents.unwrap() {
let filepath = file.unwrap().path();
match rusqlite::Connection::open(&filepath) {//try opening each file in the directory as an sqlite file
Ok(other_file_connection) => {
match other_file_connection.query_row("SELECT seed,block_size FROM file_info;", //Connection successful, but is it the correct format of the file? Atteempt get seed and block size
&[],
|row|{ (row.get::<_, String>(0), row.get::<_, String>(1))}) {
Ok(file_info_result) => { //Found the other backup compatible sqlite database here. Verify its parameters meet the current program version and block size settings
if (SEED == file_info_result.0) && (block_size.to_string() == file_info_result.1) {
collection_of_other_suitable_backup_files.push(filepath);
}
},
Err(_) => {},
};
}
Err(_) => {}
};
}
// load hashes from the collection of other suitable files into an array. For BACKUP we dont care
// which file they are actually in. We just need to know they exist and to skip them when
// encountered in the stream
if!silent_option {
println!("Found {} compatible files in the backup destination directory.",
collection_of_other_suitable_backup_files.len());
}
for filepath in collection_of_other_suitable_backup_files {
if!silent_option {
println!("Obtaining block hashes from {}", filepath.to_string_lossy());
}
let hash_extract_connection = rusqlite::Connection::open(&filepath)
.expect("Error encountered during backup, aborting. Error Code:401");//just unwrapping because we've just accessed it ok above.
let mut query = hash_extract_connection.prepare("SELECT hash, duplicate FROM blocks_table")
.expect("Error encountered during backup, aborting. Error Code:402");
let mut hashes_rows = query.query(&[])
.expect("Error encountered during backup, aborting. Error Code:403");
while let Some(Ok(hash)) = hashes_rows.next() {
if hash.get::<_, String>(1) == "FALSE" {
// not a duplicate reference, i.e. real data in this block
vector_of_hashes.push(hash.get::<_, String>(0));
}
}
}
let mut block_counter: u64 = 1; //the count will not be off-by-one, i.e. first block is "1"
let mut duplicate_blocks_found: u32 = 0; //just for the final user update
let sqlite_connection: rusqlite::Connection;
match rusqlite::Connection::open(destination_path) {
Ok(x) => sqlite_connection = x,
Err(err) => {
println!("Error: {}", err);
std::process::exit(1);
}
}
init_sqlite_data_file(&sqlite_connection, &block_size, &compression_type);
if!silent_option {
println!("Processing {}-byte blocks:", block_size);
}
let mut block_vector: Vec<u8> = Vec::with_capacity(block_size + 1);
let mut current_block: Block = Block::new(block_size); //zero out a new block struct for storing data
for byte in io::stdin().bytes() {
match byte {
Ok(read_byte) => {
if block_vector.len() < block_size - 1 {
// keep filling the vector
block_vector.push(read_byte);
} else if block_vector.len() == block_size - 1 {
// we have a full or final partial block, lets deal with it
block_vector.push(read_byte); //need to commit the byte of the current iteration to take it to block_size
// Things we want to do to completed full(+partial) blocks happen here.
current_block.serial = block_counter.to_string();
let current_hash: String = seahash::hash(&block_vector).to_string();
current_block.hash = current_hash.clone();
// if current block hash matches a hash from array of previous hashes, skip processing data
if vector_of_hashes.contains(¤t_hash) {
// this block is identical to some existing one in this or other compatible backup file
current_block.duplicate = String::from("TRUE");
duplicate_blocks_found += 1;
} else {
// new data here, we need to push it for storage
vector_of_hashes.push(current_hash.clone());
current_block.duplicate = String::from("FALSE");
if compression_type == "LZ4" {
current_block.data_blob = compress(&block_vector);//compress data into here
} else {
current_block.data_blob = block_vector.clone();//data into here
}
}
|
commit_block_to_sqlite(&sqlite_connection, ¤t_block);
if!silent_option {
print!("Blocks: {}, Duplicates: {}. Read: {} MiB, Dedup saving: {:.2} MiB",
block_counter,
duplicate_blocks_found,
block_counter as usize * block_size / 1048576,
(duplicate_blocks_found * block_size as u32 / 1048576) as f32);
print!("\r");
}
io::stdout()
.flush()
.expect("Error encountered during backup, aborting. Error Code:404");
block_counter += 1;
block_vector.clear(); //reset the vector to new for next iteration
current_block = Block::new(block_size); //reset the block to new for next iteration
}
}
Err(err) => {
println!("Error reading from stdin: {}", err);
}
}
}
// EOF has arrived. Lets deal with whats left in the block
current_block.serial = block_counter.to_string();
let current_hash: String = seahash::hash(&block_vector).to_string();
current_block.hash = current_hash.clone();
// if current block hash matches a hash from array of previous hashes, skip processing data
if vector_of_hashes.contains(¤t_hash) {
// this block is identical to some existing one in this or other compatible backup file
current_block.duplicate = String::from("TRUE");
duplicate_blocks_found += 1;
} else {
vector_of_hashes.push(current_hash.clone());
current_block.duplicate = String::from("FALSE");
if compression_type == "LZ4" {
current_block.data_blob = compress(&block_vector);//compress data into here
} else {
current_block.data_blob = block_vector.clone();//data into here
}
}
commit_block_to_sqlite(&sqlite_connection, ¤t_block);
if!silent_option {
print_backup_status_update(block_counter, duplicate_blocks_found, block_size);
}
io::stdout().flush().expect("Error encountered during backup, aborting. Error Code:405");
}
// TODO optimise sqlite operations into prepared statements
fn init_sqlite_data_file(connection: &rusqlite::Connection,
block_size: &usize,
compression_type: &String) {
connection.execute("
CREATE TABLE blocks_table (serial BLOB, hash BLOB, data BLOB, duplicate BLOB);",
&[])
.expect("Error encountered during backup, aborting. Error Code:406");
connection.execute("CREATE TABLE file_info (type BLOB, version BLOB, seed BLOB, block_size BLOB, compression_type BLOB);",
&[])
.expect("Error encountered during backup, aborting. Error Code:407");
connection.execute("INSERT INTO file_info (type, version, seed, block_size, compression_type) \
VALUES (\"dedup\",\"0.1.0\",?1,?2,?3);",
&[&SEED, &block_size.to_string(), &*compression_type])
.expect("Error encountered during backup, aborting. Error Code:408");
}
fn commit_block_to_sqlite(connection: &rusqlite::Connection, block: &Block) {
connection.execute("INSERT INTO blocks_table (serial, hash, data, duplicate) VALUES (?1,?2,?3,?4)",
&[&block.serial, &block.hash, &block.data_blob, &block.duplicate])
.expect("Error encountered during backup, aborting. Error Code:409");
}
fn print_backup_status_update(block_counter: u64, duplicate_blocks_found: u32, block_size: usize) {
print!("Blocks processed: {}, Duplicates found: {}, Maximum theoretical dedup saving: {:.2} \
MiB",
block_counter,
duplicate_blocks_found,
(duplicate_blocks_found * block_size as u32 / 1048576) as f32);
print!("\n");
}
|
random_line_split
|
|
backup.rs
|
use std;
use std::io::{self, Write, Read};
use std::fs;
// pub use decompress::decompress;
pub use compress::compress;
extern crate byteorder; //needed for lz4
extern crate rustc_serialize; //needed for lz4
extern crate docopt;
extern crate seahash; //to hash the blocks
extern crate rusqlite; //to save to backup archive
// The seed identifies compatible program version. If the file seed matches this, then it is safe to
// work on.
static SEED: &'static str = "elaiw0kahc3ohxe5ke3I3";
struct Block {
serial: String,
hash: String,
duplicate: String, /* TRUE/FALSE. Compressor writes no data, decompressor initates search for matching hash */
data_blob: Vec<u8>,
}
impl Block {
fn new(block_size: usize) -> Block {
Block {
serial: String::from(""),
hash: String::from(""),
data_blob: Vec::with_capacity(block_size),
duplicate: String::from("FALSE"),
}
}
}
pub fn backup(block_size: usize,
compression_type: &String,
destination_path: &std::path::Path,
silent_option: bool) {
let mut collection_of_other_suitable_backup_files: Vec<std::path::PathBuf> = Vec::new();
let mut vector_of_hashes: Vec<String> = Vec::new();
let mut components = destination_path.components();
components.next_back(); //get rid of the filename and get the directory its in
let working_directory_path = destination_path.parent()
.expect("Error encountered during backup, aborting. Error Code:400");
let directory_contents = if working_directory_path == std::path::Path::new("") {
fs::read_dir(std::env::current_dir().unwrap())
} else {
fs::read_dir(working_directory_path)
};
for file in directory_contents.unwrap() {
let filepath = file.unwrap().path();
match rusqlite::Connection::open(&filepath) {//try opening each file in the directory as an sqlite file
Ok(other_file_connection) => {
match other_file_connection.query_row("SELECT seed,block_size FROM file_info;", //Connection successful, but is it the correct format of the file? Atteempt get seed and block size
&[],
|row|{ (row.get::<_, String>(0), row.get::<_, String>(1))}) {
Ok(file_info_result) => { //Found the other backup compatible sqlite database here. Verify its parameters meet the current program version and block size settings
if (SEED == file_info_result.0) && (block_size.to_string() == file_info_result.1) {
collection_of_other_suitable_backup_files.push(filepath);
}
},
Err(_) => {},
};
}
Err(_) => {}
};
}
// load hashes from the collection of other suitable files into an array. For BACKUP we dont care
// which file they are actually in. We just need to know they exist and to skip them when
// encountered in the stream
if!silent_option {
println!("Found {} compatible files in the backup destination directory.",
collection_of_other_suitable_backup_files.len());
}
for filepath in collection_of_other_suitable_backup_files {
if!silent_option {
println!("Obtaining block hashes from {}", filepath.to_string_lossy());
}
let hash_extract_connection = rusqlite::Connection::open(&filepath)
.expect("Error encountered during backup, aborting. Error Code:401");//just unwrapping because we've just accessed it ok above.
let mut query = hash_extract_connection.prepare("SELECT hash, duplicate FROM blocks_table")
.expect("Error encountered during backup, aborting. Error Code:402");
let mut hashes_rows = query.query(&[])
.expect("Error encountered during backup, aborting. Error Code:403");
while let Some(Ok(hash)) = hashes_rows.next() {
if hash.get::<_, String>(1) == "FALSE" {
// not a duplicate reference, i.e. real data in this block
vector_of_hashes.push(hash.get::<_, String>(0));
}
}
}
let mut block_counter: u64 = 1; //the count will not be off-by-one, i.e. first block is "1"
let mut duplicate_blocks_found: u32 = 0; //just for the final user update
let sqlite_connection: rusqlite::Connection;
match rusqlite::Connection::open(destination_path) {
Ok(x) => sqlite_connection = x,
Err(err) => {
println!("Error: {}", err);
std::process::exit(1);
}
}
init_sqlite_data_file(&sqlite_connection, &block_size, &compression_type);
if!silent_option {
println!("Processing {}-byte blocks:", block_size);
}
let mut block_vector: Vec<u8> = Vec::with_capacity(block_size + 1);
let mut current_block: Block = Block::new(block_size); //zero out a new block struct for storing data
for byte in io::stdin().bytes() {
match byte {
Ok(read_byte) => {
if block_vector.len() < block_size - 1 {
// keep filling the vector
block_vector.push(read_byte);
} else if block_vector.len() == block_size - 1 {
// we have a full or final partial block, lets deal with it
block_vector.push(read_byte); //need to commit the byte of the current iteration to take it to block_size
// Things we want to do to completed full(+partial) blocks happen here.
current_block.serial = block_counter.to_string();
let current_hash: String = seahash::hash(&block_vector).to_string();
current_block.hash = current_hash.clone();
// if current block hash matches a hash from array of previous hashes, skip processing data
if vector_of_hashes.contains(¤t_hash) {
// this block is identical to some existing one in this or other compatible backup file
current_block.duplicate = String::from("TRUE");
duplicate_blocks_found += 1;
} else {
// new data here, we need to push it for storage
vector_of_hashes.push(current_hash.clone());
current_block.duplicate = String::from("FALSE");
if compression_type == "LZ4" {
current_block.data_blob = compress(&block_vector);//compress data into here
} else {
current_block.data_blob = block_vector.clone();//data into here
}
}
commit_block_to_sqlite(&sqlite_connection, ¤t_block);
if!silent_option {
print!("Blocks: {}, Duplicates: {}. Read: {} MiB, Dedup saving: {:.2} MiB",
block_counter,
duplicate_blocks_found,
block_counter as usize * block_size / 1048576,
(duplicate_blocks_found * block_size as u32 / 1048576) as f32);
print!("\r");
}
io::stdout()
.flush()
.expect("Error encountered during backup, aborting. Error Code:404");
block_counter += 1;
block_vector.clear(); //reset the vector to new for next iteration
current_block = Block::new(block_size); //reset the block to new for next iteration
}
}
Err(err) => {
println!("Error reading from stdin: {}", err);
}
}
}
// EOF has arrived. Lets deal with whats left in the block
current_block.serial = block_counter.to_string();
let current_hash: String = seahash::hash(&block_vector).to_string();
current_block.hash = current_hash.clone();
// if current block hash matches a hash from array of previous hashes, skip processing data
if vector_of_hashes.contains(¤t_hash) {
// this block is identical to some existing one in this or other compatible backup file
current_block.duplicate = String::from("TRUE");
duplicate_blocks_found += 1;
} else {
vector_of_hashes.push(current_hash.clone());
current_block.duplicate = String::from("FALSE");
if compression_type == "LZ4" {
current_block.data_blob = compress(&block_vector);//compress data into here
} else {
current_block.data_blob = block_vector.clone();//data into here
}
}
commit_block_to_sqlite(&sqlite_connection, ¤t_block);
if!silent_option {
print_backup_status_update(block_counter, duplicate_blocks_found, block_size);
}
io::stdout().flush().expect("Error encountered during backup, aborting. Error Code:405");
}
// TODO optimise sqlite operations into prepared statements
fn init_sqlite_data_file(connection: &rusqlite::Connection,
block_size: &usize,
compression_type: &String) {
connection.execute("
CREATE TABLE blocks_table (serial BLOB, hash BLOB, data BLOB, duplicate BLOB);",
&[])
.expect("Error encountered during backup, aborting. Error Code:406");
connection.execute("CREATE TABLE file_info (type BLOB, version BLOB, seed BLOB, block_size BLOB, compression_type BLOB);",
&[])
.expect("Error encountered during backup, aborting. Error Code:407");
connection.execute("INSERT INTO file_info (type, version, seed, block_size, compression_type) \
VALUES (\"dedup\",\"0.1.0\",?1,?2,?3);",
&[&SEED, &block_size.to_string(), &*compression_type])
.expect("Error encountered during backup, aborting. Error Code:408");
}
fn commit_block_to_sqlite(connection: &rusqlite::Connection, block: &Block)
|
fn print_backup_status_update(block_counter: u64, duplicate_blocks_found: u32, block_size: usize) {
print!("Blocks processed: {}, Duplicates found: {}, Maximum theoretical dedup saving: {:.2} \
MiB",
block_counter,
duplicate_blocks_found,
(duplicate_blocks_found * block_size as u32 / 1048576) as f32);
print!("\n");
}
|
{
connection.execute("INSERT INTO blocks_table (serial, hash, data, duplicate) VALUES (?1,?2,?3,?4)",
&[&block.serial, &block.hash, &block.data_blob, &block.duplicate])
.expect("Error encountered during backup, aborting. Error Code:409");
}
|
identifier_body
|
backup.rs
|
use std;
use std::io::{self, Write, Read};
use std::fs;
// pub use decompress::decompress;
pub use compress::compress;
extern crate byteorder; //needed for lz4
extern crate rustc_serialize; //needed for lz4
extern crate docopt;
extern crate seahash; //to hash the blocks
extern crate rusqlite; //to save to backup archive
// The seed identifies compatible program version. If the file seed matches this, then it is safe to
// work on.
static SEED: &'static str = "elaiw0kahc3ohxe5ke3I3";
struct Block {
serial: String,
hash: String,
duplicate: String, /* TRUE/FALSE. Compressor writes no data, decompressor initates search for matching hash */
data_blob: Vec<u8>,
}
impl Block {
fn new(block_size: usize) -> Block {
Block {
serial: String::from(""),
hash: String::from(""),
data_blob: Vec::with_capacity(block_size),
duplicate: String::from("FALSE"),
}
}
}
pub fn backup(block_size: usize,
compression_type: &String,
destination_path: &std::path::Path,
silent_option: bool) {
let mut collection_of_other_suitable_backup_files: Vec<std::path::PathBuf> = Vec::new();
let mut vector_of_hashes: Vec<String> = Vec::new();
let mut components = destination_path.components();
components.next_back(); //get rid of the filename and get the directory its in
let working_directory_path = destination_path.parent()
.expect("Error encountered during backup, aborting. Error Code:400");
let directory_contents = if working_directory_path == std::path::Path::new("") {
fs::read_dir(std::env::current_dir().unwrap())
} else {
fs::read_dir(working_directory_path)
};
for file in directory_contents.unwrap() {
let filepath = file.unwrap().path();
match rusqlite::Connection::open(&filepath) {//try opening each file in the directory as an sqlite file
Ok(other_file_connection) => {
match other_file_connection.query_row("SELECT seed,block_size FROM file_info;", //Connection successful, but is it the correct format of the file? Atteempt get seed and block size
&[],
|row|{ (row.get::<_, String>(0), row.get::<_, String>(1))}) {
Ok(file_info_result) => { //Found the other backup compatible sqlite database here. Verify its parameters meet the current program version and block size settings
if (SEED == file_info_result.0) && (block_size.to_string() == file_info_result.1) {
collection_of_other_suitable_backup_files.push(filepath);
}
},
Err(_) => {},
};
}
Err(_) => {}
};
}
// load hashes from the collection of other suitable files into an array. For BACKUP we dont care
// which file they are actually in. We just need to know they exist and to skip them when
// encountered in the stream
if!silent_option {
println!("Found {} compatible files in the backup destination directory.",
collection_of_other_suitable_backup_files.len());
}
for filepath in collection_of_other_suitable_backup_files {
if!silent_option {
println!("Obtaining block hashes from {}", filepath.to_string_lossy());
}
let hash_extract_connection = rusqlite::Connection::open(&filepath)
.expect("Error encountered during backup, aborting. Error Code:401");//just unwrapping because we've just accessed it ok above.
let mut query = hash_extract_connection.prepare("SELECT hash, duplicate FROM blocks_table")
.expect("Error encountered during backup, aborting. Error Code:402");
let mut hashes_rows = query.query(&[])
.expect("Error encountered during backup, aborting. Error Code:403");
while let Some(Ok(hash)) = hashes_rows.next() {
if hash.get::<_, String>(1) == "FALSE" {
// not a duplicate reference, i.e. real data in this block
vector_of_hashes.push(hash.get::<_, String>(0));
}
}
}
let mut block_counter: u64 = 1; //the count will not be off-by-one, i.e. first block is "1"
let mut duplicate_blocks_found: u32 = 0; //just for the final user update
let sqlite_connection: rusqlite::Connection;
match rusqlite::Connection::open(destination_path) {
Ok(x) => sqlite_connection = x,
Err(err) => {
println!("Error: {}", err);
std::process::exit(1);
}
}
init_sqlite_data_file(&sqlite_connection, &block_size, &compression_type);
if!silent_option {
println!("Processing {}-byte blocks:", block_size);
}
let mut block_vector: Vec<u8> = Vec::with_capacity(block_size + 1);
let mut current_block: Block = Block::new(block_size); //zero out a new block struct for storing data
for byte in io::stdin().bytes() {
match byte {
Ok(read_byte) => {
if block_vector.len() < block_size - 1 {
// keep filling the vector
block_vector.push(read_byte);
} else if block_vector.len() == block_size - 1 {
// we have a full or final partial block, lets deal with it
block_vector.push(read_byte); //need to commit the byte of the current iteration to take it to block_size
// Things we want to do to completed full(+partial) blocks happen here.
current_block.serial = block_counter.to_string();
let current_hash: String = seahash::hash(&block_vector).to_string();
current_block.hash = current_hash.clone();
// if current block hash matches a hash from array of previous hashes, skip processing data
if vector_of_hashes.contains(¤t_hash) {
// this block is identical to some existing one in this or other compatible backup file
current_block.duplicate = String::from("TRUE");
duplicate_blocks_found += 1;
} else {
// new data here, we need to push it for storage
vector_of_hashes.push(current_hash.clone());
current_block.duplicate = String::from("FALSE");
if compression_type == "LZ4"
|
else {
current_block.data_blob = block_vector.clone();//data into here
}
}
commit_block_to_sqlite(&sqlite_connection, ¤t_block);
if!silent_option {
print!("Blocks: {}, Duplicates: {}. Read: {} MiB, Dedup saving: {:.2} MiB",
block_counter,
duplicate_blocks_found,
block_counter as usize * block_size / 1048576,
(duplicate_blocks_found * block_size as u32 / 1048576) as f32);
print!("\r");
}
io::stdout()
.flush()
.expect("Error encountered during backup, aborting. Error Code:404");
block_counter += 1;
block_vector.clear(); //reset the vector to new for next iteration
current_block = Block::new(block_size); //reset the block to new for next iteration
}
}
Err(err) => {
println!("Error reading from stdin: {}", err);
}
}
}
// EOF has arrived. Lets deal with whats left in the block
current_block.serial = block_counter.to_string();
let current_hash: String = seahash::hash(&block_vector).to_string();
current_block.hash = current_hash.clone();
// if current block hash matches a hash from array of previous hashes, skip processing data
if vector_of_hashes.contains(¤t_hash) {
// this block is identical to some existing one in this or other compatible backup file
current_block.duplicate = String::from("TRUE");
duplicate_blocks_found += 1;
} else {
vector_of_hashes.push(current_hash.clone());
current_block.duplicate = String::from("FALSE");
if compression_type == "LZ4" {
current_block.data_blob = compress(&block_vector);//compress data into here
} else {
current_block.data_blob = block_vector.clone();//data into here
}
}
commit_block_to_sqlite(&sqlite_connection, ¤t_block);
if!silent_option {
print_backup_status_update(block_counter, duplicate_blocks_found, block_size);
}
io::stdout().flush().expect("Error encountered during backup, aborting. Error Code:405");
}
// TODO optimise sqlite operations into prepared statements
fn init_sqlite_data_file(connection: &rusqlite::Connection,
block_size: &usize,
compression_type: &String) {
connection.execute("
CREATE TABLE blocks_table (serial BLOB, hash BLOB, data BLOB, duplicate BLOB);",
&[])
.expect("Error encountered during backup, aborting. Error Code:406");
connection.execute("CREATE TABLE file_info (type BLOB, version BLOB, seed BLOB, block_size BLOB, compression_type BLOB);",
&[])
.expect("Error encountered during backup, aborting. Error Code:407");
connection.execute("INSERT INTO file_info (type, version, seed, block_size, compression_type) \
VALUES (\"dedup\",\"0.1.0\",?1,?2,?3);",
&[&SEED, &block_size.to_string(), &*compression_type])
.expect("Error encountered during backup, aborting. Error Code:408");
}
fn commit_block_to_sqlite(connection: &rusqlite::Connection, block: &Block) {
connection.execute("INSERT INTO blocks_table (serial, hash, data, duplicate) VALUES (?1,?2,?3,?4)",
&[&block.serial, &block.hash, &block.data_blob, &block.duplicate])
.expect("Error encountered during backup, aborting. Error Code:409");
}
fn print_backup_status_update(block_counter: u64, duplicate_blocks_found: u32, block_size: usize) {
print!("Blocks processed: {}, Duplicates found: {}, Maximum theoretical dedup saving: {:.2} \
MiB",
block_counter,
duplicate_blocks_found,
(duplicate_blocks_found * block_size as u32 / 1048576) as f32);
print!("\n");
}
|
{
current_block.data_blob = compress(&block_vector);//compress data into here
}
|
conditional_block
|
24.rs
|
use std::fs::File;
use std::io::Read;
fn
|
() -> std::io::Result<String> {
let mut file = File::open("24.txt")?;
let mut contents = String::new();
file.read_to_string(&mut contents)?;
Ok(contents)
}
fn list_subsets(numbers: &Vec<usize>, sum: usize, start_index: usize) -> Vec<Vec<usize>> {
if sum == 0 {
return vec![vec![]];
} else if start_index >= numbers.len() {
return vec![];
}
numbers
.iter()
.enumerate()
.skip(start_index)
.filter(|&(_, &x)| x <= sum)
.flat_map(|(i, &x)| {
list_subsets(numbers, sum - x, i + 1)
.into_iter()
.map(move |mut subset| {
subset.push(x);
subset
})
})
.collect()
}
fn main() {
let input = get_input().unwrap();
let numbers = input.lines().filter_map(|line| match line.parse::<usize>() {
Ok(x) => Some(x),
Err(_) => None
}).collect::<Vec<_>>();
let bucket_size = numbers.iter().sum::<usize>() / 3;
let buckets = list_subsets(&numbers, bucket_size, 0);
let min_size = buckets.iter().map(|bucket| bucket.len()).min().unwrap();
let qe = buckets.iter()
.filter(|bucket| bucket.len() == min_size)
.map(|bucket| bucket.into_iter().product::<usize>())
.min().unwrap();
println!("Part 1: {}", qe);
let bucket_size = numbers.iter().sum::<usize>() / 4;
let buckets = list_subsets(&numbers, bucket_size, 0);
let min_size = buckets.iter().map(|bucket| bucket.len()).min().unwrap();
let qe = buckets.iter()
.filter(|bucket| bucket.len() == min_size)
.map(|bucket| bucket.into_iter().product::<usize>())
.min().unwrap();
println!("Part 2: {}", qe);
}
|
get_input
|
identifier_name
|
24.rs
|
use std::fs::File;
use std::io::Read;
fn get_input() -> std::io::Result<String> {
let mut file = File::open("24.txt")?;
let mut contents = String::new();
file.read_to_string(&mut contents)?;
Ok(contents)
}
fn list_subsets(numbers: &Vec<usize>, sum: usize, start_index: usize) -> Vec<Vec<usize>>
|
.collect()
}
fn main() {
let input = get_input().unwrap();
let numbers = input.lines().filter_map(|line| match line.parse::<usize>() {
Ok(x) => Some(x),
Err(_) => None
}).collect::<Vec<_>>();
let bucket_size = numbers.iter().sum::<usize>() / 3;
let buckets = list_subsets(&numbers, bucket_size, 0);
let min_size = buckets.iter().map(|bucket| bucket.len()).min().unwrap();
let qe = buckets.iter()
.filter(|bucket| bucket.len() == min_size)
.map(|bucket| bucket.into_iter().product::<usize>())
.min().unwrap();
println!("Part 1: {}", qe);
let bucket_size = numbers.iter().sum::<usize>() / 4;
let buckets = list_subsets(&numbers, bucket_size, 0);
let min_size = buckets.iter().map(|bucket| bucket.len()).min().unwrap();
let qe = buckets.iter()
.filter(|bucket| bucket.len() == min_size)
.map(|bucket| bucket.into_iter().product::<usize>())
.min().unwrap();
println!("Part 2: {}", qe);
}
|
{
if sum == 0 {
return vec![vec![]];
} else if start_index >= numbers.len() {
return vec![];
}
numbers
.iter()
.enumerate()
.skip(start_index)
.filter(|&(_, &x)| x <= sum)
.flat_map(|(i, &x)| {
list_subsets(numbers, sum - x, i + 1)
.into_iter()
.map(move |mut subset| {
subset.push(x);
subset
})
})
|
identifier_body
|
24.rs
|
use std::fs::File;
use std::io::Read;
fn get_input() -> std::io::Result<String> {
let mut file = File::open("24.txt")?;
let mut contents = String::new();
file.read_to_string(&mut contents)?;
Ok(contents)
}
fn list_subsets(numbers: &Vec<usize>, sum: usize, start_index: usize) -> Vec<Vec<usize>> {
if sum == 0 {
return vec![vec![]];
} else if start_index >= numbers.len() {
return vec![];
}
numbers
.iter()
.enumerate()
.skip(start_index)
.filter(|&(_, &x)| x <= sum)
.flat_map(|(i, &x)| {
list_subsets(numbers, sum - x, i + 1)
.into_iter()
|
})
.collect()
}
fn main() {
let input = get_input().unwrap();
let numbers = input.lines().filter_map(|line| match line.parse::<usize>() {
Ok(x) => Some(x),
Err(_) => None
}).collect::<Vec<_>>();
let bucket_size = numbers.iter().sum::<usize>() / 3;
let buckets = list_subsets(&numbers, bucket_size, 0);
let min_size = buckets.iter().map(|bucket| bucket.len()).min().unwrap();
let qe = buckets.iter()
.filter(|bucket| bucket.len() == min_size)
.map(|bucket| bucket.into_iter().product::<usize>())
.min().unwrap();
println!("Part 1: {}", qe);
let bucket_size = numbers.iter().sum::<usize>() / 4;
let buckets = list_subsets(&numbers, bucket_size, 0);
let min_size = buckets.iter().map(|bucket| bucket.len()).min().unwrap();
let qe = buckets.iter()
.filter(|bucket| bucket.len() == min_size)
.map(|bucket| bucket.into_iter().product::<usize>())
.min().unwrap();
println!("Part 2: {}", qe);
}
|
.map(move |mut subset| {
subset.push(x);
subset
})
|
random_line_split
|
problem17.rs
|
///
/// Computes how many letters there would be if we write out all the numbers
/// from 1 to 1000 in British Egnlish.
///
fn main() {
let zero_to_ten = [0, 3, 3, 5, 4, 4, 3, 5, 5, 4, 3];
let eleven_to_nineteen = [0, 6, 6, 8, 8, 7, 7, 9, 8, 8];
let twenty_to_ninety = [0, 0, 6, 6, 5, 5, 5, 7, 6, 6];
let hundred = 7;
let thousand = 8;
|
let mut x: uint;
let mut remainder: uint;
let mut wholes: uint;
let mut sum: uint = zero_to_ten[1] + thousand;
for i in range(1, 1000) {
x = i;
if x > 99 {
wholes = x / 100u;
if x % 100!= 0 {
sum += zero_to_ten[wholes] + hundred + and
} else {
sum += zero_to_ten[wholes] + hundred;
continue;
}
x -= wholes * 100;
}
if x >= 10 && x < 20 {
remainder = x % 10;
match remainder {
1...9 => sum += eleven_to_nineteen[remainder],
0 => sum += zero_to_ten[10],
_ => panic!("Something went VERY wrong."),
}
continue;
}
wholes = x / 10u;
sum += twenty_to_ninety[wholes];
x -= wholes * 10;
sum += zero_to_ten[x];
}
println!("The number of letters is {}", sum);
}
|
let and = 3;
|
random_line_split
|
problem17.rs
|
///
/// Computes how many letters there would be if we write out all the numbers
/// from 1 to 1000 in British Egnlish.
///
fn main()
|
} else {
sum += zero_to_ten[wholes] + hundred;
continue;
}
x -= wholes * 100;
}
if x >= 10 && x < 20 {
remainder = x % 10;
match remainder {
1...9 => sum += eleven_to_nineteen[remainder],
0 => sum += zero_to_ten[10],
_ => panic!("Something went VERY wrong."),
}
continue;
}
wholes = x / 10u;
sum += twenty_to_ninety[wholes];
x -= wholes * 10;
sum += zero_to_ten[x];
}
println!("The number of letters is {}", sum);
}
|
{
let zero_to_ten = [0, 3, 3, 5, 4, 4, 3, 5, 5, 4, 3];
let eleven_to_nineteen = [0, 6, 6, 8, 8, 7, 7, 9, 8, 8];
let twenty_to_ninety = [0, 0, 6, 6, 5, 5, 5, 7, 6, 6];
let hundred = 7;
let thousand = 8;
let and = 3;
let mut x: uint;
let mut remainder: uint;
let mut wholes: uint;
let mut sum: uint = zero_to_ten[1] + thousand;
for i in range(1, 1000) {
x = i;
if x > 99 {
wholes = x / 100u;
if x % 100 != 0 {
sum += zero_to_ten[wholes] + hundred + and
|
identifier_body
|
problem17.rs
|
///
/// Computes how many letters there would be if we write out all the numbers
/// from 1 to 1000 in British Egnlish.
///
fn
|
() {
let zero_to_ten = [0, 3, 3, 5, 4, 4, 3, 5, 5, 4, 3];
let eleven_to_nineteen = [0, 6, 6, 8, 8, 7, 7, 9, 8, 8];
let twenty_to_ninety = [0, 0, 6, 6, 5, 5, 5, 7, 6, 6];
let hundred = 7;
let thousand = 8;
let and = 3;
let mut x: uint;
let mut remainder: uint;
let mut wholes: uint;
let mut sum: uint = zero_to_ten[1] + thousand;
for i in range(1, 1000) {
x = i;
if x > 99 {
wholes = x / 100u;
if x % 100!= 0 {
sum += zero_to_ten[wholes] + hundred + and
} else {
sum += zero_to_ten[wholes] + hundred;
continue;
}
x -= wholes * 100;
}
if x >= 10 && x < 20 {
remainder = x % 10;
match remainder {
1...9 => sum += eleven_to_nineteen[remainder],
0 => sum += zero_to_ten[10],
_ => panic!("Something went VERY wrong."),
}
continue;
}
wholes = x / 10u;
sum += twenty_to_ninety[wholes];
x -= wholes * 10;
sum += zero_to_ten[x];
}
println!("The number of letters is {}", sum);
}
|
main
|
identifier_name
|
stdio.rs
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Non-blocking access to stdin, stdout, and stderr.
//!
//! This module provides bindings to the local event loop's TTY interface, using it
//! to offer synchronous but non-blocking versions of stdio. These handles can be
//! inspected for information about terminal dimensions or for related information
//! about the stream or terminal to which it is attached.
//!
//! # Example
//!
//! ```rust
//! # #![allow(unused_must_use)]
//! use std::io;
//!
//! let mut out = io::stdout();
//! out.write(b"Hello, world!");
//! ```
use self::StdSource::*;
use boxed::Box;
use cell::RefCell;
use clone::Clone;
use failure::LOCAL_STDERR;
use fmt;
use io::{Reader, Writer, IoResult, IoError, OtherIoError, Buffer,
standard_error, EndOfFile, LineBufferedWriter, BufferedReader};
use marker::{Sync, Send};
use libc;
use mem;
use option::Option;
use option::Option::{Some, None};
use ops::{Deref, DerefMut, FnOnce};
use ptr;
use result::Result::{Ok, Err};
use rt;
use slice::SliceExt;
use str::StrExt;
use string::String;
use sys::{fs, tty};
use sync::{Arc, Mutex, MutexGuard, Once, ONCE_INIT};
use uint;
use vec::Vec;
// And so begins the tale of acquiring a uv handle to a stdio stream on all
// platforms in all situations. Our story begins by splitting the world into two
// categories, windows and unix. Then one day the creators of unix said let
// there be redirection! And henceforth there was redirection away from the
// console for standard I/O streams.
//
// After this day, the world split into four factions:
//
// 1. Unix with stdout on a terminal.
// 2. Unix with stdout redirected.
// 3. Windows with stdout on a terminal.
// 4. Windows with stdout redirected.
//
// Many years passed, and then one day the nation of libuv decided to unify this
// world. After months of toiling, uv created three ideas: TTY, Pipe, File.
// These three ideas propagated throughout the lands and the four great factions
// decided to settle among them.
//
// The groups of 1, 2, and 3 all worked very hard towards the idea of TTY. Upon
// doing so, they even enhanced themselves further then their Pipe/File
// brethren, becoming the dominant powers.
//
// The group of 4, however, decided to work independently. They abandoned the
// common TTY belief throughout, and even abandoned the fledgling Pipe belief.
// The members of the 4th faction decided to only align themselves with File.
//
// tl;dr; TTY works on everything but when windows stdout is redirected, in that
// case pipe also doesn't work, but magically file does!
enum StdSource {
TTY(tty::TTY),
File(fs::FileDesc),
}
fn src<T, F>(fd: libc::c_int, _readable: bool, f: F) -> T where
F: FnOnce(StdSource) -> T,
{
match tty::TTY::new(fd) {
Ok(tty) => f(TTY(tty)),
Err(_) => f(File(fs::FileDesc::new(fd, false))),
}
}
thread_local! {
static LOCAL_STDOUT: RefCell<Option<Box<Writer + Send>>> = {
RefCell::new(None)
}
}
struct RaceBox(BufferedReader<StdReader>);
unsafe impl Send for RaceBox {}
unsafe impl Sync for RaceBox {}
/// A synchronized wrapper around a buffered reader from stdin
#[derive(Clone)]
pub struct StdinReader {
inner: Arc<Mutex<RaceBox>>,
}
unsafe impl Send for StdinReader {}
unsafe impl Sync for StdinReader {}
/// A guard for exclusive access to `StdinReader`'s internal `BufferedReader`.
pub struct StdinReaderGuard<'a> {
inner: MutexGuard<'a, RaceBox>,
}
impl<'a> Deref for StdinReaderGuard<'a> {
type Target = BufferedReader<StdReader>;
fn deref(&self) -> &BufferedReader<StdReader> {
&self.inner.0
}
}
impl<'a> DerefMut for StdinReaderGuard<'a> {
fn deref_mut(&mut self) -> &mut BufferedReader<StdReader> {
&mut self.inner.0
}
}
impl StdinReader {
/// Locks the `StdinReader`, granting the calling thread exclusive access
/// to the underlying `BufferedReader`.
///
/// This provides access to methods like `chars` and `lines`.
///
/// # Examples
///
/// ```rust
/// use std::io;
///
/// for line in io::stdin().lock().lines() {
/// println!("{}", line.unwrap());
/// }
/// ```
pub fn lock<'a>(&'a mut self) -> StdinReaderGuard<'a> {
StdinReaderGuard {
inner: self.inner.lock().unwrap()
}
}
/// Like `Buffer::read_line`.
///
/// The read is performed atomically - concurrent read calls in other
/// threads will not interleave with this one.
pub fn read_line(&mut self) -> IoResult<String> {
self.inner.lock().unwrap().0.read_line()
}
/// Like `Buffer::read_until`.
///
/// The read is performed atomically - concurrent read calls in other
/// threads will not interleave with this one.
pub fn read_until(&mut self, byte: u8) -> IoResult<Vec<u8>> {
self.inner.lock().unwrap().0.read_until(byte)
}
/// Like `Buffer::read_char`.
///
/// The read is performed atomically - concurrent read calls in other
/// threads will not interleave with this one.
pub fn read_char(&mut self) -> IoResult<char> {
self.inner.lock().unwrap().0.read_char()
}
}
impl Reader for StdinReader {
fn read(&mut self, buf: &mut [u8]) -> IoResult<uint> {
self.inner.lock().unwrap().0.read(buf)
}
// We have to manually delegate all of these because the default impls call
// read more than once and we don't want those calls to interleave (or
// incur the costs of repeated locking).
fn read_at_least(&mut self, min: uint, buf: &mut [u8]) -> IoResult<uint> {
self.inner.lock().unwrap().0.read_at_least(min, buf)
}
fn push_at_least(&mut self, min: uint, len: uint, buf: &mut Vec<u8>) -> IoResult<uint> {
self.inner.lock().unwrap().0.push_at_least(min, len, buf)
}
fn read_to_end(&mut self) -> IoResult<Vec<u8>> {
self.inner.lock().unwrap().0.read_to_end()
}
fn read_le_uint_n(&mut self, nbytes: uint) -> IoResult<u64> {
self.inner.lock().unwrap().0.read_le_uint_n(nbytes)
}
fn read_be_uint_n(&mut self, nbytes: uint) -> IoResult<u64> {
self.inner.lock().unwrap().0.read_be_uint_n(nbytes)
}
}
/// Creates a new handle to the stdin of the current process.
///
/// The returned handle is a wrapper around a global `BufferedReader` shared
/// by all threads. If buffered access is not desired, the `stdin_raw` function
/// is provided to provided unbuffered access to stdin.
///
/// See `stdout()` for more notes about this function.
pub fn stdin() -> StdinReader {
// We're following the same strategy as kimundi's lazy_static library
static mut STDIN: *const StdinReader = 0 as *const StdinReader;
static ONCE: Once = ONCE_INIT;
unsafe {
ONCE.call_once(|| {
// The default buffer capacity is 64k, but apparently windows doesn't like
// 64k reads on stdin. See #13304 for details, but the idea is that on
// windows we use a slightly smaller buffer that's been seen to be
// acceptable.
let stdin = if cfg!(windows) {
BufferedReader::with_capacity(8 * 1024, stdin_raw())
} else {
BufferedReader::new(stdin_raw())
};
let stdin = StdinReader {
inner: Arc::new(Mutex::new(RaceBox(stdin)))
};
STDIN = mem::transmute(box stdin);
// Make sure to free it at exit
rt::at_exit(|| {
mem::transmute::<_, Box<StdinReader>>(STDIN);
STDIN = ptr::null();
});
});
(*STDIN).clone()
}
}
/// Creates a new non-blocking handle to the stdin of the current process.
///
/// Unlike `stdin()`, the returned reader is *not* a buffered reader.
///
/// See `stdout()` for more notes about this function.
pub fn stdin_raw() -> StdReader {
src(libc::STDIN_FILENO, true, |src| StdReader { inner: src })
}
/// Creates a line-buffered handle to the stdout of the current process.
///
/// Note that this is a fairly expensive operation in that at least one memory
/// allocation is performed. Additionally, this must be called from a runtime
/// task context because the stream returned will be a non-blocking object using
/// the local scheduler to perform the I/O.
///
/// Care should be taken when creating multiple handles to an output stream for
/// a single process. While usage is still safe, the output may be surprising if
/// no synchronization is performed to ensure a sane output.
pub fn stdout() -> LineBufferedWriter<StdWriter> {
LineBufferedWriter::new(stdout_raw())
}
/// Creates an unbuffered handle to the stdout of the current process
///
/// See notes in `stdout()` for more information.
pub fn stdout_raw() -> StdWriter {
src(libc::STDOUT_FILENO, false, |src| StdWriter { inner: src })
}
/// Creates a line-buffered handle to the stderr of the current process.
///
/// See `stdout()` for notes about this function.
pub fn stderr() -> LineBufferedWriter<StdWriter> {
LineBufferedWriter::new(stderr_raw())
}
/// Creates an unbuffered handle to the stderr of the current process
///
/// See notes in `stdout()` for more information.
pub fn stderr_raw() -> StdWriter {
src(libc::STDERR_FILENO, false, |src| StdWriter { inner: src })
}
/// Resets the task-local stdout handle to the specified writer
///
/// This will replace the current task's stdout handle, returning the old
/// handle. All future calls to `print` and friends will emit their output to
/// this specified handle.
///
/// Note that this does not need to be called for all new tasks; the default
/// output handle is to the process's stdout stream.
pub fn set_stdout(stdout: Box<Writer + Send>) -> Option<Box<Writer + Send>> {
let mut new = Some(stdout);
LOCAL_STDOUT.with(|slot| {
mem::replace(&mut *slot.borrow_mut(), new.take())
}).and_then(|mut s| {
let _ = s.flush();
Some(s)
})
}
/// Resets the task-local stderr handle to the specified writer
///
/// This will replace the current task's stderr handle, returning the old
/// handle. Currently, the stderr handle is used for printing panic messages
/// during task panic.
///
/// Note that this does not need to be called for all new tasks; the default
/// output handle is to the process's stderr stream.
pub fn set_stderr(stderr: Box<Writer + Send>) -> Option<Box<Writer + Send>> {
let mut new = Some(stderr);
LOCAL_STDERR.with(|slot| {
mem::replace(&mut *slot.borrow_mut(), new.take())
}).and_then(|mut s| {
let _ = s.flush();
Some(s)
})
}
// Helper to access the local task's stdout handle
//
// Note that this is not a safe function to expose because you can create an
// aliased pointer very easily:
//
// with_task_stdout(|io1| {
// with_task_stdout(|io2| {
// // io1 aliases io2
// })
// })
fn
|
<F>(f: F) where F: FnOnce(&mut Writer) -> IoResult<()> {
let mut my_stdout = LOCAL_STDOUT.with(|slot| {
slot.borrow_mut().take()
}).unwrap_or_else(|| {
box stdout() as Box<Writer + Send>
});
let result = f(&mut *my_stdout);
let mut var = Some(my_stdout);
LOCAL_STDOUT.with(|slot| {
*slot.borrow_mut() = var.take();
});
match result {
Ok(()) => {}
Err(e) => panic!("failed printing to stdout: {:?}", e),
}
}
/// Flushes the local task's stdout handle.
///
/// By default, this stream is a line-buffering stream, so flushing may be
/// necessary to ensure that all output is printed to the screen (if there are
/// no newlines printed).
///
/// Note that logging macros do not use this stream. Using the logging macros
/// will emit output to stderr, and while they are line buffered the log
/// messages are always terminated in a newline (no need to flush).
pub fn flush() {
with_task_stdout(|io| io.flush())
}
/// Prints a string to the stdout of the current process. No newline is emitted
/// after the string is printed.
pub fn print(s: &str) {
with_task_stdout(|io| io.write(s.as_bytes()))
}
/// Prints a string to the stdout of the current process. A literal
/// `\n` character is printed to the console after the string.
pub fn println(s: &str) {
with_task_stdout(|io| {
io.write(s.as_bytes()).and_then(|()| io.write(&[b'\n']))
})
}
/// Similar to `print`, but takes a `fmt::Arguments` structure to be compatible
/// with the `format_args!` macro.
pub fn print_args(fmt: fmt::Arguments) {
with_task_stdout(|io| write!(io, "{}", fmt))
}
/// Similar to `println`, but takes a `fmt::Arguments` structure to be
/// compatible with the `format_args!` macro.
pub fn println_args(fmt: fmt::Arguments) {
with_task_stdout(|io| writeln!(io, "{}", fmt))
}
/// Representation of a reader of a standard input stream
pub struct StdReader {
inner: StdSource
}
impl StdReader {
/// Returns whether this stream is attached to a TTY instance or not.
pub fn isatty(&self) -> bool {
match self.inner {
TTY(..) => true,
File(..) => false,
}
}
}
impl Reader for StdReader {
fn read(&mut self, buf: &mut [u8]) -> IoResult<uint> {
let ret = match self.inner {
TTY(ref mut tty) => {
// Flush the task-local stdout so that weird issues like a
// print!'d prompt not being shown until after the user hits
// enter.
flush();
tty.read(buf).map(|i| i as uint)
},
File(ref mut file) => file.read(buf).map(|i| i as uint),
};
match ret {
// When reading a piped stdin, libuv will return 0-length reads when
// stdin reaches EOF. For pretty much all other streams it will
// return an actual EOF error, but apparently for stdin it's a
// little different. Hence, here we convert a 0 length read to an
// end-of-file indicator so the caller knows to stop reading.
Ok(0) => { Err(standard_error(EndOfFile)) }
ret @ Ok(..) | ret @ Err(..) => ret,
}
}
}
/// Representation of a writer to a standard output stream
pub struct StdWriter {
inner: StdSource
}
unsafe impl Send for StdWriter {}
unsafe impl Sync for StdWriter {}
impl StdWriter {
/// Gets the size of this output window, if possible. This is typically used
/// when the writer is attached to something like a terminal, this is used
/// to fetch the dimensions of the terminal.
///
/// If successful, returns `Ok((width, height))`.
///
/// # Error
///
/// This function will return an error if the output stream is not actually
/// connected to a TTY instance, or if querying the TTY instance fails.
pub fn winsize(&mut self) -> IoResult<(int, int)> {
match self.inner {
TTY(ref mut tty) => {
tty.get_winsize()
}
File(..) => {
Err(IoError {
kind: OtherIoError,
desc: "stream is not a tty",
detail: None,
})
}
}
}
/// Controls whether this output stream is a "raw stream" or simply a normal
/// stream.
///
/// # Error
///
/// This function will return an error if the output stream is not actually
/// connected to a TTY instance, or if querying the TTY instance fails.
pub fn set_raw(&mut self, raw: bool) -> IoResult<()> {
match self.inner {
TTY(ref mut tty) => {
tty.set_raw(raw)
}
File(..) => {
Err(IoError {
kind: OtherIoError,
desc: "stream is not a tty",
detail: None,
})
}
}
}
/// Returns whether this stream is attached to a TTY instance or not.
pub fn isatty(&self) -> bool {
match self.inner {
TTY(..) => true,
File(..) => false,
}
}
}
impl Writer for StdWriter {
fn write(&mut self, buf: &[u8]) -> IoResult<()> {
// As with stdin on windows, stdout often can't handle writes of large
// sizes. For an example, see #14940. For this reason, chunk the output
// buffer on windows, but on unix we can just write the whole buffer all
// at once.
//
// For some other references, it appears that this problem has been
// encountered by others [1] [2]. We choose the number 8KB just because
// libuv does the same.
//
// [1]: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/1232
// [2]: http://www.mail-archive.com/[email protected]/msg00661.html
let max_size = if cfg!(windows) {8192} else {uint::MAX};
for chunk in buf.chunks(max_size) {
try!(match self.inner {
TTY(ref mut tty) => tty.write(chunk),
File(ref mut file) => file.write(chunk),
})
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use prelude::v1::*;
use super::*;
use sync::mpsc::channel;
use thread::Thread;
#[test]
fn smoke() {
// Just make sure we can acquire handles
stdin();
stdout();
stderr();
}
#[test]
fn capture_stdout() {
use io::{ChanReader, ChanWriter};
let (tx, rx) = channel();
let (mut r, w) = (ChanReader::new(rx), ChanWriter::new(tx));
let _t = Thread::spawn(move|| {
set_stdout(box w);
println!("hello!");
});
assert_eq!(r.read_to_string().unwrap(), "hello!\n");
}
#[test]
fn capture_stderr() {
use io::{ChanReader, ChanWriter, Reader};
let (tx, rx) = channel();
let (mut r, w) = (ChanReader::new(rx), ChanWriter::new(tx));
let _t = Thread::spawn(move || -> () {
set_stderr(box w);
panic!("my special message");
});
let s = r.read_to_string().unwrap();
assert!(s.contains("my special message"));
}
}
|
with_task_stdout
|
identifier_name
|
stdio.rs
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Non-blocking access to stdin, stdout, and stderr.
//!
//! This module provides bindings to the local event loop's TTY interface, using it
//! to offer synchronous but non-blocking versions of stdio. These handles can be
//! inspected for information about terminal dimensions or for related information
//! about the stream or terminal to which it is attached.
//!
//! # Example
//!
//! ```rust
//! # #![allow(unused_must_use)]
//! use std::io;
//!
//! let mut out = io::stdout();
//! out.write(b"Hello, world!");
//! ```
use self::StdSource::*;
use boxed::Box;
use cell::RefCell;
use clone::Clone;
use failure::LOCAL_STDERR;
use fmt;
use io::{Reader, Writer, IoResult, IoError, OtherIoError, Buffer,
standard_error, EndOfFile, LineBufferedWriter, BufferedReader};
use marker::{Sync, Send};
use libc;
use mem;
use option::Option;
use option::Option::{Some, None};
use ops::{Deref, DerefMut, FnOnce};
use ptr;
use result::Result::{Ok, Err};
use rt;
use slice::SliceExt;
use str::StrExt;
use string::String;
use sys::{fs, tty};
use sync::{Arc, Mutex, MutexGuard, Once, ONCE_INIT};
use uint;
use vec::Vec;
// And so begins the tale of acquiring a uv handle to a stdio stream on all
// platforms in all situations. Our story begins by splitting the world into two
// categories, windows and unix. Then one day the creators of unix said let
// there be redirection! And henceforth there was redirection away from the
// console for standard I/O streams.
//
// After this day, the world split into four factions:
//
// 1. Unix with stdout on a terminal.
// 2. Unix with stdout redirected.
// 3. Windows with stdout on a terminal.
// 4. Windows with stdout redirected.
//
// Many years passed, and then one day the nation of libuv decided to unify this
// world. After months of toiling, uv created three ideas: TTY, Pipe, File.
// These three ideas propagated throughout the lands and the four great factions
// decided to settle among them.
//
// The groups of 1, 2, and 3 all worked very hard towards the idea of TTY. Upon
// doing so, they even enhanced themselves further then their Pipe/File
// brethren, becoming the dominant powers.
//
// The group of 4, however, decided to work independently. They abandoned the
// common TTY belief throughout, and even abandoned the fledgling Pipe belief.
// The members of the 4th faction decided to only align themselves with File.
//
// tl;dr; TTY works on everything but when windows stdout is redirected, in that
// case pipe also doesn't work, but magically file does!
enum StdSource {
TTY(tty::TTY),
File(fs::FileDesc),
}
fn src<T, F>(fd: libc::c_int, _readable: bool, f: F) -> T where
F: FnOnce(StdSource) -> T,
{
match tty::TTY::new(fd) {
Ok(tty) => f(TTY(tty)),
Err(_) => f(File(fs::FileDesc::new(fd, false))),
}
}
thread_local! {
static LOCAL_STDOUT: RefCell<Option<Box<Writer + Send>>> = {
RefCell::new(None)
}
}
struct RaceBox(BufferedReader<StdReader>);
unsafe impl Send for RaceBox {}
unsafe impl Sync for RaceBox {}
/// A synchronized wrapper around a buffered reader from stdin
#[derive(Clone)]
pub struct StdinReader {
inner: Arc<Mutex<RaceBox>>,
}
unsafe impl Send for StdinReader {}
unsafe impl Sync for StdinReader {}
/// A guard for exclusive access to `StdinReader`'s internal `BufferedReader`.
pub struct StdinReaderGuard<'a> {
inner: MutexGuard<'a, RaceBox>,
}
impl<'a> Deref for StdinReaderGuard<'a> {
type Target = BufferedReader<StdReader>;
fn deref(&self) -> &BufferedReader<StdReader> {
&self.inner.0
}
}
impl<'a> DerefMut for StdinReaderGuard<'a> {
fn deref_mut(&mut self) -> &mut BufferedReader<StdReader> {
&mut self.inner.0
}
}
impl StdinReader {
/// Locks the `StdinReader`, granting the calling thread exclusive access
/// to the underlying `BufferedReader`.
///
/// This provides access to methods like `chars` and `lines`.
///
/// # Examples
///
/// ```rust
/// use std::io;
///
/// for line in io::stdin().lock().lines() {
/// println!("{}", line.unwrap());
/// }
/// ```
pub fn lock<'a>(&'a mut self) -> StdinReaderGuard<'a> {
StdinReaderGuard {
inner: self.inner.lock().unwrap()
}
}
/// Like `Buffer::read_line`.
///
/// The read is performed atomically - concurrent read calls in other
/// threads will not interleave with this one.
pub fn read_line(&mut self) -> IoResult<String> {
self.inner.lock().unwrap().0.read_line()
}
/// Like `Buffer::read_until`.
///
/// The read is performed atomically - concurrent read calls in other
/// threads will not interleave with this one.
pub fn read_until(&mut self, byte: u8) -> IoResult<Vec<u8>> {
self.inner.lock().unwrap().0.read_until(byte)
}
/// Like `Buffer::read_char`.
///
/// The read is performed atomically - concurrent read calls in other
/// threads will not interleave with this one.
pub fn read_char(&mut self) -> IoResult<char> {
self.inner.lock().unwrap().0.read_char()
}
}
impl Reader for StdinReader {
fn read(&mut self, buf: &mut [u8]) -> IoResult<uint> {
self.inner.lock().unwrap().0.read(buf)
}
// We have to manually delegate all of these because the default impls call
// read more than once and we don't want those calls to interleave (or
// incur the costs of repeated locking).
fn read_at_least(&mut self, min: uint, buf: &mut [u8]) -> IoResult<uint> {
self.inner.lock().unwrap().0.read_at_least(min, buf)
}
fn push_at_least(&mut self, min: uint, len: uint, buf: &mut Vec<u8>) -> IoResult<uint> {
self.inner.lock().unwrap().0.push_at_least(min, len, buf)
}
fn read_to_end(&mut self) -> IoResult<Vec<u8>> {
self.inner.lock().unwrap().0.read_to_end()
}
fn read_le_uint_n(&mut self, nbytes: uint) -> IoResult<u64> {
self.inner.lock().unwrap().0.read_le_uint_n(nbytes)
}
fn read_be_uint_n(&mut self, nbytes: uint) -> IoResult<u64> {
self.inner.lock().unwrap().0.read_be_uint_n(nbytes)
}
}
/// Creates a new handle to the stdin of the current process.
///
/// The returned handle is a wrapper around a global `BufferedReader` shared
/// by all threads. If buffered access is not desired, the `stdin_raw` function
/// is provided to provided unbuffered access to stdin.
///
/// See `stdout()` for more notes about this function.
pub fn stdin() -> StdinReader {
// We're following the same strategy as kimundi's lazy_static library
static mut STDIN: *const StdinReader = 0 as *const StdinReader;
static ONCE: Once = ONCE_INIT;
unsafe {
ONCE.call_once(|| {
// The default buffer capacity is 64k, but apparently windows doesn't like
// 64k reads on stdin. See #13304 for details, but the idea is that on
// windows we use a slightly smaller buffer that's been seen to be
// acceptable.
let stdin = if cfg!(windows) {
BufferedReader::with_capacity(8 * 1024, stdin_raw())
} else {
BufferedReader::new(stdin_raw())
};
let stdin = StdinReader {
inner: Arc::new(Mutex::new(RaceBox(stdin)))
};
STDIN = mem::transmute(box stdin);
// Make sure to free it at exit
rt::at_exit(|| {
mem::transmute::<_, Box<StdinReader>>(STDIN);
STDIN = ptr::null();
});
});
(*STDIN).clone()
}
}
/// Creates a new non-blocking handle to the stdin of the current process.
///
/// Unlike `stdin()`, the returned reader is *not* a buffered reader.
///
/// See `stdout()` for more notes about this function.
pub fn stdin_raw() -> StdReader {
src(libc::STDIN_FILENO, true, |src| StdReader { inner: src })
}
/// Creates a line-buffered handle to the stdout of the current process.
///
/// Note that this is a fairly expensive operation in that at least one memory
/// allocation is performed. Additionally, this must be called from a runtime
/// task context because the stream returned will be a non-blocking object using
/// the local scheduler to perform the I/O.
///
/// Care should be taken when creating multiple handles to an output stream for
/// a single process. While usage is still safe, the output may be surprising if
/// no synchronization is performed to ensure a sane output.
pub fn stdout() -> LineBufferedWriter<StdWriter> {
LineBufferedWriter::new(stdout_raw())
}
/// Creates an unbuffered handle to the stdout of the current process
///
/// See notes in `stdout()` for more information.
pub fn stdout_raw() -> StdWriter {
src(libc::STDOUT_FILENO, false, |src| StdWriter { inner: src })
}
/// Creates a line-buffered handle to the stderr of the current process.
///
/// See `stdout()` for notes about this function.
pub fn stderr() -> LineBufferedWriter<StdWriter>
|
/// Creates an unbuffered handle to the stderr of the current process
///
/// See notes in `stdout()` for more information.
pub fn stderr_raw() -> StdWriter {
src(libc::STDERR_FILENO, false, |src| StdWriter { inner: src })
}
/// Resets the task-local stdout handle to the specified writer
///
/// This will replace the current task's stdout handle, returning the old
/// handle. All future calls to `print` and friends will emit their output to
/// this specified handle.
///
/// Note that this does not need to be called for all new tasks; the default
/// output handle is to the process's stdout stream.
pub fn set_stdout(stdout: Box<Writer + Send>) -> Option<Box<Writer + Send>> {
let mut new = Some(stdout);
LOCAL_STDOUT.with(|slot| {
mem::replace(&mut *slot.borrow_mut(), new.take())
}).and_then(|mut s| {
let _ = s.flush();
Some(s)
})
}
/// Resets the task-local stderr handle to the specified writer
///
/// This will replace the current task's stderr handle, returning the old
/// handle. Currently, the stderr handle is used for printing panic messages
/// during task panic.
///
/// Note that this does not need to be called for all new tasks; the default
/// output handle is to the process's stderr stream.
pub fn set_stderr(stderr: Box<Writer + Send>) -> Option<Box<Writer + Send>> {
let mut new = Some(stderr);
LOCAL_STDERR.with(|slot| {
mem::replace(&mut *slot.borrow_mut(), new.take())
}).and_then(|mut s| {
let _ = s.flush();
Some(s)
})
}
// Helper to access the local task's stdout handle
//
// Note that this is not a safe function to expose because you can create an
// aliased pointer very easily:
//
// with_task_stdout(|io1| {
// with_task_stdout(|io2| {
// // io1 aliases io2
// })
// })
fn with_task_stdout<F>(f: F) where F: FnOnce(&mut Writer) -> IoResult<()> {
let mut my_stdout = LOCAL_STDOUT.with(|slot| {
slot.borrow_mut().take()
}).unwrap_or_else(|| {
box stdout() as Box<Writer + Send>
});
let result = f(&mut *my_stdout);
let mut var = Some(my_stdout);
LOCAL_STDOUT.with(|slot| {
*slot.borrow_mut() = var.take();
});
match result {
Ok(()) => {}
Err(e) => panic!("failed printing to stdout: {:?}", e),
}
}
/// Flushes the local task's stdout handle.
///
/// By default, this stream is a line-buffering stream, so flushing may be
/// necessary to ensure that all output is printed to the screen (if there are
/// no newlines printed).
///
/// Note that logging macros do not use this stream. Using the logging macros
/// will emit output to stderr, and while they are line buffered the log
/// messages are always terminated in a newline (no need to flush).
pub fn flush() {
with_task_stdout(|io| io.flush())
}
/// Prints a string to the stdout of the current process. No newline is emitted
/// after the string is printed.
pub fn print(s: &str) {
with_task_stdout(|io| io.write(s.as_bytes()))
}
/// Prints a string to the stdout of the current process. A literal
/// `\n` character is printed to the console after the string.
pub fn println(s: &str) {
with_task_stdout(|io| {
io.write(s.as_bytes()).and_then(|()| io.write(&[b'\n']))
})
}
/// Similar to `print`, but takes a `fmt::Arguments` structure to be compatible
/// with the `format_args!` macro.
pub fn print_args(fmt: fmt::Arguments) {
with_task_stdout(|io| write!(io, "{}", fmt))
}
/// Similar to `println`, but takes a `fmt::Arguments` structure to be
/// compatible with the `format_args!` macro.
pub fn println_args(fmt: fmt::Arguments) {
with_task_stdout(|io| writeln!(io, "{}", fmt))
}
/// Representation of a reader of a standard input stream
pub struct StdReader {
inner: StdSource
}
impl StdReader {
/// Returns whether this stream is attached to a TTY instance or not.
pub fn isatty(&self) -> bool {
match self.inner {
TTY(..) => true,
File(..) => false,
}
}
}
impl Reader for StdReader {
fn read(&mut self, buf: &mut [u8]) -> IoResult<uint> {
let ret = match self.inner {
TTY(ref mut tty) => {
// Flush the task-local stdout so that weird issues like a
// print!'d prompt not being shown until after the user hits
// enter.
flush();
tty.read(buf).map(|i| i as uint)
},
File(ref mut file) => file.read(buf).map(|i| i as uint),
};
match ret {
// When reading a piped stdin, libuv will return 0-length reads when
// stdin reaches EOF. For pretty much all other streams it will
// return an actual EOF error, but apparently for stdin it's a
// little different. Hence, here we convert a 0 length read to an
// end-of-file indicator so the caller knows to stop reading.
Ok(0) => { Err(standard_error(EndOfFile)) }
ret @ Ok(..) | ret @ Err(..) => ret,
}
}
}
/// Representation of a writer to a standard output stream
pub struct StdWriter {
inner: StdSource
}
unsafe impl Send for StdWriter {}
unsafe impl Sync for StdWriter {}
impl StdWriter {
/// Gets the size of this output window, if possible. This is typically used
/// when the writer is attached to something like a terminal, this is used
/// to fetch the dimensions of the terminal.
///
/// If successful, returns `Ok((width, height))`.
///
/// # Error
///
/// This function will return an error if the output stream is not actually
/// connected to a TTY instance, or if querying the TTY instance fails.
pub fn winsize(&mut self) -> IoResult<(int, int)> {
match self.inner {
TTY(ref mut tty) => {
tty.get_winsize()
}
File(..) => {
Err(IoError {
kind: OtherIoError,
desc: "stream is not a tty",
detail: None,
})
}
}
}
/// Controls whether this output stream is a "raw stream" or simply a normal
/// stream.
///
/// # Error
///
/// This function will return an error if the output stream is not actually
/// connected to a TTY instance, or if querying the TTY instance fails.
pub fn set_raw(&mut self, raw: bool) -> IoResult<()> {
match self.inner {
TTY(ref mut tty) => {
tty.set_raw(raw)
}
File(..) => {
Err(IoError {
kind: OtherIoError,
desc: "stream is not a tty",
detail: None,
})
}
}
}
/// Returns whether this stream is attached to a TTY instance or not.
pub fn isatty(&self) -> bool {
match self.inner {
TTY(..) => true,
File(..) => false,
}
}
}
impl Writer for StdWriter {
fn write(&mut self, buf: &[u8]) -> IoResult<()> {
// As with stdin on windows, stdout often can't handle writes of large
// sizes. For an example, see #14940. For this reason, chunk the output
// buffer on windows, but on unix we can just write the whole buffer all
// at once.
//
// For some other references, it appears that this problem has been
// encountered by others [1] [2]. We choose the number 8KB just because
// libuv does the same.
//
// [1]: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/1232
// [2]: http://www.mail-archive.com/[email protected]/msg00661.html
let max_size = if cfg!(windows) {8192} else {uint::MAX};
for chunk in buf.chunks(max_size) {
try!(match self.inner {
TTY(ref mut tty) => tty.write(chunk),
File(ref mut file) => file.write(chunk),
})
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use prelude::v1::*;
use super::*;
use sync::mpsc::channel;
use thread::Thread;
#[test]
fn smoke() {
// Just make sure we can acquire handles
stdin();
stdout();
stderr();
}
#[test]
fn capture_stdout() {
use io::{ChanReader, ChanWriter};
let (tx, rx) = channel();
let (mut r, w) = (ChanReader::new(rx), ChanWriter::new(tx));
let _t = Thread::spawn(move|| {
set_stdout(box w);
println!("hello!");
});
assert_eq!(r.read_to_string().unwrap(), "hello!\n");
}
#[test]
fn capture_stderr() {
use io::{ChanReader, ChanWriter, Reader};
let (tx, rx) = channel();
let (mut r, w) = (ChanReader::new(rx), ChanWriter::new(tx));
let _t = Thread::spawn(move || -> () {
set_stderr(box w);
panic!("my special message");
});
let s = r.read_to_string().unwrap();
assert!(s.contains("my special message"));
}
}
|
{
LineBufferedWriter::new(stderr_raw())
}
|
identifier_body
|
stdio.rs
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Non-blocking access to stdin, stdout, and stderr.
//!
//! This module provides bindings to the local event loop's TTY interface, using it
//! to offer synchronous but non-blocking versions of stdio. These handles can be
//! inspected for information about terminal dimensions or for related information
//! about the stream or terminal to which it is attached.
//!
//! # Example
//!
//! ```rust
//! # #![allow(unused_must_use)]
//! use std::io;
//!
//! let mut out = io::stdout();
//! out.write(b"Hello, world!");
//! ```
use self::StdSource::*;
use boxed::Box;
use cell::RefCell;
use clone::Clone;
use failure::LOCAL_STDERR;
use fmt;
use io::{Reader, Writer, IoResult, IoError, OtherIoError, Buffer,
standard_error, EndOfFile, LineBufferedWriter, BufferedReader};
use marker::{Sync, Send};
use libc;
use mem;
use option::Option;
use option::Option::{Some, None};
use ops::{Deref, DerefMut, FnOnce};
use ptr;
use result::Result::{Ok, Err};
use rt;
use slice::SliceExt;
use str::StrExt;
use string::String;
use sys::{fs, tty};
use sync::{Arc, Mutex, MutexGuard, Once, ONCE_INIT};
use uint;
use vec::Vec;
// And so begins the tale of acquiring a uv handle to a stdio stream on all
// platforms in all situations. Our story begins by splitting the world into two
// categories, windows and unix. Then one day the creators of unix said let
// there be redirection! And henceforth there was redirection away from the
// console for standard I/O streams.
//
// After this day, the world split into four factions:
//
// 1. Unix with stdout on a terminal.
// 2. Unix with stdout redirected.
// 3. Windows with stdout on a terminal.
// 4. Windows with stdout redirected.
//
// Many years passed, and then one day the nation of libuv decided to unify this
// world. After months of toiling, uv created three ideas: TTY, Pipe, File.
// These three ideas propagated throughout the lands and the four great factions
// decided to settle among them.
//
// The groups of 1, 2, and 3 all worked very hard towards the idea of TTY. Upon
// doing so, they even enhanced themselves further then their Pipe/File
// brethren, becoming the dominant powers.
//
// The group of 4, however, decided to work independently. They abandoned the
// common TTY belief throughout, and even abandoned the fledgling Pipe belief.
// The members of the 4th faction decided to only align themselves with File.
//
// tl;dr; TTY works on everything but when windows stdout is redirected, in that
// case pipe also doesn't work, but magically file does!
enum StdSource {
TTY(tty::TTY),
File(fs::FileDesc),
}
fn src<T, F>(fd: libc::c_int, _readable: bool, f: F) -> T where
F: FnOnce(StdSource) -> T,
{
match tty::TTY::new(fd) {
Ok(tty) => f(TTY(tty)),
Err(_) => f(File(fs::FileDesc::new(fd, false))),
}
}
thread_local! {
static LOCAL_STDOUT: RefCell<Option<Box<Writer + Send>>> = {
RefCell::new(None)
}
}
struct RaceBox(BufferedReader<StdReader>);
unsafe impl Send for RaceBox {}
unsafe impl Sync for RaceBox {}
/// A synchronized wrapper around a buffered reader from stdin
#[derive(Clone)]
pub struct StdinReader {
inner: Arc<Mutex<RaceBox>>,
}
unsafe impl Send for StdinReader {}
unsafe impl Sync for StdinReader {}
/// A guard for exclusive access to `StdinReader`'s internal `BufferedReader`.
pub struct StdinReaderGuard<'a> {
inner: MutexGuard<'a, RaceBox>,
}
impl<'a> Deref for StdinReaderGuard<'a> {
type Target = BufferedReader<StdReader>;
fn deref(&self) -> &BufferedReader<StdReader> {
&self.inner.0
}
}
impl<'a> DerefMut for StdinReaderGuard<'a> {
fn deref_mut(&mut self) -> &mut BufferedReader<StdReader> {
&mut self.inner.0
}
}
impl StdinReader {
/// Locks the `StdinReader`, granting the calling thread exclusive access
/// to the underlying `BufferedReader`.
///
/// This provides access to methods like `chars` and `lines`.
///
/// # Examples
///
/// ```rust
/// use std::io;
///
/// for line in io::stdin().lock().lines() {
/// println!("{}", line.unwrap());
/// }
/// ```
pub fn lock<'a>(&'a mut self) -> StdinReaderGuard<'a> {
StdinReaderGuard {
inner: self.inner.lock().unwrap()
}
}
/// Like `Buffer::read_line`.
///
|
pub fn read_line(&mut self) -> IoResult<String> {
self.inner.lock().unwrap().0.read_line()
}
/// Like `Buffer::read_until`.
///
/// The read is performed atomically - concurrent read calls in other
/// threads will not interleave with this one.
pub fn read_until(&mut self, byte: u8) -> IoResult<Vec<u8>> {
self.inner.lock().unwrap().0.read_until(byte)
}
/// Like `Buffer::read_char`.
///
/// The read is performed atomically - concurrent read calls in other
/// threads will not interleave with this one.
pub fn read_char(&mut self) -> IoResult<char> {
self.inner.lock().unwrap().0.read_char()
}
}
impl Reader for StdinReader {
fn read(&mut self, buf: &mut [u8]) -> IoResult<uint> {
self.inner.lock().unwrap().0.read(buf)
}
// We have to manually delegate all of these because the default impls call
// read more than once and we don't want those calls to interleave (or
// incur the costs of repeated locking).
fn read_at_least(&mut self, min: uint, buf: &mut [u8]) -> IoResult<uint> {
self.inner.lock().unwrap().0.read_at_least(min, buf)
}
fn push_at_least(&mut self, min: uint, len: uint, buf: &mut Vec<u8>) -> IoResult<uint> {
self.inner.lock().unwrap().0.push_at_least(min, len, buf)
}
fn read_to_end(&mut self) -> IoResult<Vec<u8>> {
self.inner.lock().unwrap().0.read_to_end()
}
fn read_le_uint_n(&mut self, nbytes: uint) -> IoResult<u64> {
self.inner.lock().unwrap().0.read_le_uint_n(nbytes)
}
fn read_be_uint_n(&mut self, nbytes: uint) -> IoResult<u64> {
self.inner.lock().unwrap().0.read_be_uint_n(nbytes)
}
}
/// Creates a new handle to the stdin of the current process.
///
/// The returned handle is a wrapper around a global `BufferedReader` shared
/// by all threads. If buffered access is not desired, the `stdin_raw` function
/// is provided to provided unbuffered access to stdin.
///
/// See `stdout()` for more notes about this function.
pub fn stdin() -> StdinReader {
// We're following the same strategy as kimundi's lazy_static library
static mut STDIN: *const StdinReader = 0 as *const StdinReader;
static ONCE: Once = ONCE_INIT;
unsafe {
ONCE.call_once(|| {
// The default buffer capacity is 64k, but apparently windows doesn't like
// 64k reads on stdin. See #13304 for details, but the idea is that on
// windows we use a slightly smaller buffer that's been seen to be
// acceptable.
let stdin = if cfg!(windows) {
BufferedReader::with_capacity(8 * 1024, stdin_raw())
} else {
BufferedReader::new(stdin_raw())
};
let stdin = StdinReader {
inner: Arc::new(Mutex::new(RaceBox(stdin)))
};
STDIN = mem::transmute(box stdin);
// Make sure to free it at exit
rt::at_exit(|| {
mem::transmute::<_, Box<StdinReader>>(STDIN);
STDIN = ptr::null();
});
});
(*STDIN).clone()
}
}
/// Creates a new non-blocking handle to the stdin of the current process.
///
/// Unlike `stdin()`, the returned reader is *not* a buffered reader.
///
/// See `stdout()` for more notes about this function.
pub fn stdin_raw() -> StdReader {
src(libc::STDIN_FILENO, true, |src| StdReader { inner: src })
}
/// Creates a line-buffered handle to the stdout of the current process.
///
/// Note that this is a fairly expensive operation in that at least one memory
/// allocation is performed. Additionally, this must be called from a runtime
/// task context because the stream returned will be a non-blocking object using
/// the local scheduler to perform the I/O.
///
/// Care should be taken when creating multiple handles to an output stream for
/// a single process. While usage is still safe, the output may be surprising if
/// no synchronization is performed to ensure a sane output.
pub fn stdout() -> LineBufferedWriter<StdWriter> {
LineBufferedWriter::new(stdout_raw())
}
/// Creates an unbuffered handle to the stdout of the current process
///
/// See notes in `stdout()` for more information.
pub fn stdout_raw() -> StdWriter {
src(libc::STDOUT_FILENO, false, |src| StdWriter { inner: src })
}
/// Creates a line-buffered handle to the stderr of the current process.
///
/// See `stdout()` for notes about this function.
pub fn stderr() -> LineBufferedWriter<StdWriter> {
LineBufferedWriter::new(stderr_raw())
}
/// Creates an unbuffered handle to the stderr of the current process
///
/// See notes in `stdout()` for more information.
pub fn stderr_raw() -> StdWriter {
src(libc::STDERR_FILENO, false, |src| StdWriter { inner: src })
}
/// Resets the task-local stdout handle to the specified writer
///
/// This will replace the current task's stdout handle, returning the old
/// handle. All future calls to `print` and friends will emit their output to
/// this specified handle.
///
/// Note that this does not need to be called for all new tasks; the default
/// output handle is to the process's stdout stream.
pub fn set_stdout(stdout: Box<Writer + Send>) -> Option<Box<Writer + Send>> {
let mut new = Some(stdout);
LOCAL_STDOUT.with(|slot| {
mem::replace(&mut *slot.borrow_mut(), new.take())
}).and_then(|mut s| {
let _ = s.flush();
Some(s)
})
}
/// Resets the task-local stderr handle to the specified writer
///
/// This will replace the current task's stderr handle, returning the old
/// handle. Currently, the stderr handle is used for printing panic messages
/// during task panic.
///
/// Note that this does not need to be called for all new tasks; the default
/// output handle is to the process's stderr stream.
pub fn set_stderr(stderr: Box<Writer + Send>) -> Option<Box<Writer + Send>> {
let mut new = Some(stderr);
LOCAL_STDERR.with(|slot| {
mem::replace(&mut *slot.borrow_mut(), new.take())
}).and_then(|mut s| {
let _ = s.flush();
Some(s)
})
}
// Helper to access the local task's stdout handle
//
// Note that this is not a safe function to expose because you can create an
// aliased pointer very easily:
//
// with_task_stdout(|io1| {
// with_task_stdout(|io2| {
// // io1 aliases io2
// })
// })
fn with_task_stdout<F>(f: F) where F: FnOnce(&mut Writer) -> IoResult<()> {
let mut my_stdout = LOCAL_STDOUT.with(|slot| {
slot.borrow_mut().take()
}).unwrap_or_else(|| {
box stdout() as Box<Writer + Send>
});
let result = f(&mut *my_stdout);
let mut var = Some(my_stdout);
LOCAL_STDOUT.with(|slot| {
*slot.borrow_mut() = var.take();
});
match result {
Ok(()) => {}
Err(e) => panic!("failed printing to stdout: {:?}", e),
}
}
/// Flushes the local task's stdout handle.
///
/// By default, this stream is a line-buffering stream, so flushing may be
/// necessary to ensure that all output is printed to the screen (if there are
/// no newlines printed).
///
/// Note that logging macros do not use this stream. Using the logging macros
/// will emit output to stderr, and while they are line buffered the log
/// messages are always terminated in a newline (no need to flush).
pub fn flush() {
with_task_stdout(|io| io.flush())
}
/// Prints a string to the stdout of the current process. No newline is emitted
/// after the string is printed.
pub fn print(s: &str) {
with_task_stdout(|io| io.write(s.as_bytes()))
}
/// Prints a string to the stdout of the current process. A literal
/// `\n` character is printed to the console after the string.
pub fn println(s: &str) {
with_task_stdout(|io| {
io.write(s.as_bytes()).and_then(|()| io.write(&[b'\n']))
})
}
/// Similar to `print`, but takes a `fmt::Arguments` structure to be compatible
/// with the `format_args!` macro.
pub fn print_args(fmt: fmt::Arguments) {
with_task_stdout(|io| write!(io, "{}", fmt))
}
/// Similar to `println`, but takes a `fmt::Arguments` structure to be
/// compatible with the `format_args!` macro.
pub fn println_args(fmt: fmt::Arguments) {
with_task_stdout(|io| writeln!(io, "{}", fmt))
}
/// Representation of a reader of a standard input stream
pub struct StdReader {
inner: StdSource
}
impl StdReader {
/// Returns whether this stream is attached to a TTY instance or not.
pub fn isatty(&self) -> bool {
match self.inner {
TTY(..) => true,
File(..) => false,
}
}
}
impl Reader for StdReader {
fn read(&mut self, buf: &mut [u8]) -> IoResult<uint> {
let ret = match self.inner {
TTY(ref mut tty) => {
// Flush the task-local stdout so that weird issues like a
// print!'d prompt not being shown until after the user hits
// enter.
flush();
tty.read(buf).map(|i| i as uint)
},
File(ref mut file) => file.read(buf).map(|i| i as uint),
};
match ret {
// When reading a piped stdin, libuv will return 0-length reads when
// stdin reaches EOF. For pretty much all other streams it will
// return an actual EOF error, but apparently for stdin it's a
// little different. Hence, here we convert a 0 length read to an
// end-of-file indicator so the caller knows to stop reading.
Ok(0) => { Err(standard_error(EndOfFile)) }
ret @ Ok(..) | ret @ Err(..) => ret,
}
}
}
/// Representation of a writer to a standard output stream
pub struct StdWriter {
inner: StdSource
}
unsafe impl Send for StdWriter {}
unsafe impl Sync for StdWriter {}
impl StdWriter {
/// Gets the size of this output window, if possible. This is typically used
/// when the writer is attached to something like a terminal, this is used
/// to fetch the dimensions of the terminal.
///
/// If successful, returns `Ok((width, height))`.
///
/// # Error
///
/// This function will return an error if the output stream is not actually
/// connected to a TTY instance, or if querying the TTY instance fails.
pub fn winsize(&mut self) -> IoResult<(int, int)> {
match self.inner {
TTY(ref mut tty) => {
tty.get_winsize()
}
File(..) => {
Err(IoError {
kind: OtherIoError,
desc: "stream is not a tty",
detail: None,
})
}
}
}
/// Controls whether this output stream is a "raw stream" or simply a normal
/// stream.
///
/// # Error
///
/// This function will return an error if the output stream is not actually
/// connected to a TTY instance, or if querying the TTY instance fails.
pub fn set_raw(&mut self, raw: bool) -> IoResult<()> {
match self.inner {
TTY(ref mut tty) => {
tty.set_raw(raw)
}
File(..) => {
Err(IoError {
kind: OtherIoError,
desc: "stream is not a tty",
detail: None,
})
}
}
}
/// Returns whether this stream is attached to a TTY instance or not.
pub fn isatty(&self) -> bool {
match self.inner {
TTY(..) => true,
File(..) => false,
}
}
}
impl Writer for StdWriter {
fn write(&mut self, buf: &[u8]) -> IoResult<()> {
// As with stdin on windows, stdout often can't handle writes of large
// sizes. For an example, see #14940. For this reason, chunk the output
// buffer on windows, but on unix we can just write the whole buffer all
// at once.
//
// For some other references, it appears that this problem has been
// encountered by others [1] [2]. We choose the number 8KB just because
// libuv does the same.
//
// [1]: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/1232
// [2]: http://www.mail-archive.com/[email protected]/msg00661.html
let max_size = if cfg!(windows) {8192} else {uint::MAX};
for chunk in buf.chunks(max_size) {
try!(match self.inner {
TTY(ref mut tty) => tty.write(chunk),
File(ref mut file) => file.write(chunk),
})
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use prelude::v1::*;
use super::*;
use sync::mpsc::channel;
use thread::Thread;
#[test]
fn smoke() {
// Just make sure we can acquire handles
stdin();
stdout();
stderr();
}
#[test]
fn capture_stdout() {
use io::{ChanReader, ChanWriter};
let (tx, rx) = channel();
let (mut r, w) = (ChanReader::new(rx), ChanWriter::new(tx));
let _t = Thread::spawn(move|| {
set_stdout(box w);
println!("hello!");
});
assert_eq!(r.read_to_string().unwrap(), "hello!\n");
}
#[test]
fn capture_stderr() {
use io::{ChanReader, ChanWriter, Reader};
let (tx, rx) = channel();
let (mut r, w) = (ChanReader::new(rx), ChanWriter::new(tx));
let _t = Thread::spawn(move || -> () {
set_stderr(box w);
panic!("my special message");
});
let s = r.read_to_string().unwrap();
assert!(s.contains("my special message"));
}
}
|
/// The read is performed atomically - concurrent read calls in other
/// threads will not interleave with this one.
|
random_line_split
|
util.rs
|
//! Misc. helper functions and utilities used in multiple parts of the application.
use std::collections::HashMap;
use std::convert::{TryFrom, TryInto};
use std::str::FromStr;
use itertools::Itertools;
use serde_json;
use super::{CompositionTree, CompositionTreeNode, CompositionTreeNodeDefinition, MasterConf};
use color_schemes::ColorFunction;
use conf::{map_setting_to_type, NoiseModuleConf, SettingType};
use ir::{IrNode, IrSetting};
#[derive(Clone, Copy, Debug, Serialize, Deserialize)]
pub enum
|
{
X,
Y,
Z,
}
impl FromStr for Dim {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"X" | "x" => Ok(Dim::X),
"Y" | "y" => Ok(Dim::Y),
"Z" | "z" => Ok(Dim::Z),
_ => Err(format!("Can't convert supplied string to `Dim`: {}", s)),
}
}
}
/// Attempts to locate a child node among the children of a node and convert it into an internal definition.
pub fn build_child<T>(children: &[IrNode], child_type: &str) -> Result<T, T::Error>
where
T: TryFrom<IrNode>,
T::Error: From<String>,
{
children
.iter()
.find(|child| child._type == child_type)
.map_or(
Err(format!("No child of type `\"{}\" found for node!", child_type).into()),
|child| child.clone().try_into(),
)
}
/// Searches through a slice of `IrSetting`s provided to a node and attempts to find the setting with the supplied name.
pub fn find_setting_by_name(name: &str, settings: &[IrSetting]) -> Result<String, String> {
Ok(settings
.iter()
.find(|&&IrSetting { ref key,.. }| key == name)
.ok_or(String::from(
"No `moduleType` setting provided to node of type `noiseModule`!",
))?.value
.clone())
}
/// Attempts to find the setting with the supplied key in the settings slice and parse its value into a `T`.
pub fn convert_setting<T>(key: &str, settings: &[IrSetting]) -> Result<T, String>
where
T: FromStr,
{
let raw_val = find_setting_by_name(key, &settings)?;
raw_val
.parse()
.map_err(|_| format!("Unable to convert value from string: {}", raw_val))
}
fn build_noise_module_conf(
setting_type: SettingType,
settings: &[IrSetting],
) -> Result<NoiseModuleConf, String> {
Ok(match setting_type {
SettingType::MultiFractal => NoiseModuleConf::MultiFractal {
frequency: convert_setting("frequency", settings)?,
octaves: convert_setting("octaves", settings)?,
lacunarity: convert_setting("lacunarity", settings)?,
persistence: convert_setting("persistence", settings)?,
},
SettingType::Seedable => NoiseModuleConf::Seedable {
seed: convert_setting("seed", settings)?,
},
SettingType::Worley => NoiseModuleConf::Worley {
displacement: convert_setting("displacement", settings)?,
range_function: convert_setting("rangeFunction", settings)?,
range_function_enabled: convert_setting("enableRange", settings)?,
worley_frequency: convert_setting("worleyFrequency", settings)?,
},
SettingType::Constant => NoiseModuleConf::Constant {
constant: convert_setting("constant", settings)?,
},
SettingType::RidgedMulti => NoiseModuleConf::RidgedMulti {
attenuation: convert_setting("attenuation", settings)?,
},
SettingType::MasterConf => NoiseModuleConf::MasterConf {
speed: convert_setting("speed", settings)?,
zoom: convert_setting("zoom", settings)?,
},
})
}
/// Converts the array of settings provided to a noise module into an array of `NoiseModuleConf`s that can be used to
/// configure the noise module.
pub fn build_noise_module_settings(
settings: Vec<IrSetting>,
) -> Result<Vec<NoiseModuleConf>, String> {
// collection to hold partially matched settings as we iterate through the list.
let mut matched_settings: HashMap<SettingType, Vec<IrSetting>> = HashMap::new();
// loop through the settings and group together those that are of the same type
for setting in settings {
let setting_type: SettingType = match map_setting_to_type(&setting.key) {
Err(Some(err)) => return Err(err),
Err(None) => {
continue;
}
Ok(setting_type) => setting_type,
};
// create a new entry if no entry exists or add to existing list if one does
matched_settings
.entry(setting_type)
.or_insert(Vec::with_capacity(1))
.push(setting);
}
// map the `HashMap`'s values into `NoiseModuleConf`s
let setting_count = matched_settings.len();
matched_settings
.into_iter()
.map(|(setting_type, settings)| build_noise_module_conf(setting_type, &settings))
.fold_results(Vec::with_capacity(setting_count), |mut acc, item| {
acc.push(item);
acc
})
}
/// Given a definition string, produces an entirely new composition tree from scratch.
pub fn build_tree_from_def(def: &str) -> Result<(ColorFunction, CompositionTree), String> {
// attempt to parse the provided IR definition into an `IrNode`
let ir_root_node_def: IrNode = serde_json::from_str::<IrNode>(def)
.map_err(|_| "Error while parsing the provided definition string!".to_string())?;
// find the global conf node in the IR tree and build it into a `MasterConf`.
// also pull off the color scheme string and buid it into a `ColorScheme`.
let (global_conf, color_fn): (MasterConf, ColorFunction) = {
let ir_global_conf = ir_root_node_def
.children
.iter()
.find(|node| node._type.as_str() == "globalConf")
.ok_or(String::from(
"Supplied definition string doesn't contain a `globalConf` node!",
))?;
let global_conf = ir_global_conf.clone().try_into().map_err(|err| {
format!(
"Unable to convert IR global conf into `GlobalConf`: {}",
err
)
})?;
let color_fn_string = find_setting_by_name("colorFunction", &ir_global_conf.settings)
.map_err(|_| {
String::from("No `colorFunction` setting included in provided `globalConf` node!")
})?;
let color_fn = ColorFunction::from_str(&color_fn_string)?;
(global_conf, color_fn)
};
// and then convert that into a `CompositionTreeNodeDefinition`
let root_node_def: CompositionTreeNodeDefinition = ir_root_node_def.try_into()?;
// build the definition into a proper `CompositionTreeNode`.
let root_node: CompositionTreeNode = root_node_def.into();
// create the full `CompositionTree` from the root node and the global configuration
Ok((
color_fn,
CompositionTree {
root_node,
global_conf,
},
))
}
|
Dim
|
identifier_name
|
util.rs
|
//! Misc. helper functions and utilities used in multiple parts of the application.
use std::collections::HashMap;
use std::convert::{TryFrom, TryInto};
use std::str::FromStr;
use itertools::Itertools;
use serde_json;
use super::{CompositionTree, CompositionTreeNode, CompositionTreeNodeDefinition, MasterConf};
use color_schemes::ColorFunction;
use conf::{map_setting_to_type, NoiseModuleConf, SettingType};
use ir::{IrNode, IrSetting};
#[derive(Clone, Copy, Debug, Serialize, Deserialize)]
pub enum Dim {
X,
Y,
Z,
}
impl FromStr for Dim {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"X" | "x" => Ok(Dim::X),
"Y" | "y" => Ok(Dim::Y),
"Z" | "z" => Ok(Dim::Z),
_ => Err(format!("Can't convert supplied string to `Dim`: {}", s)),
}
}
}
/// Attempts to locate a child node among the children of a node and convert it into an internal definition.
pub fn build_child<T>(children: &[IrNode], child_type: &str) -> Result<T, T::Error>
where
T: TryFrom<IrNode>,
T::Error: From<String>,
{
children
.iter()
.find(|child| child._type == child_type)
.map_or(
Err(format!("No child of type `\"{}\" found for node!", child_type).into()),
|child| child.clone().try_into(),
)
}
|
pub fn find_setting_by_name(name: &str, settings: &[IrSetting]) -> Result<String, String> {
Ok(settings
.iter()
.find(|&&IrSetting { ref key,.. }| key == name)
.ok_or(String::from(
"No `moduleType` setting provided to node of type `noiseModule`!",
))?.value
.clone())
}
/// Attempts to find the setting with the supplied key in the settings slice and parse its value into a `T`.
pub fn convert_setting<T>(key: &str, settings: &[IrSetting]) -> Result<T, String>
where
T: FromStr,
{
let raw_val = find_setting_by_name(key, &settings)?;
raw_val
.parse()
.map_err(|_| format!("Unable to convert value from string: {}", raw_val))
}
fn build_noise_module_conf(
setting_type: SettingType,
settings: &[IrSetting],
) -> Result<NoiseModuleConf, String> {
Ok(match setting_type {
SettingType::MultiFractal => NoiseModuleConf::MultiFractal {
frequency: convert_setting("frequency", settings)?,
octaves: convert_setting("octaves", settings)?,
lacunarity: convert_setting("lacunarity", settings)?,
persistence: convert_setting("persistence", settings)?,
},
SettingType::Seedable => NoiseModuleConf::Seedable {
seed: convert_setting("seed", settings)?,
},
SettingType::Worley => NoiseModuleConf::Worley {
displacement: convert_setting("displacement", settings)?,
range_function: convert_setting("rangeFunction", settings)?,
range_function_enabled: convert_setting("enableRange", settings)?,
worley_frequency: convert_setting("worleyFrequency", settings)?,
},
SettingType::Constant => NoiseModuleConf::Constant {
constant: convert_setting("constant", settings)?,
},
SettingType::RidgedMulti => NoiseModuleConf::RidgedMulti {
attenuation: convert_setting("attenuation", settings)?,
},
SettingType::MasterConf => NoiseModuleConf::MasterConf {
speed: convert_setting("speed", settings)?,
zoom: convert_setting("zoom", settings)?,
},
})
}
/// Converts the array of settings provided to a noise module into an array of `NoiseModuleConf`s that can be used to
/// configure the noise module.
pub fn build_noise_module_settings(
settings: Vec<IrSetting>,
) -> Result<Vec<NoiseModuleConf>, String> {
// collection to hold partially matched settings as we iterate through the list.
let mut matched_settings: HashMap<SettingType, Vec<IrSetting>> = HashMap::new();
// loop through the settings and group together those that are of the same type
for setting in settings {
let setting_type: SettingType = match map_setting_to_type(&setting.key) {
Err(Some(err)) => return Err(err),
Err(None) => {
continue;
}
Ok(setting_type) => setting_type,
};
// create a new entry if no entry exists or add to existing list if one does
matched_settings
.entry(setting_type)
.or_insert(Vec::with_capacity(1))
.push(setting);
}
// map the `HashMap`'s values into `NoiseModuleConf`s
let setting_count = matched_settings.len();
matched_settings
.into_iter()
.map(|(setting_type, settings)| build_noise_module_conf(setting_type, &settings))
.fold_results(Vec::with_capacity(setting_count), |mut acc, item| {
acc.push(item);
acc
})
}
/// Given a definition string, produces an entirely new composition tree from scratch.
pub fn build_tree_from_def(def: &str) -> Result<(ColorFunction, CompositionTree), String> {
// attempt to parse the provided IR definition into an `IrNode`
let ir_root_node_def: IrNode = serde_json::from_str::<IrNode>(def)
.map_err(|_| "Error while parsing the provided definition string!".to_string())?;
// find the global conf node in the IR tree and build it into a `MasterConf`.
// also pull off the color scheme string and buid it into a `ColorScheme`.
let (global_conf, color_fn): (MasterConf, ColorFunction) = {
let ir_global_conf = ir_root_node_def
.children
.iter()
.find(|node| node._type.as_str() == "globalConf")
.ok_or(String::from(
"Supplied definition string doesn't contain a `globalConf` node!",
))?;
let global_conf = ir_global_conf.clone().try_into().map_err(|err| {
format!(
"Unable to convert IR global conf into `GlobalConf`: {}",
err
)
})?;
let color_fn_string = find_setting_by_name("colorFunction", &ir_global_conf.settings)
.map_err(|_| {
String::from("No `colorFunction` setting included in provided `globalConf` node!")
})?;
let color_fn = ColorFunction::from_str(&color_fn_string)?;
(global_conf, color_fn)
};
// and then convert that into a `CompositionTreeNodeDefinition`
let root_node_def: CompositionTreeNodeDefinition = ir_root_node_def.try_into()?;
// build the definition into a proper `CompositionTreeNode`.
let root_node: CompositionTreeNode = root_node_def.into();
// create the full `CompositionTree` from the root node and the global configuration
Ok((
color_fn,
CompositionTree {
root_node,
global_conf,
},
))
}
|
/// Searches through a slice of `IrSetting`s provided to a node and attempts to find the setting with the supplied name.
|
random_line_split
|
main.rs
|
extern mod extra;
use std::io::buffered::BufferedStream;
use std::io::net::addrinfo::get_host_addresses;
use std::io::net::ip::{IpAddr, SocketAddr};
use std::io::net::tcp::TcpStream;
use std::os::args;
use extra::json;
use extra::json::{Json, ToJson};
use extra::treemap::TreeMap;
trait Protocol {
fn msg_type(&self) -> ~str;
fn json_data(&self) -> Json;
}
struct
|
{
msgType: ~str,
data: Json
}
impl Protocol for Msg {
fn msg_type(&self) -> ~str { self.msgType.clone() }
fn json_data(&self) -> Json { self.data.clone() }
}
struct JoinMsg {
name: ~str,
key: ~str
}
impl Protocol for JoinMsg {
fn msg_type(&self) -> ~str { ~"join" }
fn json_data(&self) -> Json {
let mut m = TreeMap::new();
m.insert(~"name", self.name.to_json());
m.insert(~"key", self.key.to_json());
return json::Object(~m);
}
}
struct ThrottleMsg {
value: f64
}
impl Protocol for ThrottleMsg {
fn msg_type(&self) -> ~str { ~"throttle" }
fn json_data(&self) -> Json { json::Number(self.value) }
}
fn write_msg<T: Protocol>(msg: &T, stream: &mut BufferedStream<TcpStream>) {
let mut json = TreeMap::new();
json.insert(~"msgType", msg.msg_type().to_json());
json.insert(~"data", msg.json_data());
write_json(&json::Object(~json), stream);
}
fn write_json(json: &Json, stream: &mut BufferedStream<TcpStream>) {
json.to_writer(stream);
stream.write_char('\n');
stream.flush();
}
fn parse_msg(json: &~json::Object) -> Option<Msg> {
match json.find(&~"msgType") {
Some(&json::String(ref msgType)) => {
let data = json.find(&~"data").unwrap_or(&json::Null);
Some(Msg {
msgType: msgType.clone(),
data: data.clone()
})
}
_ => None
}
}
fn handle_msg(msg: ~Msg, stream: &mut BufferedStream<TcpStream>) {
match msg.msgType {
~"carPositions" =>
write_msg(&ThrottleMsg {
value: 0.5
}, stream),
_ => {
match msg.msgType {
~"join" => println("Joined"),
~"gameInit" => println("Race init"),
~"raceEnd" => println("Race end"),
~"raceStart" => println("Race start"),
_ => println!("Got {:s}", msg.msgType)
}
write_msg(&Msg {
msgType: ~"ping",
data: json::Null
}, stream);
}
}
}
fn start(config: Config) {
let Config { server, name, key } = config;
println!("Attempting to connect to {:s}", server.to_str());
let mut stream = BufferedStream::new(
TcpStream::connect(server).expect("Failed to connect"));
println("Connected");
write_msg(&JoinMsg {
name: name,
key: key
}, &mut stream);
loop {
match stream.read_line() {
None => break,
Some(line) => match json::from_str(line) {
Ok(json::Object(ref v)) => {
match parse_msg(v) {
None => println("Invalid JSON data"),
Some(msg) => handle_msg(~msg, &mut stream)
}
},
Ok(_) => println("Invalid JSON data: expected an object"),
Err(msg) => println(msg.to_str())
}
}
}
println("Disconnected")
}
struct Config {
server: SocketAddr,
name: ~str,
key: ~str
}
fn resolve_first_ip(host: &str) -> Option<IpAddr> {
match get_host_addresses(host) {
Some([ip,..]) => Some(ip),
_ => None
}
}
fn read_config() -> Option<Config> {
let args = args();
match args {
[_, host, port_str, name, key] => {
let ip = resolve_first_ip(host).expect("Could not resolve host");
let port = from_str::<u16>(port_str).expect("Invalid port number");
return Some(Config {
server: SocketAddr { ip: ip, port: port },
name: name,
key: key
});
},
_ => None
}
}
fn main() {
match read_config() {
None => println("Usage:./run <host> <port> <botname> <botkey>"),
Some(config) => start(config)
}
}
|
Msg
|
identifier_name
|
main.rs
|
extern mod extra;
use std::io::buffered::BufferedStream;
use std::io::net::addrinfo::get_host_addresses;
use std::io::net::ip::{IpAddr, SocketAddr};
use std::io::net::tcp::TcpStream;
use std::os::args;
use extra::json;
use extra::json::{Json, ToJson};
use extra::treemap::TreeMap;
trait Protocol {
fn msg_type(&self) -> ~str;
fn json_data(&self) -> Json;
}
struct Msg {
msgType: ~str,
data: Json
}
impl Protocol for Msg {
fn msg_type(&self) -> ~str { self.msgType.clone() }
fn json_data(&self) -> Json { self.data.clone() }
}
struct JoinMsg {
name: ~str,
key: ~str
}
impl Protocol for JoinMsg {
fn msg_type(&self) -> ~str { ~"join" }
fn json_data(&self) -> Json {
let mut m = TreeMap::new();
m.insert(~"name", self.name.to_json());
m.insert(~"key", self.key.to_json());
return json::Object(~m);
}
}
struct ThrottleMsg {
value: f64
}
impl Protocol for ThrottleMsg {
fn msg_type(&self) -> ~str { ~"throttle" }
fn json_data(&self) -> Json { json::Number(self.value) }
}
fn write_msg<T: Protocol>(msg: &T, stream: &mut BufferedStream<TcpStream>) {
let mut json = TreeMap::new();
json.insert(~"msgType", msg.msg_type().to_json());
json.insert(~"data", msg.json_data());
write_json(&json::Object(~json), stream);
}
fn write_json(json: &Json, stream: &mut BufferedStream<TcpStream>) {
json.to_writer(stream);
stream.write_char('\n');
stream.flush();
}
fn parse_msg(json: &~json::Object) -> Option<Msg> {
match json.find(&~"msgType") {
Some(&json::String(ref msgType)) => {
let data = json.find(&~"data").unwrap_or(&json::Null);
Some(Msg {
msgType: msgType.clone(),
data: data.clone()
})
}
_ => None
}
}
fn handle_msg(msg: ~Msg, stream: &mut BufferedStream<TcpStream>) {
match msg.msgType {
~"carPositions" =>
write_msg(&ThrottleMsg {
value: 0.5
}, stream),
_ => {
match msg.msgType {
~"join" => println("Joined"),
~"gameInit" => println("Race init"),
~"raceEnd" => println("Race end"),
~"raceStart" => println("Race start"),
_ => println!("Got {:s}", msg.msgType)
}
write_msg(&Msg {
msgType: ~"ping",
data: json::Null
}, stream);
}
}
}
fn start(config: Config) {
let Config { server, name, key } = config;
println!("Attempting to connect to {:s}", server.to_str());
let mut stream = BufferedStream::new(
TcpStream::connect(server).expect("Failed to connect"));
println("Connected");
write_msg(&JoinMsg {
name: name,
key: key
}, &mut stream);
loop {
match stream.read_line() {
None => break,
Some(line) => match json::from_str(line) {
Ok(json::Object(ref v)) => {
match parse_msg(v) {
None => println("Invalid JSON data"),
Some(msg) => handle_msg(~msg, &mut stream)
}
},
Ok(_) => println("Invalid JSON data: expected an object"),
Err(msg) => println(msg.to_str())
}
}
}
println("Disconnected")
}
struct Config {
server: SocketAddr,
name: ~str,
key: ~str
}
fn resolve_first_ip(host: &str) -> Option<IpAddr> {
match get_host_addresses(host) {
Some([ip,..]) => Some(ip),
_ => None
}
}
fn read_config() -> Option<Config> {
let args = args();
match args {
[_, host, port_str, name, key] => {
let ip = resolve_first_ip(host).expect("Could not resolve host");
let port = from_str::<u16>(port_str).expect("Invalid port number");
return Some(Config {
server: SocketAddr { ip: ip, port: port },
name: name,
key: key
});
},
_ => None
|
fn main() {
match read_config() {
None => println("Usage:./run <host> <port> <botname> <botkey>"),
Some(config) => start(config)
}
}
|
}
}
|
random_line_split
|
main.rs
|
extern mod extra;
use std::io::buffered::BufferedStream;
use std::io::net::addrinfo::get_host_addresses;
use std::io::net::ip::{IpAddr, SocketAddr};
use std::io::net::tcp::TcpStream;
use std::os::args;
use extra::json;
use extra::json::{Json, ToJson};
use extra::treemap::TreeMap;
trait Protocol {
fn msg_type(&self) -> ~str;
fn json_data(&self) -> Json;
}
struct Msg {
msgType: ~str,
data: Json
}
impl Protocol for Msg {
fn msg_type(&self) -> ~str { self.msgType.clone() }
fn json_data(&self) -> Json { self.data.clone() }
}
struct JoinMsg {
name: ~str,
key: ~str
}
impl Protocol for JoinMsg {
fn msg_type(&self) -> ~str { ~"join" }
fn json_data(&self) -> Json {
let mut m = TreeMap::new();
m.insert(~"name", self.name.to_json());
m.insert(~"key", self.key.to_json());
return json::Object(~m);
}
}
struct ThrottleMsg {
value: f64
}
impl Protocol for ThrottleMsg {
fn msg_type(&self) -> ~str { ~"throttle" }
fn json_data(&self) -> Json { json::Number(self.value) }
}
fn write_msg<T: Protocol>(msg: &T, stream: &mut BufferedStream<TcpStream>) {
let mut json = TreeMap::new();
json.insert(~"msgType", msg.msg_type().to_json());
json.insert(~"data", msg.json_data());
write_json(&json::Object(~json), stream);
}
fn write_json(json: &Json, stream: &mut BufferedStream<TcpStream>) {
json.to_writer(stream);
stream.write_char('\n');
stream.flush();
}
fn parse_msg(json: &~json::Object) -> Option<Msg> {
match json.find(&~"msgType") {
Some(&json::String(ref msgType)) =>
|
_ => None
}
}
fn handle_msg(msg: ~Msg, stream: &mut BufferedStream<TcpStream>) {
match msg.msgType {
~"carPositions" =>
write_msg(&ThrottleMsg {
value: 0.5
}, stream),
_ => {
match msg.msgType {
~"join" => println("Joined"),
~"gameInit" => println("Race init"),
~"raceEnd" => println("Race end"),
~"raceStart" => println("Race start"),
_ => println!("Got {:s}", msg.msgType)
}
write_msg(&Msg {
msgType: ~"ping",
data: json::Null
}, stream);
}
}
}
fn start(config: Config) {
let Config { server, name, key } = config;
println!("Attempting to connect to {:s}", server.to_str());
let mut stream = BufferedStream::new(
TcpStream::connect(server).expect("Failed to connect"));
println("Connected");
write_msg(&JoinMsg {
name: name,
key: key
}, &mut stream);
loop {
match stream.read_line() {
None => break,
Some(line) => match json::from_str(line) {
Ok(json::Object(ref v)) => {
match parse_msg(v) {
None => println("Invalid JSON data"),
Some(msg) => handle_msg(~msg, &mut stream)
}
},
Ok(_) => println("Invalid JSON data: expected an object"),
Err(msg) => println(msg.to_str())
}
}
}
println("Disconnected")
}
struct Config {
server: SocketAddr,
name: ~str,
key: ~str
}
fn resolve_first_ip(host: &str) -> Option<IpAddr> {
match get_host_addresses(host) {
Some([ip,..]) => Some(ip),
_ => None
}
}
fn read_config() -> Option<Config> {
let args = args();
match args {
[_, host, port_str, name, key] => {
let ip = resolve_first_ip(host).expect("Could not resolve host");
let port = from_str::<u16>(port_str).expect("Invalid port number");
return Some(Config {
server: SocketAddr { ip: ip, port: port },
name: name,
key: key
});
},
_ => None
}
}
fn main() {
match read_config() {
None => println("Usage:./run <host> <port> <botname> <botkey>"),
Some(config) => start(config)
}
}
|
{
let data = json.find(&~"data").unwrap_or(&json::Null);
Some(Msg {
msgType: msgType.clone(),
data: data.clone()
})
}
|
conditional_block
|
main.rs
|
extern mod extra;
use std::io::buffered::BufferedStream;
use std::io::net::addrinfo::get_host_addresses;
use std::io::net::ip::{IpAddr, SocketAddr};
use std::io::net::tcp::TcpStream;
use std::os::args;
use extra::json;
use extra::json::{Json, ToJson};
use extra::treemap::TreeMap;
trait Protocol {
fn msg_type(&self) -> ~str;
fn json_data(&self) -> Json;
}
struct Msg {
msgType: ~str,
data: Json
}
impl Protocol for Msg {
fn msg_type(&self) -> ~str { self.msgType.clone() }
fn json_data(&self) -> Json { self.data.clone() }
}
struct JoinMsg {
name: ~str,
key: ~str
}
impl Protocol for JoinMsg {
fn msg_type(&self) -> ~str { ~"join" }
fn json_data(&self) -> Json {
let mut m = TreeMap::new();
m.insert(~"name", self.name.to_json());
m.insert(~"key", self.key.to_json());
return json::Object(~m);
}
}
struct ThrottleMsg {
value: f64
}
impl Protocol for ThrottleMsg {
fn msg_type(&self) -> ~str
|
fn json_data(&self) -> Json { json::Number(self.value) }
}
fn write_msg<T: Protocol>(msg: &T, stream: &mut BufferedStream<TcpStream>) {
let mut json = TreeMap::new();
json.insert(~"msgType", msg.msg_type().to_json());
json.insert(~"data", msg.json_data());
write_json(&json::Object(~json), stream);
}
fn write_json(json: &Json, stream: &mut BufferedStream<TcpStream>) {
json.to_writer(stream);
stream.write_char('\n');
stream.flush();
}
fn parse_msg(json: &~json::Object) -> Option<Msg> {
match json.find(&~"msgType") {
Some(&json::String(ref msgType)) => {
let data = json.find(&~"data").unwrap_or(&json::Null);
Some(Msg {
msgType: msgType.clone(),
data: data.clone()
})
}
_ => None
}
}
fn handle_msg(msg: ~Msg, stream: &mut BufferedStream<TcpStream>) {
match msg.msgType {
~"carPositions" =>
write_msg(&ThrottleMsg {
value: 0.5
}, stream),
_ => {
match msg.msgType {
~"join" => println("Joined"),
~"gameInit" => println("Race init"),
~"raceEnd" => println("Race end"),
~"raceStart" => println("Race start"),
_ => println!("Got {:s}", msg.msgType)
}
write_msg(&Msg {
msgType: ~"ping",
data: json::Null
}, stream);
}
}
}
fn start(config: Config) {
let Config { server, name, key } = config;
println!("Attempting to connect to {:s}", server.to_str());
let mut stream = BufferedStream::new(
TcpStream::connect(server).expect("Failed to connect"));
println("Connected");
write_msg(&JoinMsg {
name: name,
key: key
}, &mut stream);
loop {
match stream.read_line() {
None => break,
Some(line) => match json::from_str(line) {
Ok(json::Object(ref v)) => {
match parse_msg(v) {
None => println("Invalid JSON data"),
Some(msg) => handle_msg(~msg, &mut stream)
}
},
Ok(_) => println("Invalid JSON data: expected an object"),
Err(msg) => println(msg.to_str())
}
}
}
println("Disconnected")
}
struct Config {
server: SocketAddr,
name: ~str,
key: ~str
}
fn resolve_first_ip(host: &str) -> Option<IpAddr> {
match get_host_addresses(host) {
Some([ip,..]) => Some(ip),
_ => None
}
}
fn read_config() -> Option<Config> {
let args = args();
match args {
[_, host, port_str, name, key] => {
let ip = resolve_first_ip(host).expect("Could not resolve host");
let port = from_str::<u16>(port_str).expect("Invalid port number");
return Some(Config {
server: SocketAddr { ip: ip, port: port },
name: name,
key: key
});
},
_ => None
}
}
fn main() {
match read_config() {
None => println("Usage:./run <host> <port> <botname> <botkey>"),
Some(config) => start(config)
}
}
|
{ ~"throttle" }
|
identifier_body
|
viewer.rs
|
// Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
#![forbid(unsafe_code)]
use crate::{
interfaces::{LeftScreen, RightScreen},
tui::{
text_builder::TextBuilder,
tui_interface::{TUIInterface, TUIOutput},
},
};
use tui::{
style::{Color, Style},
text::Spans,
};
#[derive(Debug, Clone)]
pub struct Viewer<BytecodeViewer: LeftScreen, SourceViewer: RightScreen<BytecodeViewer>> {
bytecode_text: Vec<String>,
source_viewer: SourceViewer,
bytecode_viewer: BytecodeViewer,
}
impl<BytecodeViewer: LeftScreen, SourceViewer: RightScreen<BytecodeViewer>>
Viewer<BytecodeViewer, SourceViewer>
{
pub fn new(source_viewer: SourceViewer, bytecode_viewer: BytecodeViewer) -> Self
|
}
impl<BytecodeViewer: LeftScreen, SourceViewer: RightScreen<BytecodeViewer>> TUIInterface
for Viewer<BytecodeViewer, SourceViewer>
{
const LEFT_TITLE: &'static str = "Bytecode";
const RIGHT_TITLE: &'static str = "Source Code";
fn on_redraw(&mut self, line_number: u16, column_number: u16) -> TUIOutput {
// Highlight style
let style: Style = Style::default().bg(Color::Red);
let report = match self
.bytecode_viewer
.get_source_index_for_line(line_number as usize, column_number as usize)
{
None => {
let mut builder = TextBuilder::new();
builder.add(self.source_viewer.backing_string(), Style::default());
builder.finish()
}
Some(info) => {
let source_context = self.source_viewer.source_for_code_location(info).unwrap();
let mut builder = TextBuilder::new();
builder.add(source_context.left, Style::default());
builder.add(source_context.highlight, style);
builder.add(source_context.remainder, Style::default());
builder.finish()
}
};
TUIOutput {
left_screen: self
.bytecode_text
.iter()
.map(|x| Spans::from(x.clone()))
.collect(),
right_screen: report,
}
}
fn bound_line(&self, line_number: u16) -> u16 {
std::cmp::min(
line_number,
self.bytecode_text.len().checked_sub(1).unwrap() as u16,
)
}
fn bound_column(&self, line_number: u16, column_number: u16) -> u16 {
std::cmp::min(
column_number,
self.bytecode_text[line_number as usize].len() as u16,
)
}
}
|
{
Self {
bytecode_text: bytecode_viewer
.backing_string()
.split('\n')
.map(|x| x.to_string())
.collect(),
source_viewer,
bytecode_viewer,
}
}
|
identifier_body
|
viewer.rs
|
// Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
#![forbid(unsafe_code)]
use crate::{
interfaces::{LeftScreen, RightScreen},
tui::{
text_builder::TextBuilder,
tui_interface::{TUIInterface, TUIOutput},
},
};
use tui::{
style::{Color, Style},
text::Spans,
};
#[derive(Debug, Clone)]
pub struct Viewer<BytecodeViewer: LeftScreen, SourceViewer: RightScreen<BytecodeViewer>> {
bytecode_text: Vec<String>,
source_viewer: SourceViewer,
bytecode_viewer: BytecodeViewer,
}
impl<BytecodeViewer: LeftScreen, SourceViewer: RightScreen<BytecodeViewer>>
Viewer<BytecodeViewer, SourceViewer>
{
pub fn new(source_viewer: SourceViewer, bytecode_viewer: BytecodeViewer) -> Self {
Self {
bytecode_text: bytecode_viewer
.backing_string()
.split('\n')
.map(|x| x.to_string())
.collect(),
source_viewer,
bytecode_viewer,
}
}
}
impl<BytecodeViewer: LeftScreen, SourceViewer: RightScreen<BytecodeViewer>> TUIInterface
for Viewer<BytecodeViewer, SourceViewer>
{
const LEFT_TITLE: &'static str = "Bytecode";
const RIGHT_TITLE: &'static str = "Source Code";
fn on_redraw(&mut self, line_number: u16, column_number: u16) -> TUIOutput {
// Highlight style
let style: Style = Style::default().bg(Color::Red);
let report = match self
.bytecode_viewer
|
{
None => {
let mut builder = TextBuilder::new();
builder.add(self.source_viewer.backing_string(), Style::default());
builder.finish()
}
Some(info) => {
let source_context = self.source_viewer.source_for_code_location(info).unwrap();
let mut builder = TextBuilder::new();
builder.add(source_context.left, Style::default());
builder.add(source_context.highlight, style);
builder.add(source_context.remainder, Style::default());
builder.finish()
}
};
TUIOutput {
left_screen: self
.bytecode_text
.iter()
.map(|x| Spans::from(x.clone()))
.collect(),
right_screen: report,
}
}
fn bound_line(&self, line_number: u16) -> u16 {
std::cmp::min(
line_number,
self.bytecode_text.len().checked_sub(1).unwrap() as u16,
)
}
fn bound_column(&self, line_number: u16, column_number: u16) -> u16 {
std::cmp::min(
column_number,
self.bytecode_text[line_number as usize].len() as u16,
)
}
}
|
.get_source_index_for_line(line_number as usize, column_number as usize)
|
random_line_split
|
viewer.rs
|
// Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
#![forbid(unsafe_code)]
use crate::{
interfaces::{LeftScreen, RightScreen},
tui::{
text_builder::TextBuilder,
tui_interface::{TUIInterface, TUIOutput},
},
};
use tui::{
style::{Color, Style},
text::Spans,
};
#[derive(Debug, Clone)]
pub struct Viewer<BytecodeViewer: LeftScreen, SourceViewer: RightScreen<BytecodeViewer>> {
bytecode_text: Vec<String>,
source_viewer: SourceViewer,
bytecode_viewer: BytecodeViewer,
}
impl<BytecodeViewer: LeftScreen, SourceViewer: RightScreen<BytecodeViewer>>
Viewer<BytecodeViewer, SourceViewer>
{
pub fn new(source_viewer: SourceViewer, bytecode_viewer: BytecodeViewer) -> Self {
Self {
bytecode_text: bytecode_viewer
.backing_string()
.split('\n')
.map(|x| x.to_string())
.collect(),
source_viewer,
bytecode_viewer,
}
}
}
impl<BytecodeViewer: LeftScreen, SourceViewer: RightScreen<BytecodeViewer>> TUIInterface
for Viewer<BytecodeViewer, SourceViewer>
{
const LEFT_TITLE: &'static str = "Bytecode";
const RIGHT_TITLE: &'static str = "Source Code";
fn on_redraw(&mut self, line_number: u16, column_number: u16) -> TUIOutput {
// Highlight style
let style: Style = Style::default().bg(Color::Red);
let report = match self
.bytecode_viewer
.get_source_index_for_line(line_number as usize, column_number as usize)
{
None => {
let mut builder = TextBuilder::new();
builder.add(self.source_viewer.backing_string(), Style::default());
builder.finish()
}
Some(info) => {
let source_context = self.source_viewer.source_for_code_location(info).unwrap();
let mut builder = TextBuilder::new();
builder.add(source_context.left, Style::default());
builder.add(source_context.highlight, style);
builder.add(source_context.remainder, Style::default());
builder.finish()
}
};
TUIOutput {
left_screen: self
.bytecode_text
.iter()
.map(|x| Spans::from(x.clone()))
.collect(),
right_screen: report,
}
}
fn
|
(&self, line_number: u16) -> u16 {
std::cmp::min(
line_number,
self.bytecode_text.len().checked_sub(1).unwrap() as u16,
)
}
fn bound_column(&self, line_number: u16, column_number: u16) -> u16 {
std::cmp::min(
column_number,
self.bytecode_text[line_number as usize].len() as u16,
)
}
}
|
bound_line
|
identifier_name
|
lib.rs
|
//! Rustic bindings to [libnotify](https://developer.gnome.org/libnotify/)
//!
//! ```rust
//! extern crate libnotify;
//!
//! fn main() {
//! // Init libnotify
//! libnotify::init("myapp").unwrap();
//! // Create a new notification (doesn't show it yet)
//! let n = libnotify::Notification::new("Summary",
//! Some("Optional Body"),
//! None);
//! // Show the notification
//! n.show().unwrap();
//! // Update the existent notification
//! n.update("I am another notification", None, None).unwrap();
//! // Show the updated notification
//! n.show().unwrap();
//! // We are done, deinit
//! libnotify::uninit();
//! }
//!
//! ```
#![warn(missing_docs)]
extern crate gdk_pixbuf;
#[macro_use]
extern crate glib;
extern crate glib_sys as glib_ffi;
extern crate gobject_sys as gobject_ffi;
extern crate libnotify_sys as ffi;
pub use enums::*;
pub use functions::*;
pub use notification::*;
macro_rules! assert_initialized_libnotify {
() => {
use functions::*;
if!is_initted() {
panic!("Notify system not initialized, invalid call of function");
}
}
}
|
mod enums;
mod functions;
mod notification;
|
random_line_split
|
|
eq.rs
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use ast::{MetaItem, item, Expr};
use codemap::Span;
use ext::base::ExtCtxt;
use ext::build::AstBuilder;
use ext::deriving::generic::*;
pub fn expand_deriving_eq(cx: @ExtCtxt,
span: Span,
mitem: @MetaItem,
in_items: ~[@item]) -> ~[@item] {
// structures are equal if all fields are equal, and non equal, if
// any fields are not equal or if the enum variants are different
fn cs_eq(cx: @ExtCtxt, span: Span, substr: &Substructure) -> @Expr {
cs_and(|cx, span, _, _| cx.expr_bool(span, false),
cx, span, substr)
}
fn cs_ne(cx: @ExtCtxt, span: Span, substr: &Substructure) -> @Expr {
cs_or(|cx, span, _, _| cx.expr_bool(span, true),
cx, span, substr)
}
macro_rules! md (
($name:expr, $f:ident) => {
MethodDef {
name: $name,
generics: LifetimeBounds::empty(),
explicit_self: borrowed_explicit_self(),
args: ~[borrowed_self()],
ret_ty: Literal(Path::new(~["bool"])),
inline: true,
const_nonmatching: true,
combine_substructure: $f
}
|
}
);
let trait_def = TraitDef {
cx: cx, span: span,
path: Path::new(~["std", "cmp", "Eq"]),
additional_bounds: ~[],
generics: LifetimeBounds::empty(),
methods: ~[
md!("eq", cs_eq),
md!("ne", cs_ne)
]
};
trait_def.expand(mitem, in_items)
}
|
random_line_split
|
|
lib.rs
|
use binary_podman::{BinaryTrait, PodmanBinary};
use buildpack::{
eyre::Report,
libcnb::{
build::{BuildContext, BuildResult, BuildResultBuilder},
detect::{DetectContext, DetectResult, DetectResultBuilder},
generic::{GenericMetadata, GenericPlatform},
Buildpack, Error, Result,
},
tag_for_path, BuildpackTrait,
};
pub struct DockerfileBuildpack;
impl Buildpack for DockerfileBuildpack {
type Platform = GenericPlatform;
type Metadata = GenericMetadata;
type Error = Report;
fn detect(&self, _context: DetectContext<Self>) -> Result<DetectResult, Self::Error> {
if Self::any_exist(&["Dockerfile", "Containerfile"]) {
DetectResultBuilder::pass().build()
} else {
DetectResultBuilder::fail().build()
}
}
fn build(&self, context: BuildContext<Self>) -> Result<BuildResult, Self::Error> {
let tag = tag_for_path(&context.app_dir);
PodmanBinary {}
.ensure_version_sync(">=1")
.map_err(Error::BuildpackError)?
.run_sync(&["build", "--tag", &tag, "."])
.map_err(Error::BuildpackError)?;
BuildResultBuilder::new().build()
}
}
impl BuildpackTrait for DockerfileBuildpack {
fn
|
() -> &'static str {
include_str!("../buildpack.toml")
}
}
|
toml
|
identifier_name
|
lib.rs
|
use binary_podman::{BinaryTrait, PodmanBinary};
use buildpack::{
eyre::Report,
libcnb::{
build::{BuildContext, BuildResult, BuildResultBuilder},
detect::{DetectContext, DetectResult, DetectResultBuilder},
generic::{GenericMetadata, GenericPlatform},
Buildpack, Error, Result,
},
tag_for_path, BuildpackTrait,
|
type Platform = GenericPlatform;
type Metadata = GenericMetadata;
type Error = Report;
fn detect(&self, _context: DetectContext<Self>) -> Result<DetectResult, Self::Error> {
if Self::any_exist(&["Dockerfile", "Containerfile"]) {
DetectResultBuilder::pass().build()
} else {
DetectResultBuilder::fail().build()
}
}
fn build(&self, context: BuildContext<Self>) -> Result<BuildResult, Self::Error> {
let tag = tag_for_path(&context.app_dir);
PodmanBinary {}
.ensure_version_sync(">=1")
.map_err(Error::BuildpackError)?
.run_sync(&["build", "--tag", &tag, "."])
.map_err(Error::BuildpackError)?;
BuildResultBuilder::new().build()
}
}
impl BuildpackTrait for DockerfileBuildpack {
fn toml() -> &'static str {
include_str!("../buildpack.toml")
}
}
|
};
pub struct DockerfileBuildpack;
impl Buildpack for DockerfileBuildpack {
|
random_line_split
|
lib.rs
|
use binary_podman::{BinaryTrait, PodmanBinary};
use buildpack::{
eyre::Report,
libcnb::{
build::{BuildContext, BuildResult, BuildResultBuilder},
detect::{DetectContext, DetectResult, DetectResultBuilder},
generic::{GenericMetadata, GenericPlatform},
Buildpack, Error, Result,
},
tag_for_path, BuildpackTrait,
};
pub struct DockerfileBuildpack;
impl Buildpack for DockerfileBuildpack {
type Platform = GenericPlatform;
type Metadata = GenericMetadata;
type Error = Report;
fn detect(&self, _context: DetectContext<Self>) -> Result<DetectResult, Self::Error> {
if Self::any_exist(&["Dockerfile", "Containerfile"])
|
else {
DetectResultBuilder::fail().build()
}
}
fn build(&self, context: BuildContext<Self>) -> Result<BuildResult, Self::Error> {
let tag = tag_for_path(&context.app_dir);
PodmanBinary {}
.ensure_version_sync(">=1")
.map_err(Error::BuildpackError)?
.run_sync(&["build", "--tag", &tag, "."])
.map_err(Error::BuildpackError)?;
BuildResultBuilder::new().build()
}
}
impl BuildpackTrait for DockerfileBuildpack {
fn toml() -> &'static str {
include_str!("../buildpack.toml")
}
}
|
{
DetectResultBuilder::pass().build()
}
|
conditional_block
|
lib.rs
|
use binary_podman::{BinaryTrait, PodmanBinary};
use buildpack::{
eyre::Report,
libcnb::{
build::{BuildContext, BuildResult, BuildResultBuilder},
detect::{DetectContext, DetectResult, DetectResultBuilder},
generic::{GenericMetadata, GenericPlatform},
Buildpack, Error, Result,
},
tag_for_path, BuildpackTrait,
};
pub struct DockerfileBuildpack;
impl Buildpack for DockerfileBuildpack {
type Platform = GenericPlatform;
type Metadata = GenericMetadata;
type Error = Report;
fn detect(&self, _context: DetectContext<Self>) -> Result<DetectResult, Self::Error> {
if Self::any_exist(&["Dockerfile", "Containerfile"]) {
DetectResultBuilder::pass().build()
} else {
DetectResultBuilder::fail().build()
}
}
fn build(&self, context: BuildContext<Self>) -> Result<BuildResult, Self::Error>
|
}
impl BuildpackTrait for DockerfileBuildpack {
fn toml() -> &'static str {
include_str!("../buildpack.toml")
}
}
|
{
let tag = tag_for_path(&context.app_dir);
PodmanBinary {}
.ensure_version_sync(">=1")
.map_err(Error::BuildpackError)?
.run_sync(&["build", "--tag", &tag, "."])
.map_err(Error::BuildpackError)?;
BuildResultBuilder::new().build()
}
|
identifier_body
|
standard.rs
|
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Std lib global reexports.
pub use std::io;
pub use std::fs;
pub use std::str;
pub use std::fmt;
pub use std::cmp;
pub use std::ptr;
pub use std::mem;
pub use std::ops;
pub use std::slice;
pub use std::result;
pub use std::option;
pub use std::path::Path;
pub use std::str::{FromStr};
pub use std::io::{Read,Write};
pub use std::hash::{Hash, Hasher};
pub use std::error::Error as StdError;
|
pub use rustc_serialize::json::Json;
pub use rustc_serialize::base64::FromBase64;
pub use rustc_serialize::hex::{FromHex, FromHexError};
pub use heapsize::HeapSizeOf;
pub use itertools::Itertools;
pub use parking_lot::{Condvar, Mutex, MutexGuard, RwLock, RwLockReadGuard, RwLockWriteGuard};
|
pub use std::ops::*;
pub use std::cmp::*;
pub use std::sync::Arc;
pub use std::collections::*;
|
random_line_split
|
processing.rs
|
chainstate::stacks::index::{
marf::MARF, storage::TrieFileStorage, Error as MARFError, MarfTrieId,
};
use core::INITIAL_MINING_BONUS_WINDOW;
use util::db::Error as DBError;
use crate::types::chainstate::{BurnchainHeaderHash, MARFValue, PoxId, SortitionId};
use crate::types::proof::TrieHash;
impl<'a> SortitionHandleTx<'a> {
/// Run a blockstack operation's "check()" method and return the result.
fn check_transaction(
&mut self,
burnchain: &Burnchain,
blockstack_op: &BlockstackOperationType,
reward_info: Option<&RewardSetInfo>,
) -> Result<(), BurnchainError>
|
BlockstackOperationType::UserBurnSupport(ref op) => {
op.check(burnchain, self).map_err(|e| {
warn!(
"REJECTED({}) user burn support {} at {},{}: {:?}",
op.block_height, &op.txid, op.block_height, op.vtxindex, &e
);
BurnchainError::OpError(e)
})
}
BlockstackOperationType::StackStx(ref op) => op.check().map_err(|e| {
warn!(
"REJECTED({}) stack stx op {} at {},{}: {:?}",
op.block_height, &op.txid, op.block_height, op.vtxindex, &e
);
BurnchainError::OpError(e)
}),
BlockstackOperationType::TransferStx(ref op) => op.check().map_err(|e| {
warn!(
"REJECTED({}) transfer stx op {} at {},{}: {:?}",
op.block_height, &op.txid, op.block_height, op.vtxindex, &e
);
BurnchainError::OpError(e)
}),
BlockstackOperationType::PreStx(_) => {
// no check() required for PreStx
Ok(())
}
}
}
/// Process all block's checked transactions
/// * make the burn distribution
/// * insert the ones that went into the burn distribution
/// * snapshot the block and run the sortition
/// * return the snapshot (and sortition results)
fn process_checked_block_ops(
&mut self,
burnchain: &Burnchain,
parent_snapshot: &BlockSnapshot,
block_header: &BurnchainBlockHeader,
this_block_ops: &Vec<BlockstackOperationType>,
missed_commits: &Vec<MissedBlockCommit>,
next_pox_info: Option<RewardCycleInfo>,
parent_pox: PoxId,
reward_info: Option<&RewardSetInfo>,
initial_mining_bonus_ustx: u128,
) -> Result<(BlockSnapshot, BurnchainStateTransition), BurnchainError> {
let this_block_height = block_header.block_height;
let this_block_hash = block_header.block_hash.clone();
// make the burn distribution, and in doing so, identify the user burns that we'll keep
let state_transition = BurnchainStateTransition::from_block_ops(self, burnchain, parent_snapshot, this_block_ops, missed_commits, burnchain.pox_constants.sunset_end)
.map_err(|e| {
error!("TRANSACTION ABORTED when converting {} blockstack operations in block {} ({}) to a burn distribution: {:?}", this_block_ops.len(), this_block_height, &this_block_hash, e);
e
})?;
let total_burn = state_transition
.accepted_ops
.iter()
.fold(Some(0u64), |acc, op| {
if let Some(acc) = acc {
let bf = match op {
BlockstackOperationType::LeaderBlockCommit(ref op) => op.burn_fee,
BlockstackOperationType::UserBurnSupport(ref op) => op.burn_fee,
_ => 0,
};
acc.checked_add(bf)
} else {
None
}
});
let txids = state_transition
.accepted_ops
.iter()
.map(|ref op| op.txid())
.collect();
let mut next_pox = parent_pox;
if let Some(ref next_pox_info) = next_pox_info {
if next_pox_info.is_reward_info_known() {
debug!(
"Begin reward-cycle sortition with present anchor block={:?}",
&next_pox_info.selected_anchor_block()
);
next_pox.extend_with_present_block();
} else {
info!(
"Begin reward-cycle sortition with absent anchor block={:?}",
&next_pox_info.selected_anchor_block()
);
next_pox.extend_with_not_present_block();
}
};
let next_sortition_id = SortitionId::new(&this_block_hash, &next_pox);
// do the cryptographic sortition and pick the next winning block.
let mut snapshot = BlockSnapshot::make_snapshot(
self,
burnchain,
&next_sortition_id,
&next_pox,
parent_snapshot,
block_header,
&state_transition.burn_dist,
&txids,
total_burn,
initial_mining_bonus_ustx,
)
.map_err(|e| {
error!(
"TRANSACTION ABORTED when taking snapshot at block {} ({}): {:?}",
this_block_height, &this_block_hash, e
);
BurnchainError::DBError(e)
})?;
// was this snapshot the first with mining?
// compute the initial block rewards.
let initialize_bonus = if snapshot.sortition && parent_snapshot.total_burn == 0 {
let blocks_without_winners =
snapshot.block_height - burnchain.initial_reward_start_block;
let mut total_reward = 0;
for burn_block_height in burnchain.initial_reward_start_block..snapshot.block_height {
total_reward += StacksChainState::get_coinbase_reward(
burn_block_height,
self.context.first_block_height,
);
}
let per_block = total_reward / INITIAL_MINING_BONUS_WINDOW as u128;
info!("First sortition winner chosen";
"blocks_without_winners" => blocks_without_winners,
"initial_mining_per_block_reward" => per_block,
"initial_mining_bonus_block_window" => INITIAL_MINING_BONUS_WINDOW);
assert_eq!(snapshot.accumulated_coinbase_ustx, 0,
"First block should not have receive additional coinbase before initial reward calculation");
snapshot.accumulated_coinbase_ustx = per_block;
Some(InitialMiningBonus {
total_reward,
per_block,
})
} else {
None
};
// store the snapshot
let index_root = self.append_chain_tip_snapshot(
parent_snapshot,
&snapshot,
&state_transition.accepted_ops,
missed_commits,
next_pox_info,
reward_info,
initialize_bonus,
)?;
snapshot.index_root = index_root;
debug!("OPS-HASH({}): {}", this_block_height, &snapshot.ops_hash);
debug!(
"INDEX-ROOT({}): {}",
this_block_height, &snapshot.index_root
);
debug!(
"SORTITION-HASH({}): {}",
this_block_height, &snapshot.sortition_hash
);
debug!(
"CONSENSUS({}): {}",
this_block_height, &snapshot.consensus_hash
);
Ok((snapshot, state_transition))
}
/// Check and then commit all blockstack operations to our chainstate.
/// * pull out all the transactions that are blockstack ops
/// * select the ones that are _valid_
/// * do a cryptographic sortition to select the next Stacks block
/// * commit all valid transactions
/// * commit the results of the sortition
/// Returns the BlockSnapshot created from this block.
pub fn process_block_ops(
&mut self,
burnchain: &Burnchain,
parent_snapshot: &BlockSnapshot,
block_header: &BurnchainBlockHeader,
mut blockstack_txs: Vec<BlockstackOperationType>,
next_pox_info: Option<RewardCycleInfo>,
parent_pox: PoxId,
reward_set_info: Option<&RewardSetInfo>,
initial_mining_bonus_ustx: u128,
) -> Result<(BlockSnapshot, BurnchainStateTransition), BurnchainError> {
debug!(
"BEGIN({}) block ({},{}) with sortition_id: {}",
block_header.block_height,
block_header.block_hash,
block_header.parent_block_hash,
&self.context.chain_tip
);
debug!(
"Append {} operation(s) from block {} {}",
blockstack_txs.len(),
block_header.block_height,
&block_header.block_hash
);
blockstack_txs.sort_by(|ref a, ref b| a.vtxindex().partial_cmp(&b.vtxindex()).unwrap());
// check each transaction, and filter out only the ones that are valid
debug!(
"Check Blockstack transactions from sortition_id: {}",
&self.context.chain_tip
);
let mut missed_block_commits = vec![];
// classify and check each transaction
blockstack_txs.retain(|blockstack_op| {
match self.check_transaction(burnchain, blockstack_op, reward_set_info) {
Ok(_) => true,
Err(BurnchainError::OpError(OpError::MissedBlockCommit(missed_op))) => {
missed_block_commits.push(missed_op);
false
}
Err(_) => false,
}
});
// block-wide check: no duplicate keys registered
let block_ops = Burnchain::filter_block_VRF_dups(blockstack_txs);
assert!(Burnchain::ops_are_sorted(&block_ops));
// process them
let res = self
.process_checked_block_ops(
burnchain,
parent_snapshot,
block_header,
&block_ops,
&missed_block_commits,
next_pox_info,
parent_pox,
reward_set_info,
initial_mining_bonus_ustx,
)
.map_err(|e| {
error!(
"TRANSACTION ABORTED when snapshotting block {} ({}): {:?}",
block_header.block_height, &block_header.block_hash, e
);
e
})?;
Ok(res)
}
/// Given the extracted txs, and a block header, go process them into the next
/// snapshot. Unlike process_block_ops, this method applies safety checks against the given
/// list of blockstack transactions.
pub fn process_block_txs(
&mut self,
parent_snapshot: &BlockSnapshot,
this_block_header: &BurnchainBlockHeader,
burnchain: &Burnchain,
blockstack_txs: Vec<BlockstackOperationType>,
next_pox_info: Option<RewardCycleInfo>,
parent_pox: PoxId,
reward_set_info: Option<&RewardSetInfo>,
initial_mining_bonus_ustx: u128,
) -> Result<(BlockSnapshot, BurnchainStateTransition), BurnchainError> {
assert_eq!(
parent_snapshot.block_height + 1,
this_block_header.block_height
);
assert_eq!(
parent_snapshot.burn_header_hash,
this_block_header.parent_block_hash
);
let new_snapshot = self.process_block_ops(
burnchain,
&parent_snapshot,
&this_block_header,
blockstack_txs,
next_pox_info,
parent_pox,
reward_set_info,
initial_mining_bonus_ustx,
)?;
Ok(new_snapshot)
}
}
#[cfg(test)]
mod tests {
use burnchains::bitcoin::{address::BitcoinAddress, BitcoinNetworkType};
use burnchains::*;
use chainstate::burn::db::sortdb::{tests::test_append_snapshot, SortitionDB};
use chainstate::burn::operations::{
leader_block_commit::BURN_BLOCK_MINED_AT_MODULUS, LeaderBlockCommitOp, LeaderKeyRegisterOp,
};
use chainstate::burn::*;
use chainstate::stacks::StacksPublicKey;
use core::MICROSTACKS_PER_STACKS;
use util::{hash::hex_bytes, vrf::VRFPublicKey};
use crate::types::chainstate::{BlockHeaderHash, StacksAddress, VRFSeed};
use super::*;
#[test]
fn test_initial_block_reward() {
let first_burn_hash = BurnchainHeaderHash([0; 32]);
let leader_key = LeaderKeyRegisterOp {
consensus_hash: ConsensusHash([0x22; 20]),
public_key: VRFPublicKey::from_hex(
"a366b51292bef4edd64063d9145c617fec373bceb0758e98cd72becd84d54c7a",
)
.unwrap(),
memo: vec![01, 02, 03, 04, 05],
address: StacksAddress::from_bitcoin_address(
&BitcoinAddress::from_scriptpubkey(
BitcoinNetworkType::Testnet,
&hex_bytes("76a9140be3e286a15ea85882761618e366586b5574100d88ac").unwrap(),
)
.unwrap(),
),
txid: Txid::from_bytes_be(
&hex_bytes("1bfa831b5fc56c858198acb8e77e5863c1e9d8ac26d49ddb914e24d8d4083562")
.unwrap(),
)
.unwrap(),
vtxindex: 400,
block_height: 101,
burn_header_hash: BurnchainHeaderHash([0x01; 32]),
};
let block_commit = LeaderBlockCommitOp {
sunset_burn: 0,
block_header_hash: BlockHeaderHash([0x22; 32]),
new_seed: VRFSeed::from_hex(
"3333333333333333333333333333333333333333333333333333333333333333",
)
.unwrap(),
parent_block_ptr: 0,
parent_vtxindex: 0,
key_block_ptr: 101,
key_vtxindex: 400,
memo: vec![0x80],
commit_outs: vec![],
burn_fee: 12345,
input: (Txid([0; 32]), 0),
apparent_sender: BurnchainSigner {
public_keys: vec![StacksPublicKey::from_hex(
"02d8015134d9db8178ac93acbc43170a2f20febba5087a5b0437058765ad5133d0",
)
.unwrap()],
num_sigs: 1,
hash_mode: AddressHashMode::SerializeP2PKH,
},
txid: Txid::from_bytes_be(
&hex_bytes("3c07a0a93360bc85047bbaadd49e30c8af770f73a37e10fec400174d2e5f27cf")
.unwrap(),
)
.unwrap(),
vtxindex: 400,
block_height: 102,
burn_parent_modulus: (101 % BURN_BLOCK_MINED_AT_MODULUS) as u8,
burn_header_hash: BurnchainHeaderHash([0x03; 32]),
};
let mut burnchain = Burnchain::default_unittest(100, &first_burn_hash);
burnchain.initial_reward_start_block = 90;
let mut db = SortitionDB::connect_test(100, &first_burn_hash).unwrap();
let snapshot = test_append_snapshot(
&mut db,
BurnchainHeaderHash([0x01; 32]),
&vec![BlockstackOperationType::LeaderKeyRegister(leader_key)],
);
let next_block_header = BurnchainBlockHeader {
block_height: 102,
block_hash: BurnchainHeaderHash([0x03; 32]),
parent_block_hash: BurnchainHeaderHash([0x01; 32]),
num_txs: 1,
timestamp: 10,
|
{
match blockstack_op {
BlockstackOperationType::LeaderKeyRegister(ref op) => {
op.check(burnchain, self).map_err(|e| {
warn!(
"REJECTED({}) leader key register {} at {},{}: {:?}",
op.block_height, &op.txid, op.block_height, op.vtxindex, &e
);
BurnchainError::OpError(e)
})
}
BlockstackOperationType::LeaderBlockCommit(ref op) => {
op.check(burnchain, self, reward_info).map_err(|e| {
warn!(
"REJECTED({}) leader block commit {} at {},{}: {:?}",
op.block_height, &op.txid, op.block_height, op.vtxindex, &e
);
BurnchainError::OpError(e)
})
}
|
identifier_body
|
processing.rs
|
use chainstate::stacks::index::{
marf::MARF, storage::TrieFileStorage, Error as MARFError, MarfTrieId,
};
use core::INITIAL_MINING_BONUS_WINDOW;
use util::db::Error as DBError;
use crate::types::chainstate::{BurnchainHeaderHash, MARFValue, PoxId, SortitionId};
use crate::types::proof::TrieHash;
impl<'a> SortitionHandleTx<'a> {
/// Run a blockstack operation's "check()" method and return the result.
fn check_transaction(
&mut self,
burnchain: &Burnchain,
blockstack_op: &BlockstackOperationType,
reward_info: Option<&RewardSetInfo>,
) -> Result<(), BurnchainError> {
match blockstack_op {
BlockstackOperationType::LeaderKeyRegister(ref op) => {
op.check(burnchain, self).map_err(|e| {
warn!(
"REJECTED({}) leader key register {} at {},{}: {:?}",
op.block_height, &op.txid, op.block_height, op.vtxindex, &e
);
BurnchainError::OpError(e)
})
}
BlockstackOperationType::LeaderBlockCommit(ref op) => {
op.check(burnchain, self, reward_info).map_err(|e| {
warn!(
"REJECTED({}) leader block commit {} at {},{}: {:?}",
op.block_height, &op.txid, op.block_height, op.vtxindex, &e
);
BurnchainError::OpError(e)
})
}
BlockstackOperationType::UserBurnSupport(ref op) => {
op.check(burnchain, self).map_err(|e| {
warn!(
"REJECTED({}) user burn support {} at {},{}: {:?}",
op.block_height, &op.txid, op.block_height, op.vtxindex, &e
);
BurnchainError::OpError(e)
})
}
BlockstackOperationType::StackStx(ref op) => op.check().map_err(|e| {
warn!(
"REJECTED({}) stack stx op {} at {},{}: {:?}",
op.block_height, &op.txid, op.block_height, op.vtxindex, &e
);
BurnchainError::OpError(e)
}),
BlockstackOperationType::TransferStx(ref op) => op.check().map_err(|e| {
warn!(
"REJECTED({}) transfer stx op {} at {},{}: {:?}",
op.block_height, &op.txid, op.block_height, op.vtxindex, &e
);
BurnchainError::OpError(e)
}),
BlockstackOperationType::PreStx(_) => {
// no check() required for PreStx
Ok(())
}
}
}
/// Process all block's checked transactions
/// * make the burn distribution
/// * insert the ones that went into the burn distribution
/// * snapshot the block and run the sortition
/// * return the snapshot (and sortition results)
fn process_checked_block_ops(
&mut self,
burnchain: &Burnchain,
parent_snapshot: &BlockSnapshot,
block_header: &BurnchainBlockHeader,
this_block_ops: &Vec<BlockstackOperationType>,
missed_commits: &Vec<MissedBlockCommit>,
next_pox_info: Option<RewardCycleInfo>,
parent_pox: PoxId,
reward_info: Option<&RewardSetInfo>,
initial_mining_bonus_ustx: u128,
) -> Result<(BlockSnapshot, BurnchainStateTransition), BurnchainError> {
let this_block_height = block_header.block_height;
let this_block_hash = block_header.block_hash.clone();
// make the burn distribution, and in doing so, identify the user burns that we'll keep
let state_transition = BurnchainStateTransition::from_block_ops(self, burnchain, parent_snapshot, this_block_ops, missed_commits, burnchain.pox_constants.sunset_end)
.map_err(|e| {
error!("TRANSACTION ABORTED when converting {} blockstack operations in block {} ({}) to a burn distribution: {:?}", this_block_ops.len(), this_block_height, &this_block_hash, e);
e
})?;
let total_burn = state_transition
.accepted_ops
.iter()
.fold(Some(0u64), |acc, op| {
if let Some(acc) = acc {
let bf = match op {
BlockstackOperationType::LeaderBlockCommit(ref op) => op.burn_fee,
BlockstackOperationType::UserBurnSupport(ref op) => op.burn_fee,
_ => 0,
};
acc.checked_add(bf)
} else {
None
}
});
let txids = state_transition
.accepted_ops
.iter()
.map(|ref op| op.txid())
.collect();
let mut next_pox = parent_pox;
if let Some(ref next_pox_info) = next_pox_info {
if next_pox_info.is_reward_info_known() {
debug!(
"Begin reward-cycle sortition with present anchor block={:?}",
&next_pox_info.selected_anchor_block()
);
next_pox.extend_with_present_block();
} else {
info!(
"Begin reward-cycle sortition with absent anchor block={:?}",
&next_pox_info.selected_anchor_block()
);
next_pox.extend_with_not_present_block();
}
};
let next_sortition_id = SortitionId::new(&this_block_hash, &next_pox);
// do the cryptographic sortition and pick the next winning block.
let mut snapshot = BlockSnapshot::make_snapshot(
self,
burnchain,
&next_sortition_id,
&next_pox,
parent_snapshot,
block_header,
&state_transition.burn_dist,
&txids,
total_burn,
initial_mining_bonus_ustx,
)
.map_err(|e| {
error!(
"TRANSACTION ABORTED when taking snapshot at block {} ({}): {:?}",
this_block_height, &this_block_hash, e
);
BurnchainError::DBError(e)
})?;
// was this snapshot the first with mining?
// compute the initial block rewards.
let initialize_bonus = if snapshot.sortition && parent_snapshot.total_burn == 0 {
let blocks_without_winners =
snapshot.block_height - burnchain.initial_reward_start_block;
let mut total_reward = 0;
for burn_block_height in burnchain.initial_reward_start_block..snapshot.block_height {
total_reward += StacksChainState::get_coinbase_reward(
burn_block_height,
self.context.first_block_height,
);
}
let per_block = total_reward / INITIAL_MINING_BONUS_WINDOW as u128;
info!("First sortition winner chosen";
"blocks_without_winners" => blocks_without_winners,
"initial_mining_per_block_reward" => per_block,
"initial_mining_bonus_block_window" => INITIAL_MINING_BONUS_WINDOW);
assert_eq!(snapshot.accumulated_coinbase_ustx, 0,
"First block should not have receive additional coinbase before initial reward calculation");
snapshot.accumulated_coinbase_ustx = per_block;
Some(InitialMiningBonus {
total_reward,
per_block,
})
} else {
None
};
// store the snapshot
let index_root = self.append_chain_tip_snapshot(
parent_snapshot,
&snapshot,
&state_transition.accepted_ops,
missed_commits,
next_pox_info,
reward_info,
initialize_bonus,
)?;
snapshot.index_root = index_root;
debug!("OPS-HASH({}): {}", this_block_height, &snapshot.ops_hash);
debug!(
"INDEX-ROOT({}): {}",
this_block_height, &snapshot.index_root
);
debug!(
"SORTITION-HASH({}): {}",
this_block_height, &snapshot.sortition_hash
);
debug!(
"CONSENSUS({}): {}",
this_block_height, &snapshot.consensus_hash
);
Ok((snapshot, state_transition))
}
/// Check and then commit all blockstack operations to our chainstate.
/// * pull out all the transactions that are blockstack ops
/// * select the ones that are _valid_
/// * do a cryptographic sortition to select the next Stacks block
/// * commit all valid transactions
/// * commit the results of the sortition
/// Returns the BlockSnapshot created from this block.
pub fn process_block_ops(
&mut self,
burnchain: &Burnchain,
parent_snapshot: &BlockSnapshot,
block_header: &BurnchainBlockHeader,
mut blockstack_txs: Vec<BlockstackOperationType>,
next_pox_info: Option<RewardCycleInfo>,
parent_pox: PoxId,
reward_set_info: Option<&RewardSetInfo>,
initial_mining_bonus_ustx: u128,
) -> Result<(BlockSnapshot, BurnchainStateTransition), BurnchainError> {
debug!(
"BEGIN({}) block ({},{}) with sortition_id: {}",
block_header.block_height,
block_header.block_hash,
block_header.parent_block_hash,
&self.context.chain_tip
);
debug!(
"Append {} operation(s) from block {} {}",
blockstack_txs.len(),
block_header.block_height,
&block_header.block_hash
);
blockstack_txs.sort_by(|ref a, ref b| a.vtxindex().partial_cmp(&b.vtxindex()).unwrap());
// check each transaction, and filter out only the ones that are valid
debug!(
"Check Blockstack transactions from sortition_id: {}",
&self.context.chain_tip
);
let mut missed_block_commits = vec![];
// classify and check each transaction
blockstack_txs.retain(|blockstack_op| {
match self.check_transaction(burnchain, blockstack_op, reward_set_info) {
Ok(_) => true,
Err(BurnchainError::OpError(OpError::MissedBlockCommit(missed_op))) => {
missed_block_commits.push(missed_op);
false
}
Err(_) => false,
}
});
// block-wide check: no duplicate keys registered
let block_ops = Burnchain::filter_block_VRF_dups(blockstack_txs);
assert!(Burnchain::ops_are_sorted(&block_ops));
// process them
let res = self
.process_checked_block_ops(
burnchain,
parent_snapshot,
block_header,
&block_ops,
&missed_block_commits,
next_pox_info,
parent_pox,
reward_set_info,
initial_mining_bonus_ustx,
)
.map_err(|e| {
error!(
"TRANSACTION ABORTED when snapshotting block {} ({}): {:?}",
block_header.block_height, &block_header.block_hash, e
);
e
})?;
Ok(res)
}
/// Given the extracted txs, and a block header, go process them into the next
/// snapshot. Unlike process_block_ops, this method applies safety checks against the given
/// list of blockstack transactions.
pub fn process_block_txs(
&mut self,
parent_snapshot: &BlockSnapshot,
this_block_header: &BurnchainBlockHeader,
burnchain: &Burnchain,
blockstack_txs: Vec<BlockstackOperationType>,
next_pox_info: Option<RewardCycleInfo>,
parent_pox: PoxId,
reward_set_info: Option<&RewardSetInfo>,
initial_mining_bonus_ustx: u128,
) -> Result<(BlockSnapshot, BurnchainStateTransition), BurnchainError> {
assert_eq!(
parent_snapshot.block_height + 1,
this_block_header.block_height
);
assert_eq!(
parent_snapshot.burn_header_hash,
this_block_header.parent_block_hash
);
let new_snapshot = self.process_block_ops(
burnchain,
&parent_snapshot,
&this_block_header,
blockstack_txs,
next_pox_info,
parent_pox,
reward_set_info,
initial_mining_bonus_ustx,
)?;
Ok(new_snapshot)
}
}
#[cfg(test)]
mod tests {
use burnchains::bitcoin::{address::BitcoinAddress, BitcoinNetworkType};
|
use chainstate::burn::*;
use chainstate::stacks::StacksPublicKey;
use core::MICROSTACKS_PER_STACKS;
use util::{hash::hex_bytes, vrf::VRFPublicKey};
use crate::types::chainstate::{BlockHeaderHash, StacksAddress, VRFSeed};
use super::*;
#[test]
fn test_initial_block_reward() {
let first_burn_hash = BurnchainHeaderHash([0; 32]);
let leader_key = LeaderKeyRegisterOp {
consensus_hash: ConsensusHash([0x22; 20]),
public_key: VRFPublicKey::from_hex(
"a366b51292bef4edd64063d9145c617fec373bceb0758e98cd72becd84d54c7a",
)
.unwrap(),
memo: vec![01, 02, 03, 04, 05],
address: StacksAddress::from_bitcoin_address(
&BitcoinAddress::from_scriptpubkey(
BitcoinNetworkType::Testnet,
&hex_bytes("76a9140be3e286a15ea85882761618e366586b5574100d88ac").unwrap(),
)
.unwrap(),
),
txid: Txid::from_bytes_be(
&hex_bytes("1bfa831b5fc56c858198acb8e77e5863c1e9d8ac26d49ddb914e24d8d4083562")
.unwrap(),
)
.unwrap(),
vtxindex: 400,
block_height: 101,
burn_header_hash: BurnchainHeaderHash([0x01; 32]),
};
let block_commit = LeaderBlockCommitOp {
sunset_burn: 0,
block_header_hash: BlockHeaderHash([0x22; 32]),
new_seed: VRFSeed::from_hex(
"3333333333333333333333333333333333333333333333333333333333333333",
)
.unwrap(),
parent_block_ptr: 0,
parent_vtxindex: 0,
key_block_ptr: 101,
key_vtxindex: 400,
memo: vec![0x80],
commit_outs: vec![],
burn_fee: 12345,
input: (Txid([0; 32]), 0),
apparent_sender: BurnchainSigner {
public_keys: vec![StacksPublicKey::from_hex(
"02d8015134d9db8178ac93acbc43170a2f20febba5087a5b0437058765ad5133d0",
)
.unwrap()],
num_sigs: 1,
hash_mode: AddressHashMode::SerializeP2PKH,
},
txid: Txid::from_bytes_be(
&hex_bytes("3c07a0a93360bc85047bbaadd49e30c8af770f73a37e10fec400174d2e5f27cf")
.unwrap(),
)
.unwrap(),
vtxindex: 400,
block_height: 102,
burn_parent_modulus: (101 % BURN_BLOCK_MINED_AT_MODULUS) as u8,
burn_header_hash: BurnchainHeaderHash([0x03; 32]),
};
let mut burnchain = Burnchain::default_unittest(100, &first_burn_hash);
burnchain.initial_reward_start_block = 90;
let mut db = SortitionDB::connect_test(100, &first_burn_hash).unwrap();
let snapshot = test_append_snapshot(
&mut db,
BurnchainHeaderHash([0x01; 32]),
&vec![BlockstackOperationType::LeaderKeyRegister(leader_key)],
);
let next_block_header = BurnchainBlockHeader {
block_height: 102,
block_hash: BurnchainHeaderHash([0x03; 32]),
parent_block_hash: BurnchainHeaderHash([0x01; 32]),
num_txs: 1,
timestamp: 10,
|
use burnchains::*;
use chainstate::burn::db::sortdb::{tests::test_append_snapshot, SortitionDB};
use chainstate::burn::operations::{
leader_block_commit::BURN_BLOCK_MINED_AT_MODULUS, LeaderBlockCommitOp, LeaderKeyRegisterOp,
};
|
random_line_split
|
processing.rs
|
chainstate::stacks::index::{
marf::MARF, storage::TrieFileStorage, Error as MARFError, MarfTrieId,
};
use core::INITIAL_MINING_BONUS_WINDOW;
use util::db::Error as DBError;
use crate::types::chainstate::{BurnchainHeaderHash, MARFValue, PoxId, SortitionId};
use crate::types::proof::TrieHash;
impl<'a> SortitionHandleTx<'a> {
/// Run a blockstack operation's "check()" method and return the result.
fn check_transaction(
&mut self,
burnchain: &Burnchain,
blockstack_op: &BlockstackOperationType,
reward_info: Option<&RewardSetInfo>,
) -> Result<(), BurnchainError> {
match blockstack_op {
BlockstackOperationType::LeaderKeyRegister(ref op) => {
op.check(burnchain, self).map_err(|e| {
warn!(
"REJECTED({}) leader key register {} at {},{}: {:?}",
op.block_height, &op.txid, op.block_height, op.vtxindex, &e
);
BurnchainError::OpError(e)
})
}
BlockstackOperationType::LeaderBlockCommit(ref op) => {
op.check(burnchain, self, reward_info).map_err(|e| {
warn!(
"REJECTED({}) leader block commit {} at {},{}: {:?}",
op.block_height, &op.txid, op.block_height, op.vtxindex, &e
);
BurnchainError::OpError(e)
})
}
BlockstackOperationType::UserBurnSupport(ref op) => {
op.check(burnchain, self).map_err(|e| {
warn!(
"REJECTED({}) user burn support {} at {},{}: {:?}",
op.block_height, &op.txid, op.block_height, op.vtxindex, &e
);
BurnchainError::OpError(e)
})
}
BlockstackOperationType::StackStx(ref op) => op.check().map_err(|e| {
warn!(
"REJECTED({}) stack stx op {} at {},{}: {:?}",
op.block_height, &op.txid, op.block_height, op.vtxindex, &e
);
BurnchainError::OpError(e)
}),
BlockstackOperationType::TransferStx(ref op) => op.check().map_err(|e| {
warn!(
"REJECTED({}) transfer stx op {} at {},{}: {:?}",
op.block_height, &op.txid, op.block_height, op.vtxindex, &e
);
BurnchainError::OpError(e)
}),
BlockstackOperationType::PreStx(_) => {
// no check() required for PreStx
Ok(())
}
}
}
/// Process all block's checked transactions
/// * make the burn distribution
/// * insert the ones that went into the burn distribution
/// * snapshot the block and run the sortition
/// * return the snapshot (and sortition results)
fn process_checked_block_ops(
&mut self,
burnchain: &Burnchain,
parent_snapshot: &BlockSnapshot,
block_header: &BurnchainBlockHeader,
this_block_ops: &Vec<BlockstackOperationType>,
missed_commits: &Vec<MissedBlockCommit>,
next_pox_info: Option<RewardCycleInfo>,
parent_pox: PoxId,
reward_info: Option<&RewardSetInfo>,
initial_mining_bonus_ustx: u128,
) -> Result<(BlockSnapshot, BurnchainStateTransition), BurnchainError> {
let this_block_height = block_header.block_height;
let this_block_hash = block_header.block_hash.clone();
// make the burn distribution, and in doing so, identify the user burns that we'll keep
let state_transition = BurnchainStateTransition::from_block_ops(self, burnchain, parent_snapshot, this_block_ops, missed_commits, burnchain.pox_constants.sunset_end)
.map_err(|e| {
error!("TRANSACTION ABORTED when converting {} blockstack operations in block {} ({}) to a burn distribution: {:?}", this_block_ops.len(), this_block_height, &this_block_hash, e);
e
})?;
let total_burn = state_transition
.accepted_ops
.iter()
.fold(Some(0u64), |acc, op| {
if let Some(acc) = acc {
let bf = match op {
BlockstackOperationType::LeaderBlockCommit(ref op) => op.burn_fee,
BlockstackOperationType::UserBurnSupport(ref op) => op.burn_fee,
_ => 0,
};
acc.checked_add(bf)
} else {
None
}
});
let txids = state_transition
.accepted_ops
.iter()
.map(|ref op| op.txid())
.collect();
let mut next_pox = parent_pox;
if let Some(ref next_pox_info) = next_pox_info {
if next_pox_info.is_reward_info_known() {
debug!(
"Begin reward-cycle sortition with present anchor block={:?}",
&next_pox_info.selected_anchor_block()
);
next_pox.extend_with_present_block();
} else {
info!(
"Begin reward-cycle sortition with absent anchor block={:?}",
&next_pox_info.selected_anchor_block()
);
next_pox.extend_with_not_present_block();
}
};
let next_sortition_id = SortitionId::new(&this_block_hash, &next_pox);
// do the cryptographic sortition and pick the next winning block.
let mut snapshot = BlockSnapshot::make_snapshot(
self,
burnchain,
&next_sortition_id,
&next_pox,
parent_snapshot,
block_header,
&state_transition.burn_dist,
&txids,
total_burn,
initial_mining_bonus_ustx,
)
.map_err(|e| {
error!(
"TRANSACTION ABORTED when taking snapshot at block {} ({}): {:?}",
this_block_height, &this_block_hash, e
);
BurnchainError::DBError(e)
})?;
// was this snapshot the first with mining?
// compute the initial block rewards.
let initialize_bonus = if snapshot.sortition && parent_snapshot.total_burn == 0 {
let blocks_without_winners =
snapshot.block_height - burnchain.initial_reward_start_block;
let mut total_reward = 0;
for burn_block_height in burnchain.initial_reward_start_block..snapshot.block_height {
total_reward += StacksChainState::get_coinbase_reward(
burn_block_height,
self.context.first_block_height,
);
}
let per_block = total_reward / INITIAL_MINING_BONUS_WINDOW as u128;
info!("First sortition winner chosen";
"blocks_without_winners" => blocks_without_winners,
"initial_mining_per_block_reward" => per_block,
"initial_mining_bonus_block_window" => INITIAL_MINING_BONUS_WINDOW);
assert_eq!(snapshot.accumulated_coinbase_ustx, 0,
"First block should not have receive additional coinbase before initial reward calculation");
snapshot.accumulated_coinbase_ustx = per_block;
Some(InitialMiningBonus {
total_reward,
per_block,
})
} else {
None
};
// store the snapshot
let index_root = self.append_chain_tip_snapshot(
parent_snapshot,
&snapshot,
&state_transition.accepted_ops,
missed_commits,
next_pox_info,
reward_info,
initialize_bonus,
)?;
snapshot.index_root = index_root;
debug!("OPS-HASH({}): {}", this_block_height, &snapshot.ops_hash);
debug!(
"INDEX-ROOT({}): {}",
this_block_height, &snapshot.index_root
);
debug!(
"SORTITION-HASH({}): {}",
this_block_height, &snapshot.sortition_hash
);
debug!(
"CONSENSUS({}): {}",
this_block_height, &snapshot.consensus_hash
);
Ok((snapshot, state_transition))
}
/// Check and then commit all blockstack operations to our chainstate.
/// * pull out all the transactions that are blockstack ops
/// * select the ones that are _valid_
/// * do a cryptographic sortition to select the next Stacks block
/// * commit all valid transactions
/// * commit the results of the sortition
/// Returns the BlockSnapshot created from this block.
pub fn process_block_ops(
&mut self,
burnchain: &Burnchain,
parent_snapshot: &BlockSnapshot,
block_header: &BurnchainBlockHeader,
mut blockstack_txs: Vec<BlockstackOperationType>,
next_pox_info: Option<RewardCycleInfo>,
parent_pox: PoxId,
reward_set_info: Option<&RewardSetInfo>,
initial_mining_bonus_ustx: u128,
) -> Result<(BlockSnapshot, BurnchainStateTransition), BurnchainError> {
debug!(
"BEGIN({}) block ({},{}) with sortition_id: {}",
block_header.block_height,
block_header.block_hash,
block_header.parent_block_hash,
&self.context.chain_tip
);
debug!(
"Append {} operation(s) from block {} {}",
blockstack_txs.len(),
block_header.block_height,
&block_header.block_hash
);
blockstack_txs.sort_by(|ref a, ref b| a.vtxindex().partial_cmp(&b.vtxindex()).unwrap());
// check each transaction, and filter out only the ones that are valid
debug!(
"Check Blockstack transactions from sortition_id: {}",
&self.context.chain_tip
);
let mut missed_block_commits = vec![];
// classify and check each transaction
blockstack_txs.retain(|blockstack_op| {
match self.check_transaction(burnchain, blockstack_op, reward_set_info) {
Ok(_) => true,
Err(BurnchainError::OpError(OpError::MissedBlockCommit(missed_op))) => {
missed_block_commits.push(missed_op);
false
}
Err(_) => false,
}
});
// block-wide check: no duplicate keys registered
let block_ops = Burnchain::filter_block_VRF_dups(blockstack_txs);
assert!(Burnchain::ops_are_sorted(&block_ops));
// process them
let res = self
.process_checked_block_ops(
burnchain,
parent_snapshot,
block_header,
&block_ops,
&missed_block_commits,
next_pox_info,
parent_pox,
reward_set_info,
initial_mining_bonus_ustx,
)
.map_err(|e| {
error!(
"TRANSACTION ABORTED when snapshotting block {} ({}): {:?}",
block_header.block_height, &block_header.block_hash, e
);
e
})?;
Ok(res)
}
/// Given the extracted txs, and a block header, go process them into the next
/// snapshot. Unlike process_block_ops, this method applies safety checks against the given
/// list of blockstack transactions.
pub fn process_block_txs(
&mut self,
parent_snapshot: &BlockSnapshot,
this_block_header: &BurnchainBlockHeader,
burnchain: &Burnchain,
blockstack_txs: Vec<BlockstackOperationType>,
next_pox_info: Option<RewardCycleInfo>,
parent_pox: PoxId,
reward_set_info: Option<&RewardSetInfo>,
initial_mining_bonus_ustx: u128,
) -> Result<(BlockSnapshot, BurnchainStateTransition), BurnchainError> {
assert_eq!(
parent_snapshot.block_height + 1,
this_block_header.block_height
);
assert_eq!(
parent_snapshot.burn_header_hash,
this_block_header.parent_block_hash
);
let new_snapshot = self.process_block_ops(
burnchain,
&parent_snapshot,
&this_block_header,
blockstack_txs,
next_pox_info,
parent_pox,
reward_set_info,
initial_mining_bonus_ustx,
)?;
Ok(new_snapshot)
}
}
#[cfg(test)]
mod tests {
use burnchains::bitcoin::{address::BitcoinAddress, BitcoinNetworkType};
use burnchains::*;
use chainstate::burn::db::sortdb::{tests::test_append_snapshot, SortitionDB};
use chainstate::burn::operations::{
leader_block_commit::BURN_BLOCK_MINED_AT_MODULUS, LeaderBlockCommitOp, LeaderKeyRegisterOp,
};
use chainstate::burn::*;
use chainstate::stacks::StacksPublicKey;
use core::MICROSTACKS_PER_STACKS;
use util::{hash::hex_bytes, vrf::VRFPublicKey};
use crate::types::chainstate::{BlockHeaderHash, StacksAddress, VRFSeed};
use super::*;
#[test]
fn
|
() {
let first_burn_hash = BurnchainHeaderHash([0; 32]);
let leader_key = LeaderKeyRegisterOp {
consensus_hash: ConsensusHash([0x22; 20]),
public_key: VRFPublicKey::from_hex(
"a366b51292bef4edd64063d9145c617fec373bceb0758e98cd72becd84d54c7a",
)
.unwrap(),
memo: vec![01, 02, 03, 04, 05],
address: StacksAddress::from_bitcoin_address(
&BitcoinAddress::from_scriptpubkey(
BitcoinNetworkType::Testnet,
&hex_bytes("76a9140be3e286a15ea85882761618e366586b5574100d88ac").unwrap(),
)
.unwrap(),
),
txid: Txid::from_bytes_be(
&hex_bytes("1bfa831b5fc56c858198acb8e77e5863c1e9d8ac26d49ddb914e24d8d4083562")
.unwrap(),
)
.unwrap(),
vtxindex: 400,
block_height: 101,
burn_header_hash: BurnchainHeaderHash([0x01; 32]),
};
let block_commit = LeaderBlockCommitOp {
sunset_burn: 0,
block_header_hash: BlockHeaderHash([0x22; 32]),
new_seed: VRFSeed::from_hex(
"3333333333333333333333333333333333333333333333333333333333333333",
)
.unwrap(),
parent_block_ptr: 0,
parent_vtxindex: 0,
key_block_ptr: 101,
key_vtxindex: 400,
memo: vec![0x80],
commit_outs: vec![],
burn_fee: 12345,
input: (Txid([0; 32]), 0),
apparent_sender: BurnchainSigner {
public_keys: vec![StacksPublicKey::from_hex(
"02d8015134d9db8178ac93acbc43170a2f20febba5087a5b0437058765ad5133d0",
)
.unwrap()],
num_sigs: 1,
hash_mode: AddressHashMode::SerializeP2PKH,
},
txid: Txid::from_bytes_be(
&hex_bytes("3c07a0a93360bc85047bbaadd49e30c8af770f73a37e10fec400174d2e5f27cf")
.unwrap(),
)
.unwrap(),
vtxindex: 400,
block_height: 102,
burn_parent_modulus: (101 % BURN_BLOCK_MINED_AT_MODULUS) as u8,
burn_header_hash: BurnchainHeaderHash([0x03; 32]),
};
let mut burnchain = Burnchain::default_unittest(100, &first_burn_hash);
burnchain.initial_reward_start_block = 90;
let mut db = SortitionDB::connect_test(100, &first_burn_hash).unwrap();
let snapshot = test_append_snapshot(
&mut db,
BurnchainHeaderHash([0x01; 32]),
&vec![BlockstackOperationType::LeaderKeyRegister(leader_key)],
);
let next_block_header = BurnchainBlockHeader {
block_height: 102,
block_hash: BurnchainHeaderHash([0x03; 32]),
parent_block_hash: BurnchainHeaderHash([0x01; 32]),
num_txs: 1,
timestamp: 10,
|
test_initial_block_reward
|
identifier_name
|
lib.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Servo, the mighty web browser engine from the future.
//!
//! This is a very simple library that wires all of Servo's components
//! together as type `Browser`, along with a generic client
//! implementing the `WindowMethods` trait, to create a working web
//! browser.
//!
//! The `Browser` type is responsible for configuring a
//! `Constellation`, which does the heavy lifting of coordinating all
//! of Servo's internal subsystems, including the `ScriptThread` and the
//! `LayoutThread`, as well maintains the navigation context.
//!
//! The `Browser` is fed events from a generic type that implements the
//! `WindowMethods` trait.
extern crate env_logger;
#[cfg(not(target_os = "windows"))]
extern crate gaol;
#[macro_use]
extern crate gleam;
extern crate log;
pub extern crate canvas;
pub extern crate canvas_traits;
pub extern crate compositing;
pub extern crate constellation;
pub extern crate debugger;
pub extern crate devtools;
pub extern crate devtools_traits;
pub extern crate euclid;
pub extern crate gfx;
pub extern crate ipc_channel;
pub extern crate layout_thread;
pub extern crate msg;
pub extern crate net;
pub extern crate net_traits;
pub extern crate profile;
pub extern crate profile_traits;
pub extern crate script;
pub extern crate script_traits;
pub extern crate script_layout_interface;
pub extern crate style;
pub extern crate url;
pub extern crate util;
#[cfg(feature = "webdriver")]
extern crate webdriver_server;
extern crate webrender;
extern crate webrender_traits;
#[cfg(feature = "webdriver")]
fn
|
(port: u16, constellation: Sender<ConstellationMsg>) {
webdriver_server::start_server(port, constellation);
}
#[cfg(not(feature = "webdriver"))]
fn webdriver(_port: u16, _constellation: Sender<ConstellationMsg>) { }
use compositing::{CompositorProxy, IOCompositor};
use compositing::compositor_thread::InitialCompositorState;
use compositing::windowing::WindowEvent;
use compositing::windowing::WindowMethods;
use constellation::{Constellation, InitialConstellationState, UnprivilegedPipelineContent};
use constellation::{FromCompositorLogger, FromScriptLogger};
#[cfg(not(target_os = "windows"))]
use constellation::content_process_sandbox_profile;
use env_logger::Logger as EnvLogger;
#[cfg(not(target_os = "windows"))]
use gaol::sandbox::{ChildSandbox, ChildSandboxMethods};
use gfx::font_cache_thread::FontCacheThread;
use ipc_channel::ipc::{self, IpcSender};
use log::{Log, LogMetadata, LogRecord};
use net::bluetooth_thread::BluetoothThreadFactory;
use net::image_cache_thread::new_image_cache_thread;
use net::resource_thread::new_resource_threads;
use net_traits::IpcSend;
use net_traits::bluetooth_thread::BluetoothMethodMsg;
use profile::mem as profile_mem;
use profile::time as profile_time;
use profile_traits::mem;
use profile_traits::time;
use script_traits::{ConstellationMsg, SWManagerSenders, ScriptMsg};
use std::cmp::max;
use std::rc::Rc;
use std::sync::mpsc::Sender;
use util::opts;
use util::prefs::PREFS;
use util::resource_files::resources_dir_path;
pub use gleam::gl;
/// The in-process interface to Servo.
///
/// It does everything necessary to render the web, primarily
/// orchestrating the interaction between JavaScript, CSS layout,
/// rendering, and the client window.
///
/// Clients create a `Browser` for a given reference-counted type
/// implementing `WindowMethods`, which is the bridge to whatever
/// application Servo is embedded in. Clients then create an event
/// loop to pump messages between the embedding application and
/// various browser components.
pub struct Browser<Window: WindowMethods +'static> {
compositor: IOCompositor<Window>,
constellation_chan: Sender<ConstellationMsg>,
}
impl<Window> Browser<Window> where Window: WindowMethods +'static {
pub fn new(window: Rc<Window>) -> Browser<Window> {
// Global configuration options, parsed from the command line.
let opts = opts::get();
// Get both endpoints of a special channel for communication between
// the client window and the compositor. This channel is unique because
// messages to client may need to pump a platform-specific event loop
// to deliver the message.
let (compositor_proxy, compositor_receiver) =
window.create_compositor_channel();
let supports_clipboard = window.supports_clipboard();
let time_profiler_chan = profile_time::Profiler::create(&opts.time_profiling,
opts.time_profiler_trace_path.clone());
let mem_profiler_chan = profile_mem::Profiler::create(opts.mem_profiler_period);
if let Some(port) = opts.debugger_port {
debugger::start_server(port)
}
let devtools_chan = opts.devtools_port.map(|port| {
devtools::start_server(port)
});
let (webrender, webrender_api_sender) = if opts::get().use_webrender {
if let Ok(mut resource_path) = resources_dir_path() {
resource_path.push("shaders");
// TODO(gw): Duplicates device_pixels_per_screen_px from compositor. Tidy up!
let scale_factor = window.scale_factor().get();
let device_pixel_ratio = match opts.device_pixels_per_px {
Some(device_pixels_per_px) => device_pixels_per_px,
None => match opts.output_file {
Some(_) => 1.0,
None => scale_factor,
}
};
let (webrender, webrender_sender) =
webrender::Renderer::new(webrender::RendererOptions {
device_pixel_ratio: device_pixel_ratio,
resource_path: resource_path,
enable_aa: opts.enable_text_antialiasing,
enable_msaa: opts.use_msaa,
enable_profiler: opts.webrender_stats,
debug: opts.webrender_debug,
enable_recording: false,
});
(Some(webrender), Some(webrender_sender))
} else {
(None, None)
}
} else {
(None, None)
};
// Create the constellation, which maintains the engine
// pipelines, including the script and layout threads, as well
// as the navigation context.
let (constellation_chan, sw_senders) = create_constellation(opts.clone(),
compositor_proxy.clone_compositor_proxy(),
time_profiler_chan.clone(),
mem_profiler_chan.clone(),
devtools_chan,
supports_clipboard,
webrender_api_sender.clone());
// Send the constellation's swmanager sender to service worker manager thread
script::init(sw_senders);
if cfg!(feature = "webdriver") {
if let Some(port) = opts.webdriver_port {
webdriver(port, constellation_chan.clone());
}
}
// The compositor coordinates with the client window to create the final
// rendered page and display it somewhere.
let compositor = IOCompositor::create(window, InitialCompositorState {
sender: compositor_proxy,
receiver: compositor_receiver,
constellation_chan: constellation_chan.clone(),
time_profiler_chan: time_profiler_chan,
mem_profiler_chan: mem_profiler_chan,
webrender: webrender,
webrender_api_sender: webrender_api_sender,
});
Browser {
compositor: compositor,
constellation_chan: constellation_chan,
}
}
pub fn handle_events(&mut self, events: Vec<WindowEvent>) -> bool {
self.compositor.handle_events(events)
}
pub fn repaint_synchronously(&mut self) {
self.compositor.repaint_synchronously()
}
pub fn pinch_zoom_level(&self) -> f32 {
self.compositor.pinch_zoom_level()
}
pub fn request_title_for_main_frame(&self) {
self.compositor.title_for_main_frame()
}
pub fn setup_logging(&self) {
let constellation_chan = self.constellation_chan.clone();
log::set_logger(|max_log_level| {
let env_logger = EnvLogger::new();
let con_logger = FromCompositorLogger::new(constellation_chan);
let filter = max(env_logger.filter(), con_logger.filter());
let logger = BothLogger(env_logger, con_logger);
max_log_level.set(filter);
Box::new(logger)
}).expect("Failed to set logger.")
}
}
fn create_constellation(opts: opts::Opts,
compositor_proxy: Box<CompositorProxy + Send>,
time_profiler_chan: time::ProfilerChan,
mem_profiler_chan: mem::ProfilerChan,
devtools_chan: Option<Sender<devtools_traits::DevtoolsControlMsg>>,
supports_clipboard: bool,
webrender_api_sender: Option<webrender_traits::RenderApiSender>)
-> (Sender<ConstellationMsg>, SWManagerSenders) {
let bluetooth_thread: IpcSender<BluetoothMethodMsg> = BluetoothThreadFactory::new();
let (public_resource_threads, private_resource_threads) =
new_resource_threads(opts.user_agent.clone(),
devtools_chan.clone(),
time_profiler_chan.clone(),
opts.config_dir.map(Into::into));
let image_cache_thread = new_image_cache_thread(public_resource_threads.sender(),
webrender_api_sender.as_ref().map(|wr| wr.create_api()));
let font_cache_thread = FontCacheThread::new(public_resource_threads.sender(),
webrender_api_sender.as_ref().map(|wr| wr.create_api()));
let resource_sender = public_resource_threads.sender();
let initial_state = InitialConstellationState {
compositor_proxy: compositor_proxy,
devtools_chan: devtools_chan,
bluetooth_thread: bluetooth_thread,
image_cache_thread: image_cache_thread,
font_cache_thread: font_cache_thread,
public_resource_threads: public_resource_threads,
private_resource_threads: private_resource_threads,
time_profiler_chan: time_profiler_chan,
mem_profiler_chan: mem_profiler_chan,
supports_clipboard: supports_clipboard,
webrender_api_sender: webrender_api_sender,
};
let (constellation_chan, from_swmanager_sender) =
Constellation::<script_layout_interface::message::Msg,
layout_thread::LayoutThread,
script::script_thread::ScriptThread>::start(initial_state);
if let Some(url) = opts.url {
constellation_chan.send(ConstellationMsg::InitLoadUrl(url)).unwrap();
};
// channels to communicate with Service Worker Manager
let sw_senders = SWManagerSenders {
swmanager_sender: from_swmanager_sender,
resource_sender: resource_sender
};
(constellation_chan, sw_senders)
}
// A logger that logs to two downstream loggers.
// This should probably be in the log crate.
struct BothLogger<Log1, Log2>(Log1, Log2);
impl<Log1, Log2> Log for BothLogger<Log1, Log2> where Log1: Log, Log2: Log {
fn enabled(&self, metadata: &LogMetadata) -> bool {
self.0.enabled(metadata) || self.1.enabled(metadata)
}
fn log(&self, record: &LogRecord) {
self.0.log(record);
self.1.log(record);
}
}
pub fn set_logger(constellation_chan: IpcSender<ScriptMsg>) {
log::set_logger(|max_log_level| {
let env_logger = EnvLogger::new();
let con_logger = FromScriptLogger::new(constellation_chan);
let filter = max(env_logger.filter(), con_logger.filter());
let logger = BothLogger(env_logger, con_logger);
max_log_level.set(filter);
Box::new(logger)
}).expect("Failed to set logger.")
}
/// Content process entry point.
pub fn run_content_process(token: String) {
let (unprivileged_content_sender, unprivileged_content_receiver) =
ipc::channel::<UnprivilegedPipelineContent>().unwrap();
let connection_bootstrap: IpcSender<IpcSender<UnprivilegedPipelineContent>> =
IpcSender::connect(token).unwrap();
connection_bootstrap.send(unprivileged_content_sender).unwrap();
let unprivileged_content = unprivileged_content_receiver.recv().unwrap();
opts::set_defaults(unprivileged_content.opts());
PREFS.extend(unprivileged_content.prefs());
set_logger(unprivileged_content.constellation_chan());
// Enter the sandbox if necessary.
if opts::get().sandbox {
create_sandbox();
}
// send the required channels to the service worker manager
let sw_senders = unprivileged_content.swmanager_senders();
script::init(sw_senders);
unprivileged_content.start_all::<script_layout_interface::message::Msg,
layout_thread::LayoutThread,
script::script_thread::ScriptThread>(true);
}
// This is a workaround for https://github.com/rust-lang/rust/pull/30175 until
// https://github.com/lfairy/rust-errno/pull/5 lands, and should be removed once
// we update Servo with the rust-errno crate.
#[cfg(target_os = "android")]
#[no_mangle]
pub unsafe extern fn __errno_location() -> *mut i32 {
extern { fn __errno() -> *mut i32; }
__errno()
}
#[cfg(not(target_os = "windows"))]
fn create_sandbox() {
ChildSandbox::new(content_process_sandbox_profile()).activate()
.expect("Failed to activate sandbox!");
}
#[cfg(target_os = "windows")]
fn create_sandbox() {
panic!("Sandboxing is not supported on Windows.");
}
|
webdriver
|
identifier_name
|
lib.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Servo, the mighty web browser engine from the future.
//!
//! This is a very simple library that wires all of Servo's components
//! together as type `Browser`, along with a generic client
//! implementing the `WindowMethods` trait, to create a working web
//! browser.
//!
//! The `Browser` type is responsible for configuring a
//! `Constellation`, which does the heavy lifting of coordinating all
//! of Servo's internal subsystems, including the `ScriptThread` and the
//! `LayoutThread`, as well maintains the navigation context.
//!
//! The `Browser` is fed events from a generic type that implements the
//! `WindowMethods` trait.
extern crate env_logger;
#[cfg(not(target_os = "windows"))]
extern crate gaol;
#[macro_use]
extern crate gleam;
extern crate log;
pub extern crate canvas;
pub extern crate canvas_traits;
pub extern crate compositing;
pub extern crate constellation;
pub extern crate debugger;
pub extern crate devtools;
pub extern crate devtools_traits;
pub extern crate euclid;
pub extern crate gfx;
pub extern crate ipc_channel;
pub extern crate layout_thread;
pub extern crate msg;
pub extern crate net;
pub extern crate net_traits;
pub extern crate profile;
pub extern crate profile_traits;
pub extern crate script;
pub extern crate script_traits;
pub extern crate script_layout_interface;
pub extern crate style;
pub extern crate url;
pub extern crate util;
#[cfg(feature = "webdriver")]
extern crate webdriver_server;
extern crate webrender;
extern crate webrender_traits;
#[cfg(feature = "webdriver")]
fn webdriver(port: u16, constellation: Sender<ConstellationMsg>) {
webdriver_server::start_server(port, constellation);
}
#[cfg(not(feature = "webdriver"))]
fn webdriver(_port: u16, _constellation: Sender<ConstellationMsg>) { }
use compositing::{CompositorProxy, IOCompositor};
use compositing::compositor_thread::InitialCompositorState;
use compositing::windowing::WindowEvent;
use compositing::windowing::WindowMethods;
use constellation::{Constellation, InitialConstellationState, UnprivilegedPipelineContent};
use constellation::{FromCompositorLogger, FromScriptLogger};
#[cfg(not(target_os = "windows"))]
use constellation::content_process_sandbox_profile;
use env_logger::Logger as EnvLogger;
#[cfg(not(target_os = "windows"))]
use gaol::sandbox::{ChildSandbox, ChildSandboxMethods};
use gfx::font_cache_thread::FontCacheThread;
use ipc_channel::ipc::{self, IpcSender};
use log::{Log, LogMetadata, LogRecord};
use net::bluetooth_thread::BluetoothThreadFactory;
use net::image_cache_thread::new_image_cache_thread;
use net::resource_thread::new_resource_threads;
use net_traits::IpcSend;
use net_traits::bluetooth_thread::BluetoothMethodMsg;
use profile::mem as profile_mem;
use profile::time as profile_time;
use profile_traits::mem;
use profile_traits::time;
use script_traits::{ConstellationMsg, SWManagerSenders, ScriptMsg};
use std::cmp::max;
use std::rc::Rc;
use std::sync::mpsc::Sender;
use util::opts;
use util::prefs::PREFS;
use util::resource_files::resources_dir_path;
pub use gleam::gl;
/// The in-process interface to Servo.
///
/// It does everything necessary to render the web, primarily
/// orchestrating the interaction between JavaScript, CSS layout,
/// rendering, and the client window.
///
/// Clients create a `Browser` for a given reference-counted type
/// implementing `WindowMethods`, which is the bridge to whatever
/// application Servo is embedded in. Clients then create an event
/// loop to pump messages between the embedding application and
/// various browser components.
pub struct Browser<Window: WindowMethods +'static> {
compositor: IOCompositor<Window>,
constellation_chan: Sender<ConstellationMsg>,
}
impl<Window> Browser<Window> where Window: WindowMethods +'static {
pub fn new(window: Rc<Window>) -> Browser<Window> {
// Global configuration options, parsed from the command line.
let opts = opts::get();
// Get both endpoints of a special channel for communication between
// the client window and the compositor. This channel is unique because
// messages to client may need to pump a platform-specific event loop
// to deliver the message.
let (compositor_proxy, compositor_receiver) =
window.create_compositor_channel();
let supports_clipboard = window.supports_clipboard();
let time_profiler_chan = profile_time::Profiler::create(&opts.time_profiling,
opts.time_profiler_trace_path.clone());
let mem_profiler_chan = profile_mem::Profiler::create(opts.mem_profiler_period);
if let Some(port) = opts.debugger_port {
debugger::start_server(port)
}
let devtools_chan = opts.devtools_port.map(|port| {
devtools::start_server(port)
});
let (webrender, webrender_api_sender) = if opts::get().use_webrender {
if let Ok(mut resource_path) = resources_dir_path() {
resource_path.push("shaders");
// TODO(gw): Duplicates device_pixels_per_screen_px from compositor. Tidy up!
let scale_factor = window.scale_factor().get();
let device_pixel_ratio = match opts.device_pixels_per_px {
Some(device_pixels_per_px) => device_pixels_per_px,
None => match opts.output_file {
Some(_) => 1.0,
None => scale_factor,
}
};
let (webrender, webrender_sender) =
webrender::Renderer::new(webrender::RendererOptions {
device_pixel_ratio: device_pixel_ratio,
resource_path: resource_path,
enable_aa: opts.enable_text_antialiasing,
enable_msaa: opts.use_msaa,
enable_profiler: opts.webrender_stats,
debug: opts.webrender_debug,
enable_recording: false,
});
(Some(webrender), Some(webrender_sender))
} else {
(None, None)
}
} else {
(None, None)
};
// Create the constellation, which maintains the engine
// pipelines, including the script and layout threads, as well
// as the navigation context.
let (constellation_chan, sw_senders) = create_constellation(opts.clone(),
compositor_proxy.clone_compositor_proxy(),
time_profiler_chan.clone(),
mem_profiler_chan.clone(),
devtools_chan,
supports_clipboard,
webrender_api_sender.clone());
// Send the constellation's swmanager sender to service worker manager thread
script::init(sw_senders);
if cfg!(feature = "webdriver") {
if let Some(port) = opts.webdriver_port {
webdriver(port, constellation_chan.clone());
}
}
// The compositor coordinates with the client window to create the final
// rendered page and display it somewhere.
let compositor = IOCompositor::create(window, InitialCompositorState {
sender: compositor_proxy,
receiver: compositor_receiver,
constellation_chan: constellation_chan.clone(),
time_profiler_chan: time_profiler_chan,
mem_profiler_chan: mem_profiler_chan,
webrender: webrender,
webrender_api_sender: webrender_api_sender,
});
Browser {
compositor: compositor,
constellation_chan: constellation_chan,
}
}
pub fn handle_events(&mut self, events: Vec<WindowEvent>) -> bool {
self.compositor.handle_events(events)
}
pub fn repaint_synchronously(&mut self) {
self.compositor.repaint_synchronously()
}
pub fn pinch_zoom_level(&self) -> f32
|
pub fn request_title_for_main_frame(&self) {
self.compositor.title_for_main_frame()
}
pub fn setup_logging(&self) {
let constellation_chan = self.constellation_chan.clone();
log::set_logger(|max_log_level| {
let env_logger = EnvLogger::new();
let con_logger = FromCompositorLogger::new(constellation_chan);
let filter = max(env_logger.filter(), con_logger.filter());
let logger = BothLogger(env_logger, con_logger);
max_log_level.set(filter);
Box::new(logger)
}).expect("Failed to set logger.")
}
}
fn create_constellation(opts: opts::Opts,
compositor_proxy: Box<CompositorProxy + Send>,
time_profiler_chan: time::ProfilerChan,
mem_profiler_chan: mem::ProfilerChan,
devtools_chan: Option<Sender<devtools_traits::DevtoolsControlMsg>>,
supports_clipboard: bool,
webrender_api_sender: Option<webrender_traits::RenderApiSender>)
-> (Sender<ConstellationMsg>, SWManagerSenders) {
let bluetooth_thread: IpcSender<BluetoothMethodMsg> = BluetoothThreadFactory::new();
let (public_resource_threads, private_resource_threads) =
new_resource_threads(opts.user_agent.clone(),
devtools_chan.clone(),
time_profiler_chan.clone(),
opts.config_dir.map(Into::into));
let image_cache_thread = new_image_cache_thread(public_resource_threads.sender(),
webrender_api_sender.as_ref().map(|wr| wr.create_api()));
let font_cache_thread = FontCacheThread::new(public_resource_threads.sender(),
webrender_api_sender.as_ref().map(|wr| wr.create_api()));
let resource_sender = public_resource_threads.sender();
let initial_state = InitialConstellationState {
compositor_proxy: compositor_proxy,
devtools_chan: devtools_chan,
bluetooth_thread: bluetooth_thread,
image_cache_thread: image_cache_thread,
font_cache_thread: font_cache_thread,
public_resource_threads: public_resource_threads,
private_resource_threads: private_resource_threads,
time_profiler_chan: time_profiler_chan,
mem_profiler_chan: mem_profiler_chan,
supports_clipboard: supports_clipboard,
webrender_api_sender: webrender_api_sender,
};
let (constellation_chan, from_swmanager_sender) =
Constellation::<script_layout_interface::message::Msg,
layout_thread::LayoutThread,
script::script_thread::ScriptThread>::start(initial_state);
if let Some(url) = opts.url {
constellation_chan.send(ConstellationMsg::InitLoadUrl(url)).unwrap();
};
// channels to communicate with Service Worker Manager
let sw_senders = SWManagerSenders {
swmanager_sender: from_swmanager_sender,
resource_sender: resource_sender
};
(constellation_chan, sw_senders)
}
// A logger that logs to two downstream loggers.
// This should probably be in the log crate.
struct BothLogger<Log1, Log2>(Log1, Log2);
impl<Log1, Log2> Log for BothLogger<Log1, Log2> where Log1: Log, Log2: Log {
fn enabled(&self, metadata: &LogMetadata) -> bool {
self.0.enabled(metadata) || self.1.enabled(metadata)
}
fn log(&self, record: &LogRecord) {
self.0.log(record);
self.1.log(record);
}
}
pub fn set_logger(constellation_chan: IpcSender<ScriptMsg>) {
log::set_logger(|max_log_level| {
let env_logger = EnvLogger::new();
let con_logger = FromScriptLogger::new(constellation_chan);
let filter = max(env_logger.filter(), con_logger.filter());
let logger = BothLogger(env_logger, con_logger);
max_log_level.set(filter);
Box::new(logger)
}).expect("Failed to set logger.")
}
/// Content process entry point.
pub fn run_content_process(token: String) {
let (unprivileged_content_sender, unprivileged_content_receiver) =
ipc::channel::<UnprivilegedPipelineContent>().unwrap();
let connection_bootstrap: IpcSender<IpcSender<UnprivilegedPipelineContent>> =
IpcSender::connect(token).unwrap();
connection_bootstrap.send(unprivileged_content_sender).unwrap();
let unprivileged_content = unprivileged_content_receiver.recv().unwrap();
opts::set_defaults(unprivileged_content.opts());
PREFS.extend(unprivileged_content.prefs());
set_logger(unprivileged_content.constellation_chan());
// Enter the sandbox if necessary.
if opts::get().sandbox {
create_sandbox();
}
// send the required channels to the service worker manager
let sw_senders = unprivileged_content.swmanager_senders();
script::init(sw_senders);
unprivileged_content.start_all::<script_layout_interface::message::Msg,
layout_thread::LayoutThread,
script::script_thread::ScriptThread>(true);
}
// This is a workaround for https://github.com/rust-lang/rust/pull/30175 until
// https://github.com/lfairy/rust-errno/pull/5 lands, and should be removed once
// we update Servo with the rust-errno crate.
#[cfg(target_os = "android")]
#[no_mangle]
pub unsafe extern fn __errno_location() -> *mut i32 {
extern { fn __errno() -> *mut i32; }
__errno()
}
#[cfg(not(target_os = "windows"))]
fn create_sandbox() {
ChildSandbox::new(content_process_sandbox_profile()).activate()
.expect("Failed to activate sandbox!");
}
#[cfg(target_os = "windows")]
fn create_sandbox() {
panic!("Sandboxing is not supported on Windows.");
}
|
{
self.compositor.pinch_zoom_level()
}
|
identifier_body
|
lib.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Servo, the mighty web browser engine from the future.
//!
//! This is a very simple library that wires all of Servo's components
//! together as type `Browser`, along with a generic client
//! implementing the `WindowMethods` trait, to create a working web
//! browser.
//!
//! The `Browser` type is responsible for configuring a
//! `Constellation`, which does the heavy lifting of coordinating all
//! of Servo's internal subsystems, including the `ScriptThread` and the
//! `LayoutThread`, as well maintains the navigation context.
//!
//! The `Browser` is fed events from a generic type that implements the
//! `WindowMethods` trait.
extern crate env_logger;
#[cfg(not(target_os = "windows"))]
extern crate gaol;
#[macro_use]
extern crate gleam;
extern crate log;
pub extern crate canvas;
pub extern crate canvas_traits;
pub extern crate compositing;
pub extern crate constellation;
pub extern crate debugger;
pub extern crate devtools;
pub extern crate devtools_traits;
pub extern crate euclid;
pub extern crate gfx;
pub extern crate ipc_channel;
pub extern crate layout_thread;
pub extern crate msg;
pub extern crate net;
pub extern crate net_traits;
pub extern crate profile;
pub extern crate profile_traits;
pub extern crate script;
pub extern crate script_traits;
pub extern crate script_layout_interface;
pub extern crate style;
pub extern crate url;
pub extern crate util;
#[cfg(feature = "webdriver")]
extern crate webdriver_server;
extern crate webrender;
extern crate webrender_traits;
#[cfg(feature = "webdriver")]
fn webdriver(port: u16, constellation: Sender<ConstellationMsg>) {
webdriver_server::start_server(port, constellation);
}
#[cfg(not(feature = "webdriver"))]
fn webdriver(_port: u16, _constellation: Sender<ConstellationMsg>) { }
use compositing::{CompositorProxy, IOCompositor};
use compositing::compositor_thread::InitialCompositorState;
use compositing::windowing::WindowEvent;
use compositing::windowing::WindowMethods;
use constellation::{Constellation, InitialConstellationState, UnprivilegedPipelineContent};
use constellation::{FromCompositorLogger, FromScriptLogger};
|
use gaol::sandbox::{ChildSandbox, ChildSandboxMethods};
use gfx::font_cache_thread::FontCacheThread;
use ipc_channel::ipc::{self, IpcSender};
use log::{Log, LogMetadata, LogRecord};
use net::bluetooth_thread::BluetoothThreadFactory;
use net::image_cache_thread::new_image_cache_thread;
use net::resource_thread::new_resource_threads;
use net_traits::IpcSend;
use net_traits::bluetooth_thread::BluetoothMethodMsg;
use profile::mem as profile_mem;
use profile::time as profile_time;
use profile_traits::mem;
use profile_traits::time;
use script_traits::{ConstellationMsg, SWManagerSenders, ScriptMsg};
use std::cmp::max;
use std::rc::Rc;
use std::sync::mpsc::Sender;
use util::opts;
use util::prefs::PREFS;
use util::resource_files::resources_dir_path;
pub use gleam::gl;
/// The in-process interface to Servo.
///
/// It does everything necessary to render the web, primarily
/// orchestrating the interaction between JavaScript, CSS layout,
/// rendering, and the client window.
///
/// Clients create a `Browser` for a given reference-counted type
/// implementing `WindowMethods`, which is the bridge to whatever
/// application Servo is embedded in. Clients then create an event
/// loop to pump messages between the embedding application and
/// various browser components.
pub struct Browser<Window: WindowMethods +'static> {
compositor: IOCompositor<Window>,
constellation_chan: Sender<ConstellationMsg>,
}
impl<Window> Browser<Window> where Window: WindowMethods +'static {
pub fn new(window: Rc<Window>) -> Browser<Window> {
// Global configuration options, parsed from the command line.
let opts = opts::get();
// Get both endpoints of a special channel for communication between
// the client window and the compositor. This channel is unique because
// messages to client may need to pump a platform-specific event loop
// to deliver the message.
let (compositor_proxy, compositor_receiver) =
window.create_compositor_channel();
let supports_clipboard = window.supports_clipboard();
let time_profiler_chan = profile_time::Profiler::create(&opts.time_profiling,
opts.time_profiler_trace_path.clone());
let mem_profiler_chan = profile_mem::Profiler::create(opts.mem_profiler_period);
if let Some(port) = opts.debugger_port {
debugger::start_server(port)
}
let devtools_chan = opts.devtools_port.map(|port| {
devtools::start_server(port)
});
let (webrender, webrender_api_sender) = if opts::get().use_webrender {
if let Ok(mut resource_path) = resources_dir_path() {
resource_path.push("shaders");
// TODO(gw): Duplicates device_pixels_per_screen_px from compositor. Tidy up!
let scale_factor = window.scale_factor().get();
let device_pixel_ratio = match opts.device_pixels_per_px {
Some(device_pixels_per_px) => device_pixels_per_px,
None => match opts.output_file {
Some(_) => 1.0,
None => scale_factor,
}
};
let (webrender, webrender_sender) =
webrender::Renderer::new(webrender::RendererOptions {
device_pixel_ratio: device_pixel_ratio,
resource_path: resource_path,
enable_aa: opts.enable_text_antialiasing,
enable_msaa: opts.use_msaa,
enable_profiler: opts.webrender_stats,
debug: opts.webrender_debug,
enable_recording: false,
});
(Some(webrender), Some(webrender_sender))
} else {
(None, None)
}
} else {
(None, None)
};
// Create the constellation, which maintains the engine
// pipelines, including the script and layout threads, as well
// as the navigation context.
let (constellation_chan, sw_senders) = create_constellation(opts.clone(),
compositor_proxy.clone_compositor_proxy(),
time_profiler_chan.clone(),
mem_profiler_chan.clone(),
devtools_chan,
supports_clipboard,
webrender_api_sender.clone());
// Send the constellation's swmanager sender to service worker manager thread
script::init(sw_senders);
if cfg!(feature = "webdriver") {
if let Some(port) = opts.webdriver_port {
webdriver(port, constellation_chan.clone());
}
}
// The compositor coordinates with the client window to create the final
// rendered page and display it somewhere.
let compositor = IOCompositor::create(window, InitialCompositorState {
sender: compositor_proxy,
receiver: compositor_receiver,
constellation_chan: constellation_chan.clone(),
time_profiler_chan: time_profiler_chan,
mem_profiler_chan: mem_profiler_chan,
webrender: webrender,
webrender_api_sender: webrender_api_sender,
});
Browser {
compositor: compositor,
constellation_chan: constellation_chan,
}
}
pub fn handle_events(&mut self, events: Vec<WindowEvent>) -> bool {
self.compositor.handle_events(events)
}
pub fn repaint_synchronously(&mut self) {
self.compositor.repaint_synchronously()
}
pub fn pinch_zoom_level(&self) -> f32 {
self.compositor.pinch_zoom_level()
}
pub fn request_title_for_main_frame(&self) {
self.compositor.title_for_main_frame()
}
pub fn setup_logging(&self) {
let constellation_chan = self.constellation_chan.clone();
log::set_logger(|max_log_level| {
let env_logger = EnvLogger::new();
let con_logger = FromCompositorLogger::new(constellation_chan);
let filter = max(env_logger.filter(), con_logger.filter());
let logger = BothLogger(env_logger, con_logger);
max_log_level.set(filter);
Box::new(logger)
}).expect("Failed to set logger.")
}
}
fn create_constellation(opts: opts::Opts,
compositor_proxy: Box<CompositorProxy + Send>,
time_profiler_chan: time::ProfilerChan,
mem_profiler_chan: mem::ProfilerChan,
devtools_chan: Option<Sender<devtools_traits::DevtoolsControlMsg>>,
supports_clipboard: bool,
webrender_api_sender: Option<webrender_traits::RenderApiSender>)
-> (Sender<ConstellationMsg>, SWManagerSenders) {
let bluetooth_thread: IpcSender<BluetoothMethodMsg> = BluetoothThreadFactory::new();
let (public_resource_threads, private_resource_threads) =
new_resource_threads(opts.user_agent.clone(),
devtools_chan.clone(),
time_profiler_chan.clone(),
opts.config_dir.map(Into::into));
let image_cache_thread = new_image_cache_thread(public_resource_threads.sender(),
webrender_api_sender.as_ref().map(|wr| wr.create_api()));
let font_cache_thread = FontCacheThread::new(public_resource_threads.sender(),
webrender_api_sender.as_ref().map(|wr| wr.create_api()));
let resource_sender = public_resource_threads.sender();
let initial_state = InitialConstellationState {
compositor_proxy: compositor_proxy,
devtools_chan: devtools_chan,
bluetooth_thread: bluetooth_thread,
image_cache_thread: image_cache_thread,
font_cache_thread: font_cache_thread,
public_resource_threads: public_resource_threads,
private_resource_threads: private_resource_threads,
time_profiler_chan: time_profiler_chan,
mem_profiler_chan: mem_profiler_chan,
supports_clipboard: supports_clipboard,
webrender_api_sender: webrender_api_sender,
};
let (constellation_chan, from_swmanager_sender) =
Constellation::<script_layout_interface::message::Msg,
layout_thread::LayoutThread,
script::script_thread::ScriptThread>::start(initial_state);
if let Some(url) = opts.url {
constellation_chan.send(ConstellationMsg::InitLoadUrl(url)).unwrap();
};
// channels to communicate with Service Worker Manager
let sw_senders = SWManagerSenders {
swmanager_sender: from_swmanager_sender,
resource_sender: resource_sender
};
(constellation_chan, sw_senders)
}
// A logger that logs to two downstream loggers.
// This should probably be in the log crate.
struct BothLogger<Log1, Log2>(Log1, Log2);
impl<Log1, Log2> Log for BothLogger<Log1, Log2> where Log1: Log, Log2: Log {
fn enabled(&self, metadata: &LogMetadata) -> bool {
self.0.enabled(metadata) || self.1.enabled(metadata)
}
fn log(&self, record: &LogRecord) {
self.0.log(record);
self.1.log(record);
}
}
pub fn set_logger(constellation_chan: IpcSender<ScriptMsg>) {
log::set_logger(|max_log_level| {
let env_logger = EnvLogger::new();
let con_logger = FromScriptLogger::new(constellation_chan);
let filter = max(env_logger.filter(), con_logger.filter());
let logger = BothLogger(env_logger, con_logger);
max_log_level.set(filter);
Box::new(logger)
}).expect("Failed to set logger.")
}
/// Content process entry point.
pub fn run_content_process(token: String) {
let (unprivileged_content_sender, unprivileged_content_receiver) =
ipc::channel::<UnprivilegedPipelineContent>().unwrap();
let connection_bootstrap: IpcSender<IpcSender<UnprivilegedPipelineContent>> =
IpcSender::connect(token).unwrap();
connection_bootstrap.send(unprivileged_content_sender).unwrap();
let unprivileged_content = unprivileged_content_receiver.recv().unwrap();
opts::set_defaults(unprivileged_content.opts());
PREFS.extend(unprivileged_content.prefs());
set_logger(unprivileged_content.constellation_chan());
// Enter the sandbox if necessary.
if opts::get().sandbox {
create_sandbox();
}
// send the required channels to the service worker manager
let sw_senders = unprivileged_content.swmanager_senders();
script::init(sw_senders);
unprivileged_content.start_all::<script_layout_interface::message::Msg,
layout_thread::LayoutThread,
script::script_thread::ScriptThread>(true);
}
// This is a workaround for https://github.com/rust-lang/rust/pull/30175 until
// https://github.com/lfairy/rust-errno/pull/5 lands, and should be removed once
// we update Servo with the rust-errno crate.
#[cfg(target_os = "android")]
#[no_mangle]
pub unsafe extern fn __errno_location() -> *mut i32 {
extern { fn __errno() -> *mut i32; }
__errno()
}
#[cfg(not(target_os = "windows"))]
fn create_sandbox() {
ChildSandbox::new(content_process_sandbox_profile()).activate()
.expect("Failed to activate sandbox!");
}
#[cfg(target_os = "windows")]
fn create_sandbox() {
panic!("Sandboxing is not supported on Windows.");
}
|
#[cfg(not(target_os = "windows"))]
use constellation::content_process_sandbox_profile;
use env_logger::Logger as EnvLogger;
#[cfg(not(target_os = "windows"))]
|
random_line_split
|
types.rs
|
use crate::error::*;
use std::fmt;
use std::num::ParseFloatError;
use std::str::FromStr;
use std::string::ToString;
#[derive(Debug, PartialEq, Clone)]
pub struct Atom<T> {
pub element: String,
pub x: T,
pub y: T,
pub z: T,
}
impl<T> Atom<T> {
pub fn new(element: &str, x: T, y: T, z: T) -> Self
|
}
impl<T> FromStr for Atom<T>
where
T: FromStr<Err = ParseFloatError>,
{
type Err = Error;
fn from_str(s: &str) -> Result<Self> {
let splitted: Vec<&str> = s.split_whitespace().collect();
if splitted.len()!= 4 {
return Err(Error::IllegalState(String::from("")));
}
Ok(Atom::new(
splitted[0],
splitted[1].parse()?,
splitted[2].parse()?,
splitted[3].parse()?,
))
}
}
impl<T: fmt::Display> fmt::Display for Atom<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{} {} {} {}", self.element, self.x, self.y, self.z)
}
}
pub struct Snapshot<T> {
pub comment: String,
pub atoms: Vec<Atom<T>>,
}
impl<T> Snapshot<T> {
pub fn size(&self) -> usize {
self.atoms.len()
}
}
impl<T: fmt::Display> fmt::Display for Snapshot<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
writeln!(f, "{}", self.size())?;
write!(f, "{}", self.comment)?;
for atom in &self.atoms {
writeln!(f, "")?;
write!(f, "{}", atom)?;
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_atom() {
let success = "C 10.0 11.0 12.0".parse();
assert!(success.is_ok());
assert_eq!(Atom::new("C", 10.0, 11.0, 12.0), success.unwrap());
let failure: Result<Atom<f64>> = "C 1.0 2.0 a".parse();
assert!(failure.is_err());
}
#[test]
fn test_atom_to_string() {
let atom = Atom::new("C", 11.2, 8.5, 14.8);
assert_eq!("C 11.2 8.5 14.8", atom.to_string());
}
#[test]
fn test_snapshot() {
let snapshot = Snapshot {
comment: "This is a comment".to_string(),
atoms: vec![
Atom::new("C", 10.0, 11.0, 12.0),
Atom::new("O", 8.4, 12.8, 5.0),
Atom::new("H", 23.0, 9.0, 11.8),
],
};
assert_eq!(3, snapshot.size());
assert_eq!("This is a comment", snapshot.comment);
assert_eq!(Atom::new("C", 10.0, 11.0, 12.0), snapshot.atoms[0]);
assert_eq!(Atom::new("O", 8.4, 12.8, 5.0), snapshot.atoms[1]);
assert_eq!(Atom::new("H", 23.0, 9.0, 11.8), snapshot.atoms[2]);
}
#[test]
fn test_format_snapshot() {
let snapshot = Snapshot {
comment: "This is a comment".to_string(),
atoms: vec![
Atom::new("C", 10.0, 11.0, 12.0),
Atom::new("O", 8.4, 12.8, 5.0),
Atom::new("H", 23.0, 9.0, 11.8),
],
};
assert_eq!(
format!("{}", snapshot),
"3\n\
This is a comment\n\
C 10 11 12\n\
O 8.4 12.8 5\n\
H 23 9 11.8"
);
}
}
|
{
Atom {
element: element.to_string(),
x: x,
y: y,
z: z,
}
}
|
identifier_body
|
types.rs
|
use crate::error::*;
use std::fmt;
use std::num::ParseFloatError;
use std::str::FromStr;
use std::string::ToString;
#[derive(Debug, PartialEq, Clone)]
pub struct Atom<T> {
pub element: String,
pub x: T,
pub y: T,
pub z: T,
}
impl<T> Atom<T> {
pub fn new(element: &str, x: T, y: T, z: T) -> Self {
Atom {
element: element.to_string(),
x: x,
y: y,
z: z,
}
}
}
impl<T> FromStr for Atom<T>
where
T: FromStr<Err = ParseFloatError>,
{
type Err = Error;
fn from_str(s: &str) -> Result<Self> {
let splitted: Vec<&str> = s.split_whitespace().collect();
if splitted.len()!= 4
|
Ok(Atom::new(
splitted[0],
splitted[1].parse()?,
splitted[2].parse()?,
splitted[3].parse()?,
))
}
}
impl<T: fmt::Display> fmt::Display for Atom<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{} {} {} {}", self.element, self.x, self.y, self.z)
}
}
pub struct Snapshot<T> {
pub comment: String,
pub atoms: Vec<Atom<T>>,
}
impl<T> Snapshot<T> {
pub fn size(&self) -> usize {
self.atoms.len()
}
}
impl<T: fmt::Display> fmt::Display for Snapshot<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
writeln!(f, "{}", self.size())?;
write!(f, "{}", self.comment)?;
for atom in &self.atoms {
writeln!(f, "")?;
write!(f, "{}", atom)?;
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_atom() {
let success = "C 10.0 11.0 12.0".parse();
assert!(success.is_ok());
assert_eq!(Atom::new("C", 10.0, 11.0, 12.0), success.unwrap());
let failure: Result<Atom<f64>> = "C 1.0 2.0 a".parse();
assert!(failure.is_err());
}
#[test]
fn test_atom_to_string() {
let atom = Atom::new("C", 11.2, 8.5, 14.8);
assert_eq!("C 11.2 8.5 14.8", atom.to_string());
}
#[test]
fn test_snapshot() {
let snapshot = Snapshot {
comment: "This is a comment".to_string(),
atoms: vec![
Atom::new("C", 10.0, 11.0, 12.0),
Atom::new("O", 8.4, 12.8, 5.0),
Atom::new("H", 23.0, 9.0, 11.8),
],
};
assert_eq!(3, snapshot.size());
assert_eq!("This is a comment", snapshot.comment);
assert_eq!(Atom::new("C", 10.0, 11.0, 12.0), snapshot.atoms[0]);
assert_eq!(Atom::new("O", 8.4, 12.8, 5.0), snapshot.atoms[1]);
assert_eq!(Atom::new("H", 23.0, 9.0, 11.8), snapshot.atoms[2]);
}
#[test]
fn test_format_snapshot() {
let snapshot = Snapshot {
comment: "This is a comment".to_string(),
atoms: vec![
Atom::new("C", 10.0, 11.0, 12.0),
Atom::new("O", 8.4, 12.8, 5.0),
Atom::new("H", 23.0, 9.0, 11.8),
],
};
assert_eq!(
format!("{}", snapshot),
"3\n\
This is a comment\n\
C 10 11 12\n\
O 8.4 12.8 5\n\
H 23 9 11.8"
);
}
}
|
{
return Err(Error::IllegalState(String::from("")));
}
|
conditional_block
|
types.rs
|
use crate::error::*;
use std::fmt;
use std::num::ParseFloatError;
use std::str::FromStr;
use std::string::ToString;
#[derive(Debug, PartialEq, Clone)]
pub struct Atom<T> {
pub element: String,
pub x: T,
pub y: T,
pub z: T,
}
impl<T> Atom<T> {
pub fn new(element: &str, x: T, y: T, z: T) -> Self {
Atom {
element: element.to_string(),
x: x,
y: y,
z: z,
}
}
}
impl<T> FromStr for Atom<T>
where
T: FromStr<Err = ParseFloatError>,
{
type Err = Error;
fn from_str(s: &str) -> Result<Self> {
let splitted: Vec<&str> = s.split_whitespace().collect();
if splitted.len()!= 4 {
return Err(Error::IllegalState(String::from("")));
}
Ok(Atom::new(
splitted[0],
splitted[1].parse()?,
splitted[2].parse()?,
splitted[3].parse()?,
))
}
}
impl<T: fmt::Display> fmt::Display for Atom<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{} {} {} {}", self.element, self.x, self.y, self.z)
}
}
pub struct Snapshot<T> {
pub comment: String,
pub atoms: Vec<Atom<T>>,
}
impl<T> Snapshot<T> {
pub fn size(&self) -> usize {
self.atoms.len()
}
}
impl<T: fmt::Display> fmt::Display for Snapshot<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
writeln!(f, "{}", self.size())?;
write!(f, "{}", self.comment)?;
for atom in &self.atoms {
writeln!(f, "")?;
write!(f, "{}", atom)?;
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_atom() {
let success = "C 10.0 11.0 12.0".parse();
assert!(success.is_ok());
assert_eq!(Atom::new("C", 10.0, 11.0, 12.0), success.unwrap());
let failure: Result<Atom<f64>> = "C 1.0 2.0 a".parse();
assert!(failure.is_err());
}
#[test]
fn test_atom_to_string() {
let atom = Atom::new("C", 11.2, 8.5, 14.8);
assert_eq!("C 11.2 8.5 14.8", atom.to_string());
}
#[test]
fn test_snapshot() {
let snapshot = Snapshot {
comment: "This is a comment".to_string(),
atoms: vec![
Atom::new("C", 10.0, 11.0, 12.0),
Atom::new("O", 8.4, 12.8, 5.0),
Atom::new("H", 23.0, 9.0, 11.8),
],
};
assert_eq!(3, snapshot.size());
assert_eq!("This is a comment", snapshot.comment);
assert_eq!(Atom::new("C", 10.0, 11.0, 12.0), snapshot.atoms[0]);
assert_eq!(Atom::new("O", 8.4, 12.8, 5.0), snapshot.atoms[1]);
assert_eq!(Atom::new("H", 23.0, 9.0, 11.8), snapshot.atoms[2]);
}
#[test]
fn test_format_snapshot() {
let snapshot = Snapshot {
comment: "This is a comment".to_string(),
atoms: vec![
Atom::new("C", 10.0, 11.0, 12.0),
Atom::new("O", 8.4, 12.8, 5.0),
Atom::new("H", 23.0, 9.0, 11.8),
],
};
assert_eq!(
format!("{}", snapshot),
"3\n\
This is a comment\n\
|
}
}
|
C 10 11 12\n\
O 8.4 12.8 5\n\
H 23 9 11.8"
);
|
random_line_split
|
types.rs
|
use crate::error::*;
use std::fmt;
use std::num::ParseFloatError;
use std::str::FromStr;
use std::string::ToString;
#[derive(Debug, PartialEq, Clone)]
pub struct Atom<T> {
pub element: String,
pub x: T,
pub y: T,
pub z: T,
}
impl<T> Atom<T> {
pub fn new(element: &str, x: T, y: T, z: T) -> Self {
Atom {
element: element.to_string(),
x: x,
y: y,
z: z,
}
}
}
impl<T> FromStr for Atom<T>
where
T: FromStr<Err = ParseFloatError>,
{
type Err = Error;
fn from_str(s: &str) -> Result<Self> {
let splitted: Vec<&str> = s.split_whitespace().collect();
if splitted.len()!= 4 {
return Err(Error::IllegalState(String::from("")));
}
Ok(Atom::new(
splitted[0],
splitted[1].parse()?,
splitted[2].parse()?,
splitted[3].parse()?,
))
}
}
impl<T: fmt::Display> fmt::Display for Atom<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{} {} {} {}", self.element, self.x, self.y, self.z)
}
}
pub struct Snapshot<T> {
pub comment: String,
pub atoms: Vec<Atom<T>>,
}
impl<T> Snapshot<T> {
pub fn size(&self) -> usize {
self.atoms.len()
}
}
impl<T: fmt::Display> fmt::Display for Snapshot<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
writeln!(f, "{}", self.size())?;
write!(f, "{}", self.comment)?;
for atom in &self.atoms {
writeln!(f, "")?;
write!(f, "{}", atom)?;
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_atom() {
let success = "C 10.0 11.0 12.0".parse();
assert!(success.is_ok());
assert_eq!(Atom::new("C", 10.0, 11.0, 12.0), success.unwrap());
let failure: Result<Atom<f64>> = "C 1.0 2.0 a".parse();
assert!(failure.is_err());
}
#[test]
fn test_atom_to_string() {
let atom = Atom::new("C", 11.2, 8.5, 14.8);
assert_eq!("C 11.2 8.5 14.8", atom.to_string());
}
#[test]
fn
|
() {
let snapshot = Snapshot {
comment: "This is a comment".to_string(),
atoms: vec![
Atom::new("C", 10.0, 11.0, 12.0),
Atom::new("O", 8.4, 12.8, 5.0),
Atom::new("H", 23.0, 9.0, 11.8),
],
};
assert_eq!(3, snapshot.size());
assert_eq!("This is a comment", snapshot.comment);
assert_eq!(Atom::new("C", 10.0, 11.0, 12.0), snapshot.atoms[0]);
assert_eq!(Atom::new("O", 8.4, 12.8, 5.0), snapshot.atoms[1]);
assert_eq!(Atom::new("H", 23.0, 9.0, 11.8), snapshot.atoms[2]);
}
#[test]
fn test_format_snapshot() {
let snapshot = Snapshot {
comment: "This is a comment".to_string(),
atoms: vec![
Atom::new("C", 10.0, 11.0, 12.0),
Atom::new("O", 8.4, 12.8, 5.0),
Atom::new("H", 23.0, 9.0, 11.8),
],
};
assert_eq!(
format!("{}", snapshot),
"3\n\
This is a comment\n\
C 10 11 12\n\
O 8.4 12.8 5\n\
H 23 9 11.8"
);
}
}
|
test_snapshot
|
identifier_name
|
cargo_clean.rs
|
use std::default::Default;
use std::fs;
use std::io::prelude::*;
use std::path::Path;
use core::{Package, PackageSet, Profiles};
use core::source::{Source, SourceMap};
use core::registry::PackageRegistry;
use util::{CargoResult, human, ChainError, Config};
use ops::{self, Layout, Context, BuildConfig, Kind, Unit};
pub struct
|
<'a> {
pub spec: &'a [String],
pub target: Option<&'a str>,
pub config: &'a Config,
pub release: bool,
}
/// Cleans the project from build artifacts.
pub fn clean(manifest_path: &Path, opts: &CleanOptions) -> CargoResult<()> {
let root = try!(Package::for_path(manifest_path, opts.config));
let target_dir = opts.config.target_dir(&root);
// If we have a spec, then we need to delete some packages, otherwise, just
// remove the whole target directory and be done with it!
if opts.spec.len() == 0 {
return rm_rf(&target_dir);
}
// Load the lockfile (if one's available)
let lockfile = root.root().join("Cargo.lock");
let source_id = root.package_id().source_id();
let resolve = match try!(ops::load_lockfile(&lockfile, source_id)) {
Some(resolve) => resolve,
None => bail!("a Cargo.lock must exist before cleaning")
};
// Create a compilation context to have access to information like target
// filenames and such
let srcs = SourceMap::new();
let pkgs = PackageSet::new(&[]);
let dest = if opts.release {"release"} else {"debug"};
let host_layout = Layout::new(opts.config, &root, None, dest);
let target_layout = opts.target.map(|target| {
Layout::new(opts.config, &root, Some(target), dest)
});
let cx = try!(Context::new(&resolve, &srcs, &pkgs, opts.config,
host_layout, target_layout,
BuildConfig::default(),
root.manifest().profiles()));
let mut registry = PackageRegistry::new(opts.config);
// resolve package specs and remove the corresponding packages
for spec in opts.spec {
let pkgid = try!(resolve.query(spec));
// Translate the PackageId to a Package
let pkg = {
try!(registry.add_sources(&[pkgid.source_id().clone()]));
(try!(registry.get(&[pkgid.clone()]))).into_iter().next().unwrap()
};
// And finally, clean everything out!
for target in pkg.targets() {
for kind in [Kind::Host, Kind::Target].iter() {
let layout = cx.layout(&pkg, *kind);
try!(rm_rf(&layout.proxy().fingerprint(&pkg)));
try!(rm_rf(&layout.build(&pkg)));
let Profiles {
ref release, ref dev, ref test, ref bench, ref doc,
ref custom_build,
} = *root.manifest().profiles();
for profile in [release, dev, test, bench, doc, custom_build].iter() {
let unit = Unit {
pkg: &pkg,
target: target,
profile: profile,
kind: *kind,
};
let root = cx.out_dir(&unit);
for filename in try!(cx.target_filenames(&unit)).iter() {
try!(rm_rf(&root.join(&filename)));
}
}
}
}
}
Ok(())
}
fn rm_rf(path: &Path) -> CargoResult<()> {
let m = fs::metadata(path);
if m.as_ref().map(|s| s.is_dir()).unwrap_or(false) {
try!(fs::remove_dir_all(path).chain_error(|| {
human("could not remove build directory")
}));
} else if m.is_ok() {
try!(fs::remove_file(path).chain_error(|| {
human("failed to remove build artifact")
}));
}
Ok(())
}
|
CleanOptions
|
identifier_name
|
cargo_clean.rs
|
use std::default::Default;
use std::fs;
use std::io::prelude::*;
use std::path::Path;
use core::{Package, PackageSet, Profiles};
use core::source::{Source, SourceMap};
use core::registry::PackageRegistry;
use util::{CargoResult, human, ChainError, Config};
use ops::{self, Layout, Context, BuildConfig, Kind, Unit};
pub struct CleanOptions<'a> {
pub spec: &'a [String],
pub target: Option<&'a str>,
pub config: &'a Config,
pub release: bool,
}
/// Cleans the project from build artifacts.
pub fn clean(manifest_path: &Path, opts: &CleanOptions) -> CargoResult<()> {
let root = try!(Package::for_path(manifest_path, opts.config));
let target_dir = opts.config.target_dir(&root);
// If we have a spec, then we need to delete some packages, otherwise, just
// remove the whole target directory and be done with it!
if opts.spec.len() == 0 {
return rm_rf(&target_dir);
}
// Load the lockfile (if one's available)
let lockfile = root.root().join("Cargo.lock");
let source_id = root.package_id().source_id();
let resolve = match try!(ops::load_lockfile(&lockfile, source_id)) {
Some(resolve) => resolve,
None => bail!("a Cargo.lock must exist before cleaning")
};
// Create a compilation context to have access to information like target
// filenames and such
let srcs = SourceMap::new();
let pkgs = PackageSet::new(&[]);
let dest = if opts.release {"release"} else {"debug"};
let host_layout = Layout::new(opts.config, &root, None, dest);
let target_layout = opts.target.map(|target| {
Layout::new(opts.config, &root, Some(target), dest)
});
let cx = try!(Context::new(&resolve, &srcs, &pkgs, opts.config,
host_layout, target_layout,
BuildConfig::default(),
root.manifest().profiles()));
let mut registry = PackageRegistry::new(opts.config);
// resolve package specs and remove the corresponding packages
for spec in opts.spec {
let pkgid = try!(resolve.query(spec));
// Translate the PackageId to a Package
let pkg = {
try!(registry.add_sources(&[pkgid.source_id().clone()]));
(try!(registry.get(&[pkgid.clone()]))).into_iter().next().unwrap()
};
// And finally, clean everything out!
for target in pkg.targets() {
for kind in [Kind::Host, Kind::Target].iter() {
let layout = cx.layout(&pkg, *kind);
try!(rm_rf(&layout.proxy().fingerprint(&pkg)));
try!(rm_rf(&layout.build(&pkg)));
let Profiles {
ref release, ref dev, ref test, ref bench, ref doc,
ref custom_build,
} = *root.manifest().profiles();
for profile in [release, dev, test, bench, doc, custom_build].iter() {
let unit = Unit {
pkg: &pkg,
target: target,
profile: profile,
kind: *kind,
};
let root = cx.out_dir(&unit);
for filename in try!(cx.target_filenames(&unit)).iter() {
try!(rm_rf(&root.join(&filename)));
}
}
}
}
}
Ok(())
}
fn rm_rf(path: &Path) -> CargoResult<()>
|
{
let m = fs::metadata(path);
if m.as_ref().map(|s| s.is_dir()).unwrap_or(false) {
try!(fs::remove_dir_all(path).chain_error(|| {
human("could not remove build directory")
}));
} else if m.is_ok() {
try!(fs::remove_file(path).chain_error(|| {
human("failed to remove build artifact")
}));
}
Ok(())
}
|
identifier_body
|
|
cargo_clean.rs
|
use std::default::Default;
use std::fs;
use std::io::prelude::*;
use std::path::Path;
use core::{Package, PackageSet, Profiles};
use core::source::{Source, SourceMap};
use core::registry::PackageRegistry;
use util::{CargoResult, human, ChainError, Config};
use ops::{self, Layout, Context, BuildConfig, Kind, Unit};
pub struct CleanOptions<'a> {
pub spec: &'a [String],
pub target: Option<&'a str>,
pub config: &'a Config,
pub release: bool,
}
/// Cleans the project from build artifacts.
pub fn clean(manifest_path: &Path, opts: &CleanOptions) -> CargoResult<()> {
let root = try!(Package::for_path(manifest_path, opts.config));
let target_dir = opts.config.target_dir(&root);
// If we have a spec, then we need to delete some packages, otherwise, just
// remove the whole target directory and be done with it!
if opts.spec.len() == 0 {
return rm_rf(&target_dir);
}
// Load the lockfile (if one's available)
let lockfile = root.root().join("Cargo.lock");
let source_id = root.package_id().source_id();
let resolve = match try!(ops::load_lockfile(&lockfile, source_id)) {
Some(resolve) => resolve,
None => bail!("a Cargo.lock must exist before cleaning")
};
// Create a compilation context to have access to information like target
// filenames and such
let srcs = SourceMap::new();
let pkgs = PackageSet::new(&[]);
let dest = if opts.release {"release"} else {"debug"};
let host_layout = Layout::new(opts.config, &root, None, dest);
let target_layout = opts.target.map(|target| {
Layout::new(opts.config, &root, Some(target), dest)
});
let cx = try!(Context::new(&resolve, &srcs, &pkgs, opts.config,
host_layout, target_layout,
BuildConfig::default(),
root.manifest().profiles()));
let mut registry = PackageRegistry::new(opts.config);
// resolve package specs and remove the corresponding packages
for spec in opts.spec {
let pkgid = try!(resolve.query(spec));
// Translate the PackageId to a Package
let pkg = {
try!(registry.add_sources(&[pkgid.source_id().clone()]));
(try!(registry.get(&[pkgid.clone()]))).into_iter().next().unwrap()
};
// And finally, clean everything out!
for target in pkg.targets() {
for kind in [Kind::Host, Kind::Target].iter() {
let layout = cx.layout(&pkg, *kind);
try!(rm_rf(&layout.proxy().fingerprint(&pkg)));
try!(rm_rf(&layout.build(&pkg)));
let Profiles {
ref release, ref dev, ref test, ref bench, ref doc,
ref custom_build,
} = *root.manifest().profiles();
for profile in [release, dev, test, bench, doc, custom_build].iter() {
let unit = Unit {
pkg: &pkg,
target: target,
profile: profile,
kind: *kind,
};
let root = cx.out_dir(&unit);
for filename in try!(cx.target_filenames(&unit)).iter() {
try!(rm_rf(&root.join(&filename)));
}
}
}
}
}
Ok(())
}
fn rm_rf(path: &Path) -> CargoResult<()> {
let m = fs::metadata(path);
if m.as_ref().map(|s| s.is_dir()).unwrap_or(false) {
try!(fs::remove_dir_all(path).chain_error(|| {
human("could not remove build directory")
|
human("failed to remove build artifact")
}));
}
Ok(())
}
|
}));
} else if m.is_ok() {
try!(fs::remove_file(path).chain_error(|| {
|
random_line_split
|
types.rs
|
//! Exports Rust counterparts for all the common GLSL types, along with a few marker traits
use rasen::prelude::{Dim, TypeName};
use std::ops::{Add, Div, Index, Mul, Rem, Sub};
use crate::{
context::{Container, Context},
value::{IntoValue, Value},
};
pub trait AsTypeName {
const TYPE_NAME: &'static TypeName;
}
pub trait GenType: Copy {
fn zero() -> Self;
fn one() -> Self;
fn min(self, rhs: Self) -> Self;
fn max(self, rhs: Self) -> Self;
}
pub trait Numerical: GenType {
fn pow(self, rhs: Self) -> Self;
}
pub trait Floating: Numerical {
fn sqrt(self) -> Self;
fn floor(self) -> Self;
fn ceil(self) -> Self;
fn round(self) -> Self;
fn sin(self) -> Self;
fn cos(self) -> Self;
fn tan(self) -> Self;
fn ln(self) -> Self;
fn abs(self) -> Self;
}
pub trait Vector: GenType {
type Scalar: Numerical;
fn spread(v: Self::Scalar) -> Self;
}
pub trait VectorFloating: Vector
where
Self::Scalar: Floating,
{
fn dot(&self, rhs: &Self) -> Self::Scalar;
fn normalize(&self) -> Self;
fn length_squared(&self) -> Self::Scalar;
fn length(&self) -> Self::Scalar {
self.length_squared().sqrt()
}
}
pub trait Vector3: Vector {
fn cross(&self, rhs: &Self) -> Self;
}
pub trait Matrix {
fn inverse(self) -> Self;
}
include!(concat!(env!("OUT_DIR"), "/types.rs"));
#[derive(Copy, Clone, Debug)]
pub struct
|
<V>(pub V);
impl<V: Vector> AsTypeName for Sampler<V>
where
<V as Vector>::Scalar: AsTypeName,
{
const TYPE_NAME: &'static TypeName =
&TypeName::Sampler(<<V as Vector>::Scalar as AsTypeName>::TYPE_NAME, Dim::Dim2D);
}
|
Sampler
|
identifier_name
|
types.rs
|
//! Exports Rust counterparts for all the common GLSL types, along with a few marker traits
use rasen::prelude::{Dim, TypeName};
use std::ops::{Add, Div, Index, Mul, Rem, Sub};
use crate::{
context::{Container, Context},
value::{IntoValue, Value},
};
pub trait AsTypeName {
const TYPE_NAME: &'static TypeName;
}
pub trait GenType: Copy {
fn zero() -> Self;
fn one() -> Self;
fn min(self, rhs: Self) -> Self;
fn max(self, rhs: Self) -> Self;
}
pub trait Numerical: GenType {
fn pow(self, rhs: Self) -> Self;
}
pub trait Floating: Numerical {
fn sqrt(self) -> Self;
fn floor(self) -> Self;
fn ceil(self) -> Self;
fn round(self) -> Self;
fn sin(self) -> Self;
fn cos(self) -> Self;
fn tan(self) -> Self;
fn ln(self) -> Self;
fn abs(self) -> Self;
}
pub trait Vector: GenType {
type Scalar: Numerical;
fn spread(v: Self::Scalar) -> Self;
}
pub trait VectorFloating: Vector
where
Self::Scalar: Floating,
{
fn dot(&self, rhs: &Self) -> Self::Scalar;
fn normalize(&self) -> Self;
fn length_squared(&self) -> Self::Scalar;
fn length(&self) -> Self::Scalar
|
}
pub trait Vector3: Vector {
fn cross(&self, rhs: &Self) -> Self;
}
pub trait Matrix {
fn inverse(self) -> Self;
}
include!(concat!(env!("OUT_DIR"), "/types.rs"));
#[derive(Copy, Clone, Debug)]
pub struct Sampler<V>(pub V);
impl<V: Vector> AsTypeName for Sampler<V>
where
<V as Vector>::Scalar: AsTypeName,
{
const TYPE_NAME: &'static TypeName =
&TypeName::Sampler(<<V as Vector>::Scalar as AsTypeName>::TYPE_NAME, Dim::Dim2D);
}
|
{
self.length_squared().sqrt()
}
|
identifier_body
|
types.rs
|
//! Exports Rust counterparts for all the common GLSL types, along with a few marker traits
use rasen::prelude::{Dim, TypeName};
use std::ops::{Add, Div, Index, Mul, Rem, Sub};
use crate::{
context::{Container, Context},
value::{IntoValue, Value},
};
pub trait AsTypeName {
const TYPE_NAME: &'static TypeName;
}
pub trait GenType: Copy {
fn zero() -> Self;
fn one() -> Self;
fn min(self, rhs: Self) -> Self;
fn max(self, rhs: Self) -> Self;
}
pub trait Numerical: GenType {
fn pow(self, rhs: Self) -> Self;
}
pub trait Floating: Numerical {
fn sqrt(self) -> Self;
fn floor(self) -> Self;
fn ceil(self) -> Self;
fn round(self) -> Self;
fn sin(self) -> Self;
fn cos(self) -> Self;
fn tan(self) -> Self;
fn ln(self) -> Self;
fn abs(self) -> Self;
}
pub trait Vector: GenType {
type Scalar: Numerical;
fn spread(v: Self::Scalar) -> Self;
}
pub trait VectorFloating: Vector
where
Self::Scalar: Floating,
{
fn dot(&self, rhs: &Self) -> Self::Scalar;
fn normalize(&self) -> Self;
fn length_squared(&self) -> Self::Scalar;
fn length(&self) -> Self::Scalar {
self.length_squared().sqrt()
}
}
pub trait Vector3: Vector {
fn cross(&self, rhs: &Self) -> Self;
}
pub trait Matrix {
fn inverse(self) -> Self;
}
include!(concat!(env!("OUT_DIR"), "/types.rs"));
#[derive(Copy, Clone, Debug)]
pub struct Sampler<V>(pub V);
impl<V: Vector> AsTypeName for Sampler<V>
|
&TypeName::Sampler(<<V as Vector>::Scalar as AsTypeName>::TYPE_NAME, Dim::Dim2D);
}
|
where
<V as Vector>::Scalar: AsTypeName,
{
const TYPE_NAME: &'static TypeName =
|
random_line_split
|
test_util.rs
|
extern crate mazth;
#[allow(unused_imports)]
use std::ops::Div;
#[allow(unused_imports)]
use std::cmp::Ordering;
use self::mazth::i_comparable::IComparableError;
use self::mazth::mat::{Mat3x1, Mat4};
use implement::math::util;
#[test]
fn
|
(){
//look_at
{
let eye : Mat3x1<f32> = Mat3x1 { _val: [5.0,5.0,5.0] };
let center : Mat3x1<f32> = Mat3x1 { _val: [0.0,0.0,0.0] };
let up : Mat3x1<f32> = Mat3x1 { _val: [0.0,1.0,0.0] };
let lookat = util::look_at( eye, center, up );
assert!( lookat.is_equal( &Mat4{ _val: [ 0.70711, 0.0, -0.70711, 0.0,
-0.40825, 0.81650, -0.40825, 0.0,
0.57735, 0.57735, 0.57735, -8.66025,
0.0, 0.0, 0.0, 1.0 ], _is_row_major: true }, 0.0001f32 ).expect("look_at result unexpected") );
}
//perspective transform
{
let fov = 90.0;
let aspect = 1.0;
let near = 0.1;
let far = 100.0;
let persp = util::perspective( fov, aspect, near, far );
println!( "{:?}", persp );
assert!( persp.is_equal( &Mat4{ _val: [ 1.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0,
0.0, 0.0, -1.0, -0.2,
0.0, 0.0, -1.0, 0.0 ], _is_row_major: true }, 0.01f32 ).expect("perspective result unexpected") );
}
}
|
test_math_util
|
identifier_name
|
test_util.rs
|
extern crate mazth;
#[allow(unused_imports)]
use std::ops::Div;
#[allow(unused_imports)]
use std::cmp::Ordering;
use self::mazth::i_comparable::IComparableError;
use self::mazth::mat::{Mat3x1, Mat4};
use implement::math::util;
#[test]
fn test_math_util()
|
let far = 100.0;
let persp = util::perspective( fov, aspect, near, far );
println!( "{:?}", persp );
assert!( persp.is_equal( &Mat4{ _val: [ 1.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0,
0.0, 0.0, -1.0, -0.2,
0.0, 0.0, -1.0, 0.0 ], _is_row_major: true }, 0.01f32 ).expect("perspective result unexpected") );
}
}
|
{
//look_at
{
let eye : Mat3x1<f32> = Mat3x1 { _val: [5.0,5.0,5.0] };
let center : Mat3x1<f32> = Mat3x1 { _val: [0.0,0.0,0.0] };
let up : Mat3x1<f32> = Mat3x1 { _val: [0.0,1.0,0.0] };
let lookat = util::look_at( eye, center, up );
assert!( lookat.is_equal( &Mat4{ _val: [ 0.70711, 0.0, -0.70711, 0.0,
-0.40825, 0.81650, -0.40825, 0.0,
0.57735, 0.57735, 0.57735, -8.66025,
0.0, 0.0, 0.0, 1.0 ], _is_row_major: true }, 0.0001f32 ).expect("look_at result unexpected") );
}
//perspective transform
{
let fov = 90.0;
let aspect = 1.0;
let near = 0.1;
|
identifier_body
|
test_util.rs
|
extern crate mazth;
#[allow(unused_imports)]
use std::ops::Div;
#[allow(unused_imports)]
use std::cmp::Ordering;
use self::mazth::i_comparable::IComparableError;
use self::mazth::mat::{Mat3x1, Mat4};
use implement::math::util;
#[test]
fn test_math_util(){
//look_at
{
let eye : Mat3x1<f32> = Mat3x1 { _val: [5.0,5.0,5.0] };
let center : Mat3x1<f32> = Mat3x1 { _val: [0.0,0.0,0.0] };
let up : Mat3x1<f32> = Mat3x1 { _val: [0.0,1.0,0.0] };
|
0.0, 0.0, 0.0, 1.0 ], _is_row_major: true }, 0.0001f32 ).expect("look_at result unexpected") );
}
//perspective transform
{
let fov = 90.0;
let aspect = 1.0;
let near = 0.1;
let far = 100.0;
let persp = util::perspective( fov, aspect, near, far );
println!( "{:?}", persp );
assert!( persp.is_equal( &Mat4{ _val: [ 1.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0,
0.0, 0.0, -1.0, -0.2,
0.0, 0.0, -1.0, 0.0 ], _is_row_major: true }, 0.01f32 ).expect("perspective result unexpected") );
}
}
|
let lookat = util::look_at( eye, center, up );
assert!( lookat.is_equal( &Mat4{ _val: [ 0.70711, 0.0, -0.70711, 0.0,
-0.40825, 0.81650, -0.40825, 0.0,
0.57735, 0.57735, 0.57735, -8.66025,
|
random_line_split
|
server.rs
|
use std::collections::HashMap;
use std::cmp::max;
use iron::{Iron, Chain};
use router::Router;
use persistent::State;
use chrono::{Duration, NaiveDate, NaiveDateTime};
use serde_json::builder::ObjectBuilder;
use serde_json::Value;
use serde;
use SERVER_ADDRESS;
use errors::*;
use load::{SummarizedWeek, Kind, TestRun, InputData, Timing};
use util::{start_idx, end_idx};
const JS_DATE_FORMAT: &'static str = "%Y-%m-%dT%H:%M:%S.000Z";
// Boilerplate for parsing and responding to both GET and POST requests.
mod handler {
use std::ops::Deref;
use std::io::Read;
use serde;
use serde_json::{self, Value};
use iron::prelude::*;
use iron::status;
use persistent::State;
use load::InputData;
use errors::*;
fn respond(res: Result<Value>) -> IronResult<Response> {
use iron::headers::{ContentType, AccessControlAllowOrigin};
use iron::mime::{Mime, TopLevel, SubLevel};
use iron::modifiers::Header;
let mut resp = match res {
Ok(json) =>
|
,
Err(err) => {
// TODO: Print to stderr
println!("An error occurred: {:?}", err);
Response::with((status::InternalServerError, err.to_string()))
}
};
resp.set_mut(Header(AccessControlAllowOrigin::Any));
Ok(resp)
}
pub trait PostHandler: Sized {
fn handle(_body: Self, _data: &InputData) -> Result<Value>;
fn handler(req: &mut Request) -> IronResult<Response>
where Self: serde::Deserialize {
let rwlock = req.get::<State<InputData>>().unwrap();
let data = rwlock.read().unwrap();
let mut buf = String::new();
let res = match req.body.read_to_string(&mut buf).unwrap() {
0 => Err("POST handler with 0 length body.".into()),
_ => Self::handle(serde_json::from_str(&buf).unwrap(), data.deref())
};
respond(res)
}
}
pub trait GetHandler: Sized {
fn handle(_data: &InputData) -> Result<Value>;
fn handler(req: &mut Request) -> IronResult<Response> {
let rwlock = req.get::<State<InputData>>().unwrap();
let data = rwlock.read().unwrap();
respond(Self::handle(data.deref()))
}
}
}
use self::handler::{PostHandler, GetHandler};
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
enum GroupBy {
Crate,
Phase,
}
impl serde::Deserialize for GroupBy {
fn deserialize<D>(deserializer: &mut D) -> ::std::result::Result<GroupBy, D::Error>
where D: serde::de::Deserializer
{
struct GroupByVisitor;
impl serde::de::Visitor for GroupByVisitor {
type Value = GroupBy;
fn visit_str<E>(&mut self, value: &str) -> ::std::result::Result<GroupBy, E>
where E: serde::de::Error
{
match value {
"crate" => Ok(GroupBy::Crate),
"phase" => Ok(GroupBy::Phase),
_ => {
let msg = format!("unexpected {} value for group by", value);
Err(serde::de::Error::custom(msg))
}
}
}
}
deserializer.deserialize(GroupByVisitor)
}
}
enum OptionalDate {
Date(NaiveDateTime),
CouldNotParse(String),
}
impl OptionalDate {
fn as_start(&self, data: &InputData) -> NaiveDateTime {
// Handle missing start by returning 30 days before end.
if let OptionalDate::Date(date) = *self {
date
} else {
let end = self.as_end(data);
let start = (end - Duration::days(30)).timestamp();
NaiveDateTime::from_timestamp(start, 0)
}
}
fn as_end(&self, data: &InputData) -> NaiveDateTime {
// Handle missing end by using the last available date.
if let OptionalDate::Date(date) = *self {
date
} else {
data.last_date
}
}
}
impl serde::Deserialize for OptionalDate {
fn deserialize<D>(deserializer: &mut D) -> ::std::result::Result<OptionalDate, D::Error>
where D: serde::de::Deserializer
{
struct DateVisitor;
impl serde::de::Visitor for DateVisitor {
type Value = OptionalDate;
fn visit_str<E>(&mut self, value: &str) -> ::std::result::Result<OptionalDate, E>
where E: serde::de::Error
{
match NaiveDate::parse_from_str(value, "%a %b %d %Y") {
Ok(date) => Ok(OptionalDate::Date(date.and_hms(0, 0, 0))),
Err(err) => {
if!value.is_empty() {
println!("bad date {:?}: {:?}", value, err);
}
Ok(OptionalDate::CouldNotParse(value.to_string()))
}
}
}
}
deserializer.deserialize(DateVisitor)
}
}
struct Summary;
impl GetHandler for Summary {
fn handle(data: &InputData) -> Result<Value> {
let dates = data.summary_rustc.summary.iter()
.map(|s| s.date.format(JS_DATE_FORMAT).to_string())
.collect::<Vec<_>>();
fn summarize(benchmark: &SummarizedWeek, rustc: &SummarizedWeek) -> String {
let mut sum = 0.0;
let mut count = 0;
for krate in benchmark.by_crate.values() {
if krate.contains_key("total") {
sum += krate["total"];
count += 1;
}
}
if rustc.by_crate["total"].contains_key("total") {
sum += 2.0 * rustc.by_crate["total"]["total"];
count += 2;
}
format!("{:.1}", sum / (count as f64))
}
// overall number for each week
let summaries = data.summary_benchmarks.summary.iter().enumerate().map(|(i, s)| {
summarize(s, &data.summary_rustc.summary[i])
}).collect::<Vec<_>>();
fn breakdown(benchmark: &SummarizedWeek, rustc: &SummarizedWeek) -> Value {
let mut per_bench = ObjectBuilder::new();
for (crate_name, krate) in &benchmark.by_crate {
let val = krate.get("total").cloned().unwrap_or(0.0);
per_bench = per_bench.insert(crate_name.as_str(), format!("{:.1}", val));
}
let bootstrap = if rustc.by_crate["total"].contains_key("total") {
rustc.by_crate["total"]["total"]
} else {
0.0
};
per_bench = per_bench.insert("bootstrap", format!("{:.1}", bootstrap));
per_bench.build()
}
// per benchmark, per week
let breakdown_data = data.summary_benchmarks.summary.iter().enumerate().map(|(i, s)| {
breakdown(s, &data.summary_rustc.summary[i])
}).collect::<Vec<Value>>();
Ok(ObjectBuilder::new()
.insert("total_summary", summarize(&data.summary_benchmarks.total, &data.summary_rustc.total))
.insert("total_breakdown", breakdown(&data.summary_benchmarks.total, &data.summary_rustc.total))
.insert("breakdown", breakdown_data)
.insert("summaries", summaries)
.insert("dates", dates)
.build())
}
}
struct Info;
impl GetHandler for Info {
fn handle(data: &InputData) -> Result<Value> {
Ok(ObjectBuilder::new()
.insert("crates", &data.crate_list)
.insert("phases", &data.phase_list)
.insert("benchmarks", &data.benchmarks)
.build())
}
}
fn get_data_for_date(day: &TestRun, crate_names: &[String], phases: &[String], group_by: GroupBy) -> Value {
#[derive(Serialize)]
struct Recording { // TODO better name (can't use Timing since we don't have a percent...)
time: f64,
rss: u64,
}
impl Recording {
fn new() -> Recording {
Recording {
time: 0.0,
rss: 0,
}
}
fn record(&mut self, phase: Option<&Timing>) {
if let Some(phase) = phase {
self.time += phase.time;
self.rss = max(self.rss, phase.rss.unwrap());
}
}
}
let crates = crate_names.into_iter().filter_map(|crate_name| {
day.by_crate.get(crate_name).map(|krate| {
(crate_name, krate)
})
}).collect::<Vec<_>>();
let mut data = HashMap::new();
for phase_name in phases {
for &(crate_name, krate) in &crates {
let entry = match group_by {
GroupBy::Crate => data.entry(crate_name),
GroupBy::Phase => data.entry(phase_name),
};
entry.or_insert(Recording::new()).record(krate.get(phase_name));
}
}
ObjectBuilder::new()
.insert("date", day.date.format(JS_DATE_FORMAT).to_string())
.insert("commit", day.commit.clone())
.insert("data", data)
.build()
}
#[derive(Deserialize)]
struct Data { // XXX naming
#[serde(rename(deserialize="start"))]
start_date: OptionalDate,
#[serde(rename(deserialize="end"))]
end_date: OptionalDate,
kind: Kind,
group_by: GroupBy,
crates: Vec<String>,
phases: Vec<String>,
}
impl PostHandler for Data {
fn handle(body: Self, data: &InputData) -> Result<Value> {
let mut result = Vec::new();
let mut first_idx = None;
let mut last_idx = 0;
// Iterate over date range.
let start_idx = start_idx(data.by_kind(body.kind), body.start_date.as_start(data));
let end_idx = end_idx(data.by_kind(body.kind), body.end_date.as_end(data));
for i in start_idx..(end_idx + 1) {
let today_data = get_data_for_date(
&data.by_kind(body.kind)[i],
&body.crates,
&body.phases,
body.group_by
);
if!today_data.find("data").unwrap().as_object().unwrap().is_empty() {
last_idx = i - start_idx;
if first_idx == None {
first_idx = Some(i - start_idx);
}
}
result.push(today_data);
}
// Trim the data
let result = result.drain(first_idx.unwrap()..(last_idx+1)).collect::<Vec<_>>();
Ok(Value::Array(result))
}
}
#[derive(Deserialize)]
struct Tabular { // XXX naming
kind: Kind,
date: OptionalDate,
}
impl PostHandler for Tabular {
fn handle(body: Self, data: &InputData) -> Result<Value> {
let kind_data = data.by_kind(body.kind);
let day = &kind_data[end_idx(kind_data, body.date.as_end(data))];
Ok(ObjectBuilder::new()
.insert("date", day.date.format(JS_DATE_FORMAT).to_string())
.insert("commit", &day.commit)
.insert("data", &day.by_crate)
.build())
}
}
#[derive(Deserialize)]
struct Days { // XXX naming
kind: Kind,
dates: Vec<OptionalDate>,
crates: Vec<String>,
phases: Vec<String>,
group_by: GroupBy,
}
impl PostHandler for Days {
fn handle(body: Self, data: &InputData) -> Result<Value> {
let data = data.by_kind(body.kind);
let mut result = Vec::new();
for date in body.dates {
if let OptionalDate::Date(date) = date {
let day = get_data_for_date(
&data[end_idx(data, date)],
&body.crates,
&body.phases,
body.group_by
);
result.push(day);
}
}
Ok(Value::Array(result))
}
}
#[derive(Deserialize)]
struct Stats { // XXX naming
kind: Kind,
#[serde(rename(deserialize="start"))]
start_date: OptionalDate,
#[serde(rename(deserialize="end"))]
end_date: OptionalDate,
// kind rustc only: crate or phase can be 'total'
crates: Vec<String>,
phases: Vec<String>,
}
impl PostHandler for Stats {
fn handle(body: Self, data: &InputData) -> Result<Value> {
if body.kind == Kind::Benchmarks && body.crates.iter().any(|s| s == "total") {
return Err("unexpected total crate with benchmarks kind".into());
}
let kinded_data = data.by_kind(body.kind);
let mut start_date = body.start_date.as_start(data);
let mut end_date = body.end_date.as_end(data);
let mut counted = Vec::new();
// Iterate over date range.
let start_idx = start_idx(kinded_data, start_date);
let end_idx = end_idx(kinded_data, end_date);
for i in start_idx..(end_idx + 1) {
let today_data = &kinded_data[i];
if!today_data.by_crate.is_empty() {
if counted.is_empty() {
start_date = today_data.date;
}
end_date = today_data.date;
counted.push(today_data);
}
}
let mut crates = ObjectBuilder::new();
for crate_name in body.crates {
let stats = mk_stats(&counted, &crate_name, &body.phases);
crates = crates.insert(crate_name, stats);
}
Ok(ObjectBuilder::new()
.insert("startDate", start_date.format(JS_DATE_FORMAT).to_string())
.insert("endDate", end_date.format(JS_DATE_FORMAT).to_string())
.insert("crates", crates.build())
.build())
}
}
fn mk_stats(data: &[&TestRun], crate_name: &str, phases: &[String]) -> Value {
let sums = data.iter()
.filter(|day| if let Some(krate) = day.by_crate.get(crate_name) {
!krate.is_empty()
} else {
false
})
.map(|day| {
let krate = &day.by_crate[crate_name];
let mut sum = 0.0;
for phase in phases {
sum += krate[phase].time;
}
sum
})
.collect::<Vec<_>>();
if sums.is_empty() {
return ObjectBuilder::new()
.insert("first", 0)
.insert("last", 0)
.insert("min", 0)
.insert("max", 0)
.insert("mean", 0)
.insert("variance", 0)
.insert("trend", 0)
.insert("trend_b", 0)
.insert("n", 0)
.build();
}
let first = sums[0];
let last = *sums.last().unwrap();
let mut min = first;
let mut max = first;
let q1_idx = data.len() / 4;
let q4_idx = 3 * data.len() / 4;
let mut total = 0.0;
let mut q1_total = 0.0;
let mut q4_total = 0.0;
for (i, &cur) in sums.iter().enumerate() {
min = min.min(cur);
max = max.max(cur);
total += cur;
if i < q1_idx { // Within the first quartile
q1_total += cur;
}
if i >= q4_idx { // Within the fourth quartile
q4_total += cur;
}
}
// Calculate the variance
let mean = total / (sums.len() as f64);
let mut var_total = 0.0;
for sum in &sums {
let diff = sum - mean;
var_total += diff * diff;
}
let variance = var_total / ((sums.len() - 1) as f64);
let trend = if sums.len() >= 10 && sums.len() == data.len() {
let q1_mean = q1_total / (q1_idx as f64);
let q4_mean = q4_total / ((data.len() - q4_idx) as f64);
100.0 * ((q4_mean - q1_mean) / first)
} else {
0.0
};
let trend_b = 100.0 * ((last - first) / first);
ObjectBuilder::new()
.insert("first", first)
.insert("last", last)
.insert("min", min)
.insert("max", max)
.insert("mean", mean)
.insert("variance", variance)
.insert("trend", trend)
.insert("trend_b", trend_b)
.insert("n", sums.len())
.build()
}
pub fn start(data: InputData) {
let mut router = Router::new();
router.get("/summary", Summary::handler);
router.get("/info", Info::handler);
router.post("/data", Data::handler);
router.post("/get_tabular", Tabular::handler);
router.post("/get", Days::handler);
router.post("/stats", Stats::handler);
let mut chain = Chain::new(router);
chain.link(State::<InputData>::both(data));
Iron::new(chain).http(SERVER_ADDRESS).unwrap();
}
|
{
let mut resp = Response::with((status::Ok, serde_json::to_string(&json).unwrap()));
resp.set_mut(Header(ContentType(Mime(TopLevel::Application, SubLevel::Json, vec![]))));
resp
}
|
conditional_block
|
server.rs
|
use std::collections::HashMap;
use std::cmp::max;
use iron::{Iron, Chain};
use router::Router;
use persistent::State;
use chrono::{Duration, NaiveDate, NaiveDateTime};
use serde_json::builder::ObjectBuilder;
use serde_json::Value;
use serde;
use SERVER_ADDRESS;
use errors::*;
use load::{SummarizedWeek, Kind, TestRun, InputData, Timing};
use util::{start_idx, end_idx};
const JS_DATE_FORMAT: &'static str = "%Y-%m-%dT%H:%M:%S.000Z";
// Boilerplate for parsing and responding to both GET and POST requests.
mod handler {
use std::ops::Deref;
use std::io::Read;
use serde;
use serde_json::{self, Value};
use iron::prelude::*;
use iron::status;
use persistent::State;
use load::InputData;
use errors::*;
fn respond(res: Result<Value>) -> IronResult<Response> {
use iron::headers::{ContentType, AccessControlAllowOrigin};
use iron::mime::{Mime, TopLevel, SubLevel};
use iron::modifiers::Header;
let mut resp = match res {
Ok(json) => {
let mut resp = Response::with((status::Ok, serde_json::to_string(&json).unwrap()));
resp.set_mut(Header(ContentType(Mime(TopLevel::Application, SubLevel::Json, vec![]))));
resp
},
Err(err) => {
// TODO: Print to stderr
println!("An error occurred: {:?}", err);
Response::with((status::InternalServerError, err.to_string()))
}
};
resp.set_mut(Header(AccessControlAllowOrigin::Any));
Ok(resp)
}
pub trait PostHandler: Sized {
fn handle(_body: Self, _data: &InputData) -> Result<Value>;
fn handler(req: &mut Request) -> IronResult<Response>
where Self: serde::Deserialize {
let rwlock = req.get::<State<InputData>>().unwrap();
let data = rwlock.read().unwrap();
let mut buf = String::new();
let res = match req.body.read_to_string(&mut buf).unwrap() {
0 => Err("POST handler with 0 length body.".into()),
_ => Self::handle(serde_json::from_str(&buf).unwrap(), data.deref())
};
respond(res)
}
}
pub trait GetHandler: Sized {
fn handle(_data: &InputData) -> Result<Value>;
fn handler(req: &mut Request) -> IronResult<Response> {
let rwlock = req.get::<State<InputData>>().unwrap();
let data = rwlock.read().unwrap();
respond(Self::handle(data.deref()))
}
}
}
use self::handler::{PostHandler, GetHandler};
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
enum
|
{
Crate,
Phase,
}
impl serde::Deserialize for GroupBy {
fn deserialize<D>(deserializer: &mut D) -> ::std::result::Result<GroupBy, D::Error>
where D: serde::de::Deserializer
{
struct GroupByVisitor;
impl serde::de::Visitor for GroupByVisitor {
type Value = GroupBy;
fn visit_str<E>(&mut self, value: &str) -> ::std::result::Result<GroupBy, E>
where E: serde::de::Error
{
match value {
"crate" => Ok(GroupBy::Crate),
"phase" => Ok(GroupBy::Phase),
_ => {
let msg = format!("unexpected {} value for group by", value);
Err(serde::de::Error::custom(msg))
}
}
}
}
deserializer.deserialize(GroupByVisitor)
}
}
enum OptionalDate {
Date(NaiveDateTime),
CouldNotParse(String),
}
impl OptionalDate {
fn as_start(&self, data: &InputData) -> NaiveDateTime {
// Handle missing start by returning 30 days before end.
if let OptionalDate::Date(date) = *self {
date
} else {
let end = self.as_end(data);
let start = (end - Duration::days(30)).timestamp();
NaiveDateTime::from_timestamp(start, 0)
}
}
fn as_end(&self, data: &InputData) -> NaiveDateTime {
// Handle missing end by using the last available date.
if let OptionalDate::Date(date) = *self {
date
} else {
data.last_date
}
}
}
impl serde::Deserialize for OptionalDate {
fn deserialize<D>(deserializer: &mut D) -> ::std::result::Result<OptionalDate, D::Error>
where D: serde::de::Deserializer
{
struct DateVisitor;
impl serde::de::Visitor for DateVisitor {
type Value = OptionalDate;
fn visit_str<E>(&mut self, value: &str) -> ::std::result::Result<OptionalDate, E>
where E: serde::de::Error
{
match NaiveDate::parse_from_str(value, "%a %b %d %Y") {
Ok(date) => Ok(OptionalDate::Date(date.and_hms(0, 0, 0))),
Err(err) => {
if!value.is_empty() {
println!("bad date {:?}: {:?}", value, err);
}
Ok(OptionalDate::CouldNotParse(value.to_string()))
}
}
}
}
deserializer.deserialize(DateVisitor)
}
}
struct Summary;
impl GetHandler for Summary {
fn handle(data: &InputData) -> Result<Value> {
let dates = data.summary_rustc.summary.iter()
.map(|s| s.date.format(JS_DATE_FORMAT).to_string())
.collect::<Vec<_>>();
fn summarize(benchmark: &SummarizedWeek, rustc: &SummarizedWeek) -> String {
let mut sum = 0.0;
let mut count = 0;
for krate in benchmark.by_crate.values() {
if krate.contains_key("total") {
sum += krate["total"];
count += 1;
}
}
if rustc.by_crate["total"].contains_key("total") {
sum += 2.0 * rustc.by_crate["total"]["total"];
count += 2;
}
format!("{:.1}", sum / (count as f64))
}
// overall number for each week
let summaries = data.summary_benchmarks.summary.iter().enumerate().map(|(i, s)| {
summarize(s, &data.summary_rustc.summary[i])
}).collect::<Vec<_>>();
fn breakdown(benchmark: &SummarizedWeek, rustc: &SummarizedWeek) -> Value {
let mut per_bench = ObjectBuilder::new();
for (crate_name, krate) in &benchmark.by_crate {
let val = krate.get("total").cloned().unwrap_or(0.0);
per_bench = per_bench.insert(crate_name.as_str(), format!("{:.1}", val));
}
let bootstrap = if rustc.by_crate["total"].contains_key("total") {
rustc.by_crate["total"]["total"]
} else {
0.0
};
per_bench = per_bench.insert("bootstrap", format!("{:.1}", bootstrap));
per_bench.build()
}
// per benchmark, per week
let breakdown_data = data.summary_benchmarks.summary.iter().enumerate().map(|(i, s)| {
breakdown(s, &data.summary_rustc.summary[i])
}).collect::<Vec<Value>>();
Ok(ObjectBuilder::new()
.insert("total_summary", summarize(&data.summary_benchmarks.total, &data.summary_rustc.total))
.insert("total_breakdown", breakdown(&data.summary_benchmarks.total, &data.summary_rustc.total))
.insert("breakdown", breakdown_data)
.insert("summaries", summaries)
.insert("dates", dates)
.build())
}
}
struct Info;
impl GetHandler for Info {
fn handle(data: &InputData) -> Result<Value> {
Ok(ObjectBuilder::new()
.insert("crates", &data.crate_list)
.insert("phases", &data.phase_list)
.insert("benchmarks", &data.benchmarks)
.build())
}
}
fn get_data_for_date(day: &TestRun, crate_names: &[String], phases: &[String], group_by: GroupBy) -> Value {
#[derive(Serialize)]
struct Recording { // TODO better name (can't use Timing since we don't have a percent...)
time: f64,
rss: u64,
}
impl Recording {
fn new() -> Recording {
Recording {
time: 0.0,
rss: 0,
}
}
fn record(&mut self, phase: Option<&Timing>) {
if let Some(phase) = phase {
self.time += phase.time;
self.rss = max(self.rss, phase.rss.unwrap());
}
}
}
let crates = crate_names.into_iter().filter_map(|crate_name| {
day.by_crate.get(crate_name).map(|krate| {
(crate_name, krate)
})
}).collect::<Vec<_>>();
let mut data = HashMap::new();
for phase_name in phases {
for &(crate_name, krate) in &crates {
let entry = match group_by {
GroupBy::Crate => data.entry(crate_name),
GroupBy::Phase => data.entry(phase_name),
};
entry.or_insert(Recording::new()).record(krate.get(phase_name));
}
}
ObjectBuilder::new()
.insert("date", day.date.format(JS_DATE_FORMAT).to_string())
.insert("commit", day.commit.clone())
.insert("data", data)
.build()
}
#[derive(Deserialize)]
struct Data { // XXX naming
#[serde(rename(deserialize="start"))]
start_date: OptionalDate,
#[serde(rename(deserialize="end"))]
end_date: OptionalDate,
kind: Kind,
group_by: GroupBy,
crates: Vec<String>,
phases: Vec<String>,
}
impl PostHandler for Data {
fn handle(body: Self, data: &InputData) -> Result<Value> {
let mut result = Vec::new();
let mut first_idx = None;
let mut last_idx = 0;
// Iterate over date range.
let start_idx = start_idx(data.by_kind(body.kind), body.start_date.as_start(data));
let end_idx = end_idx(data.by_kind(body.kind), body.end_date.as_end(data));
for i in start_idx..(end_idx + 1) {
let today_data = get_data_for_date(
&data.by_kind(body.kind)[i],
&body.crates,
&body.phases,
body.group_by
);
if!today_data.find("data").unwrap().as_object().unwrap().is_empty() {
last_idx = i - start_idx;
if first_idx == None {
first_idx = Some(i - start_idx);
}
}
result.push(today_data);
}
// Trim the data
let result = result.drain(first_idx.unwrap()..(last_idx+1)).collect::<Vec<_>>();
Ok(Value::Array(result))
}
}
#[derive(Deserialize)]
struct Tabular { // XXX naming
kind: Kind,
date: OptionalDate,
}
impl PostHandler for Tabular {
fn handle(body: Self, data: &InputData) -> Result<Value> {
let kind_data = data.by_kind(body.kind);
let day = &kind_data[end_idx(kind_data, body.date.as_end(data))];
Ok(ObjectBuilder::new()
.insert("date", day.date.format(JS_DATE_FORMAT).to_string())
.insert("commit", &day.commit)
.insert("data", &day.by_crate)
.build())
}
}
#[derive(Deserialize)]
struct Days { // XXX naming
kind: Kind,
dates: Vec<OptionalDate>,
crates: Vec<String>,
phases: Vec<String>,
group_by: GroupBy,
}
impl PostHandler for Days {
fn handle(body: Self, data: &InputData) -> Result<Value> {
let data = data.by_kind(body.kind);
let mut result = Vec::new();
for date in body.dates {
if let OptionalDate::Date(date) = date {
let day = get_data_for_date(
&data[end_idx(data, date)],
&body.crates,
&body.phases,
body.group_by
);
result.push(day);
}
}
Ok(Value::Array(result))
}
}
#[derive(Deserialize)]
struct Stats { // XXX naming
kind: Kind,
#[serde(rename(deserialize="start"))]
start_date: OptionalDate,
#[serde(rename(deserialize="end"))]
end_date: OptionalDate,
// kind rustc only: crate or phase can be 'total'
crates: Vec<String>,
phases: Vec<String>,
}
impl PostHandler for Stats {
fn handle(body: Self, data: &InputData) -> Result<Value> {
if body.kind == Kind::Benchmarks && body.crates.iter().any(|s| s == "total") {
return Err("unexpected total crate with benchmarks kind".into());
}
let kinded_data = data.by_kind(body.kind);
let mut start_date = body.start_date.as_start(data);
let mut end_date = body.end_date.as_end(data);
let mut counted = Vec::new();
// Iterate over date range.
let start_idx = start_idx(kinded_data, start_date);
let end_idx = end_idx(kinded_data, end_date);
for i in start_idx..(end_idx + 1) {
let today_data = &kinded_data[i];
if!today_data.by_crate.is_empty() {
if counted.is_empty() {
start_date = today_data.date;
}
end_date = today_data.date;
counted.push(today_data);
}
}
let mut crates = ObjectBuilder::new();
for crate_name in body.crates {
let stats = mk_stats(&counted, &crate_name, &body.phases);
crates = crates.insert(crate_name, stats);
}
Ok(ObjectBuilder::new()
.insert("startDate", start_date.format(JS_DATE_FORMAT).to_string())
.insert("endDate", end_date.format(JS_DATE_FORMAT).to_string())
.insert("crates", crates.build())
.build())
}
}
fn mk_stats(data: &[&TestRun], crate_name: &str, phases: &[String]) -> Value {
let sums = data.iter()
.filter(|day| if let Some(krate) = day.by_crate.get(crate_name) {
!krate.is_empty()
} else {
false
})
.map(|day| {
let krate = &day.by_crate[crate_name];
let mut sum = 0.0;
for phase in phases {
sum += krate[phase].time;
}
sum
})
.collect::<Vec<_>>();
if sums.is_empty() {
return ObjectBuilder::new()
.insert("first", 0)
.insert("last", 0)
.insert("min", 0)
.insert("max", 0)
.insert("mean", 0)
.insert("variance", 0)
.insert("trend", 0)
.insert("trend_b", 0)
.insert("n", 0)
.build();
}
let first = sums[0];
let last = *sums.last().unwrap();
let mut min = first;
let mut max = first;
let q1_idx = data.len() / 4;
let q4_idx = 3 * data.len() / 4;
let mut total = 0.0;
let mut q1_total = 0.0;
let mut q4_total = 0.0;
for (i, &cur) in sums.iter().enumerate() {
min = min.min(cur);
max = max.max(cur);
total += cur;
if i < q1_idx { // Within the first quartile
q1_total += cur;
}
if i >= q4_idx { // Within the fourth quartile
q4_total += cur;
}
}
// Calculate the variance
let mean = total / (sums.len() as f64);
let mut var_total = 0.0;
for sum in &sums {
let diff = sum - mean;
var_total += diff * diff;
}
let variance = var_total / ((sums.len() - 1) as f64);
let trend = if sums.len() >= 10 && sums.len() == data.len() {
let q1_mean = q1_total / (q1_idx as f64);
let q4_mean = q4_total / ((data.len() - q4_idx) as f64);
100.0 * ((q4_mean - q1_mean) / first)
} else {
0.0
};
let trend_b = 100.0 * ((last - first) / first);
ObjectBuilder::new()
.insert("first", first)
.insert("last", last)
.insert("min", min)
.insert("max", max)
.insert("mean", mean)
.insert("variance", variance)
.insert("trend", trend)
.insert("trend_b", trend_b)
.insert("n", sums.len())
.build()
}
pub fn start(data: InputData) {
let mut router = Router::new();
router.get("/summary", Summary::handler);
router.get("/info", Info::handler);
router.post("/data", Data::handler);
router.post("/get_tabular", Tabular::handler);
router.post("/get", Days::handler);
router.post("/stats", Stats::handler);
let mut chain = Chain::new(router);
chain.link(State::<InputData>::both(data));
Iron::new(chain).http(SERVER_ADDRESS).unwrap();
}
|
GroupBy
|
identifier_name
|
server.rs
|
use std::collections::HashMap;
use std::cmp::max;
use iron::{Iron, Chain};
use router::Router;
use persistent::State;
use chrono::{Duration, NaiveDate, NaiveDateTime};
use serde_json::builder::ObjectBuilder;
use serde_json::Value;
use serde;
use SERVER_ADDRESS;
use errors::*;
use load::{SummarizedWeek, Kind, TestRun, InputData, Timing};
use util::{start_idx, end_idx};
const JS_DATE_FORMAT: &'static str = "%Y-%m-%dT%H:%M:%S.000Z";
// Boilerplate for parsing and responding to both GET and POST requests.
mod handler {
use std::ops::Deref;
use std::io::Read;
use serde;
use serde_json::{self, Value};
use iron::prelude::*;
use iron::status;
use persistent::State;
use load::InputData;
use errors::*;
fn respond(res: Result<Value>) -> IronResult<Response> {
use iron::headers::{ContentType, AccessControlAllowOrigin};
use iron::mime::{Mime, TopLevel, SubLevel};
use iron::modifiers::Header;
let mut resp = match res {
Ok(json) => {
let mut resp = Response::with((status::Ok, serde_json::to_string(&json).unwrap()));
resp.set_mut(Header(ContentType(Mime(TopLevel::Application, SubLevel::Json, vec![]))));
resp
},
Err(err) => {
// TODO: Print to stderr
println!("An error occurred: {:?}", err);
Response::with((status::InternalServerError, err.to_string()))
}
};
resp.set_mut(Header(AccessControlAllowOrigin::Any));
Ok(resp)
}
pub trait PostHandler: Sized {
fn handle(_body: Self, _data: &InputData) -> Result<Value>;
fn handler(req: &mut Request) -> IronResult<Response>
where Self: serde::Deserialize {
let rwlock = req.get::<State<InputData>>().unwrap();
let data = rwlock.read().unwrap();
let mut buf = String::new();
let res = match req.body.read_to_string(&mut buf).unwrap() {
0 => Err("POST handler with 0 length body.".into()),
_ => Self::handle(serde_json::from_str(&buf).unwrap(), data.deref())
};
respond(res)
}
}
pub trait GetHandler: Sized {
fn handle(_data: &InputData) -> Result<Value>;
fn handler(req: &mut Request) -> IronResult<Response> {
let rwlock = req.get::<State<InputData>>().unwrap();
let data = rwlock.read().unwrap();
respond(Self::handle(data.deref()))
}
}
}
use self::handler::{PostHandler, GetHandler};
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
enum GroupBy {
Crate,
Phase,
}
impl serde::Deserialize for GroupBy {
fn deserialize<D>(deserializer: &mut D) -> ::std::result::Result<GroupBy, D::Error>
where D: serde::de::Deserializer
{
struct GroupByVisitor;
impl serde::de::Visitor for GroupByVisitor {
type Value = GroupBy;
fn visit_str<E>(&mut self, value: &str) -> ::std::result::Result<GroupBy, E>
where E: serde::de::Error
{
match value {
"crate" => Ok(GroupBy::Crate),
"phase" => Ok(GroupBy::Phase),
_ => {
let msg = format!("unexpected {} value for group by", value);
Err(serde::de::Error::custom(msg))
}
}
}
}
deserializer.deserialize(GroupByVisitor)
}
}
enum OptionalDate {
Date(NaiveDateTime),
CouldNotParse(String),
}
impl OptionalDate {
fn as_start(&self, data: &InputData) -> NaiveDateTime {
// Handle missing start by returning 30 days before end.
if let OptionalDate::Date(date) = *self {
date
} else {
let end = self.as_end(data);
let start = (end - Duration::days(30)).timestamp();
NaiveDateTime::from_timestamp(start, 0)
}
}
fn as_end(&self, data: &InputData) -> NaiveDateTime {
// Handle missing end by using the last available date.
if let OptionalDate::Date(date) = *self {
date
} else {
data.last_date
}
}
}
impl serde::Deserialize for OptionalDate {
fn deserialize<D>(deserializer: &mut D) -> ::std::result::Result<OptionalDate, D::Error>
where D: serde::de::Deserializer
{
struct DateVisitor;
impl serde::de::Visitor for DateVisitor {
type Value = OptionalDate;
fn visit_str<E>(&mut self, value: &str) -> ::std::result::Result<OptionalDate, E>
where E: serde::de::Error
{
match NaiveDate::parse_from_str(value, "%a %b %d %Y") {
Ok(date) => Ok(OptionalDate::Date(date.and_hms(0, 0, 0))),
Err(err) => {
if!value.is_empty() {
println!("bad date {:?}: {:?}", value, err);
}
Ok(OptionalDate::CouldNotParse(value.to_string()))
}
}
}
}
deserializer.deserialize(DateVisitor)
}
}
struct Summary;
impl GetHandler for Summary {
fn handle(data: &InputData) -> Result<Value> {
let dates = data.summary_rustc.summary.iter()
.map(|s| s.date.format(JS_DATE_FORMAT).to_string())
.collect::<Vec<_>>();
fn summarize(benchmark: &SummarizedWeek, rustc: &SummarizedWeek) -> String {
|
for krate in benchmark.by_crate.values() {
if krate.contains_key("total") {
sum += krate["total"];
count += 1;
}
}
if rustc.by_crate["total"].contains_key("total") {
sum += 2.0 * rustc.by_crate["total"]["total"];
count += 2;
}
format!("{:.1}", sum / (count as f64))
}
// overall number for each week
let summaries = data.summary_benchmarks.summary.iter().enumerate().map(|(i, s)| {
summarize(s, &data.summary_rustc.summary[i])
}).collect::<Vec<_>>();
fn breakdown(benchmark: &SummarizedWeek, rustc: &SummarizedWeek) -> Value {
let mut per_bench = ObjectBuilder::new();
for (crate_name, krate) in &benchmark.by_crate {
let val = krate.get("total").cloned().unwrap_or(0.0);
per_bench = per_bench.insert(crate_name.as_str(), format!("{:.1}", val));
}
let bootstrap = if rustc.by_crate["total"].contains_key("total") {
rustc.by_crate["total"]["total"]
} else {
0.0
};
per_bench = per_bench.insert("bootstrap", format!("{:.1}", bootstrap));
per_bench.build()
}
// per benchmark, per week
let breakdown_data = data.summary_benchmarks.summary.iter().enumerate().map(|(i, s)| {
breakdown(s, &data.summary_rustc.summary[i])
}).collect::<Vec<Value>>();
Ok(ObjectBuilder::new()
.insert("total_summary", summarize(&data.summary_benchmarks.total, &data.summary_rustc.total))
.insert("total_breakdown", breakdown(&data.summary_benchmarks.total, &data.summary_rustc.total))
.insert("breakdown", breakdown_data)
.insert("summaries", summaries)
.insert("dates", dates)
.build())
}
}
struct Info;
impl GetHandler for Info {
fn handle(data: &InputData) -> Result<Value> {
Ok(ObjectBuilder::new()
.insert("crates", &data.crate_list)
.insert("phases", &data.phase_list)
.insert("benchmarks", &data.benchmarks)
.build())
}
}
fn get_data_for_date(day: &TestRun, crate_names: &[String], phases: &[String], group_by: GroupBy) -> Value {
#[derive(Serialize)]
struct Recording { // TODO better name (can't use Timing since we don't have a percent...)
time: f64,
rss: u64,
}
impl Recording {
fn new() -> Recording {
Recording {
time: 0.0,
rss: 0,
}
}
fn record(&mut self, phase: Option<&Timing>) {
if let Some(phase) = phase {
self.time += phase.time;
self.rss = max(self.rss, phase.rss.unwrap());
}
}
}
let crates = crate_names.into_iter().filter_map(|crate_name| {
day.by_crate.get(crate_name).map(|krate| {
(crate_name, krate)
})
}).collect::<Vec<_>>();
let mut data = HashMap::new();
for phase_name in phases {
for &(crate_name, krate) in &crates {
let entry = match group_by {
GroupBy::Crate => data.entry(crate_name),
GroupBy::Phase => data.entry(phase_name),
};
entry.or_insert(Recording::new()).record(krate.get(phase_name));
}
}
ObjectBuilder::new()
.insert("date", day.date.format(JS_DATE_FORMAT).to_string())
.insert("commit", day.commit.clone())
.insert("data", data)
.build()
}
#[derive(Deserialize)]
struct Data { // XXX naming
#[serde(rename(deserialize="start"))]
start_date: OptionalDate,
#[serde(rename(deserialize="end"))]
end_date: OptionalDate,
kind: Kind,
group_by: GroupBy,
crates: Vec<String>,
phases: Vec<String>,
}
impl PostHandler for Data {
fn handle(body: Self, data: &InputData) -> Result<Value> {
let mut result = Vec::new();
let mut first_idx = None;
let mut last_idx = 0;
// Iterate over date range.
let start_idx = start_idx(data.by_kind(body.kind), body.start_date.as_start(data));
let end_idx = end_idx(data.by_kind(body.kind), body.end_date.as_end(data));
for i in start_idx..(end_idx + 1) {
let today_data = get_data_for_date(
&data.by_kind(body.kind)[i],
&body.crates,
&body.phases,
body.group_by
);
if!today_data.find("data").unwrap().as_object().unwrap().is_empty() {
last_idx = i - start_idx;
if first_idx == None {
first_idx = Some(i - start_idx);
}
}
result.push(today_data);
}
// Trim the data
let result = result.drain(first_idx.unwrap()..(last_idx+1)).collect::<Vec<_>>();
Ok(Value::Array(result))
}
}
#[derive(Deserialize)]
struct Tabular { // XXX naming
kind: Kind,
date: OptionalDate,
}
impl PostHandler for Tabular {
fn handle(body: Self, data: &InputData) -> Result<Value> {
let kind_data = data.by_kind(body.kind);
let day = &kind_data[end_idx(kind_data, body.date.as_end(data))];
Ok(ObjectBuilder::new()
.insert("date", day.date.format(JS_DATE_FORMAT).to_string())
.insert("commit", &day.commit)
.insert("data", &day.by_crate)
.build())
}
}
#[derive(Deserialize)]
struct Days { // XXX naming
kind: Kind,
dates: Vec<OptionalDate>,
crates: Vec<String>,
phases: Vec<String>,
group_by: GroupBy,
}
impl PostHandler for Days {
fn handle(body: Self, data: &InputData) -> Result<Value> {
let data = data.by_kind(body.kind);
let mut result = Vec::new();
for date in body.dates {
if let OptionalDate::Date(date) = date {
let day = get_data_for_date(
&data[end_idx(data, date)],
&body.crates,
&body.phases,
body.group_by
);
result.push(day);
}
}
Ok(Value::Array(result))
}
}
#[derive(Deserialize)]
struct Stats { // XXX naming
kind: Kind,
#[serde(rename(deserialize="start"))]
start_date: OptionalDate,
#[serde(rename(deserialize="end"))]
end_date: OptionalDate,
// kind rustc only: crate or phase can be 'total'
crates: Vec<String>,
phases: Vec<String>,
}
impl PostHandler for Stats {
fn handle(body: Self, data: &InputData) -> Result<Value> {
if body.kind == Kind::Benchmarks && body.crates.iter().any(|s| s == "total") {
return Err("unexpected total crate with benchmarks kind".into());
}
let kinded_data = data.by_kind(body.kind);
let mut start_date = body.start_date.as_start(data);
let mut end_date = body.end_date.as_end(data);
let mut counted = Vec::new();
// Iterate over date range.
let start_idx = start_idx(kinded_data, start_date);
let end_idx = end_idx(kinded_data, end_date);
for i in start_idx..(end_idx + 1) {
let today_data = &kinded_data[i];
if!today_data.by_crate.is_empty() {
if counted.is_empty() {
start_date = today_data.date;
}
end_date = today_data.date;
counted.push(today_data);
}
}
let mut crates = ObjectBuilder::new();
for crate_name in body.crates {
let stats = mk_stats(&counted, &crate_name, &body.phases);
crates = crates.insert(crate_name, stats);
}
Ok(ObjectBuilder::new()
.insert("startDate", start_date.format(JS_DATE_FORMAT).to_string())
.insert("endDate", end_date.format(JS_DATE_FORMAT).to_string())
.insert("crates", crates.build())
.build())
}
}
fn mk_stats(data: &[&TestRun], crate_name: &str, phases: &[String]) -> Value {
let sums = data.iter()
.filter(|day| if let Some(krate) = day.by_crate.get(crate_name) {
!krate.is_empty()
} else {
false
})
.map(|day| {
let krate = &day.by_crate[crate_name];
let mut sum = 0.0;
for phase in phases {
sum += krate[phase].time;
}
sum
})
.collect::<Vec<_>>();
if sums.is_empty() {
return ObjectBuilder::new()
.insert("first", 0)
.insert("last", 0)
.insert("min", 0)
.insert("max", 0)
.insert("mean", 0)
.insert("variance", 0)
.insert("trend", 0)
.insert("trend_b", 0)
.insert("n", 0)
.build();
}
let first = sums[0];
let last = *sums.last().unwrap();
let mut min = first;
let mut max = first;
let q1_idx = data.len() / 4;
let q4_idx = 3 * data.len() / 4;
let mut total = 0.0;
let mut q1_total = 0.0;
let mut q4_total = 0.0;
for (i, &cur) in sums.iter().enumerate() {
min = min.min(cur);
max = max.max(cur);
total += cur;
if i < q1_idx { // Within the first quartile
q1_total += cur;
}
if i >= q4_idx { // Within the fourth quartile
q4_total += cur;
}
}
// Calculate the variance
let mean = total / (sums.len() as f64);
let mut var_total = 0.0;
for sum in &sums {
let diff = sum - mean;
var_total += diff * diff;
}
let variance = var_total / ((sums.len() - 1) as f64);
let trend = if sums.len() >= 10 && sums.len() == data.len() {
let q1_mean = q1_total / (q1_idx as f64);
let q4_mean = q4_total / ((data.len() - q4_idx) as f64);
100.0 * ((q4_mean - q1_mean) / first)
} else {
0.0
};
let trend_b = 100.0 * ((last - first) / first);
ObjectBuilder::new()
.insert("first", first)
.insert("last", last)
.insert("min", min)
.insert("max", max)
.insert("mean", mean)
.insert("variance", variance)
.insert("trend", trend)
.insert("trend_b", trend_b)
.insert("n", sums.len())
.build()
}
pub fn start(data: InputData) {
let mut router = Router::new();
router.get("/summary", Summary::handler);
router.get("/info", Info::handler);
router.post("/data", Data::handler);
router.post("/get_tabular", Tabular::handler);
router.post("/get", Days::handler);
router.post("/stats", Stats::handler);
let mut chain = Chain::new(router);
chain.link(State::<InputData>::both(data));
Iron::new(chain).http(SERVER_ADDRESS).unwrap();
}
|
let mut sum = 0.0;
let mut count = 0;
|
random_line_split
|
server.rs
|
use std::collections::HashMap;
use std::cmp::max;
use iron::{Iron, Chain};
use router::Router;
use persistent::State;
use chrono::{Duration, NaiveDate, NaiveDateTime};
use serde_json::builder::ObjectBuilder;
use serde_json::Value;
use serde;
use SERVER_ADDRESS;
use errors::*;
use load::{SummarizedWeek, Kind, TestRun, InputData, Timing};
use util::{start_idx, end_idx};
const JS_DATE_FORMAT: &'static str = "%Y-%m-%dT%H:%M:%S.000Z";
// Boilerplate for parsing and responding to both GET and POST requests.
mod handler {
use std::ops::Deref;
use std::io::Read;
use serde;
use serde_json::{self, Value};
use iron::prelude::*;
use iron::status;
use persistent::State;
use load::InputData;
use errors::*;
fn respond(res: Result<Value>) -> IronResult<Response> {
use iron::headers::{ContentType, AccessControlAllowOrigin};
use iron::mime::{Mime, TopLevel, SubLevel};
use iron::modifiers::Header;
let mut resp = match res {
Ok(json) => {
let mut resp = Response::with((status::Ok, serde_json::to_string(&json).unwrap()));
resp.set_mut(Header(ContentType(Mime(TopLevel::Application, SubLevel::Json, vec![]))));
resp
},
Err(err) => {
// TODO: Print to stderr
println!("An error occurred: {:?}", err);
Response::with((status::InternalServerError, err.to_string()))
}
};
resp.set_mut(Header(AccessControlAllowOrigin::Any));
Ok(resp)
}
pub trait PostHandler: Sized {
fn handle(_body: Self, _data: &InputData) -> Result<Value>;
fn handler(req: &mut Request) -> IronResult<Response>
where Self: serde::Deserialize {
let rwlock = req.get::<State<InputData>>().unwrap();
let data = rwlock.read().unwrap();
let mut buf = String::new();
let res = match req.body.read_to_string(&mut buf).unwrap() {
0 => Err("POST handler with 0 length body.".into()),
_ => Self::handle(serde_json::from_str(&buf).unwrap(), data.deref())
};
respond(res)
}
}
pub trait GetHandler: Sized {
fn handle(_data: &InputData) -> Result<Value>;
fn handler(req: &mut Request) -> IronResult<Response> {
let rwlock = req.get::<State<InputData>>().unwrap();
let data = rwlock.read().unwrap();
respond(Self::handle(data.deref()))
}
}
}
use self::handler::{PostHandler, GetHandler};
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
enum GroupBy {
Crate,
Phase,
}
impl serde::Deserialize for GroupBy {
fn deserialize<D>(deserializer: &mut D) -> ::std::result::Result<GroupBy, D::Error>
where D: serde::de::Deserializer
{
struct GroupByVisitor;
impl serde::de::Visitor for GroupByVisitor {
type Value = GroupBy;
fn visit_str<E>(&mut self, value: &str) -> ::std::result::Result<GroupBy, E>
where E: serde::de::Error
{
match value {
"crate" => Ok(GroupBy::Crate),
"phase" => Ok(GroupBy::Phase),
_ => {
let msg = format!("unexpected {} value for group by", value);
Err(serde::de::Error::custom(msg))
}
}
}
}
deserializer.deserialize(GroupByVisitor)
}
}
enum OptionalDate {
Date(NaiveDateTime),
CouldNotParse(String),
}
impl OptionalDate {
fn as_start(&self, data: &InputData) -> NaiveDateTime {
// Handle missing start by returning 30 days before end.
if let OptionalDate::Date(date) = *self {
date
} else {
let end = self.as_end(data);
let start = (end - Duration::days(30)).timestamp();
NaiveDateTime::from_timestamp(start, 0)
}
}
fn as_end(&self, data: &InputData) -> NaiveDateTime {
// Handle missing end by using the last available date.
if let OptionalDate::Date(date) = *self {
date
} else {
data.last_date
}
}
}
impl serde::Deserialize for OptionalDate {
fn deserialize<D>(deserializer: &mut D) -> ::std::result::Result<OptionalDate, D::Error>
where D: serde::de::Deserializer
{
struct DateVisitor;
impl serde::de::Visitor for DateVisitor {
type Value = OptionalDate;
fn visit_str<E>(&mut self, value: &str) -> ::std::result::Result<OptionalDate, E>
where E: serde::de::Error
{
match NaiveDate::parse_from_str(value, "%a %b %d %Y") {
Ok(date) => Ok(OptionalDate::Date(date.and_hms(0, 0, 0))),
Err(err) => {
if!value.is_empty() {
println!("bad date {:?}: {:?}", value, err);
}
Ok(OptionalDate::CouldNotParse(value.to_string()))
}
}
}
}
deserializer.deserialize(DateVisitor)
}
}
struct Summary;
impl GetHandler for Summary {
fn handle(data: &InputData) -> Result<Value> {
let dates = data.summary_rustc.summary.iter()
.map(|s| s.date.format(JS_DATE_FORMAT).to_string())
.collect::<Vec<_>>();
fn summarize(benchmark: &SummarizedWeek, rustc: &SummarizedWeek) -> String {
let mut sum = 0.0;
let mut count = 0;
for krate in benchmark.by_crate.values() {
if krate.contains_key("total") {
sum += krate["total"];
count += 1;
}
}
if rustc.by_crate["total"].contains_key("total") {
sum += 2.0 * rustc.by_crate["total"]["total"];
count += 2;
}
format!("{:.1}", sum / (count as f64))
}
// overall number for each week
let summaries = data.summary_benchmarks.summary.iter().enumerate().map(|(i, s)| {
summarize(s, &data.summary_rustc.summary[i])
}).collect::<Vec<_>>();
fn breakdown(benchmark: &SummarizedWeek, rustc: &SummarizedWeek) -> Value {
let mut per_bench = ObjectBuilder::new();
for (crate_name, krate) in &benchmark.by_crate {
let val = krate.get("total").cloned().unwrap_or(0.0);
per_bench = per_bench.insert(crate_name.as_str(), format!("{:.1}", val));
}
let bootstrap = if rustc.by_crate["total"].contains_key("total") {
rustc.by_crate["total"]["total"]
} else {
0.0
};
per_bench = per_bench.insert("bootstrap", format!("{:.1}", bootstrap));
per_bench.build()
}
// per benchmark, per week
let breakdown_data = data.summary_benchmarks.summary.iter().enumerate().map(|(i, s)| {
breakdown(s, &data.summary_rustc.summary[i])
}).collect::<Vec<Value>>();
Ok(ObjectBuilder::new()
.insert("total_summary", summarize(&data.summary_benchmarks.total, &data.summary_rustc.total))
.insert("total_breakdown", breakdown(&data.summary_benchmarks.total, &data.summary_rustc.total))
.insert("breakdown", breakdown_data)
.insert("summaries", summaries)
.insert("dates", dates)
.build())
}
}
struct Info;
impl GetHandler for Info {
fn handle(data: &InputData) -> Result<Value> {
Ok(ObjectBuilder::new()
.insert("crates", &data.crate_list)
.insert("phases", &data.phase_list)
.insert("benchmarks", &data.benchmarks)
.build())
}
}
fn get_data_for_date(day: &TestRun, crate_names: &[String], phases: &[String], group_by: GroupBy) -> Value {
#[derive(Serialize)]
struct Recording { // TODO better name (can't use Timing since we don't have a percent...)
time: f64,
rss: u64,
}
impl Recording {
fn new() -> Recording {
Recording {
time: 0.0,
rss: 0,
}
}
fn record(&mut self, phase: Option<&Timing>) {
if let Some(phase) = phase {
self.time += phase.time;
self.rss = max(self.rss, phase.rss.unwrap());
}
}
}
let crates = crate_names.into_iter().filter_map(|crate_name| {
day.by_crate.get(crate_name).map(|krate| {
(crate_name, krate)
})
}).collect::<Vec<_>>();
let mut data = HashMap::new();
for phase_name in phases {
for &(crate_name, krate) in &crates {
let entry = match group_by {
GroupBy::Crate => data.entry(crate_name),
GroupBy::Phase => data.entry(phase_name),
};
entry.or_insert(Recording::new()).record(krate.get(phase_name));
}
}
ObjectBuilder::new()
.insert("date", day.date.format(JS_DATE_FORMAT).to_string())
.insert("commit", day.commit.clone())
.insert("data", data)
.build()
}
#[derive(Deserialize)]
struct Data { // XXX naming
#[serde(rename(deserialize="start"))]
start_date: OptionalDate,
#[serde(rename(deserialize="end"))]
end_date: OptionalDate,
kind: Kind,
group_by: GroupBy,
crates: Vec<String>,
phases: Vec<String>,
}
impl PostHandler for Data {
fn handle(body: Self, data: &InputData) -> Result<Value> {
let mut result = Vec::new();
let mut first_idx = None;
let mut last_idx = 0;
// Iterate over date range.
let start_idx = start_idx(data.by_kind(body.kind), body.start_date.as_start(data));
let end_idx = end_idx(data.by_kind(body.kind), body.end_date.as_end(data));
for i in start_idx..(end_idx + 1) {
let today_data = get_data_for_date(
&data.by_kind(body.kind)[i],
&body.crates,
&body.phases,
body.group_by
);
if!today_data.find("data").unwrap().as_object().unwrap().is_empty() {
last_idx = i - start_idx;
if first_idx == None {
first_idx = Some(i - start_idx);
}
}
result.push(today_data);
}
// Trim the data
let result = result.drain(first_idx.unwrap()..(last_idx+1)).collect::<Vec<_>>();
Ok(Value::Array(result))
}
}
#[derive(Deserialize)]
struct Tabular { // XXX naming
kind: Kind,
date: OptionalDate,
}
impl PostHandler for Tabular {
fn handle(body: Self, data: &InputData) -> Result<Value> {
let kind_data = data.by_kind(body.kind);
let day = &kind_data[end_idx(kind_data, body.date.as_end(data))];
Ok(ObjectBuilder::new()
.insert("date", day.date.format(JS_DATE_FORMAT).to_string())
.insert("commit", &day.commit)
.insert("data", &day.by_crate)
.build())
}
}
#[derive(Deserialize)]
struct Days { // XXX naming
kind: Kind,
dates: Vec<OptionalDate>,
crates: Vec<String>,
phases: Vec<String>,
group_by: GroupBy,
}
impl PostHandler for Days {
fn handle(body: Self, data: &InputData) -> Result<Value> {
let data = data.by_kind(body.kind);
let mut result = Vec::new();
for date in body.dates {
if let OptionalDate::Date(date) = date {
let day = get_data_for_date(
&data[end_idx(data, date)],
&body.crates,
&body.phases,
body.group_by
);
result.push(day);
}
}
Ok(Value::Array(result))
}
}
#[derive(Deserialize)]
struct Stats { // XXX naming
kind: Kind,
#[serde(rename(deserialize="start"))]
start_date: OptionalDate,
#[serde(rename(deserialize="end"))]
end_date: OptionalDate,
// kind rustc only: crate or phase can be 'total'
crates: Vec<String>,
phases: Vec<String>,
}
impl PostHandler for Stats {
fn handle(body: Self, data: &InputData) -> Result<Value> {
if body.kind == Kind::Benchmarks && body.crates.iter().any(|s| s == "total") {
return Err("unexpected total crate with benchmarks kind".into());
}
let kinded_data = data.by_kind(body.kind);
let mut start_date = body.start_date.as_start(data);
let mut end_date = body.end_date.as_end(data);
let mut counted = Vec::new();
// Iterate over date range.
let start_idx = start_idx(kinded_data, start_date);
let end_idx = end_idx(kinded_data, end_date);
for i in start_idx..(end_idx + 1) {
let today_data = &kinded_data[i];
if!today_data.by_crate.is_empty() {
if counted.is_empty() {
start_date = today_data.date;
}
end_date = today_data.date;
counted.push(today_data);
}
}
let mut crates = ObjectBuilder::new();
for crate_name in body.crates {
let stats = mk_stats(&counted, &crate_name, &body.phases);
crates = crates.insert(crate_name, stats);
}
Ok(ObjectBuilder::new()
.insert("startDate", start_date.format(JS_DATE_FORMAT).to_string())
.insert("endDate", end_date.format(JS_DATE_FORMAT).to_string())
.insert("crates", crates.build())
.build())
}
}
fn mk_stats(data: &[&TestRun], crate_name: &str, phases: &[String]) -> Value {
let sums = data.iter()
.filter(|day| if let Some(krate) = day.by_crate.get(crate_name) {
!krate.is_empty()
} else {
false
})
.map(|day| {
let krate = &day.by_crate[crate_name];
let mut sum = 0.0;
for phase in phases {
sum += krate[phase].time;
}
sum
})
.collect::<Vec<_>>();
if sums.is_empty() {
return ObjectBuilder::new()
.insert("first", 0)
.insert("last", 0)
.insert("min", 0)
.insert("max", 0)
.insert("mean", 0)
.insert("variance", 0)
.insert("trend", 0)
.insert("trend_b", 0)
.insert("n", 0)
.build();
}
let first = sums[0];
let last = *sums.last().unwrap();
let mut min = first;
let mut max = first;
let q1_idx = data.len() / 4;
let q4_idx = 3 * data.len() / 4;
let mut total = 0.0;
let mut q1_total = 0.0;
let mut q4_total = 0.0;
for (i, &cur) in sums.iter().enumerate() {
min = min.min(cur);
max = max.max(cur);
total += cur;
if i < q1_idx { // Within the first quartile
q1_total += cur;
}
if i >= q4_idx { // Within the fourth quartile
q4_total += cur;
}
}
// Calculate the variance
let mean = total / (sums.len() as f64);
let mut var_total = 0.0;
for sum in &sums {
let diff = sum - mean;
var_total += diff * diff;
}
let variance = var_total / ((sums.len() - 1) as f64);
let trend = if sums.len() >= 10 && sums.len() == data.len() {
let q1_mean = q1_total / (q1_idx as f64);
let q4_mean = q4_total / ((data.len() - q4_idx) as f64);
100.0 * ((q4_mean - q1_mean) / first)
} else {
0.0
};
let trend_b = 100.0 * ((last - first) / first);
ObjectBuilder::new()
.insert("first", first)
.insert("last", last)
.insert("min", min)
.insert("max", max)
.insert("mean", mean)
.insert("variance", variance)
.insert("trend", trend)
.insert("trend_b", trend_b)
.insert("n", sums.len())
.build()
}
pub fn start(data: InputData)
|
{
let mut router = Router::new();
router.get("/summary", Summary::handler);
router.get("/info", Info::handler);
router.post("/data", Data::handler);
router.post("/get_tabular", Tabular::handler);
router.post("/get", Days::handler);
router.post("/stats", Stats::handler);
let mut chain = Chain::new(router);
chain.link(State::<InputData>::both(data));
Iron::new(chain).http(SERVER_ADDRESS).unwrap();
}
|
identifier_body
|
|
tests.rs
|
// Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).
// Licensed under the Apache License, Version 2.0 (see LICENSE).
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};
use std::time::Duration;
use crate::{Process, ProcessResultMetadata};
use bazel_protos::gen::build::bazel::remote::execution::v2 as remexec;
use prost_types::Timestamp;
use remexec::ExecutedActionMetadata;
#[test]
fn process_equality() {
// TODO: Tests like these would be cleaner with the builder pattern for the rust-side Process API.
let process_generator = |description: String, timeout: Option<Duration>| {
let mut p = Process::new(vec![]);
p.description = description;
p.timeout = timeout;
p
};
fn hash<Hashable: Hash>(hashable: &Hashable) -> u64 {
let mut hasher = DefaultHasher::new();
hashable.hash(&mut hasher);
hasher.finish()
}
let a = process_generator("One thing".to_string(), Some(Duration::new(0, 0)));
let b = process_generator("Another".to_string(), Some(Duration::new(0, 0)));
let c = process_generator("One thing".to_string(), Some(Duration::new(5, 0)));
let d = process_generator("One thing".to_string(), None);
// Process should derive a PartialEq and Hash that ignores the description
assert_eq!(a, b);
assert_eq!(hash(&a), hash(&b));
//..but not other fields.
assert_ne!(a, c);
assert_ne!(hash(&a), hash(&c));
// Absence of timeout is included in hash.
assert_ne!(a, d);
assert_ne!(hash(&a), hash(&d));
}
#[test]
fn process_result_metadata_to_and_from_executed_action_metadata() {
let action_metadata = ExecutedActionMetadata {
worker_start_timestamp: Some(Timestamp {
seconds: 100,
nanos: 20,
}),
worker_completed_timestamp: Some(Timestamp {
seconds: 120,
nanos: 50,
}),
..ExecutedActionMetadata::default()
};
let converted_process_result: ProcessResultMetadata = action_metadata.into();
assert_eq!(
converted_process_result,
ProcessResultMetadata::new(Some(concrete_time::Duration::new(20, 30)))
);
// The conversion from `ExecutedActionMetadata` to `ProcessResultMetadata` is lossy.
let restored_action_metadata: ExecutedActionMetadata = converted_process_result.into();
assert_eq!(
|
restored_action_metadata,
ExecutedActionMetadata {
worker_start_timestamp: Some(Timestamp {
seconds: 0,
nanos: 0,
}),
worker_completed_timestamp: Some(Timestamp {
seconds: 20,
nanos: 30,
}),
..ExecutedActionMetadata::default()
}
);
// The relevant metadata may be missing from either type.
let action_metadata_missing: ProcessResultMetadata = ExecutedActionMetadata::default().into();
assert_eq!(action_metadata_missing, ProcessResultMetadata::default());
let process_result_missing: ExecutedActionMetadata = ProcessResultMetadata::default().into();
assert_eq!(process_result_missing, ExecutedActionMetadata::default());
}
#[test]
fn process_result_metadata_time_saved_from_cache() {
let metadata = ProcessResultMetadata::new(Some(concrete_time::Duration::new(5, 150)));
let time_saved = metadata.time_saved_from_cache(Duration::new(1, 100));
assert_eq!(time_saved, Some(Duration::new(4, 50)));
// If the cache lookup took more time than the process, we return 0.
let metadata = ProcessResultMetadata::new(Some(concrete_time::Duration::new(1, 0)));
let time_saved = metadata.time_saved_from_cache(Duration::new(5, 0));
assert_eq!(time_saved, Some(Duration::new(0, 0)));
// If the original process time wasn't recorded, we can't compute the time saved.
assert_eq!(
ProcessResultMetadata::default().time_saved_from_cache(Duration::new(1, 100)),
None
);
}
|
random_line_split
|
|
tests.rs
|
// Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).
// Licensed under the Apache License, Version 2.0 (see LICENSE).
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};
use std::time::Duration;
use crate::{Process, ProcessResultMetadata};
use bazel_protos::gen::build::bazel::remote::execution::v2 as remexec;
use prost_types::Timestamp;
use remexec::ExecutedActionMetadata;
#[test]
fn process_equality()
|
// Process should derive a PartialEq and Hash that ignores the description
assert_eq!(a, b);
assert_eq!(hash(&a), hash(&b));
//..but not other fields.
assert_ne!(a, c);
assert_ne!(hash(&a), hash(&c));
// Absence of timeout is included in hash.
assert_ne!(a, d);
assert_ne!(hash(&a), hash(&d));
}
#[test]
fn process_result_metadata_to_and_from_executed_action_metadata() {
let action_metadata = ExecutedActionMetadata {
worker_start_timestamp: Some(Timestamp {
seconds: 100,
nanos: 20,
}),
worker_completed_timestamp: Some(Timestamp {
seconds: 120,
nanos: 50,
}),
..ExecutedActionMetadata::default()
};
let converted_process_result: ProcessResultMetadata = action_metadata.into();
assert_eq!(
converted_process_result,
ProcessResultMetadata::new(Some(concrete_time::Duration::new(20, 30)))
);
// The conversion from `ExecutedActionMetadata` to `ProcessResultMetadata` is lossy.
let restored_action_metadata: ExecutedActionMetadata = converted_process_result.into();
assert_eq!(
restored_action_metadata,
ExecutedActionMetadata {
worker_start_timestamp: Some(Timestamp {
seconds: 0,
nanos: 0,
}),
worker_completed_timestamp: Some(Timestamp {
seconds: 20,
nanos: 30,
}),
..ExecutedActionMetadata::default()
}
);
// The relevant metadata may be missing from either type.
let action_metadata_missing: ProcessResultMetadata = ExecutedActionMetadata::default().into();
assert_eq!(action_metadata_missing, ProcessResultMetadata::default());
let process_result_missing: ExecutedActionMetadata = ProcessResultMetadata::default().into();
assert_eq!(process_result_missing, ExecutedActionMetadata::default());
}
#[test]
fn process_result_metadata_time_saved_from_cache() {
let metadata = ProcessResultMetadata::new(Some(concrete_time::Duration::new(5, 150)));
let time_saved = metadata.time_saved_from_cache(Duration::new(1, 100));
assert_eq!(time_saved, Some(Duration::new(4, 50)));
// If the cache lookup took more time than the process, we return 0.
let metadata = ProcessResultMetadata::new(Some(concrete_time::Duration::new(1, 0)));
let time_saved = metadata.time_saved_from_cache(Duration::new(5, 0));
assert_eq!(time_saved, Some(Duration::new(0, 0)));
// If the original process time wasn't recorded, we can't compute the time saved.
assert_eq!(
ProcessResultMetadata::default().time_saved_from_cache(Duration::new(1, 100)),
None
);
}
|
{
// TODO: Tests like these would be cleaner with the builder pattern for the rust-side Process API.
let process_generator = |description: String, timeout: Option<Duration>| {
let mut p = Process::new(vec![]);
p.description = description;
p.timeout = timeout;
p
};
fn hash<Hashable: Hash>(hashable: &Hashable) -> u64 {
let mut hasher = DefaultHasher::new();
hashable.hash(&mut hasher);
hasher.finish()
}
let a = process_generator("One thing".to_string(), Some(Duration::new(0, 0)));
let b = process_generator("Another".to_string(), Some(Duration::new(0, 0)));
let c = process_generator("One thing".to_string(), Some(Duration::new(5, 0)));
let d = process_generator("One thing".to_string(), None);
|
identifier_body
|
tests.rs
|
// Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).
// Licensed under the Apache License, Version 2.0 (see LICENSE).
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};
use std::time::Duration;
use crate::{Process, ProcessResultMetadata};
use bazel_protos::gen::build::bazel::remote::execution::v2 as remexec;
use prost_types::Timestamp;
use remexec::ExecutedActionMetadata;
#[test]
fn process_equality() {
// TODO: Tests like these would be cleaner with the builder pattern for the rust-side Process API.
let process_generator = |description: String, timeout: Option<Duration>| {
let mut p = Process::new(vec![]);
p.description = description;
p.timeout = timeout;
p
};
fn hash<Hashable: Hash>(hashable: &Hashable) -> u64 {
let mut hasher = DefaultHasher::new();
hashable.hash(&mut hasher);
hasher.finish()
}
let a = process_generator("One thing".to_string(), Some(Duration::new(0, 0)));
let b = process_generator("Another".to_string(), Some(Duration::new(0, 0)));
let c = process_generator("One thing".to_string(), Some(Duration::new(5, 0)));
let d = process_generator("One thing".to_string(), None);
// Process should derive a PartialEq and Hash that ignores the description
assert_eq!(a, b);
assert_eq!(hash(&a), hash(&b));
//..but not other fields.
assert_ne!(a, c);
assert_ne!(hash(&a), hash(&c));
// Absence of timeout is included in hash.
assert_ne!(a, d);
assert_ne!(hash(&a), hash(&d));
}
#[test]
fn process_result_metadata_to_and_from_executed_action_metadata() {
let action_metadata = ExecutedActionMetadata {
worker_start_timestamp: Some(Timestamp {
seconds: 100,
nanos: 20,
}),
worker_completed_timestamp: Some(Timestamp {
seconds: 120,
nanos: 50,
}),
..ExecutedActionMetadata::default()
};
let converted_process_result: ProcessResultMetadata = action_metadata.into();
assert_eq!(
converted_process_result,
ProcessResultMetadata::new(Some(concrete_time::Duration::new(20, 30)))
);
// The conversion from `ExecutedActionMetadata` to `ProcessResultMetadata` is lossy.
let restored_action_metadata: ExecutedActionMetadata = converted_process_result.into();
assert_eq!(
restored_action_metadata,
ExecutedActionMetadata {
worker_start_timestamp: Some(Timestamp {
seconds: 0,
nanos: 0,
}),
worker_completed_timestamp: Some(Timestamp {
seconds: 20,
nanos: 30,
}),
..ExecutedActionMetadata::default()
}
);
// The relevant metadata may be missing from either type.
let action_metadata_missing: ProcessResultMetadata = ExecutedActionMetadata::default().into();
assert_eq!(action_metadata_missing, ProcessResultMetadata::default());
let process_result_missing: ExecutedActionMetadata = ProcessResultMetadata::default().into();
assert_eq!(process_result_missing, ExecutedActionMetadata::default());
}
#[test]
fn
|
() {
let metadata = ProcessResultMetadata::new(Some(concrete_time::Duration::new(5, 150)));
let time_saved = metadata.time_saved_from_cache(Duration::new(1, 100));
assert_eq!(time_saved, Some(Duration::new(4, 50)));
// If the cache lookup took more time than the process, we return 0.
let metadata = ProcessResultMetadata::new(Some(concrete_time::Duration::new(1, 0)));
let time_saved = metadata.time_saved_from_cache(Duration::new(5, 0));
assert_eq!(time_saved, Some(Duration::new(0, 0)));
// If the original process time wasn't recorded, we can't compute the time saved.
assert_eq!(
ProcessResultMetadata::default().time_saved_from_cache(Duration::new(1, 100)),
None
);
}
|
process_result_metadata_time_saved_from_cache
|
identifier_name
|
mod.rs
|
use std::sync::atomic::AtomicBool;
use std::ptr;
use libc;
use {CreationError, Event};
use BuilderAttribs;
pub use self::monitor::{MonitorID, get_available_monitors, get_primary_monitor};
use winapi;
mod event;
mod gl;
mod init;
mod monitor;
///
pub struct HeadlessContext(Window);
impl HeadlessContext {
/// See the docs in the crate root file.
pub fn new(builder: BuilderAttribs) -> Result<HeadlessContext, CreationError> {
let BuilderAttribs { dimensions, gl_version, gl_debug,.. } = builder;
init::new_window(dimensions, "".to_string(), None, gl_version, gl_debug, false, true,
None, None)
.map(|w| HeadlessContext(w))
}
/// See the docs in the crate root file.
pub unsafe fn make_current(&self) {
self.0.make_current()
}
/// See the docs in the crate root file.
pub fn get_proc_address(&self, addr: &str) -> *const () {
self.0.get_proc_address(addr)
}
/// See the docs in the crate root file.
pub fn get_api(&self) -> ::Api {
::Api::OpenGl
}
pub fn set_window_resize_callback(&mut self, _: Option<fn(uint, uint)>) {
}
}
#[cfg(feature = "headless")]
unsafe impl Send for HeadlessContext {}
#[cfg(feature = "headless")]
unsafe impl Sync for HeadlessContext {}
/// The Win32 implementation of the main `Window` object.
pub struct
|
{
/// Main handle for the window.
window: winapi::HWND,
/// This represents a "draw context" for the surface of the window.
hdc: winapi::HDC,
/// OpenGL context.
context: winapi::HGLRC,
/// Binded to `opengl32.dll`.
///
/// `wglGetProcAddress` returns null for GL 1.1 functions because they are
/// already defined by the system. This module contains them.
gl_library: winapi::HMODULE,
/// Receiver for the events dispatched by the window callback.
events_receiver: Receiver<Event>,
/// True if a `Closed` event has been received.
is_closed: AtomicBool,
}
unsafe impl Send for Window {}
unsafe impl Sync for Window {}
impl Window {
/// See the docs in the crate root file.
pub fn new(builder: BuilderAttribs) -> Result<Window, CreationError> {
let BuilderAttribs { dimensions, title, monitor, gl_version,
gl_debug, vsync, visible, sharing, multisampling,.. } = builder;
init::new_window(dimensions, title, monitor, gl_version, gl_debug, vsync,
!visible, sharing.map(|w| init::ContextHack(w.context)),
multisampling)
}
}
#[deriving(Clone)]
pub struct WindowProxy;
impl WindowProxy {
pub fn wakeup_event_loop(&self) {
unimplemented!()
}
}
impl Window {
/// See the docs in the crate root file.
pub fn is_closed(&self) -> bool {
use std::sync::atomic::Relaxed;
self.is_closed.load(Relaxed)
}
/// See the docs in the crate root file.
///
/// Calls SetWindowText on the HWND.
pub fn set_title(&self, text: &str) {
unsafe {
winapi::SetWindowTextW(self.window,
text.utf16_units().chain(Some(0).into_iter())
.collect::<Vec<u16>>().as_ptr() as winapi::LPCWSTR);
}
}
pub fn show(&self) {
unsafe {
winapi::ShowWindow(self.window, winapi::SW_SHOW);
}
}
pub fn hide(&self) {
unsafe {
winapi::ShowWindow(self.window, winapi::SW_HIDE);
}
}
/// See the docs in the crate root file.
pub fn get_position(&self) -> Option<(int, int)> {
use std::mem;
let mut placement: winapi::WINDOWPLACEMENT = unsafe { mem::zeroed() };
placement.length = mem::size_of::<winapi::WINDOWPLACEMENT>() as winapi::UINT;
if unsafe { winapi::GetWindowPlacement(self.window, &mut placement) } == 0 {
return None
}
let ref rect = placement.rcNormalPosition;
Some((rect.left as int, rect.top as int))
}
/// See the docs in the crate root file.
pub fn set_position(&self, x: int, y: int) {
use libc;
unsafe {
winapi::SetWindowPos(self.window, ptr::null_mut(), x as libc::c_int, y as libc::c_int,
0, 0, winapi::SWP_NOZORDER | winapi::SWP_NOSIZE);
winapi::UpdateWindow(self.window);
}
}
/// See the docs in the crate root file.
pub fn get_inner_size(&self) -> Option<(uint, uint)> {
use std::mem;
let mut rect: winapi::RECT = unsafe { mem::uninitialized() };
if unsafe { winapi::GetClientRect(self.window, &mut rect) } == 0 {
return None
}
Some((
(rect.right - rect.left) as uint,
(rect.bottom - rect.top) as uint
))
}
/// See the docs in the crate root file.
pub fn get_outer_size(&self) -> Option<(uint, uint)> {
use std::mem;
let mut rect: winapi::RECT = unsafe { mem::uninitialized() };
if unsafe { winapi::GetWindowRect(self.window, &mut rect) } == 0 {
return None
}
Some((
(rect.right - rect.left) as uint,
(rect.bottom - rect.top) as uint
))
}
/// See the docs in the crate root file.
pub fn set_inner_size(&self, x: uint, y: uint) {
use libc;
unsafe {
winapi::SetWindowPos(self.window, ptr::null_mut(), 0, 0, x as libc::c_int,
y as libc::c_int, winapi::SWP_NOZORDER | winapi::SWP_NOREPOSITION);
winapi::UpdateWindow(self.window);
}
}
pub fn create_window_proxy(&self) -> WindowProxy {
WindowProxy
}
/// See the docs in the crate root file.
// TODO: return iterator
pub fn poll_events(&self) -> Vec<Event> {
let mut events = Vec::new();
loop {
match self.events_receiver.try_recv() {
Ok(ev) => events.push(ev),
Err(_) => break
}
}
// if one of the received events is `Closed`, setting `is_closed` to true
if events.iter().any(|e| match e { &::events::Event::Closed => true, _ => false }) {
use std::sync::atomic::Relaxed;
self.is_closed.store(true, Relaxed);
}
events
}
/// See the docs in the crate root file.
// TODO: return iterator
pub fn wait_events(&self) -> Vec<Event> {
match self.events_receiver.recv_opt() {
Ok(ev) => {
// if the received event is `Closed`, setting `is_closed` to true
match ev {
::events::Event::Closed => {
use std::sync::atomic::Relaxed;
self.is_closed.store(true, Relaxed);
},
_ => ()
};
// looing for other possible events in the queue
let mut result = self.poll_events();
result.insert(0, ev);
result
},
Err(_) => {
use std::sync::atomic::Relaxed;
self.is_closed.store(true, Relaxed);
vec![]
}
}
}
/// See the docs in the crate root file.
pub unsafe fn make_current(&self) {
// TODO: check return value
gl::wgl::MakeCurrent(self.hdc as *const libc::c_void, self.context as *const libc::c_void);
}
/// See the docs in the crate root file.
pub fn get_proc_address(&self, addr: &str) -> *const () {
use std::c_str::ToCStr;
unsafe {
addr.with_c_str(|s| {
let p = gl::wgl::GetProcAddress(s) as *const ();
if!p.is_null() { return p; }
winapi::GetProcAddress(self.gl_library, s) as *const ()
})
}
}
/// See the docs in the crate root file.
pub fn swap_buffers(&self) {
unsafe {
winapi::SwapBuffers(self.hdc);
}
}
pub fn platform_display(&self) -> *mut libc::c_void {
unimplemented!()
}
/// See the docs in the crate root file.
pub fn get_api(&self) -> ::Api {
::Api::OpenGl
}
pub fn set_window_resize_callback(&mut self, _: Option<fn(uint, uint)>) {
}
}
#[unsafe_destructor]
impl Drop for Window {
fn drop(&mut self) {
use std::ptr;
unsafe { winapi::PostMessageW(self.window, winapi::WM_DESTROY, 0, 0); }
unsafe { gl::wgl::MakeCurrent(ptr::null(), ptr::null()); }
unsafe { gl::wgl::DeleteContext(self.context as *const libc::c_void); }
unsafe { winapi::DestroyWindow(self.window); }
}
}
|
Window
|
identifier_name
|
mod.rs
|
use std::sync::atomic::AtomicBool;
use std::ptr;
use libc;
use {CreationError, Event};
use BuilderAttribs;
pub use self::monitor::{MonitorID, get_available_monitors, get_primary_monitor};
use winapi;
mod event;
mod gl;
mod init;
mod monitor;
///
pub struct HeadlessContext(Window);
|
impl HeadlessContext {
/// See the docs in the crate root file.
pub fn new(builder: BuilderAttribs) -> Result<HeadlessContext, CreationError> {
let BuilderAttribs { dimensions, gl_version, gl_debug,.. } = builder;
init::new_window(dimensions, "".to_string(), None, gl_version, gl_debug, false, true,
None, None)
.map(|w| HeadlessContext(w))
}
/// See the docs in the crate root file.
pub unsafe fn make_current(&self) {
self.0.make_current()
}
/// See the docs in the crate root file.
pub fn get_proc_address(&self, addr: &str) -> *const () {
self.0.get_proc_address(addr)
}
/// See the docs in the crate root file.
pub fn get_api(&self) -> ::Api {
::Api::OpenGl
}
pub fn set_window_resize_callback(&mut self, _: Option<fn(uint, uint)>) {
}
}
#[cfg(feature = "headless")]
unsafe impl Send for HeadlessContext {}
#[cfg(feature = "headless")]
unsafe impl Sync for HeadlessContext {}
/// The Win32 implementation of the main `Window` object.
pub struct Window {
/// Main handle for the window.
window: winapi::HWND,
/// This represents a "draw context" for the surface of the window.
hdc: winapi::HDC,
/// OpenGL context.
context: winapi::HGLRC,
/// Binded to `opengl32.dll`.
///
/// `wglGetProcAddress` returns null for GL 1.1 functions because they are
/// already defined by the system. This module contains them.
gl_library: winapi::HMODULE,
/// Receiver for the events dispatched by the window callback.
events_receiver: Receiver<Event>,
/// True if a `Closed` event has been received.
is_closed: AtomicBool,
}
unsafe impl Send for Window {}
unsafe impl Sync for Window {}
impl Window {
/// See the docs in the crate root file.
pub fn new(builder: BuilderAttribs) -> Result<Window, CreationError> {
let BuilderAttribs { dimensions, title, monitor, gl_version,
gl_debug, vsync, visible, sharing, multisampling,.. } = builder;
init::new_window(dimensions, title, monitor, gl_version, gl_debug, vsync,
!visible, sharing.map(|w| init::ContextHack(w.context)),
multisampling)
}
}
#[deriving(Clone)]
pub struct WindowProxy;
impl WindowProxy {
pub fn wakeup_event_loop(&self) {
unimplemented!()
}
}
impl Window {
/// See the docs in the crate root file.
pub fn is_closed(&self) -> bool {
use std::sync::atomic::Relaxed;
self.is_closed.load(Relaxed)
}
/// See the docs in the crate root file.
///
/// Calls SetWindowText on the HWND.
pub fn set_title(&self, text: &str) {
unsafe {
winapi::SetWindowTextW(self.window,
text.utf16_units().chain(Some(0).into_iter())
.collect::<Vec<u16>>().as_ptr() as winapi::LPCWSTR);
}
}
pub fn show(&self) {
unsafe {
winapi::ShowWindow(self.window, winapi::SW_SHOW);
}
}
pub fn hide(&self) {
unsafe {
winapi::ShowWindow(self.window, winapi::SW_HIDE);
}
}
/// See the docs in the crate root file.
pub fn get_position(&self) -> Option<(int, int)> {
use std::mem;
let mut placement: winapi::WINDOWPLACEMENT = unsafe { mem::zeroed() };
placement.length = mem::size_of::<winapi::WINDOWPLACEMENT>() as winapi::UINT;
if unsafe { winapi::GetWindowPlacement(self.window, &mut placement) } == 0 {
return None
}
let ref rect = placement.rcNormalPosition;
Some((rect.left as int, rect.top as int))
}
/// See the docs in the crate root file.
pub fn set_position(&self, x: int, y: int) {
use libc;
unsafe {
winapi::SetWindowPos(self.window, ptr::null_mut(), x as libc::c_int, y as libc::c_int,
0, 0, winapi::SWP_NOZORDER | winapi::SWP_NOSIZE);
winapi::UpdateWindow(self.window);
}
}
/// See the docs in the crate root file.
pub fn get_inner_size(&self) -> Option<(uint, uint)> {
use std::mem;
let mut rect: winapi::RECT = unsafe { mem::uninitialized() };
if unsafe { winapi::GetClientRect(self.window, &mut rect) } == 0 {
return None
}
Some((
(rect.right - rect.left) as uint,
(rect.bottom - rect.top) as uint
))
}
/// See the docs in the crate root file.
pub fn get_outer_size(&self) -> Option<(uint, uint)> {
use std::mem;
let mut rect: winapi::RECT = unsafe { mem::uninitialized() };
if unsafe { winapi::GetWindowRect(self.window, &mut rect) } == 0 {
return None
}
Some((
(rect.right - rect.left) as uint,
(rect.bottom - rect.top) as uint
))
}
/// See the docs in the crate root file.
pub fn set_inner_size(&self, x: uint, y: uint) {
use libc;
unsafe {
winapi::SetWindowPos(self.window, ptr::null_mut(), 0, 0, x as libc::c_int,
y as libc::c_int, winapi::SWP_NOZORDER | winapi::SWP_NOREPOSITION);
winapi::UpdateWindow(self.window);
}
}
pub fn create_window_proxy(&self) -> WindowProxy {
WindowProxy
}
/// See the docs in the crate root file.
// TODO: return iterator
pub fn poll_events(&self) -> Vec<Event> {
let mut events = Vec::new();
loop {
match self.events_receiver.try_recv() {
Ok(ev) => events.push(ev),
Err(_) => break
}
}
// if one of the received events is `Closed`, setting `is_closed` to true
if events.iter().any(|e| match e { &::events::Event::Closed => true, _ => false }) {
use std::sync::atomic::Relaxed;
self.is_closed.store(true, Relaxed);
}
events
}
/// See the docs in the crate root file.
// TODO: return iterator
pub fn wait_events(&self) -> Vec<Event> {
match self.events_receiver.recv_opt() {
Ok(ev) => {
// if the received event is `Closed`, setting `is_closed` to true
match ev {
::events::Event::Closed => {
use std::sync::atomic::Relaxed;
self.is_closed.store(true, Relaxed);
},
_ => ()
};
// looing for other possible events in the queue
let mut result = self.poll_events();
result.insert(0, ev);
result
},
Err(_) => {
use std::sync::atomic::Relaxed;
self.is_closed.store(true, Relaxed);
vec![]
}
}
}
/// See the docs in the crate root file.
pub unsafe fn make_current(&self) {
// TODO: check return value
gl::wgl::MakeCurrent(self.hdc as *const libc::c_void, self.context as *const libc::c_void);
}
/// See the docs in the crate root file.
pub fn get_proc_address(&self, addr: &str) -> *const () {
use std::c_str::ToCStr;
unsafe {
addr.with_c_str(|s| {
let p = gl::wgl::GetProcAddress(s) as *const ();
if!p.is_null() { return p; }
winapi::GetProcAddress(self.gl_library, s) as *const ()
})
}
}
/// See the docs in the crate root file.
pub fn swap_buffers(&self) {
unsafe {
winapi::SwapBuffers(self.hdc);
}
}
pub fn platform_display(&self) -> *mut libc::c_void {
unimplemented!()
}
/// See the docs in the crate root file.
pub fn get_api(&self) -> ::Api {
::Api::OpenGl
}
pub fn set_window_resize_callback(&mut self, _: Option<fn(uint, uint)>) {
}
}
#[unsafe_destructor]
impl Drop for Window {
fn drop(&mut self) {
use std::ptr;
unsafe { winapi::PostMessageW(self.window, winapi::WM_DESTROY, 0, 0); }
unsafe { gl::wgl::MakeCurrent(ptr::null(), ptr::null()); }
unsafe { gl::wgl::DeleteContext(self.context as *const libc::c_void); }
unsafe { winapi::DestroyWindow(self.window); }
}
}
|
random_line_split
|
|
mod.rs
|
use std::sync::atomic::AtomicBool;
use std::ptr;
use libc;
use {CreationError, Event};
use BuilderAttribs;
pub use self::monitor::{MonitorID, get_available_monitors, get_primary_monitor};
use winapi;
mod event;
mod gl;
mod init;
mod monitor;
///
pub struct HeadlessContext(Window);
impl HeadlessContext {
/// See the docs in the crate root file.
pub fn new(builder: BuilderAttribs) -> Result<HeadlessContext, CreationError> {
let BuilderAttribs { dimensions, gl_version, gl_debug,.. } = builder;
init::new_window(dimensions, "".to_string(), None, gl_version, gl_debug, false, true,
None, None)
.map(|w| HeadlessContext(w))
}
/// See the docs in the crate root file.
pub unsafe fn make_current(&self) {
self.0.make_current()
}
/// See the docs in the crate root file.
pub fn get_proc_address(&self, addr: &str) -> *const () {
self.0.get_proc_address(addr)
}
/// See the docs in the crate root file.
pub fn get_api(&self) -> ::Api {
::Api::OpenGl
}
pub fn set_window_resize_callback(&mut self, _: Option<fn(uint, uint)>) {
}
}
#[cfg(feature = "headless")]
unsafe impl Send for HeadlessContext {}
#[cfg(feature = "headless")]
unsafe impl Sync for HeadlessContext {}
/// The Win32 implementation of the main `Window` object.
pub struct Window {
/// Main handle for the window.
window: winapi::HWND,
/// This represents a "draw context" for the surface of the window.
hdc: winapi::HDC,
/// OpenGL context.
context: winapi::HGLRC,
/// Binded to `opengl32.dll`.
///
/// `wglGetProcAddress` returns null for GL 1.1 functions because they are
/// already defined by the system. This module contains them.
gl_library: winapi::HMODULE,
/// Receiver for the events dispatched by the window callback.
events_receiver: Receiver<Event>,
/// True if a `Closed` event has been received.
is_closed: AtomicBool,
}
unsafe impl Send for Window {}
unsafe impl Sync for Window {}
impl Window {
/// See the docs in the crate root file.
pub fn new(builder: BuilderAttribs) -> Result<Window, CreationError> {
let BuilderAttribs { dimensions, title, monitor, gl_version,
gl_debug, vsync, visible, sharing, multisampling,.. } = builder;
init::new_window(dimensions, title, monitor, gl_version, gl_debug, vsync,
!visible, sharing.map(|w| init::ContextHack(w.context)),
multisampling)
}
}
#[deriving(Clone)]
pub struct WindowProxy;
impl WindowProxy {
pub fn wakeup_event_loop(&self) {
unimplemented!()
}
}
impl Window {
/// See the docs in the crate root file.
pub fn is_closed(&self) -> bool {
use std::sync::atomic::Relaxed;
self.is_closed.load(Relaxed)
}
/// See the docs in the crate root file.
///
/// Calls SetWindowText on the HWND.
pub fn set_title(&self, text: &str) {
unsafe {
winapi::SetWindowTextW(self.window,
text.utf16_units().chain(Some(0).into_iter())
.collect::<Vec<u16>>().as_ptr() as winapi::LPCWSTR);
}
}
pub fn show(&self) {
unsafe {
winapi::ShowWindow(self.window, winapi::SW_SHOW);
}
}
pub fn hide(&self) {
unsafe {
winapi::ShowWindow(self.window, winapi::SW_HIDE);
}
}
/// See the docs in the crate root file.
pub fn get_position(&self) -> Option<(int, int)> {
use std::mem;
let mut placement: winapi::WINDOWPLACEMENT = unsafe { mem::zeroed() };
placement.length = mem::size_of::<winapi::WINDOWPLACEMENT>() as winapi::UINT;
if unsafe { winapi::GetWindowPlacement(self.window, &mut placement) } == 0 {
return None
}
let ref rect = placement.rcNormalPosition;
Some((rect.left as int, rect.top as int))
}
/// See the docs in the crate root file.
pub fn set_position(&self, x: int, y: int) {
use libc;
unsafe {
winapi::SetWindowPos(self.window, ptr::null_mut(), x as libc::c_int, y as libc::c_int,
0, 0, winapi::SWP_NOZORDER | winapi::SWP_NOSIZE);
winapi::UpdateWindow(self.window);
}
}
/// See the docs in the crate root file.
pub fn get_inner_size(&self) -> Option<(uint, uint)>
|
/// See the docs in the crate root file.
pub fn get_outer_size(&self) -> Option<(uint, uint)> {
use std::mem;
let mut rect: winapi::RECT = unsafe { mem::uninitialized() };
if unsafe { winapi::GetWindowRect(self.window, &mut rect) } == 0 {
return None
}
Some((
(rect.right - rect.left) as uint,
(rect.bottom - rect.top) as uint
))
}
/// See the docs in the crate root file.
pub fn set_inner_size(&self, x: uint, y: uint) {
use libc;
unsafe {
winapi::SetWindowPos(self.window, ptr::null_mut(), 0, 0, x as libc::c_int,
y as libc::c_int, winapi::SWP_NOZORDER | winapi::SWP_NOREPOSITION);
winapi::UpdateWindow(self.window);
}
}
pub fn create_window_proxy(&self) -> WindowProxy {
WindowProxy
}
/// See the docs in the crate root file.
// TODO: return iterator
pub fn poll_events(&self) -> Vec<Event> {
let mut events = Vec::new();
loop {
match self.events_receiver.try_recv() {
Ok(ev) => events.push(ev),
Err(_) => break
}
}
// if one of the received events is `Closed`, setting `is_closed` to true
if events.iter().any(|e| match e { &::events::Event::Closed => true, _ => false }) {
use std::sync::atomic::Relaxed;
self.is_closed.store(true, Relaxed);
}
events
}
/// See the docs in the crate root file.
// TODO: return iterator
pub fn wait_events(&self) -> Vec<Event> {
match self.events_receiver.recv_opt() {
Ok(ev) => {
// if the received event is `Closed`, setting `is_closed` to true
match ev {
::events::Event::Closed => {
use std::sync::atomic::Relaxed;
self.is_closed.store(true, Relaxed);
},
_ => ()
};
// looing for other possible events in the queue
let mut result = self.poll_events();
result.insert(0, ev);
result
},
Err(_) => {
use std::sync::atomic::Relaxed;
self.is_closed.store(true, Relaxed);
vec![]
}
}
}
/// See the docs in the crate root file.
pub unsafe fn make_current(&self) {
// TODO: check return value
gl::wgl::MakeCurrent(self.hdc as *const libc::c_void, self.context as *const libc::c_void);
}
/// See the docs in the crate root file.
pub fn get_proc_address(&self, addr: &str) -> *const () {
use std::c_str::ToCStr;
unsafe {
addr.with_c_str(|s| {
let p = gl::wgl::GetProcAddress(s) as *const ();
if!p.is_null() { return p; }
winapi::GetProcAddress(self.gl_library, s) as *const ()
})
}
}
/// See the docs in the crate root file.
pub fn swap_buffers(&self) {
unsafe {
winapi::SwapBuffers(self.hdc);
}
}
pub fn platform_display(&self) -> *mut libc::c_void {
unimplemented!()
}
/// See the docs in the crate root file.
pub fn get_api(&self) -> ::Api {
::Api::OpenGl
}
pub fn set_window_resize_callback(&mut self, _: Option<fn(uint, uint)>) {
}
}
#[unsafe_destructor]
impl Drop for Window {
fn drop(&mut self) {
use std::ptr;
unsafe { winapi::PostMessageW(self.window, winapi::WM_DESTROY, 0, 0); }
unsafe { gl::wgl::MakeCurrent(ptr::null(), ptr::null()); }
unsafe { gl::wgl::DeleteContext(self.context as *const libc::c_void); }
unsafe { winapi::DestroyWindow(self.window); }
}
}
|
{
use std::mem;
let mut rect: winapi::RECT = unsafe { mem::uninitialized() };
if unsafe { winapi::GetClientRect(self.window, &mut rect) } == 0 {
return None
}
Some((
(rect.right - rect.left) as uint,
(rect.bottom - rect.top) as uint
))
}
|
identifier_body
|
skills_scene.rs
|
use std::path::Path;
use uorustlibs::skills::Skills;
use cgmath::Point2;
use ggez::event::{KeyCode, KeyMods};
use ggez::graphics::{self, Text};
use ggez::{Context, GameResult};
use scene::{BoxedScene, Scene, SceneChangeEvent, SceneName};
pub struct SkillsScene {
pages: Vec<Text>,
exiting: bool,
}
impl<'a> SkillsScene {
pub fn new() -> BoxedScene<'a, SceneName, ()> {
let skills = Skills::new(
&Path::new("./assets/skills.idx"),
&Path::new("./assets/skills.mul"),
);
let text = match skills {
Ok(skills) => {
let items: Vec<Text> = skills
.skills
.chunks(30)
.map(|chunk| {
let skills: Vec<String> = chunk
.iter()
.map(|skill| {
let glyph = if skill.clickable { "+" } else { "-" };
format!("{} {}", glyph, skill.name)
})
.collect();
Text::new(skills.join("\n"))
})
.collect();
items
}
Err(error) => {
let text = format!("{}", error);
let texture = Text::new(text);
vec![texture]
}
};
Box::new(SkillsScene {
pages: text,
exiting: false,
})
}
}
impl Scene<SceneName, ()> for SkillsScene {
fn draw(&mut self, ctx: &mut Context, _engine_data: &mut ()) -> GameResult<()> {
graphics::clear(ctx, graphics::BLACK);
let mut last_width = 0;
for page in self.pages.iter() {
let width = page.width(ctx);
graphics::draw(
ctx,
page,
(Point2::new(last_width as f32, 0.0), graphics::WHITE),
)?;
last_width += width as i32;
}
Ok(())
}
fn update(
&mut self,
_ctx: &mut Context,
_engine_data: &mut (),
) -> GameResult<Option<SceneChangeEvent<SceneName>>> {
if self.exiting {
Ok(Some(SceneChangeEvent::PopScene))
} else {
Ok(None)
}
}
fn key_down_event(
&mut self,
_ctx: &mut Context,
keycode: KeyCode,
_keymods: KeyMods,
_repeat: bool,
_engine_data: &mut (),
)
|
}
|
{
match keycode {
KeyCode::Escape => self.exiting = true,
_ => (),
}
}
|
identifier_body
|
skills_scene.rs
|
use std::path::Path;
use uorustlibs::skills::Skills;
use cgmath::Point2;
use ggez::event::{KeyCode, KeyMods};
use ggez::graphics::{self, Text};
use ggez::{Context, GameResult};
use scene::{BoxedScene, Scene, SceneChangeEvent, SceneName};
pub struct SkillsScene {
pages: Vec<Text>,
exiting: bool,
}
impl<'a> SkillsScene {
pub fn new() -> BoxedScene<'a, SceneName, ()> {
let skills = Skills::new(
&Path::new("./assets/skills.idx"),
&Path::new("./assets/skills.mul"),
);
let text = match skills {
Ok(skills) => {
let items: Vec<Text> = skills
.skills
.chunks(30)
.map(|chunk| {
let skills: Vec<String> = chunk
.iter()
.map(|skill| {
let glyph = if skill.clickable { "+" } else
|
;
format!("{} {}", glyph, skill.name)
})
.collect();
Text::new(skills.join("\n"))
})
.collect();
items
}
Err(error) => {
let text = format!("{}", error);
let texture = Text::new(text);
vec![texture]
}
};
Box::new(SkillsScene {
pages: text,
exiting: false,
})
}
}
impl Scene<SceneName, ()> for SkillsScene {
fn draw(&mut self, ctx: &mut Context, _engine_data: &mut ()) -> GameResult<()> {
graphics::clear(ctx, graphics::BLACK);
let mut last_width = 0;
for page in self.pages.iter() {
let width = page.width(ctx);
graphics::draw(
ctx,
page,
(Point2::new(last_width as f32, 0.0), graphics::WHITE),
)?;
last_width += width as i32;
}
Ok(())
}
fn update(
&mut self,
_ctx: &mut Context,
_engine_data: &mut (),
) -> GameResult<Option<SceneChangeEvent<SceneName>>> {
if self.exiting {
Ok(Some(SceneChangeEvent::PopScene))
} else {
Ok(None)
}
}
fn key_down_event(
&mut self,
_ctx: &mut Context,
keycode: KeyCode,
_keymods: KeyMods,
_repeat: bool,
_engine_data: &mut (),
) {
match keycode {
KeyCode::Escape => self.exiting = true,
_ => (),
}
}
}
|
{ "-" }
|
conditional_block
|
skills_scene.rs
|
use std::path::Path;
use uorustlibs::skills::Skills;
use cgmath::Point2;
use ggez::event::{KeyCode, KeyMods};
use ggez::graphics::{self, Text};
use ggez::{Context, GameResult};
use scene::{BoxedScene, Scene, SceneChangeEvent, SceneName};
pub struct SkillsScene {
pages: Vec<Text>,
exiting: bool,
}
impl<'a> SkillsScene {
pub fn new() -> BoxedScene<'a, SceneName, ()> {
let skills = Skills::new(
&Path::new("./assets/skills.idx"),
&Path::new("./assets/skills.mul"),
);
let text = match skills {
Ok(skills) => {
let items: Vec<Text> = skills
.skills
.chunks(30)
.map(|chunk| {
let skills: Vec<String> = chunk
.iter()
.map(|skill| {
let glyph = if skill.clickable { "+" } else { "-" };
format!("{} {}", glyph, skill.name)
})
.collect();
Text::new(skills.join("\n"))
})
.collect();
items
}
Err(error) => {
let text = format!("{}", error);
let texture = Text::new(text);
vec![texture]
}
};
Box::new(SkillsScene {
pages: text,
exiting: false,
})
}
}
impl Scene<SceneName, ()> for SkillsScene {
fn draw(&mut self, ctx: &mut Context, _engine_data: &mut ()) -> GameResult<()> {
graphics::clear(ctx, graphics::BLACK);
let mut last_width = 0;
for page in self.pages.iter() {
let width = page.width(ctx);
graphics::draw(
ctx,
page,
(Point2::new(last_width as f32, 0.0), graphics::WHITE),
)?;
last_width += width as i32;
}
|
fn update(
&mut self,
_ctx: &mut Context,
_engine_data: &mut (),
) -> GameResult<Option<SceneChangeEvent<SceneName>>> {
if self.exiting {
Ok(Some(SceneChangeEvent::PopScene))
} else {
Ok(None)
}
}
fn key_down_event(
&mut self,
_ctx: &mut Context,
keycode: KeyCode,
_keymods: KeyMods,
_repeat: bool,
_engine_data: &mut (),
) {
match keycode {
KeyCode::Escape => self.exiting = true,
_ => (),
}
}
}
|
Ok(())
}
|
random_line_split
|
skills_scene.rs
|
use std::path::Path;
use uorustlibs::skills::Skills;
use cgmath::Point2;
use ggez::event::{KeyCode, KeyMods};
use ggez::graphics::{self, Text};
use ggez::{Context, GameResult};
use scene::{BoxedScene, Scene, SceneChangeEvent, SceneName};
pub struct
|
{
pages: Vec<Text>,
exiting: bool,
}
impl<'a> SkillsScene {
pub fn new() -> BoxedScene<'a, SceneName, ()> {
let skills = Skills::new(
&Path::new("./assets/skills.idx"),
&Path::new("./assets/skills.mul"),
);
let text = match skills {
Ok(skills) => {
let items: Vec<Text> = skills
.skills
.chunks(30)
.map(|chunk| {
let skills: Vec<String> = chunk
.iter()
.map(|skill| {
let glyph = if skill.clickable { "+" } else { "-" };
format!("{} {}", glyph, skill.name)
})
.collect();
Text::new(skills.join("\n"))
})
.collect();
items
}
Err(error) => {
let text = format!("{}", error);
let texture = Text::new(text);
vec![texture]
}
};
Box::new(SkillsScene {
pages: text,
exiting: false,
})
}
}
impl Scene<SceneName, ()> for SkillsScene {
fn draw(&mut self, ctx: &mut Context, _engine_data: &mut ()) -> GameResult<()> {
graphics::clear(ctx, graphics::BLACK);
let mut last_width = 0;
for page in self.pages.iter() {
let width = page.width(ctx);
graphics::draw(
ctx,
page,
(Point2::new(last_width as f32, 0.0), graphics::WHITE),
)?;
last_width += width as i32;
}
Ok(())
}
fn update(
&mut self,
_ctx: &mut Context,
_engine_data: &mut (),
) -> GameResult<Option<SceneChangeEvent<SceneName>>> {
if self.exiting {
Ok(Some(SceneChangeEvent::PopScene))
} else {
Ok(None)
}
}
fn key_down_event(
&mut self,
_ctx: &mut Context,
keycode: KeyCode,
_keymods: KeyMods,
_repeat: bool,
_engine_data: &mut (),
) {
match keycode {
KeyCode::Escape => self.exiting = true,
_ => (),
}
}
}
|
SkillsScene
|
identifier_name
|
and.rs
|
//
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
//
//! AND implementation.
//!
//! Will be automatically included when including `filter::Filter`, so importing this module
//! shouldn't be necessary.
//!
use failable::filter::FailableFilter;
#[must_use = "filters are lazy and do nothing unless consumed"]
#[derive(Clone)]
pub struct
|
<T, U>(T, U);
impl<T, U> FailableAnd<T, U> {
pub fn new(a: T, b: U) -> FailableAnd<T, U> {
FailableAnd(a, b)
}
}
impl<N, T, U, E> FailableFilter<N> for FailableAnd<T, U>
where T: FailableFilter<N, Error = E>,
U: FailableFilter<N, Error = E>
{
type Error = E;
fn filter(&self, e: &N) -> Result<bool, Self::Error> {
Ok(self.0.filter(e)? && self.1.filter(e)?)
}
}
|
FailableAnd
|
identifier_name
|
and.rs
|
//
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
//
//! AND implementation.
//!
//! Will be automatically included when including `filter::Filter`, so importing this module
//! shouldn't be necessary.
//!
use failable::filter::FailableFilter;
#[must_use = "filters are lazy and do nothing unless consumed"]
#[derive(Clone)]
pub struct FailableAnd<T, U>(T, U);
impl<T, U> FailableAnd<T, U> {
pub fn new(a: T, b: U) -> FailableAnd<T, U> {
FailableAnd(a, b)
}
}
impl<N, T, U, E> FailableFilter<N> for FailableAnd<T, U>
where T: FailableFilter<N, Error = E>,
U: FailableFilter<N, Error = E>
{
type Error = E;
fn filter(&self, e: &N) -> Result<bool, Self::Error>
|
}
|
{
Ok(self.0.filter(e)? && self.1.filter(e)?)
}
|
identifier_body
|
and.rs
|
//
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
//
//! AND implementation.
//!
//! Will be automatically included when including `filter::Filter`, so importing this module
//! shouldn't be necessary.
//!
use failable::filter::FailableFilter;
#[must_use = "filters are lazy and do nothing unless consumed"]
#[derive(Clone)]
pub struct FailableAnd<T, U>(T, U);
impl<T, U> FailableAnd<T, U> {
pub fn new(a: T, b: U) -> FailableAnd<T, U> {
FailableAnd(a, b)
}
}
impl<N, T, U, E> FailableFilter<N> for FailableAnd<T, U>
where T: FailableFilter<N, Error = E>,
U: FailableFilter<N, Error = E>
{
type Error = E;
fn filter(&self, e: &N) -> Result<bool, Self::Error> {
Ok(self.0.filter(e)? && self.1.filter(e)?)
|
}
|
}
|
random_line_split
|
thread-local-in-ctfe.rs
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(const_fn, thread_local)]
#[thread_local]
static A: u32 = 1;
static B: u32 = A;
//~^ ERROR thread-local statics cannot be accessed at compile-time
static C: &u32 = &A;
//~^ ERROR thread-local statics cannot be accessed at compile-time
const D: u32 = A;
//~^ ERROR thread-local statics cannot be accessed at compile-time
const E: &u32 = &A;
//~^ ERROR thread-local statics cannot be accessed at compile-time
const fn
|
() -> u32 {
A
//~^ ERROR thread-local statics cannot be accessed at compile-time
}
fn main() {}
|
f
|
identifier_name
|
thread-local-in-ctfe.rs
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
|
#[thread_local]
static A: u32 = 1;
static B: u32 = A;
//~^ ERROR thread-local statics cannot be accessed at compile-time
static C: &u32 = &A;
//~^ ERROR thread-local statics cannot be accessed at compile-time
const D: u32 = A;
//~^ ERROR thread-local statics cannot be accessed at compile-time
const E: &u32 = &A;
//~^ ERROR thread-local statics cannot be accessed at compile-time
const fn f() -> u32 {
A
//~^ ERROR thread-local statics cannot be accessed at compile-time
}
fn main() {}
|
#![feature(const_fn, thread_local)]
|
random_line_split
|
thread-local-in-ctfe.rs
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(const_fn, thread_local)]
#[thread_local]
static A: u32 = 1;
static B: u32 = A;
//~^ ERROR thread-local statics cannot be accessed at compile-time
static C: &u32 = &A;
//~^ ERROR thread-local statics cannot be accessed at compile-time
const D: u32 = A;
//~^ ERROR thread-local statics cannot be accessed at compile-time
const E: &u32 = &A;
//~^ ERROR thread-local statics cannot be accessed at compile-time
const fn f() -> u32 {
A
//~^ ERROR thread-local statics cannot be accessed at compile-time
}
fn main()
|
{}
|
identifier_body
|
|
lib.rs
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![crate_id = "rustdoc#0.11.0-pre"]
#![experimental]
#![desc = "rustdoc, the Rust documentation extractor"]
#![license = "MIT/ASL2"]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![feature(globs, struct_variant, managed_boxes, macro_rules, phase)]
extern crate debug;
extern crate getopts;
extern crate libc;
extern crate rustc;
extern crate serialize;
extern crate syntax;
extern crate testing = "test";
extern crate time;
#[phase(plugin, link)] extern crate log;
use std::io;
use std::io::{File, MemWriter};
use std::str;
use std::gc::Gc;
use serialize::{json, Decodable, Encodable};
use externalfiles::ExternalHtml;
// reexported from `clean` so it can be easily updated with the mod itself
pub use clean::SCHEMA_VERSION;
pub mod clean;
pub mod core;
pub mod doctree;
#[macro_escape]
pub mod externalfiles;
pub mod fold;
pub mod html {
pub mod highlight;
pub mod escape;
pub mod item_type;
pub mod format;
pub mod layout;
pub mod markdown;
pub mod render;
pub mod toc;
}
pub mod markdown;
pub mod passes;
pub mod plugins;
pub mod visit_ast;
pub mod test;
mod flock;
type Pass = (&'static str, // name
fn(clean::Crate) -> plugins::PluginResult, // fn
&'static str); // description
static PASSES: &'static [Pass] = &[
("strip-hidden", passes::strip_hidden,
"strips all doc(hidden) items from the output"),
("unindent-comments", passes::unindent_comments,
"removes excess indentation on comments in order for markdown to like it"),
("collapse-docs", passes::collapse_docs,
"concatenates all document attributes into one document attribute"),
("strip-private", passes::strip_private,
"strips all private items from a crate which cannot be seen externally"),
];
static DEFAULT_PASSES: &'static [&'static str] = &[
"strip-hidden",
"strip-private",
"collapse-docs",
"unindent-comments",
];
local_data_key!(pub ctxtkey: Gc<core::DocContext>)
local_data_key!(pub analysiskey: core::CrateAnalysis)
type Output = (clean::Crate, Vec<plugins::PluginJson> );
pub fn main() {
std::os::set_exit_status(main_args(std::os::args().as_slice()));
}
pub fn opts() -> Vec<getopts::OptGroup> {
use getopts::*;
vec!(
optflag("h", "help", "show this help message"),
optflagopt("", "version", "print rustdoc's version", "verbose"),
optopt("r", "input-format", "the input type of the specified file",
"[rust|json]"),
optopt("w", "output-format", "the output type to write",
"[html|json]"),
optopt("o", "output", "where to place the output", "PATH"),
optmulti("L", "library-path", "directory to add to crate search path",
"DIR"),
optmulti("", "cfg", "pass a --cfg to rustc", ""),
optmulti("", "plugin-path", "directory to load plugins from", "DIR"),
optmulti("", "passes", "space separated list of passes to also run, a \
value of `list` will print available passes",
"PASSES"),
optmulti("", "plugins", "space separated list of plugins to also load",
"PLUGINS"),
optflag("", "no-defaults", "don't run the default passes"),
optflag("", "test", "run code examples as tests"),
optmulti("", "test-args", "arguments to pass to the test runner",
"ARGS"),
optmulti("", "markdown-css", "CSS files to include via <link> in a rendered Markdown file",
"FILES"),
optmulti("", "html-in-header",
"files to include inline in the <head> section of a rendered Markdown file \
or generated documentation",
"FILES"),
optmulti("", "html-before-content",
"files to include inline between <body> and the content of a rendered \
Markdown file or generated documentation",
"FILES"),
optmulti("", "html-after-content",
"files to include inline between the content and </body> of a rendered \
Markdown file or generated documentation",
"FILES"),
optopt("", "markdown-playground-url",
"URL to send code snippets to", "URL")
)
}
pub fn usage(argv0: &str)
|
pub fn main_args(args: &[String]) -> int {
let matches = match getopts::getopts(args.tail(), opts().as_slice()) {
Ok(m) => m,
Err(err) => {
println!("{}", err);
return 1;
}
};
if matches.opt_present("h") || matches.opt_present("help") {
usage(args[0].as_slice());
return 0;
} else if matches.opt_present("version") {
match rustc::driver::version("rustdoc", &matches) {
Some(err) => {
println!("{}", err);
return 1
},
None => return 0
}
}
if matches.free.len() == 0 {
println!("expected an input file to act on");
return 1;
} if matches.free.len() > 1 {
println!("only one input file may be specified");
return 1;
}
let input = matches.free.get(0).as_slice();
let libs = matches.opt_strs("L").iter().map(|s| Path::new(s.as_slice())).collect();
let test_args = matches.opt_strs("test-args");
let test_args: Vec<String> = test_args.iter()
.flat_map(|s| s.as_slice().words())
.map(|s| s.to_string())
.collect();
let should_test = matches.opt_present("test");
let markdown_input = input.ends_with(".md") || input.ends_with(".markdown");
let output = matches.opt_str("o").map(|s| Path::new(s));
let cfgs = matches.opt_strs("cfg");
let external_html = match ExternalHtml::load(
matches.opt_strs("html-in-header").as_slice(),
matches.opt_strs("html-before-content").as_slice(),
matches.opt_strs("html-after-content").as_slice()) {
Some(eh) => eh,
None => return 3
};
match (should_test, markdown_input) {
(true, true) => {
return markdown::test(input, libs, test_args)
}
(true, false) => {
return test::run(input, cfgs, libs, test_args)
}
(false, true) => return markdown::render(input, output.unwrap_or(Path::new("doc")),
&matches, &external_html),
(false, false) => {}
}
if matches.opt_strs("passes").as_slice() == &["list".to_string()] {
println!("Available passes for running rustdoc:");
for &(name, _, description) in PASSES.iter() {
println!("{:>20s} - {}", name, description);
}
println!("{}", "\nDefault passes for rustdoc:"); // FIXME: #9970
for &name in DEFAULT_PASSES.iter() {
println!("{:>20s}", name);
}
return 0;
}
let (krate, res) = match acquire_input(input, &matches) {
Ok(pair) => pair,
Err(s) => {
println!("input error: {}", s);
return 1;
}
};
info!("going to format");
let started = time::precise_time_ns();
match matches.opt_str("w").as_ref().map(|s| s.as_slice()) {
Some("html") | None => {
match html::render::run(krate, &external_html, output.unwrap_or(Path::new("doc"))) {
Ok(()) => {}
Err(e) => fail!("failed to generate documentation: {}", e),
}
}
Some("json") => {
match json_output(krate, res, output.unwrap_or(Path::new("doc.json"))) {
Ok(()) => {}
Err(e) => fail!("failed to write json: {}", e),
}
}
Some(s) => {
println!("unknown output format: {}", s);
return 1;
}
}
let ended = time::precise_time_ns();
info!("Took {:.03f}s", (ended as f64 - started as f64) / 1e9f64);
return 0;
}
/// Looks inside the command line arguments to extract the relevant input format
/// and files and then generates the necessary rustdoc output for formatting.
fn acquire_input(input: &str,
matches: &getopts::Matches) -> Result<Output, String> {
match matches.opt_str("r").as_ref().map(|s| s.as_slice()) {
Some("rust") => Ok(rust_input(input, matches)),
Some("json") => json_input(input),
Some(s) => Err(format!("unknown input format: {}", s)),
None => {
if input.ends_with(".json") {
json_input(input)
} else {
Ok(rust_input(input, matches))
}
}
}
}
/// Interprets the input file as a rust source file, passing it through the
/// compiler all the way through the analysis passes. The rustdoc output is then
/// generated from the cleaned AST of the crate.
///
/// This form of input will run all of the plug/cleaning passes
fn rust_input(cratefile: &str, matches: &getopts::Matches) -> Output {
let mut default_passes =!matches.opt_present("no-defaults");
let mut passes = matches.opt_strs("passes");
let mut plugins = matches.opt_strs("plugins");
// First, parse the crate and extract all relevant information.
let libs: Vec<Path> = matches.opt_strs("L")
.iter()
.map(|s| Path::new(s.as_slice()))
.collect();
let cfgs = matches.opt_strs("cfg");
let cr = Path::new(cratefile);
info!("starting to run rustc");
let (krate, analysis) = std::task::try(proc() {
let cr = cr;
core::run_core(libs.move_iter().map(|x| x.clone()).collect(),
cfgs,
&cr)
}).map_err(|boxed_any|format!("{:?}", boxed_any)).unwrap();
info!("finished with rustc");
analysiskey.replace(Some(analysis));
// Process all of the crate attributes, extracting plugin metadata along
// with the passes which we are supposed to run.
match krate.module.get_ref().doc_list() {
Some(nested) => {
for inner in nested.iter() {
match *inner {
clean::Word(ref x)
if "no_default_passes" == x.as_slice() => {
default_passes = false;
}
clean::NameValue(ref x, ref value)
if "passes" == x.as_slice() => {
for pass in value.as_slice().words() {
passes.push(pass.to_string());
}
}
clean::NameValue(ref x, ref value)
if "plugins" == x.as_slice() => {
for p in value.as_slice().words() {
plugins.push(p.to_string());
}
}
_ => {}
}
}
}
None => {}
}
if default_passes {
for name in DEFAULT_PASSES.iter().rev() {
passes.unshift(name.to_string());
}
}
// Load all plugins/passes into a PluginManager
let path = matches.opt_str("plugin-path")
.unwrap_or("/tmp/rustdoc/plugins".to_string());
let mut pm = plugins::PluginManager::new(Path::new(path));
for pass in passes.iter() {
let plugin = match PASSES.iter()
.position(|&(p, _, _)| {
p == pass.as_slice()
}) {
Some(i) => PASSES[i].val1(),
None => {
error!("unknown pass {}, skipping", *pass);
continue
},
};
pm.add_plugin(plugin);
}
info!("loading plugins...");
for pname in plugins.move_iter() {
pm.load_plugin(pname);
}
// Run everything!
info!("Executing passes/plugins");
return pm.run_plugins(krate);
}
/// This input format purely deserializes the json output file. No passes are
/// run over the deserialized output.
fn json_input(input: &str) -> Result<Output, String> {
let mut input = match File::open(&Path::new(input)) {
Ok(f) => f,
Err(e) => {
return Err(format!("couldn't open {}: {}", input, e))
}
};
match json::from_reader(&mut input) {
Err(s) => Err(s.to_str()),
Ok(json::Object(obj)) => {
let mut obj = obj;
// Make sure the schema is what we expect
match obj.pop(&"schema".to_string()) {
Some(json::String(version)) => {
if version.as_slice()!= SCHEMA_VERSION {
return Err(format!(
"sorry, but I only understand version {}",
SCHEMA_VERSION))
}
}
Some(..) => return Err("malformed json".to_string()),
None => return Err("expected a schema version".to_string()),
}
let krate = match obj.pop(&"crate".to_string()) {
Some(json) => {
let mut d = json::Decoder::new(json);
Decodable::decode(&mut d).unwrap()
}
None => return Err("malformed json".to_string()),
};
// FIXME: this should read from the "plugins" field, but currently
// Json doesn't implement decodable...
let plugin_output = Vec::new();
Ok((krate, plugin_output))
}
Ok(..) => {
Err("malformed json input: expected an object at the \
top".to_string())
}
}
}
/// Outputs the crate/plugin json as a giant json blob at the specified
/// destination.
fn json_output(krate: clean::Crate, res: Vec<plugins::PluginJson>,
dst: Path) -> io::IoResult<()> {
// {
// "schema": version,
// "crate": { parsed crate... },
// "plugins": { output of plugins... }
// }
let mut json = std::collections::TreeMap::new();
json.insert("schema".to_string(), json::String(SCHEMA_VERSION.to_string()));
let plugins_json = res.move_iter()
.filter_map(|opt| {
match opt {
None => None,
Some((string, json)) => {
Some((string.to_string(), json))
}
}
}).collect();
// FIXME #8335: yuck, Rust -> str -> JSON round trip! No way to.encode
// straight to the Rust JSON representation.
let crate_json_str = {
let mut w = MemWriter::new();
{
let mut encoder = json::Encoder::new(&mut w as &mut io::Writer);
krate.encode(&mut encoder).unwrap();
}
str::from_utf8_owned(w.unwrap()).unwrap()
};
let crate_json = match json::from_str(crate_json_str.as_slice()) {
Ok(j) => j,
Err(e) => fail!("Rust generated JSON is invalid: {:?}", e)
};
json.insert("crate".to_string(), crate_json);
json.insert("plugins".to_string(), json::Object(plugins_json));
let mut file = try!(File::create(&dst));
json::Object(json).to_writer(&mut file)
}
|
{
println!("{}",
getopts::usage(format!("{} [options] <input>", argv0).as_slice(),
opts().as_slice()));
}
|
identifier_body
|
lib.rs
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![crate_id = "rustdoc#0.11.0-pre"]
#![experimental]
#![desc = "rustdoc, the Rust documentation extractor"]
#![license = "MIT/ASL2"]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![feature(globs, struct_variant, managed_boxes, macro_rules, phase)]
extern crate debug;
extern crate getopts;
extern crate libc;
extern crate rustc;
extern crate serialize;
extern crate syntax;
extern crate testing = "test";
extern crate time;
#[phase(plugin, link)] extern crate log;
use std::io;
use std::io::{File, MemWriter};
use std::str;
use std::gc::Gc;
use serialize::{json, Decodable, Encodable};
use externalfiles::ExternalHtml;
// reexported from `clean` so it can be easily updated with the mod itself
pub use clean::SCHEMA_VERSION;
pub mod clean;
pub mod core;
pub mod doctree;
#[macro_escape]
pub mod externalfiles;
pub mod fold;
pub mod html {
pub mod highlight;
pub mod escape;
pub mod item_type;
pub mod format;
pub mod layout;
pub mod markdown;
pub mod render;
pub mod toc;
}
pub mod markdown;
pub mod passes;
pub mod plugins;
pub mod visit_ast;
pub mod test;
mod flock;
type Pass = (&'static str, // name
fn(clean::Crate) -> plugins::PluginResult, // fn
&'static str); // description
static PASSES: &'static [Pass] = &[
("strip-hidden", passes::strip_hidden,
"strips all doc(hidden) items from the output"),
("unindent-comments", passes::unindent_comments,
"removes excess indentation on comments in order for markdown to like it"),
("collapse-docs", passes::collapse_docs,
"concatenates all document attributes into one document attribute"),
("strip-private", passes::strip_private,
"strips all private items from a crate which cannot be seen externally"),
];
static DEFAULT_PASSES: &'static [&'static str] = &[
"strip-hidden",
"strip-private",
"collapse-docs",
"unindent-comments",
];
local_data_key!(pub ctxtkey: Gc<core::DocContext>)
local_data_key!(pub analysiskey: core::CrateAnalysis)
type Output = (clean::Crate, Vec<plugins::PluginJson> );
pub fn main() {
std::os::set_exit_status(main_args(std::os::args().as_slice()));
}
pub fn opts() -> Vec<getopts::OptGroup> {
use getopts::*;
vec!(
optflag("h", "help", "show this help message"),
optflagopt("", "version", "print rustdoc's version", "verbose"),
optopt("r", "input-format", "the input type of the specified file",
"[rust|json]"),
optopt("w", "output-format", "the output type to write",
"[html|json]"),
optopt("o", "output", "where to place the output", "PATH"),
optmulti("L", "library-path", "directory to add to crate search path",
"DIR"),
optmulti("", "cfg", "pass a --cfg to rustc", ""),
optmulti("", "plugin-path", "directory to load plugins from", "DIR"),
optmulti("", "passes", "space separated list of passes to also run, a \
value of `list` will print available passes",
"PASSES"),
optmulti("", "plugins", "space separated list of plugins to also load",
"PLUGINS"),
optflag("", "no-defaults", "don't run the default passes"),
optflag("", "test", "run code examples as tests"),
optmulti("", "test-args", "arguments to pass to the test runner",
"ARGS"),
optmulti("", "markdown-css", "CSS files to include via <link> in a rendered Markdown file",
"FILES"),
optmulti("", "html-in-header",
"files to include inline in the <head> section of a rendered Markdown file \
or generated documentation",
"FILES"),
optmulti("", "html-before-content",
"files to include inline between <body> and the content of a rendered \
Markdown file or generated documentation",
"FILES"),
optmulti("", "html-after-content",
"files to include inline between the content and </body> of a rendered \
Markdown file or generated documentation",
"FILES"),
optopt("", "markdown-playground-url",
"URL to send code snippets to", "URL")
)
}
pub fn usage(argv0: &str) {
println!("{}",
getopts::usage(format!("{} [options] <input>", argv0).as_slice(),
opts().as_slice()));
}
pub fn main_args(args: &[String]) -> int {
let matches = match getopts::getopts(args.tail(), opts().as_slice()) {
Ok(m) => m,
Err(err) => {
println!("{}", err);
return 1;
}
};
if matches.opt_present("h") || matches.opt_present("help") {
usage(args[0].as_slice());
return 0;
} else if matches.opt_present("version") {
match rustc::driver::version("rustdoc", &matches) {
Some(err) => {
println!("{}", err);
return 1
},
None => return 0
}
}
if matches.free.len() == 0 {
println!("expected an input file to act on");
return 1;
} if matches.free.len() > 1 {
println!("only one input file may be specified");
return 1;
}
let input = matches.free.get(0).as_slice();
let libs = matches.opt_strs("L").iter().map(|s| Path::new(s.as_slice())).collect();
let test_args = matches.opt_strs("test-args");
let test_args: Vec<String> = test_args.iter()
.flat_map(|s| s.as_slice().words())
.map(|s| s.to_string())
.collect();
let should_test = matches.opt_present("test");
let markdown_input = input.ends_with(".md") || input.ends_with(".markdown");
let output = matches.opt_str("o").map(|s| Path::new(s));
let cfgs = matches.opt_strs("cfg");
let external_html = match ExternalHtml::load(
matches.opt_strs("html-in-header").as_slice(),
matches.opt_strs("html-before-content").as_slice(),
matches.opt_strs("html-after-content").as_slice()) {
Some(eh) => eh,
None => return 3
};
match (should_test, markdown_input) {
(true, true) => {
return markdown::test(input, libs, test_args)
}
(true, false) => {
return test::run(input, cfgs, libs, test_args)
}
(false, true) => return markdown::render(input, output.unwrap_or(Path::new("doc")),
&matches, &external_html),
(false, false) => {}
}
if matches.opt_strs("passes").as_slice() == &["list".to_string()] {
println!("Available passes for running rustdoc:");
for &(name, _, description) in PASSES.iter() {
println!("{:>20s} - {}", name, description);
}
println!("{}", "\nDefault passes for rustdoc:"); // FIXME: #9970
for &name in DEFAULT_PASSES.iter() {
println!("{:>20s}", name);
}
return 0;
}
let (krate, res) = match acquire_input(input, &matches) {
Ok(pair) => pair,
Err(s) =>
|
};
info!("going to format");
let started = time::precise_time_ns();
match matches.opt_str("w").as_ref().map(|s| s.as_slice()) {
Some("html") | None => {
match html::render::run(krate, &external_html, output.unwrap_or(Path::new("doc"))) {
Ok(()) => {}
Err(e) => fail!("failed to generate documentation: {}", e),
}
}
Some("json") => {
match json_output(krate, res, output.unwrap_or(Path::new("doc.json"))) {
Ok(()) => {}
Err(e) => fail!("failed to write json: {}", e),
}
}
Some(s) => {
println!("unknown output format: {}", s);
return 1;
}
}
let ended = time::precise_time_ns();
info!("Took {:.03f}s", (ended as f64 - started as f64) / 1e9f64);
return 0;
}
/// Looks inside the command line arguments to extract the relevant input format
/// and files and then generates the necessary rustdoc output for formatting.
fn acquire_input(input: &str,
matches: &getopts::Matches) -> Result<Output, String> {
match matches.opt_str("r").as_ref().map(|s| s.as_slice()) {
Some("rust") => Ok(rust_input(input, matches)),
Some("json") => json_input(input),
Some(s) => Err(format!("unknown input format: {}", s)),
None => {
if input.ends_with(".json") {
json_input(input)
} else {
Ok(rust_input(input, matches))
}
}
}
}
/// Interprets the input file as a rust source file, passing it through the
/// compiler all the way through the analysis passes. The rustdoc output is then
/// generated from the cleaned AST of the crate.
///
/// This form of input will run all of the plug/cleaning passes
fn rust_input(cratefile: &str, matches: &getopts::Matches) -> Output {
let mut default_passes =!matches.opt_present("no-defaults");
let mut passes = matches.opt_strs("passes");
let mut plugins = matches.opt_strs("plugins");
// First, parse the crate and extract all relevant information.
let libs: Vec<Path> = matches.opt_strs("L")
.iter()
.map(|s| Path::new(s.as_slice()))
.collect();
let cfgs = matches.opt_strs("cfg");
let cr = Path::new(cratefile);
info!("starting to run rustc");
let (krate, analysis) = std::task::try(proc() {
let cr = cr;
core::run_core(libs.move_iter().map(|x| x.clone()).collect(),
cfgs,
&cr)
}).map_err(|boxed_any|format!("{:?}", boxed_any)).unwrap();
info!("finished with rustc");
analysiskey.replace(Some(analysis));
// Process all of the crate attributes, extracting plugin metadata along
// with the passes which we are supposed to run.
match krate.module.get_ref().doc_list() {
Some(nested) => {
for inner in nested.iter() {
match *inner {
clean::Word(ref x)
if "no_default_passes" == x.as_slice() => {
default_passes = false;
}
clean::NameValue(ref x, ref value)
if "passes" == x.as_slice() => {
for pass in value.as_slice().words() {
passes.push(pass.to_string());
}
}
clean::NameValue(ref x, ref value)
if "plugins" == x.as_slice() => {
for p in value.as_slice().words() {
plugins.push(p.to_string());
}
}
_ => {}
}
}
}
None => {}
}
if default_passes {
for name in DEFAULT_PASSES.iter().rev() {
passes.unshift(name.to_string());
}
}
// Load all plugins/passes into a PluginManager
let path = matches.opt_str("plugin-path")
.unwrap_or("/tmp/rustdoc/plugins".to_string());
let mut pm = plugins::PluginManager::new(Path::new(path));
for pass in passes.iter() {
let plugin = match PASSES.iter()
.position(|&(p, _, _)| {
p == pass.as_slice()
}) {
Some(i) => PASSES[i].val1(),
None => {
error!("unknown pass {}, skipping", *pass);
continue
},
};
pm.add_plugin(plugin);
}
info!("loading plugins...");
for pname in plugins.move_iter() {
pm.load_plugin(pname);
}
// Run everything!
info!("Executing passes/plugins");
return pm.run_plugins(krate);
}
/// This input format purely deserializes the json output file. No passes are
/// run over the deserialized output.
fn json_input(input: &str) -> Result<Output, String> {
let mut input = match File::open(&Path::new(input)) {
Ok(f) => f,
Err(e) => {
return Err(format!("couldn't open {}: {}", input, e))
}
};
match json::from_reader(&mut input) {
Err(s) => Err(s.to_str()),
Ok(json::Object(obj)) => {
let mut obj = obj;
// Make sure the schema is what we expect
match obj.pop(&"schema".to_string()) {
Some(json::String(version)) => {
if version.as_slice()!= SCHEMA_VERSION {
return Err(format!(
"sorry, but I only understand version {}",
SCHEMA_VERSION))
}
}
Some(..) => return Err("malformed json".to_string()),
None => return Err("expected a schema version".to_string()),
}
let krate = match obj.pop(&"crate".to_string()) {
Some(json) => {
let mut d = json::Decoder::new(json);
Decodable::decode(&mut d).unwrap()
}
None => return Err("malformed json".to_string()),
};
// FIXME: this should read from the "plugins" field, but currently
// Json doesn't implement decodable...
let plugin_output = Vec::new();
Ok((krate, plugin_output))
}
Ok(..) => {
Err("malformed json input: expected an object at the \
top".to_string())
}
}
}
/// Outputs the crate/plugin json as a giant json blob at the specified
/// destination.
fn json_output(krate: clean::Crate, res: Vec<plugins::PluginJson>,
dst: Path) -> io::IoResult<()> {
// {
// "schema": version,
// "crate": { parsed crate... },
// "plugins": { output of plugins... }
// }
let mut json = std::collections::TreeMap::new();
json.insert("schema".to_string(), json::String(SCHEMA_VERSION.to_string()));
let plugins_json = res.move_iter()
.filter_map(|opt| {
match opt {
None => None,
Some((string, json)) => {
Some((string.to_string(), json))
}
}
}).collect();
// FIXME #8335: yuck, Rust -> str -> JSON round trip! No way to.encode
// straight to the Rust JSON representation.
let crate_json_str = {
let mut w = MemWriter::new();
{
let mut encoder = json::Encoder::new(&mut w as &mut io::Writer);
krate.encode(&mut encoder).unwrap();
}
str::from_utf8_owned(w.unwrap()).unwrap()
};
let crate_json = match json::from_str(crate_json_str.as_slice()) {
Ok(j) => j,
Err(e) => fail!("Rust generated JSON is invalid: {:?}", e)
};
json.insert("crate".to_string(), crate_json);
json.insert("plugins".to_string(), json::Object(plugins_json));
let mut file = try!(File::create(&dst));
json::Object(json).to_writer(&mut file)
}
|
{
println!("input error: {}", s);
return 1;
}
|
conditional_block
|
lib.rs
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![crate_id = "rustdoc#0.11.0-pre"]
#![experimental]
#![desc = "rustdoc, the Rust documentation extractor"]
#![license = "MIT/ASL2"]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![feature(globs, struct_variant, managed_boxes, macro_rules, phase)]
extern crate debug;
extern crate getopts;
extern crate libc;
extern crate rustc;
extern crate serialize;
extern crate syntax;
extern crate testing = "test";
extern crate time;
#[phase(plugin, link)] extern crate log;
use std::io;
use std::io::{File, MemWriter};
use std::str;
use std::gc::Gc;
use serialize::{json, Decodable, Encodable};
use externalfiles::ExternalHtml;
// reexported from `clean` so it can be easily updated with the mod itself
pub use clean::SCHEMA_VERSION;
pub mod clean;
pub mod core;
pub mod doctree;
#[macro_escape]
pub mod externalfiles;
pub mod fold;
pub mod html {
pub mod highlight;
pub mod escape;
pub mod item_type;
pub mod format;
pub mod layout;
pub mod markdown;
pub mod render;
pub mod toc;
}
pub mod markdown;
pub mod passes;
pub mod plugins;
pub mod visit_ast;
pub mod test;
mod flock;
type Pass = (&'static str, // name
fn(clean::Crate) -> plugins::PluginResult, // fn
&'static str); // description
static PASSES: &'static [Pass] = &[
("strip-hidden", passes::strip_hidden,
"strips all doc(hidden) items from the output"),
("unindent-comments", passes::unindent_comments,
"removes excess indentation on comments in order for markdown to like it"),
("collapse-docs", passes::collapse_docs,
"concatenates all document attributes into one document attribute"),
("strip-private", passes::strip_private,
"strips all private items from a crate which cannot be seen externally"),
];
static DEFAULT_PASSES: &'static [&'static str] = &[
"strip-hidden",
"strip-private",
"collapse-docs",
"unindent-comments",
];
local_data_key!(pub ctxtkey: Gc<core::DocContext>)
local_data_key!(pub analysiskey: core::CrateAnalysis)
type Output = (clean::Crate, Vec<plugins::PluginJson> );
pub fn main() {
std::os::set_exit_status(main_args(std::os::args().as_slice()));
}
pub fn opts() -> Vec<getopts::OptGroup> {
use getopts::*;
vec!(
optflag("h", "help", "show this help message"),
optflagopt("", "version", "print rustdoc's version", "verbose"),
optopt("r", "input-format", "the input type of the specified file",
"[rust|json]"),
optopt("w", "output-format", "the output type to write",
"[html|json]"),
optopt("o", "output", "where to place the output", "PATH"),
optmulti("L", "library-path", "directory to add to crate search path",
"DIR"),
optmulti("", "cfg", "pass a --cfg to rustc", ""),
optmulti("", "plugin-path", "directory to load plugins from", "DIR"),
optmulti("", "passes", "space separated list of passes to also run, a \
value of `list` will print available passes",
"PASSES"),
optmulti("", "plugins", "space separated list of plugins to also load",
"PLUGINS"),
optflag("", "no-defaults", "don't run the default passes"),
optflag("", "test", "run code examples as tests"),
optmulti("", "test-args", "arguments to pass to the test runner",
"ARGS"),
optmulti("", "markdown-css", "CSS files to include via <link> in a rendered Markdown file",
"FILES"),
optmulti("", "html-in-header",
"files to include inline in the <head> section of a rendered Markdown file \
or generated documentation",
"FILES"),
optmulti("", "html-before-content",
"files to include inline between <body> and the content of a rendered \
Markdown file or generated documentation",
"FILES"),
optmulti("", "html-after-content",
"files to include inline between the content and </body> of a rendered \
Markdown file or generated documentation",
"FILES"),
optopt("", "markdown-playground-url",
"URL to send code snippets to", "URL")
)
}
pub fn usage(argv0: &str) {
println!("{}",
getopts::usage(format!("{} [options] <input>", argv0).as_slice(),
opts().as_slice()));
}
pub fn
|
(args: &[String]) -> int {
let matches = match getopts::getopts(args.tail(), opts().as_slice()) {
Ok(m) => m,
Err(err) => {
println!("{}", err);
return 1;
}
};
if matches.opt_present("h") || matches.opt_present("help") {
usage(args[0].as_slice());
return 0;
} else if matches.opt_present("version") {
match rustc::driver::version("rustdoc", &matches) {
Some(err) => {
println!("{}", err);
return 1
},
None => return 0
}
}
if matches.free.len() == 0 {
println!("expected an input file to act on");
return 1;
} if matches.free.len() > 1 {
println!("only one input file may be specified");
return 1;
}
let input = matches.free.get(0).as_slice();
let libs = matches.opt_strs("L").iter().map(|s| Path::new(s.as_slice())).collect();
let test_args = matches.opt_strs("test-args");
let test_args: Vec<String> = test_args.iter()
.flat_map(|s| s.as_slice().words())
.map(|s| s.to_string())
.collect();
let should_test = matches.opt_present("test");
let markdown_input = input.ends_with(".md") || input.ends_with(".markdown");
let output = matches.opt_str("o").map(|s| Path::new(s));
let cfgs = matches.opt_strs("cfg");
let external_html = match ExternalHtml::load(
matches.opt_strs("html-in-header").as_slice(),
matches.opt_strs("html-before-content").as_slice(),
matches.opt_strs("html-after-content").as_slice()) {
Some(eh) => eh,
None => return 3
};
match (should_test, markdown_input) {
(true, true) => {
return markdown::test(input, libs, test_args)
}
(true, false) => {
return test::run(input, cfgs, libs, test_args)
}
(false, true) => return markdown::render(input, output.unwrap_or(Path::new("doc")),
&matches, &external_html),
(false, false) => {}
}
if matches.opt_strs("passes").as_slice() == &["list".to_string()] {
println!("Available passes for running rustdoc:");
for &(name, _, description) in PASSES.iter() {
println!("{:>20s} - {}", name, description);
}
println!("{}", "\nDefault passes for rustdoc:"); // FIXME: #9970
for &name in DEFAULT_PASSES.iter() {
println!("{:>20s}", name);
}
return 0;
}
let (krate, res) = match acquire_input(input, &matches) {
Ok(pair) => pair,
Err(s) => {
println!("input error: {}", s);
return 1;
}
};
info!("going to format");
let started = time::precise_time_ns();
match matches.opt_str("w").as_ref().map(|s| s.as_slice()) {
Some("html") | None => {
match html::render::run(krate, &external_html, output.unwrap_or(Path::new("doc"))) {
Ok(()) => {}
Err(e) => fail!("failed to generate documentation: {}", e),
}
}
Some("json") => {
match json_output(krate, res, output.unwrap_or(Path::new("doc.json"))) {
Ok(()) => {}
Err(e) => fail!("failed to write json: {}", e),
}
}
Some(s) => {
println!("unknown output format: {}", s);
return 1;
}
}
let ended = time::precise_time_ns();
info!("Took {:.03f}s", (ended as f64 - started as f64) / 1e9f64);
return 0;
}
/// Looks inside the command line arguments to extract the relevant input format
/// and files and then generates the necessary rustdoc output for formatting.
fn acquire_input(input: &str,
matches: &getopts::Matches) -> Result<Output, String> {
match matches.opt_str("r").as_ref().map(|s| s.as_slice()) {
Some("rust") => Ok(rust_input(input, matches)),
Some("json") => json_input(input),
Some(s) => Err(format!("unknown input format: {}", s)),
None => {
if input.ends_with(".json") {
json_input(input)
} else {
Ok(rust_input(input, matches))
}
}
}
}
/// Interprets the input file as a rust source file, passing it through the
/// compiler all the way through the analysis passes. The rustdoc output is then
/// generated from the cleaned AST of the crate.
///
/// This form of input will run all of the plug/cleaning passes
fn rust_input(cratefile: &str, matches: &getopts::Matches) -> Output {
let mut default_passes =!matches.opt_present("no-defaults");
let mut passes = matches.opt_strs("passes");
let mut plugins = matches.opt_strs("plugins");
// First, parse the crate and extract all relevant information.
let libs: Vec<Path> = matches.opt_strs("L")
.iter()
.map(|s| Path::new(s.as_slice()))
.collect();
let cfgs = matches.opt_strs("cfg");
let cr = Path::new(cratefile);
info!("starting to run rustc");
let (krate, analysis) = std::task::try(proc() {
let cr = cr;
core::run_core(libs.move_iter().map(|x| x.clone()).collect(),
cfgs,
&cr)
}).map_err(|boxed_any|format!("{:?}", boxed_any)).unwrap();
info!("finished with rustc");
analysiskey.replace(Some(analysis));
// Process all of the crate attributes, extracting plugin metadata along
// with the passes which we are supposed to run.
match krate.module.get_ref().doc_list() {
Some(nested) => {
for inner in nested.iter() {
match *inner {
clean::Word(ref x)
if "no_default_passes" == x.as_slice() => {
default_passes = false;
}
clean::NameValue(ref x, ref value)
if "passes" == x.as_slice() => {
for pass in value.as_slice().words() {
passes.push(pass.to_string());
}
}
clean::NameValue(ref x, ref value)
if "plugins" == x.as_slice() => {
for p in value.as_slice().words() {
plugins.push(p.to_string());
}
}
_ => {}
}
}
}
None => {}
}
if default_passes {
for name in DEFAULT_PASSES.iter().rev() {
passes.unshift(name.to_string());
}
}
// Load all plugins/passes into a PluginManager
let path = matches.opt_str("plugin-path")
.unwrap_or("/tmp/rustdoc/plugins".to_string());
let mut pm = plugins::PluginManager::new(Path::new(path));
for pass in passes.iter() {
let plugin = match PASSES.iter()
.position(|&(p, _, _)| {
p == pass.as_slice()
}) {
Some(i) => PASSES[i].val1(),
None => {
error!("unknown pass {}, skipping", *pass);
continue
},
};
pm.add_plugin(plugin);
}
info!("loading plugins...");
for pname in plugins.move_iter() {
pm.load_plugin(pname);
}
// Run everything!
info!("Executing passes/plugins");
return pm.run_plugins(krate);
}
/// This input format purely deserializes the json output file. No passes are
/// run over the deserialized output.
fn json_input(input: &str) -> Result<Output, String> {
let mut input = match File::open(&Path::new(input)) {
Ok(f) => f,
Err(e) => {
return Err(format!("couldn't open {}: {}", input, e))
}
};
match json::from_reader(&mut input) {
Err(s) => Err(s.to_str()),
Ok(json::Object(obj)) => {
let mut obj = obj;
// Make sure the schema is what we expect
match obj.pop(&"schema".to_string()) {
Some(json::String(version)) => {
if version.as_slice()!= SCHEMA_VERSION {
return Err(format!(
"sorry, but I only understand version {}",
SCHEMA_VERSION))
}
}
Some(..) => return Err("malformed json".to_string()),
None => return Err("expected a schema version".to_string()),
}
let krate = match obj.pop(&"crate".to_string()) {
Some(json) => {
let mut d = json::Decoder::new(json);
Decodable::decode(&mut d).unwrap()
}
None => return Err("malformed json".to_string()),
};
// FIXME: this should read from the "plugins" field, but currently
// Json doesn't implement decodable...
let plugin_output = Vec::new();
Ok((krate, plugin_output))
}
Ok(..) => {
Err("malformed json input: expected an object at the \
top".to_string())
}
}
}
/// Outputs the crate/plugin json as a giant json blob at the specified
/// destination.
fn json_output(krate: clean::Crate, res: Vec<plugins::PluginJson>,
dst: Path) -> io::IoResult<()> {
// {
// "schema": version,
// "crate": { parsed crate... },
// "plugins": { output of plugins... }
// }
let mut json = std::collections::TreeMap::new();
json.insert("schema".to_string(), json::String(SCHEMA_VERSION.to_string()));
let plugins_json = res.move_iter()
.filter_map(|opt| {
match opt {
None => None,
Some((string, json)) => {
Some((string.to_string(), json))
}
}
}).collect();
// FIXME #8335: yuck, Rust -> str -> JSON round trip! No way to.encode
// straight to the Rust JSON representation.
let crate_json_str = {
let mut w = MemWriter::new();
{
let mut encoder = json::Encoder::new(&mut w as &mut io::Writer);
krate.encode(&mut encoder).unwrap();
}
str::from_utf8_owned(w.unwrap()).unwrap()
};
let crate_json = match json::from_str(crate_json_str.as_slice()) {
Ok(j) => j,
Err(e) => fail!("Rust generated JSON is invalid: {:?}", e)
};
json.insert("crate".to_string(), crate_json);
json.insert("plugins".to_string(), json::Object(plugins_json));
let mut file = try!(File::create(&dst));
json::Object(json).to_writer(&mut file)
}
|
main_args
|
identifier_name
|
lib.rs
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![crate_id = "rustdoc#0.11.0-pre"]
#![experimental]
#![desc = "rustdoc, the Rust documentation extractor"]
#![license = "MIT/ASL2"]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![feature(globs, struct_variant, managed_boxes, macro_rules, phase)]
extern crate debug;
extern crate getopts;
extern crate libc;
extern crate rustc;
extern crate serialize;
extern crate syntax;
extern crate testing = "test";
extern crate time;
#[phase(plugin, link)] extern crate log;
use std::io;
use std::io::{File, MemWriter};
use std::str;
use std::gc::Gc;
use serialize::{json, Decodable, Encodable};
use externalfiles::ExternalHtml;
// reexported from `clean` so it can be easily updated with the mod itself
pub use clean::SCHEMA_VERSION;
pub mod clean;
pub mod core;
pub mod doctree;
#[macro_escape]
pub mod externalfiles;
pub mod fold;
pub mod html {
pub mod highlight;
pub mod escape;
pub mod item_type;
pub mod format;
pub mod layout;
pub mod markdown;
pub mod render;
pub mod toc;
}
pub mod markdown;
pub mod passes;
pub mod plugins;
pub mod visit_ast;
pub mod test;
mod flock;
type Pass = (&'static str, // name
fn(clean::Crate) -> plugins::PluginResult, // fn
&'static str); // description
static PASSES: &'static [Pass] = &[
("strip-hidden", passes::strip_hidden,
"strips all doc(hidden) items from the output"),
("unindent-comments", passes::unindent_comments,
"removes excess indentation on comments in order for markdown to like it"),
("collapse-docs", passes::collapse_docs,
"concatenates all document attributes into one document attribute"),
("strip-private", passes::strip_private,
"strips all private items from a crate which cannot be seen externally"),
];
static DEFAULT_PASSES: &'static [&'static str] = &[
"strip-hidden",
"strip-private",
"collapse-docs",
"unindent-comments",
];
local_data_key!(pub ctxtkey: Gc<core::DocContext>)
local_data_key!(pub analysiskey: core::CrateAnalysis)
type Output = (clean::Crate, Vec<plugins::PluginJson> );
pub fn main() {
std::os::set_exit_status(main_args(std::os::args().as_slice()));
}
pub fn opts() -> Vec<getopts::OptGroup> {
use getopts::*;
vec!(
optflag("h", "help", "show this help message"),
optflagopt("", "version", "print rustdoc's version", "verbose"),
optopt("r", "input-format", "the input type of the specified file",
"[rust|json]"),
optopt("w", "output-format", "the output type to write",
"[html|json]"),
optopt("o", "output", "where to place the output", "PATH"),
optmulti("L", "library-path", "directory to add to crate search path",
"DIR"),
optmulti("", "cfg", "pass a --cfg to rustc", ""),
optmulti("", "plugin-path", "directory to load plugins from", "DIR"),
optmulti("", "passes", "space separated list of passes to also run, a \
value of `list` will print available passes",
"PASSES"),
optmulti("", "plugins", "space separated list of plugins to also load",
"PLUGINS"),
optflag("", "no-defaults", "don't run the default passes"),
optflag("", "test", "run code examples as tests"),
optmulti("", "test-args", "arguments to pass to the test runner",
"ARGS"),
optmulti("", "markdown-css", "CSS files to include via <link> in a rendered Markdown file",
"FILES"),
optmulti("", "html-in-header",
"files to include inline in the <head> section of a rendered Markdown file \
or generated documentation",
"FILES"),
optmulti("", "html-before-content",
"files to include inline between <body> and the content of a rendered \
Markdown file or generated documentation",
"FILES"),
optmulti("", "html-after-content",
"files to include inline between the content and </body> of a rendered \
Markdown file or generated documentation",
"FILES"),
optopt("", "markdown-playground-url",
"URL to send code snippets to", "URL")
)
}
pub fn usage(argv0: &str) {
println!("{}",
getopts::usage(format!("{} [options] <input>", argv0).as_slice(),
opts().as_slice()));
}
pub fn main_args(args: &[String]) -> int {
let matches = match getopts::getopts(args.tail(), opts().as_slice()) {
Ok(m) => m,
Err(err) => {
println!("{}", err);
return 1;
}
};
if matches.opt_present("h") || matches.opt_present("help") {
usage(args[0].as_slice());
return 0;
} else if matches.opt_present("version") {
match rustc::driver::version("rustdoc", &matches) {
Some(err) => {
println!("{}", err);
return 1
},
None => return 0
}
}
if matches.free.len() == 0 {
println!("expected an input file to act on");
return 1;
} if matches.free.len() > 1 {
println!("only one input file may be specified");
return 1;
}
let input = matches.free.get(0).as_slice();
let libs = matches.opt_strs("L").iter().map(|s| Path::new(s.as_slice())).collect();
let test_args = matches.opt_strs("test-args");
let test_args: Vec<String> = test_args.iter()
.flat_map(|s| s.as_slice().words())
.map(|s| s.to_string())
.collect();
let should_test = matches.opt_present("test");
let markdown_input = input.ends_with(".md") || input.ends_with(".markdown");
let output = matches.opt_str("o").map(|s| Path::new(s));
let cfgs = matches.opt_strs("cfg");
let external_html = match ExternalHtml::load(
matches.opt_strs("html-in-header").as_slice(),
matches.opt_strs("html-before-content").as_slice(),
matches.opt_strs("html-after-content").as_slice()) {
Some(eh) => eh,
None => return 3
};
match (should_test, markdown_input) {
(true, true) => {
return markdown::test(input, libs, test_args)
}
(true, false) => {
return test::run(input, cfgs, libs, test_args)
}
(false, true) => return markdown::render(input, output.unwrap_or(Path::new("doc")),
&matches, &external_html),
(false, false) => {}
}
if matches.opt_strs("passes").as_slice() == &["list".to_string()] {
println!("Available passes for running rustdoc:");
for &(name, _, description) in PASSES.iter() {
println!("{:>20s} - {}", name, description);
}
println!("{}", "\nDefault passes for rustdoc:"); // FIXME: #9970
for &name in DEFAULT_PASSES.iter() {
println!("{:>20s}", name);
}
return 0;
}
let (krate, res) = match acquire_input(input, &matches) {
Ok(pair) => pair,
Err(s) => {
println!("input error: {}", s);
return 1;
}
};
info!("going to format");
let started = time::precise_time_ns();
match matches.opt_str("w").as_ref().map(|s| s.as_slice()) {
Some("html") | None => {
match html::render::run(krate, &external_html, output.unwrap_or(Path::new("doc"))) {
Ok(()) => {}
Err(e) => fail!("failed to generate documentation: {}", e),
}
}
Some("json") => {
match json_output(krate, res, output.unwrap_or(Path::new("doc.json"))) {
Ok(()) => {}
Err(e) => fail!("failed to write json: {}", e),
}
}
Some(s) => {
println!("unknown output format: {}", s);
return 1;
}
}
let ended = time::precise_time_ns();
info!("Took {:.03f}s", (ended as f64 - started as f64) / 1e9f64);
return 0;
}
|
/// and files and then generates the necessary rustdoc output for formatting.
fn acquire_input(input: &str,
matches: &getopts::Matches) -> Result<Output, String> {
match matches.opt_str("r").as_ref().map(|s| s.as_slice()) {
Some("rust") => Ok(rust_input(input, matches)),
Some("json") => json_input(input),
Some(s) => Err(format!("unknown input format: {}", s)),
None => {
if input.ends_with(".json") {
json_input(input)
} else {
Ok(rust_input(input, matches))
}
}
}
}
/// Interprets the input file as a rust source file, passing it through the
/// compiler all the way through the analysis passes. The rustdoc output is then
/// generated from the cleaned AST of the crate.
///
/// This form of input will run all of the plug/cleaning passes
fn rust_input(cratefile: &str, matches: &getopts::Matches) -> Output {
let mut default_passes =!matches.opt_present("no-defaults");
let mut passes = matches.opt_strs("passes");
let mut plugins = matches.opt_strs("plugins");
// First, parse the crate and extract all relevant information.
let libs: Vec<Path> = matches.opt_strs("L")
.iter()
.map(|s| Path::new(s.as_slice()))
.collect();
let cfgs = matches.opt_strs("cfg");
let cr = Path::new(cratefile);
info!("starting to run rustc");
let (krate, analysis) = std::task::try(proc() {
let cr = cr;
core::run_core(libs.move_iter().map(|x| x.clone()).collect(),
cfgs,
&cr)
}).map_err(|boxed_any|format!("{:?}", boxed_any)).unwrap();
info!("finished with rustc");
analysiskey.replace(Some(analysis));
// Process all of the crate attributes, extracting plugin metadata along
// with the passes which we are supposed to run.
match krate.module.get_ref().doc_list() {
Some(nested) => {
for inner in nested.iter() {
match *inner {
clean::Word(ref x)
if "no_default_passes" == x.as_slice() => {
default_passes = false;
}
clean::NameValue(ref x, ref value)
if "passes" == x.as_slice() => {
for pass in value.as_slice().words() {
passes.push(pass.to_string());
}
}
clean::NameValue(ref x, ref value)
if "plugins" == x.as_slice() => {
for p in value.as_slice().words() {
plugins.push(p.to_string());
}
}
_ => {}
}
}
}
None => {}
}
if default_passes {
for name in DEFAULT_PASSES.iter().rev() {
passes.unshift(name.to_string());
}
}
// Load all plugins/passes into a PluginManager
let path = matches.opt_str("plugin-path")
.unwrap_or("/tmp/rustdoc/plugins".to_string());
let mut pm = plugins::PluginManager::new(Path::new(path));
for pass in passes.iter() {
let plugin = match PASSES.iter()
.position(|&(p, _, _)| {
p == pass.as_slice()
}) {
Some(i) => PASSES[i].val1(),
None => {
error!("unknown pass {}, skipping", *pass);
continue
},
};
pm.add_plugin(plugin);
}
info!("loading plugins...");
for pname in plugins.move_iter() {
pm.load_plugin(pname);
}
// Run everything!
info!("Executing passes/plugins");
return pm.run_plugins(krate);
}
/// This input format purely deserializes the json output file. No passes are
/// run over the deserialized output.
fn json_input(input: &str) -> Result<Output, String> {
let mut input = match File::open(&Path::new(input)) {
Ok(f) => f,
Err(e) => {
return Err(format!("couldn't open {}: {}", input, e))
}
};
match json::from_reader(&mut input) {
Err(s) => Err(s.to_str()),
Ok(json::Object(obj)) => {
let mut obj = obj;
// Make sure the schema is what we expect
match obj.pop(&"schema".to_string()) {
Some(json::String(version)) => {
if version.as_slice()!= SCHEMA_VERSION {
return Err(format!(
"sorry, but I only understand version {}",
SCHEMA_VERSION))
}
}
Some(..) => return Err("malformed json".to_string()),
None => return Err("expected a schema version".to_string()),
}
let krate = match obj.pop(&"crate".to_string()) {
Some(json) => {
let mut d = json::Decoder::new(json);
Decodable::decode(&mut d).unwrap()
}
None => return Err("malformed json".to_string()),
};
// FIXME: this should read from the "plugins" field, but currently
// Json doesn't implement decodable...
let plugin_output = Vec::new();
Ok((krate, plugin_output))
}
Ok(..) => {
Err("malformed json input: expected an object at the \
top".to_string())
}
}
}
/// Outputs the crate/plugin json as a giant json blob at the specified
/// destination.
fn json_output(krate: clean::Crate, res: Vec<plugins::PluginJson>,
dst: Path) -> io::IoResult<()> {
// {
// "schema": version,
// "crate": { parsed crate... },
// "plugins": { output of plugins... }
// }
let mut json = std::collections::TreeMap::new();
json.insert("schema".to_string(), json::String(SCHEMA_VERSION.to_string()));
let plugins_json = res.move_iter()
.filter_map(|opt| {
match opt {
None => None,
Some((string, json)) => {
Some((string.to_string(), json))
}
}
}).collect();
// FIXME #8335: yuck, Rust -> str -> JSON round trip! No way to.encode
// straight to the Rust JSON representation.
let crate_json_str = {
let mut w = MemWriter::new();
{
let mut encoder = json::Encoder::new(&mut w as &mut io::Writer);
krate.encode(&mut encoder).unwrap();
}
str::from_utf8_owned(w.unwrap()).unwrap()
};
let crate_json = match json::from_str(crate_json_str.as_slice()) {
Ok(j) => j,
Err(e) => fail!("Rust generated JSON is invalid: {:?}", e)
};
json.insert("crate".to_string(), crate_json);
json.insert("plugins".to_string(), json::Object(plugins_json));
let mut file = try!(File::create(&dst));
json::Object(json).to_writer(&mut file)
}
|
/// Looks inside the command line arguments to extract the relevant input format
|
random_line_split
|
set_cover_test.rs
|
mod bit_vector;
mod set_cover;
use set_cover::minimum_set_cover;
#[test]
fn test_disjoint()
|
#[test]
fn test_one() {
test(
vec![
vec![0, 1, 2],
vec![0],
vec![1],
vec![2],
],
vec![0],
);
}
#[test]
fn test_two() {
test(
vec![
vec![0, 1],
vec![1, 2],
vec![2, 3],
],
vec![0, 2],
);
}
fn test(subsets: Vec<Vec<u8>>, expected_cover: Vec<usize>) {
let mut actual_cover = minimum_set_cover(&subsets);
actual_cover.sort();
assert_eq!(expected_cover, actual_cover);
}
|
{
test(
vec![
vec![0],
vec![1],
vec![2],
],
vec![0, 1, 2],
);
}
|
identifier_body
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.