file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
delete.rs
|
// Copyright (c) 2016-2018 Chef Software Inc. and/or applicable contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use depot_client::Client as DepotClient;
use common::ui::{Status, UIWriter, UI};
use {PRODUCT, VERSION};
use error::{Error, Result};
pub fn start(ui: &mut UI, bldr_url: &str, token: &str, origin: &str, key: &str) -> Result<()>
|
{
let depot_client =
DepotClient::new(bldr_url, PRODUCT, VERSION, None).map_err(Error::DepotClient)?;
ui.status(Status::Deleting, format!("secret {}.", key))?;
depot_client
.delete_origin_secret(origin, token, key)
.map_err(Error::DepotClient)?;
ui.status(Status::Deleted, format!("secret {}.", key))?;
Ok(())
}
|
identifier_body
|
|
delete.rs
|
// Copyright (c) 2016-2018 Chef Software Inc. and/or applicable contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use depot_client::Client as DepotClient;
use common::ui::{Status, UIWriter, UI};
use {PRODUCT, VERSION};
use error::{Error, Result};
pub fn
|
(ui: &mut UI, bldr_url: &str, token: &str, origin: &str, key: &str) -> Result<()> {
let depot_client =
DepotClient::new(bldr_url, PRODUCT, VERSION, None).map_err(Error::DepotClient)?;
ui.status(Status::Deleting, format!("secret {}.", key))?;
depot_client
.delete_origin_secret(origin, token, key)
.map_err(Error::DepotClient)?;
ui.status(Status::Deleted, format!("secret {}.", key))?;
Ok(())
}
|
start
|
identifier_name
|
main.rs
|
#![crate_name = "weather-client"]
/*!
* Weather update client
* Connects SUB socket to tcp://localhost:5556
* Collects weather updates and find avg temp in zipcode
*/
extern crate zmq;
fn atoi(s: &str) -> int {
from_str(s).unwrap()
}
fn main() {
println!("Collecting updates from weather server...");
let mut context = zmq::Context::new();
let mut subscriber = context.socket(zmq::SUB).unwrap();
|
let mut total_temp = 0;
for _ in range(0i, 100i) {
let string = subscriber.recv_str(0).unwrap();
let chks: Vec<int> = string.as_slice().split(' ').map(|x| atoi(x)).collect();
let (_zipcode, temperature, _relhumidity) = (chks[0], chks[1], chks[2]);
total_temp += temperature;
}
println!("Average temperature for zipcode '{}' was {}F", filter, (total_temp / 100));
}
|
assert!(subscriber.connect("tcp://localhost:5556").is_ok());
let args = std::os::args();
let filter = if args.len() > 1 { args[1].clone() } else { "10001".to_string() };
assert!(subscriber.set_subscribe(filter.as_bytes()).is_ok());
|
random_line_split
|
main.rs
|
#![crate_name = "weather-client"]
/*!
* Weather update client
* Connects SUB socket to tcp://localhost:5556
* Collects weather updates and find avg temp in zipcode
*/
extern crate zmq;
fn
|
(s: &str) -> int {
from_str(s).unwrap()
}
fn main() {
println!("Collecting updates from weather server...");
let mut context = zmq::Context::new();
let mut subscriber = context.socket(zmq::SUB).unwrap();
assert!(subscriber.connect("tcp://localhost:5556").is_ok());
let args = std::os::args();
let filter = if args.len() > 1 { args[1].clone() } else { "10001".to_string() };
assert!(subscriber.set_subscribe(filter.as_bytes()).is_ok());
let mut total_temp = 0;
for _ in range(0i, 100i) {
let string = subscriber.recv_str(0).unwrap();
let chks: Vec<int> = string.as_slice().split(' ').map(|x| atoi(x)).collect();
let (_zipcode, temperature, _relhumidity) = (chks[0], chks[1], chks[2]);
total_temp += temperature;
}
println!("Average temperature for zipcode '{}' was {}F", filter, (total_temp / 100));
}
|
atoi
|
identifier_name
|
main.rs
|
#![crate_name = "weather-client"]
/*!
* Weather update client
* Connects SUB socket to tcp://localhost:5556
* Collects weather updates and find avg temp in zipcode
*/
extern crate zmq;
fn atoi(s: &str) -> int {
from_str(s).unwrap()
}
fn main() {
println!("Collecting updates from weather server...");
let mut context = zmq::Context::new();
let mut subscriber = context.socket(zmq::SUB).unwrap();
assert!(subscriber.connect("tcp://localhost:5556").is_ok());
let args = std::os::args();
let filter = if args.len() > 1 { args[1].clone() } else
|
;
assert!(subscriber.set_subscribe(filter.as_bytes()).is_ok());
let mut total_temp = 0;
for _ in range(0i, 100i) {
let string = subscriber.recv_str(0).unwrap();
let chks: Vec<int> = string.as_slice().split(' ').map(|x| atoi(x)).collect();
let (_zipcode, temperature, _relhumidity) = (chks[0], chks[1], chks[2]);
total_temp += temperature;
}
println!("Average temperature for zipcode '{}' was {}F", filter, (total_temp / 100));
}
|
{ "10001".to_string() }
|
conditional_block
|
main.rs
|
#![crate_name = "weather-client"]
/*!
* Weather update client
* Connects SUB socket to tcp://localhost:5556
* Collects weather updates and find avg temp in zipcode
*/
extern crate zmq;
fn atoi(s: &str) -> int
|
fn main() {
println!("Collecting updates from weather server...");
let mut context = zmq::Context::new();
let mut subscriber = context.socket(zmq::SUB).unwrap();
assert!(subscriber.connect("tcp://localhost:5556").is_ok());
let args = std::os::args();
let filter = if args.len() > 1 { args[1].clone() } else { "10001".to_string() };
assert!(subscriber.set_subscribe(filter.as_bytes()).is_ok());
let mut total_temp = 0;
for _ in range(0i, 100i) {
let string = subscriber.recv_str(0).unwrap();
let chks: Vec<int> = string.as_slice().split(' ').map(|x| atoi(x)).collect();
let (_zipcode, temperature, _relhumidity) = (chks[0], chks[1], chks[2]);
total_temp += temperature;
}
println!("Average temperature for zipcode '{}' was {}F", filter, (total_temp / 100));
}
|
{
from_str(s).unwrap()
}
|
identifier_body
|
bench.rs
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use lz4_pyframe::compress;
use lz4_pyframe::decompress;
use minibench::bench;
use minibench::elapsed;
use rand_core::RngCore;
use rand_core::SeedableRng;
fn main()
|
{
let mut rng = rand_chacha::ChaChaRng::seed_from_u64(0);
let mut buf = vec![0u8; 100_000000];
rng.fill_bytes(&mut buf);
let compressed = compress(&buf).unwrap();
bench("compress (100M)", || {
elapsed(|| {
compress(&buf).unwrap();
})
});
bench("decompress (~100M)", || {
elapsed(|| {
decompress(&compressed).unwrap();
})
});
}
|
identifier_body
|
|
bench.rs
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use lz4_pyframe::compress;
use lz4_pyframe::decompress;
use minibench::bench;
use minibench::elapsed;
use rand_core::RngCore;
use rand_core::SeedableRng;
fn main() {
let mut rng = rand_chacha::ChaChaRng::seed_from_u64(0);
let mut buf = vec![0u8; 100_000000];
rng.fill_bytes(&mut buf);
let compressed = compress(&buf).unwrap();
bench("compress (100M)", || {
elapsed(|| {
|
bench("decompress (~100M)", || {
elapsed(|| {
decompress(&compressed).unwrap();
})
});
}
|
compress(&buf).unwrap();
})
});
|
random_line_split
|
bench.rs
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use lz4_pyframe::compress;
use lz4_pyframe::decompress;
use minibench::bench;
use minibench::elapsed;
use rand_core::RngCore;
use rand_core::SeedableRng;
fn
|
() {
let mut rng = rand_chacha::ChaChaRng::seed_from_u64(0);
let mut buf = vec![0u8; 100_000000];
rng.fill_bytes(&mut buf);
let compressed = compress(&buf).unwrap();
bench("compress (100M)", || {
elapsed(|| {
compress(&buf).unwrap();
})
});
bench("decompress (~100M)", || {
elapsed(|| {
decompress(&compressed).unwrap();
})
});
}
|
main
|
identifier_name
|
lib.rs
|
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! The Rust parser and macro expander.
//!
//! # Note
//!
//! This API is completely unstable and subject to change.
// Do not remove on snapshot creation. Needed for bootstrap. (Issue #22364)
#![cfg_attr(stage0, feature(custom_attribute))]
#![crate_name = "syntax"]
#![unstable(feature = "rustc_private")]
#![staged_api]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "http://doc.rust-lang.org/nightly/")]
#![feature(associated_consts)]
#![feature(bitset)]
#![feature(drain)]
#![feature(filling_drop)]
#![feature(libc)]
#![feature(ref_slice)]
#![feature(rustc_private)]
#![feature(set_stdio)]
#![feature(staged_api)]
#![feature(str_char)]
#![feature(str_escape)]
#![feature(unicode)]
#![feature(vec_push_all)]
extern crate fmt_macros;
extern crate serialize;
extern crate term;
extern crate libc;
#[macro_use] extern crate log;
#[macro_use] #[no_link] extern crate rustc_bitflags;
extern crate serialize as rustc_serialize; // used by deriving
// A variant of 'try!' that panics on Err(FatalError). This is used as a
// crutch on the way towards a non-panic!-prone parser. It should be used
// for fatal parsing errors; eventually we plan to convert all code using
// panictry to just use normal try
macro_rules! panictry {
($e:expr) => ({
use std::result::Result::{Ok, Err};
use diagnostic::FatalError;
match $e {
Ok(e) => e,
Err(FatalError) => panic!(FatalError)
}
})
}
pub mod util {
pub mod interner;
#[cfg(test)]
pub mod parser_testing;
pub mod small_vector;
}
pub mod diagnostics {
pub mod macros;
pub mod plugin;
pub mod registry;
pub mod metadata;
}
pub mod syntax {
pub use ext;
pub use parse;
pub use ast;
}
pub mod abi;
pub mod ast;
pub mod ast_util;
pub mod attr;
pub mod codemap;
pub mod config;
pub mod diagnostic;
pub mod feature_gate;
pub mod fold;
pub mod owned_slice;
pub mod parse;
pub mod ptr;
pub mod show_span;
pub mod std_inject;
pub mod str;
pub mod test;
pub mod visit;
pub mod print {
pub mod pp;
pub mod pprust;
}
pub mod ext {
pub mod asm;
pub mod base;
pub mod build;
pub mod cfg;
pub mod concat;
pub mod concat_idents;
pub mod deriving;
pub mod env;
pub mod expand;
pub mod format;
pub mod log_syntax;
pub mod mtwt;
pub mod quote;
pub mod pushpop_safe;
pub mod source_util;
pub mod trace_macros;
pub mod tt {
pub mod transcribe;
pub mod macro_parser;
pub mod macro_rules;
}
}
|
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
|
random_line_split
|
|
packed-struct-transmute.rs
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// This assumes the packed and non-packed structs are different sizes.
// the error points to the start of the file, not the line with the
|
// transmute
// error-pattern: transmute called on types with different size
#[packed]
struct Foo {
bar: u8,
baz: uint
}
struct Oof {
rab: u8,
zab: uint
}
fn main() {
let foo = Foo { bar: 1, baz: 10 };
unsafe {
let oof: Oof = cast::transmute(foo);
debug!(oof);
}
}
|
random_line_split
|
|
packed-struct-transmute.rs
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// This assumes the packed and non-packed structs are different sizes.
// the error points to the start of the file, not the line with the
// transmute
// error-pattern: transmute called on types with different size
#[packed]
struct
|
{
bar: u8,
baz: uint
}
struct Oof {
rab: u8,
zab: uint
}
fn main() {
let foo = Foo { bar: 1, baz: 10 };
unsafe {
let oof: Oof = cast::transmute(foo);
debug!(oof);
}
}
|
Foo
|
identifier_name
|
mod.rs
|
/*
* Copyright (C) 2017 AltOS-Rust Team
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
//! This module provides system implementation information and allows
//! configuration control and reporting of system exceptions.
mod icsr;
mod defs;
use core::ops::{Deref, DerefMut};
use ::volatile::Volatile;
use self::icsr::ICSR;
use self::defs::*;
/// Returns instance of the System Control Block.
pub fn scb() -> SCB {
SCB::scb()
}
#[derive(Copy, Clone, Debug)]
#[repr(C)]
#[doc(hidden)]
pub struct RawSCB {
cpuid: u32,
icsr: ICSR,
reserved1: u32,
aircr: u32,
scr: u32,
ccr: u32,
reserved2: u32,
shpr2: u32,
shpr3: u32,
}
|
impl SCB {
fn scb() -> Self {
unsafe {
SCB(Volatile::new(SCB_ADDR as *const _))
}
}
}
impl Deref for SCB {
type Target = RawSCB;
fn deref(&self) -> &Self::Target {
&*(self.0)
}
}
impl DerefMut for SCB {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut *(self.0)
}
}
impl RawSCB {
/// Trigger a pend_sv exception.
///
/// PendSV signals to the operating system that a context switch should occur.
pub fn set_pend_sv(&mut self) {
self.icsr.set_pend_sv();
}
/// Clear the pend_sv exception.
pub fn clear_pend_sv(&mut self) {
self.icsr.clear_pend_sv();
}
}
|
/// System Control Block
#[derive(Copy, Clone, Debug)]
pub struct SCB(Volatile<RawSCB>);
|
random_line_split
|
mod.rs
|
/*
* Copyright (C) 2017 AltOS-Rust Team
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
//! This module provides system implementation information and allows
//! configuration control and reporting of system exceptions.
mod icsr;
mod defs;
use core::ops::{Deref, DerefMut};
use ::volatile::Volatile;
use self::icsr::ICSR;
use self::defs::*;
/// Returns instance of the System Control Block.
pub fn scb() -> SCB {
SCB::scb()
}
#[derive(Copy, Clone, Debug)]
#[repr(C)]
#[doc(hidden)]
pub struct RawSCB {
cpuid: u32,
icsr: ICSR,
reserved1: u32,
aircr: u32,
scr: u32,
ccr: u32,
reserved2: u32,
shpr2: u32,
shpr3: u32,
}
/// System Control Block
#[derive(Copy, Clone, Debug)]
pub struct SCB(Volatile<RawSCB>);
impl SCB {
fn scb() -> Self {
unsafe {
SCB(Volatile::new(SCB_ADDR as *const _))
}
}
}
impl Deref for SCB {
type Target = RawSCB;
fn deref(&self) -> &Self::Target {
&*(self.0)
}
}
impl DerefMut for SCB {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut *(self.0)
}
}
impl RawSCB {
/// Trigger a pend_sv exception.
///
/// PendSV signals to the operating system that a context switch should occur.
pub fn
|
(&mut self) {
self.icsr.set_pend_sv();
}
/// Clear the pend_sv exception.
pub fn clear_pend_sv(&mut self) {
self.icsr.clear_pend_sv();
}
}
|
set_pend_sv
|
identifier_name
|
csssupportsrule.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use cssparser::{Parser, ParserInput};
use dom::bindings::codegen::Bindings::CSSSupportsRuleBinding;
use dom::bindings::codegen::Bindings::WindowBinding::WindowBinding::WindowMethods;
use dom::bindings::reflector::{DomObject, reflect_dom_object};
use dom::bindings::root::DomRoot;
use dom::bindings::str::DOMString;
use dom::cssconditionrule::CSSConditionRule;
use dom::cssrule::SpecificCSSRule;
use dom::cssstylesheet::CSSStyleSheet;
use dom::window::Window;
use dom_struct::dom_struct;
use servo_arc::Arc;
use style::parser::ParserContext;
use style::shared_lock::{Locked, ToCssWithGuard};
use style::stylesheets::{CssRuleType, SupportsRule};
use style::stylesheets::supports_rule::SupportsCondition;
use style_traits::{ParsingMode, ToCss};
#[dom_struct]
pub struct CSSSupportsRule {
cssconditionrule: CSSConditionRule,
#[ignore_malloc_size_of = "Arc"]
supportsrule: Arc<Locked<SupportsRule>>,
}
impl CSSSupportsRule {
fn new_inherited(parent_stylesheet: &CSSStyleSheet, supportsrule: Arc<Locked<SupportsRule>>)
-> CSSSupportsRule {
let guard = parent_stylesheet.shared_lock().read();
let list = supportsrule.read_with(&guard).rules.clone();
CSSSupportsRule {
cssconditionrule: CSSConditionRule::new_inherited(parent_stylesheet, list),
supportsrule: supportsrule,
}
}
#[allow(unrooted_must_root)]
pub fn new(window: &Window, parent_stylesheet: &CSSStyleSheet,
supportsrule: Arc<Locked<SupportsRule>>) -> DomRoot<CSSSupportsRule> {
reflect_dom_object(Box::new(CSSSupportsRule::new_inherited(parent_stylesheet, supportsrule)),
window,
CSSSupportsRuleBinding::Wrap)
}
/// <https://drafts.csswg.org/css-conditional-3/#the-csssupportsrule-interface>
pub fn get_condition_text(&self) -> DOMString
|
/// <https://drafts.csswg.org/css-conditional-3/#the-csssupportsrule-interface>
pub fn set_condition_text(&self, text: DOMString) {
let mut input = ParserInput::new(&text);
let mut input = Parser::new(&mut input);
let cond = SupportsCondition::parse(&mut input);
if let Ok(cond) = cond {
let global = self.global();
let win = global.as_window();
let url = win.Document().url();
let quirks_mode = win.Document().quirks_mode();
let context = ParserContext::new_for_cssom(
&url,
Some(CssRuleType::Supports),
ParsingMode::DEFAULT,
quirks_mode,
None,
);
let enabled = cond.eval(&context);
let mut guard = self.cssconditionrule.shared_lock().write();
let rule = self.supportsrule.write_with(&mut guard);
rule.condition = cond;
rule.enabled = enabled;
}
}
}
impl SpecificCSSRule for CSSSupportsRule {
fn ty(&self) -> u16 {
use dom::bindings::codegen::Bindings::CSSRuleBinding::CSSRuleConstants;
CSSRuleConstants::SUPPORTS_RULE
}
fn get_css(&self) -> DOMString {
let guard = self.cssconditionrule.shared_lock().read();
self.supportsrule.read_with(&guard).to_css_string(&guard).into()
}
}
|
{
let guard = self.cssconditionrule.shared_lock().read();
let rule = self.supportsrule.read_with(&guard);
rule.condition.to_css_string().into()
}
|
identifier_body
|
csssupportsrule.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use cssparser::{Parser, ParserInput};
use dom::bindings::codegen::Bindings::CSSSupportsRuleBinding;
|
use dom::bindings::reflector::{DomObject, reflect_dom_object};
use dom::bindings::root::DomRoot;
use dom::bindings::str::DOMString;
use dom::cssconditionrule::CSSConditionRule;
use dom::cssrule::SpecificCSSRule;
use dom::cssstylesheet::CSSStyleSheet;
use dom::window::Window;
use dom_struct::dom_struct;
use servo_arc::Arc;
use style::parser::ParserContext;
use style::shared_lock::{Locked, ToCssWithGuard};
use style::stylesheets::{CssRuleType, SupportsRule};
use style::stylesheets::supports_rule::SupportsCondition;
use style_traits::{ParsingMode, ToCss};
#[dom_struct]
pub struct CSSSupportsRule {
cssconditionrule: CSSConditionRule,
#[ignore_malloc_size_of = "Arc"]
supportsrule: Arc<Locked<SupportsRule>>,
}
impl CSSSupportsRule {
fn new_inherited(parent_stylesheet: &CSSStyleSheet, supportsrule: Arc<Locked<SupportsRule>>)
-> CSSSupportsRule {
let guard = parent_stylesheet.shared_lock().read();
let list = supportsrule.read_with(&guard).rules.clone();
CSSSupportsRule {
cssconditionrule: CSSConditionRule::new_inherited(parent_stylesheet, list),
supportsrule: supportsrule,
}
}
#[allow(unrooted_must_root)]
pub fn new(window: &Window, parent_stylesheet: &CSSStyleSheet,
supportsrule: Arc<Locked<SupportsRule>>) -> DomRoot<CSSSupportsRule> {
reflect_dom_object(Box::new(CSSSupportsRule::new_inherited(parent_stylesheet, supportsrule)),
window,
CSSSupportsRuleBinding::Wrap)
}
/// <https://drafts.csswg.org/css-conditional-3/#the-csssupportsrule-interface>
pub fn get_condition_text(&self) -> DOMString {
let guard = self.cssconditionrule.shared_lock().read();
let rule = self.supportsrule.read_with(&guard);
rule.condition.to_css_string().into()
}
/// <https://drafts.csswg.org/css-conditional-3/#the-csssupportsrule-interface>
pub fn set_condition_text(&self, text: DOMString) {
let mut input = ParserInput::new(&text);
let mut input = Parser::new(&mut input);
let cond = SupportsCondition::parse(&mut input);
if let Ok(cond) = cond {
let global = self.global();
let win = global.as_window();
let url = win.Document().url();
let quirks_mode = win.Document().quirks_mode();
let context = ParserContext::new_for_cssom(
&url,
Some(CssRuleType::Supports),
ParsingMode::DEFAULT,
quirks_mode,
None,
);
let enabled = cond.eval(&context);
let mut guard = self.cssconditionrule.shared_lock().write();
let rule = self.supportsrule.write_with(&mut guard);
rule.condition = cond;
rule.enabled = enabled;
}
}
}
impl SpecificCSSRule for CSSSupportsRule {
fn ty(&self) -> u16 {
use dom::bindings::codegen::Bindings::CSSRuleBinding::CSSRuleConstants;
CSSRuleConstants::SUPPORTS_RULE
}
fn get_css(&self) -> DOMString {
let guard = self.cssconditionrule.shared_lock().read();
self.supportsrule.read_with(&guard).to_css_string(&guard).into()
}
}
|
use dom::bindings::codegen::Bindings::WindowBinding::WindowBinding::WindowMethods;
|
random_line_split
|
csssupportsrule.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use cssparser::{Parser, ParserInput};
use dom::bindings::codegen::Bindings::CSSSupportsRuleBinding;
use dom::bindings::codegen::Bindings::WindowBinding::WindowBinding::WindowMethods;
use dom::bindings::reflector::{DomObject, reflect_dom_object};
use dom::bindings::root::DomRoot;
use dom::bindings::str::DOMString;
use dom::cssconditionrule::CSSConditionRule;
use dom::cssrule::SpecificCSSRule;
use dom::cssstylesheet::CSSStyleSheet;
use dom::window::Window;
use dom_struct::dom_struct;
use servo_arc::Arc;
use style::parser::ParserContext;
use style::shared_lock::{Locked, ToCssWithGuard};
use style::stylesheets::{CssRuleType, SupportsRule};
use style::stylesheets::supports_rule::SupportsCondition;
use style_traits::{ParsingMode, ToCss};
#[dom_struct]
pub struct CSSSupportsRule {
cssconditionrule: CSSConditionRule,
#[ignore_malloc_size_of = "Arc"]
supportsrule: Arc<Locked<SupportsRule>>,
}
impl CSSSupportsRule {
fn new_inherited(parent_stylesheet: &CSSStyleSheet, supportsrule: Arc<Locked<SupportsRule>>)
-> CSSSupportsRule {
let guard = parent_stylesheet.shared_lock().read();
let list = supportsrule.read_with(&guard).rules.clone();
CSSSupportsRule {
cssconditionrule: CSSConditionRule::new_inherited(parent_stylesheet, list),
supportsrule: supportsrule,
}
}
#[allow(unrooted_must_root)]
pub fn new(window: &Window, parent_stylesheet: &CSSStyleSheet,
supportsrule: Arc<Locked<SupportsRule>>) -> DomRoot<CSSSupportsRule> {
reflect_dom_object(Box::new(CSSSupportsRule::new_inherited(parent_stylesheet, supportsrule)),
window,
CSSSupportsRuleBinding::Wrap)
}
/// <https://drafts.csswg.org/css-conditional-3/#the-csssupportsrule-interface>
pub fn get_condition_text(&self) -> DOMString {
let guard = self.cssconditionrule.shared_lock().read();
let rule = self.supportsrule.read_with(&guard);
rule.condition.to_css_string().into()
}
/// <https://drafts.csswg.org/css-conditional-3/#the-csssupportsrule-interface>
pub fn set_condition_text(&self, text: DOMString) {
let mut input = ParserInput::new(&text);
let mut input = Parser::new(&mut input);
let cond = SupportsCondition::parse(&mut input);
if let Ok(cond) = cond
|
}
}
impl SpecificCSSRule for CSSSupportsRule {
fn ty(&self) -> u16 {
use dom::bindings::codegen::Bindings::CSSRuleBinding::CSSRuleConstants;
CSSRuleConstants::SUPPORTS_RULE
}
fn get_css(&self) -> DOMString {
let guard = self.cssconditionrule.shared_lock().read();
self.supportsrule.read_with(&guard).to_css_string(&guard).into()
}
}
|
{
let global = self.global();
let win = global.as_window();
let url = win.Document().url();
let quirks_mode = win.Document().quirks_mode();
let context = ParserContext::new_for_cssom(
&url,
Some(CssRuleType::Supports),
ParsingMode::DEFAULT,
quirks_mode,
None,
);
let enabled = cond.eval(&context);
let mut guard = self.cssconditionrule.shared_lock().write();
let rule = self.supportsrule.write_with(&mut guard);
rule.condition = cond;
rule.enabled = enabled;
}
|
conditional_block
|
csssupportsrule.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use cssparser::{Parser, ParserInput};
use dom::bindings::codegen::Bindings::CSSSupportsRuleBinding;
use dom::bindings::codegen::Bindings::WindowBinding::WindowBinding::WindowMethods;
use dom::bindings::reflector::{DomObject, reflect_dom_object};
use dom::bindings::root::DomRoot;
use dom::bindings::str::DOMString;
use dom::cssconditionrule::CSSConditionRule;
use dom::cssrule::SpecificCSSRule;
use dom::cssstylesheet::CSSStyleSheet;
use dom::window::Window;
use dom_struct::dom_struct;
use servo_arc::Arc;
use style::parser::ParserContext;
use style::shared_lock::{Locked, ToCssWithGuard};
use style::stylesheets::{CssRuleType, SupportsRule};
use style::stylesheets::supports_rule::SupportsCondition;
use style_traits::{ParsingMode, ToCss};
#[dom_struct]
pub struct CSSSupportsRule {
cssconditionrule: CSSConditionRule,
#[ignore_malloc_size_of = "Arc"]
supportsrule: Arc<Locked<SupportsRule>>,
}
impl CSSSupportsRule {
fn new_inherited(parent_stylesheet: &CSSStyleSheet, supportsrule: Arc<Locked<SupportsRule>>)
-> CSSSupportsRule {
let guard = parent_stylesheet.shared_lock().read();
let list = supportsrule.read_with(&guard).rules.clone();
CSSSupportsRule {
cssconditionrule: CSSConditionRule::new_inherited(parent_stylesheet, list),
supportsrule: supportsrule,
}
}
#[allow(unrooted_must_root)]
pub fn
|
(window: &Window, parent_stylesheet: &CSSStyleSheet,
supportsrule: Arc<Locked<SupportsRule>>) -> DomRoot<CSSSupportsRule> {
reflect_dom_object(Box::new(CSSSupportsRule::new_inherited(parent_stylesheet, supportsrule)),
window,
CSSSupportsRuleBinding::Wrap)
}
/// <https://drafts.csswg.org/css-conditional-3/#the-csssupportsrule-interface>
pub fn get_condition_text(&self) -> DOMString {
let guard = self.cssconditionrule.shared_lock().read();
let rule = self.supportsrule.read_with(&guard);
rule.condition.to_css_string().into()
}
/// <https://drafts.csswg.org/css-conditional-3/#the-csssupportsrule-interface>
pub fn set_condition_text(&self, text: DOMString) {
let mut input = ParserInput::new(&text);
let mut input = Parser::new(&mut input);
let cond = SupportsCondition::parse(&mut input);
if let Ok(cond) = cond {
let global = self.global();
let win = global.as_window();
let url = win.Document().url();
let quirks_mode = win.Document().quirks_mode();
let context = ParserContext::new_for_cssom(
&url,
Some(CssRuleType::Supports),
ParsingMode::DEFAULT,
quirks_mode,
None,
);
let enabled = cond.eval(&context);
let mut guard = self.cssconditionrule.shared_lock().write();
let rule = self.supportsrule.write_with(&mut guard);
rule.condition = cond;
rule.enabled = enabled;
}
}
}
impl SpecificCSSRule for CSSSupportsRule {
fn ty(&self) -> u16 {
use dom::bindings::codegen::Bindings::CSSRuleBinding::CSSRuleConstants;
CSSRuleConstants::SUPPORTS_RULE
}
fn get_css(&self) -> DOMString {
let guard = self.cssconditionrule.shared_lock().read();
self.supportsrule.read_with(&guard).to_css_string(&guard).into()
}
}
|
new
|
identifier_name
|
coherence.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*! See `doc.rs` for high-level documentation */
use super::SelectionContext;
use super::Obligation;
use super::util;
use middle::subst;
use middle::subst::Subst;
use middle::ty;
use middle::typeck::infer::InferCtxt;
use syntax::ast;
use syntax::codemap::DUMMY_SP;
use util::ppaux::Repr;
pub fn impl_can_satisfy(infcx: &InferCtxt,
impl1_def_id: ast::DefId,
impl2_def_id: ast::DefId)
-> bool
{
debug!("impl_can_satisfy(\
impl1_def_id={}, \
impl2_def_id={})",
impl1_def_id.repr(infcx.tcx),
impl2_def_id.repr(infcx.tcx));
// `impl1` provides an implementation of `Foo<X,Y> for Z`.
let impl1_substs =
util::fresh_substs_for_impl(infcx, DUMMY_SP, impl1_def_id);
let impl1_trait_ref =
ty::impl_trait_ref(infcx.tcx, impl1_def_id).unwrap()
.subst(infcx.tcx, &impl1_substs);
// Determine whether `impl2` can provide an implementation for those
// same types.
let param_env = ty::empty_parameter_environment();
let mut selcx = SelectionContext::intercrate(infcx, ¶m_env, infcx.tcx);
let obligation = Obligation::misc(DUMMY_SP, impl1_trait_ref);
debug!("impl_can_satisfy obligation={}", obligation.repr(infcx.tcx));
selcx.evaluate_impl(impl2_def_id, &obligation)
}
pub fn impl_is_local(tcx: &ty::ctxt,
impl_def_id: ast::DefId)
-> bool
{
debug!("impl_is_local({})", impl_def_id.repr(tcx));
// We only except this routine to be invoked on implementations
// of a trait, not inherent implementations.
let trait_ref = ty::impl_trait_ref(tcx, impl_def_id).unwrap();
debug!("trait_ref={}", trait_ref.repr(tcx));
// If the trait is local to the crate, ok.
if trait_ref.def_id.krate == ast::LOCAL_CRATE {
debug!("trait {} is local to current crate",
trait_ref.def_id.repr(tcx));
return true;
}
// Otherwise, at least one of the input types must be local to the
// crate.
trait_ref.input_types().iter().any(|&t| ty_is_local(tcx, t))
}
pub fn ty_is_local(tcx: &ty::ctxt,
ty: ty::t)
-> bool
{
debug!("ty_is_local({})", ty.repr(tcx));
match ty::get(ty).sty {
ty::ty_nil |
ty::ty_bool |
ty::ty_char |
ty::ty_int(..) |
ty::ty_uint(..) |
ty::ty_float(..) |
ty::ty_str(..) => {
false
}
ty::ty_unboxed_closure(..) => {
// This routine is invoked on types specified by users as
// part of an impl and hence an unboxed closure type
// cannot appear.
tcx.sess.bug("ty_is_local applied to unboxed closure type")
}
ty::ty_bare_fn(..) |
ty::ty_closure(..) => {
false
}
ty::ty_uniq(t) => {
let krate = tcx.lang_items.owned_box().map(|d| d.krate);
krate == Some(ast::LOCAL_CRATE) || ty_is_local(tcx, t)
}
ty::ty_vec(t, _) |
ty::ty_ptr(ty::mt { ty: t,.. }) |
|
}
ty::ty_tup(ref ts) => {
ts.iter().any(|&t| ty_is_local(tcx, t))
}
ty::ty_enum(def_id, ref substs) |
ty::ty_struct(def_id, ref substs) => {
def_id.krate == ast::LOCAL_CRATE || {
let variances = ty::item_variances(tcx, def_id);
subst::ParamSpace::all().iter().any(|&space| {
substs.types.get_slice(space).iter().enumerate().any(
|(i, &t)| {
match *variances.types.get(space, i) {
ty::Bivariant => {
// If Foo<T> is bivariant with respect to
// T, then it doesn't matter whether T is
// local or not, because `Foo<U>` for any
// U will be a subtype of T.
false
}
ty::Contravariant |
ty::Covariant |
ty::Invariant => {
ty_is_local(tcx, t)
}
}
})
})
}
}
ty::ty_trait(ref tt) => {
tt.principal.def_id.krate == ast::LOCAL_CRATE
}
// Type parameters may be bound to types that are not local to
// the crate.
ty::ty_param(..) => {
false
}
ty::ty_infer(..) |
ty::ty_open(..) |
ty::ty_err => {
tcx.sess.bug(
format!("ty_is_local invoked on unexpected type: {}",
ty.repr(tcx)).as_slice())
}
}
}
|
ty::ty_rptr(_, ty::mt { ty: t, .. }) => {
ty_is_local(tcx, t)
|
random_line_split
|
coherence.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*! See `doc.rs` for high-level documentation */
use super::SelectionContext;
use super::Obligation;
use super::util;
use middle::subst;
use middle::subst::Subst;
use middle::ty;
use middle::typeck::infer::InferCtxt;
use syntax::ast;
use syntax::codemap::DUMMY_SP;
use util::ppaux::Repr;
pub fn
|
(infcx: &InferCtxt,
impl1_def_id: ast::DefId,
impl2_def_id: ast::DefId)
-> bool
{
debug!("impl_can_satisfy(\
impl1_def_id={}, \
impl2_def_id={})",
impl1_def_id.repr(infcx.tcx),
impl2_def_id.repr(infcx.tcx));
// `impl1` provides an implementation of `Foo<X,Y> for Z`.
let impl1_substs =
util::fresh_substs_for_impl(infcx, DUMMY_SP, impl1_def_id);
let impl1_trait_ref =
ty::impl_trait_ref(infcx.tcx, impl1_def_id).unwrap()
.subst(infcx.tcx, &impl1_substs);
// Determine whether `impl2` can provide an implementation for those
// same types.
let param_env = ty::empty_parameter_environment();
let mut selcx = SelectionContext::intercrate(infcx, ¶m_env, infcx.tcx);
let obligation = Obligation::misc(DUMMY_SP, impl1_trait_ref);
debug!("impl_can_satisfy obligation={}", obligation.repr(infcx.tcx));
selcx.evaluate_impl(impl2_def_id, &obligation)
}
pub fn impl_is_local(tcx: &ty::ctxt,
impl_def_id: ast::DefId)
-> bool
{
debug!("impl_is_local({})", impl_def_id.repr(tcx));
// We only except this routine to be invoked on implementations
// of a trait, not inherent implementations.
let trait_ref = ty::impl_trait_ref(tcx, impl_def_id).unwrap();
debug!("trait_ref={}", trait_ref.repr(tcx));
// If the trait is local to the crate, ok.
if trait_ref.def_id.krate == ast::LOCAL_CRATE {
debug!("trait {} is local to current crate",
trait_ref.def_id.repr(tcx));
return true;
}
// Otherwise, at least one of the input types must be local to the
// crate.
trait_ref.input_types().iter().any(|&t| ty_is_local(tcx, t))
}
pub fn ty_is_local(tcx: &ty::ctxt,
ty: ty::t)
-> bool
{
debug!("ty_is_local({})", ty.repr(tcx));
match ty::get(ty).sty {
ty::ty_nil |
ty::ty_bool |
ty::ty_char |
ty::ty_int(..) |
ty::ty_uint(..) |
ty::ty_float(..) |
ty::ty_str(..) => {
false
}
ty::ty_unboxed_closure(..) => {
// This routine is invoked on types specified by users as
// part of an impl and hence an unboxed closure type
// cannot appear.
tcx.sess.bug("ty_is_local applied to unboxed closure type")
}
ty::ty_bare_fn(..) |
ty::ty_closure(..) => {
false
}
ty::ty_uniq(t) => {
let krate = tcx.lang_items.owned_box().map(|d| d.krate);
krate == Some(ast::LOCAL_CRATE) || ty_is_local(tcx, t)
}
ty::ty_vec(t, _) |
ty::ty_ptr(ty::mt { ty: t,.. }) |
ty::ty_rptr(_, ty::mt { ty: t,.. }) => {
ty_is_local(tcx, t)
}
ty::ty_tup(ref ts) => {
ts.iter().any(|&t| ty_is_local(tcx, t))
}
ty::ty_enum(def_id, ref substs) |
ty::ty_struct(def_id, ref substs) => {
def_id.krate == ast::LOCAL_CRATE || {
let variances = ty::item_variances(tcx, def_id);
subst::ParamSpace::all().iter().any(|&space| {
substs.types.get_slice(space).iter().enumerate().any(
|(i, &t)| {
match *variances.types.get(space, i) {
ty::Bivariant => {
// If Foo<T> is bivariant with respect to
// T, then it doesn't matter whether T is
// local or not, because `Foo<U>` for any
// U will be a subtype of T.
false
}
ty::Contravariant |
ty::Covariant |
ty::Invariant => {
ty_is_local(tcx, t)
}
}
})
})
}
}
ty::ty_trait(ref tt) => {
tt.principal.def_id.krate == ast::LOCAL_CRATE
}
// Type parameters may be bound to types that are not local to
// the crate.
ty::ty_param(..) => {
false
}
ty::ty_infer(..) |
ty::ty_open(..) |
ty::ty_err => {
tcx.sess.bug(
format!("ty_is_local invoked on unexpected type: {}",
ty.repr(tcx)).as_slice())
}
}
}
|
impl_can_satisfy
|
identifier_name
|
coherence.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*! See `doc.rs` for high-level documentation */
use super::SelectionContext;
use super::Obligation;
use super::util;
use middle::subst;
use middle::subst::Subst;
use middle::ty;
use middle::typeck::infer::InferCtxt;
use syntax::ast;
use syntax::codemap::DUMMY_SP;
use util::ppaux::Repr;
pub fn impl_can_satisfy(infcx: &InferCtxt,
impl1_def_id: ast::DefId,
impl2_def_id: ast::DefId)
-> bool
{
debug!("impl_can_satisfy(\
impl1_def_id={}, \
impl2_def_id={})",
impl1_def_id.repr(infcx.tcx),
impl2_def_id.repr(infcx.tcx));
// `impl1` provides an implementation of `Foo<X,Y> for Z`.
let impl1_substs =
util::fresh_substs_for_impl(infcx, DUMMY_SP, impl1_def_id);
let impl1_trait_ref =
ty::impl_trait_ref(infcx.tcx, impl1_def_id).unwrap()
.subst(infcx.tcx, &impl1_substs);
// Determine whether `impl2` can provide an implementation for those
// same types.
let param_env = ty::empty_parameter_environment();
let mut selcx = SelectionContext::intercrate(infcx, ¶m_env, infcx.tcx);
let obligation = Obligation::misc(DUMMY_SP, impl1_trait_ref);
debug!("impl_can_satisfy obligation={}", obligation.repr(infcx.tcx));
selcx.evaluate_impl(impl2_def_id, &obligation)
}
pub fn impl_is_local(tcx: &ty::ctxt,
impl_def_id: ast::DefId)
-> bool
{
debug!("impl_is_local({})", impl_def_id.repr(tcx));
// We only except this routine to be invoked on implementations
// of a trait, not inherent implementations.
let trait_ref = ty::impl_trait_ref(tcx, impl_def_id).unwrap();
debug!("trait_ref={}", trait_ref.repr(tcx));
// If the trait is local to the crate, ok.
if trait_ref.def_id.krate == ast::LOCAL_CRATE
|
// Otherwise, at least one of the input types must be local to the
// crate.
trait_ref.input_types().iter().any(|&t| ty_is_local(tcx, t))
}
pub fn ty_is_local(tcx: &ty::ctxt,
ty: ty::t)
-> bool
{
debug!("ty_is_local({})", ty.repr(tcx));
match ty::get(ty).sty {
ty::ty_nil |
ty::ty_bool |
ty::ty_char |
ty::ty_int(..) |
ty::ty_uint(..) |
ty::ty_float(..) |
ty::ty_str(..) => {
false
}
ty::ty_unboxed_closure(..) => {
// This routine is invoked on types specified by users as
// part of an impl and hence an unboxed closure type
// cannot appear.
tcx.sess.bug("ty_is_local applied to unboxed closure type")
}
ty::ty_bare_fn(..) |
ty::ty_closure(..) => {
false
}
ty::ty_uniq(t) => {
let krate = tcx.lang_items.owned_box().map(|d| d.krate);
krate == Some(ast::LOCAL_CRATE) || ty_is_local(tcx, t)
}
ty::ty_vec(t, _) |
ty::ty_ptr(ty::mt { ty: t,.. }) |
ty::ty_rptr(_, ty::mt { ty: t,.. }) => {
ty_is_local(tcx, t)
}
ty::ty_tup(ref ts) => {
ts.iter().any(|&t| ty_is_local(tcx, t))
}
ty::ty_enum(def_id, ref substs) |
ty::ty_struct(def_id, ref substs) => {
def_id.krate == ast::LOCAL_CRATE || {
let variances = ty::item_variances(tcx, def_id);
subst::ParamSpace::all().iter().any(|&space| {
substs.types.get_slice(space).iter().enumerate().any(
|(i, &t)| {
match *variances.types.get(space, i) {
ty::Bivariant => {
// If Foo<T> is bivariant with respect to
// T, then it doesn't matter whether T is
// local or not, because `Foo<U>` for any
// U will be a subtype of T.
false
}
ty::Contravariant |
ty::Covariant |
ty::Invariant => {
ty_is_local(tcx, t)
}
}
})
})
}
}
ty::ty_trait(ref tt) => {
tt.principal.def_id.krate == ast::LOCAL_CRATE
}
// Type parameters may be bound to types that are not local to
// the crate.
ty::ty_param(..) => {
false
}
ty::ty_infer(..) |
ty::ty_open(..) |
ty::ty_err => {
tcx.sess.bug(
format!("ty_is_local invoked on unexpected type: {}",
ty.repr(tcx)).as_slice())
}
}
}
|
{
debug!("trait {} is local to current crate",
trait_ref.def_id.repr(tcx));
return true;
}
|
conditional_block
|
coherence.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*! See `doc.rs` for high-level documentation */
use super::SelectionContext;
use super::Obligation;
use super::util;
use middle::subst;
use middle::subst::Subst;
use middle::ty;
use middle::typeck::infer::InferCtxt;
use syntax::ast;
use syntax::codemap::DUMMY_SP;
use util::ppaux::Repr;
pub fn impl_can_satisfy(infcx: &InferCtxt,
impl1_def_id: ast::DefId,
impl2_def_id: ast::DefId)
-> bool
|
selcx.evaluate_impl(impl2_def_id, &obligation)
}
pub fn impl_is_local(tcx: &ty::ctxt,
impl_def_id: ast::DefId)
-> bool
{
debug!("impl_is_local({})", impl_def_id.repr(tcx));
// We only except this routine to be invoked on implementations
// of a trait, not inherent implementations.
let trait_ref = ty::impl_trait_ref(tcx, impl_def_id).unwrap();
debug!("trait_ref={}", trait_ref.repr(tcx));
// If the trait is local to the crate, ok.
if trait_ref.def_id.krate == ast::LOCAL_CRATE {
debug!("trait {} is local to current crate",
trait_ref.def_id.repr(tcx));
return true;
}
// Otherwise, at least one of the input types must be local to the
// crate.
trait_ref.input_types().iter().any(|&t| ty_is_local(tcx, t))
}
pub fn ty_is_local(tcx: &ty::ctxt,
ty: ty::t)
-> bool
{
debug!("ty_is_local({})", ty.repr(tcx));
match ty::get(ty).sty {
ty::ty_nil |
ty::ty_bool |
ty::ty_char |
ty::ty_int(..) |
ty::ty_uint(..) |
ty::ty_float(..) |
ty::ty_str(..) => {
false
}
ty::ty_unboxed_closure(..) => {
// This routine is invoked on types specified by users as
// part of an impl and hence an unboxed closure type
// cannot appear.
tcx.sess.bug("ty_is_local applied to unboxed closure type")
}
ty::ty_bare_fn(..) |
ty::ty_closure(..) => {
false
}
ty::ty_uniq(t) => {
let krate = tcx.lang_items.owned_box().map(|d| d.krate);
krate == Some(ast::LOCAL_CRATE) || ty_is_local(tcx, t)
}
ty::ty_vec(t, _) |
ty::ty_ptr(ty::mt { ty: t,.. }) |
ty::ty_rptr(_, ty::mt { ty: t,.. }) => {
ty_is_local(tcx, t)
}
ty::ty_tup(ref ts) => {
ts.iter().any(|&t| ty_is_local(tcx, t))
}
ty::ty_enum(def_id, ref substs) |
ty::ty_struct(def_id, ref substs) => {
def_id.krate == ast::LOCAL_CRATE || {
let variances = ty::item_variances(tcx, def_id);
subst::ParamSpace::all().iter().any(|&space| {
substs.types.get_slice(space).iter().enumerate().any(
|(i, &t)| {
match *variances.types.get(space, i) {
ty::Bivariant => {
// If Foo<T> is bivariant with respect to
// T, then it doesn't matter whether T is
// local or not, because `Foo<U>` for any
// U will be a subtype of T.
false
}
ty::Contravariant |
ty::Covariant |
ty::Invariant => {
ty_is_local(tcx, t)
}
}
})
})
}
}
ty::ty_trait(ref tt) => {
tt.principal.def_id.krate == ast::LOCAL_CRATE
}
// Type parameters may be bound to types that are not local to
// the crate.
ty::ty_param(..) => {
false
}
ty::ty_infer(..) |
ty::ty_open(..) |
ty::ty_err => {
tcx.sess.bug(
format!("ty_is_local invoked on unexpected type: {}",
ty.repr(tcx)).as_slice())
}
}
}
|
{
debug!("impl_can_satisfy(\
impl1_def_id={}, \
impl2_def_id={})",
impl1_def_id.repr(infcx.tcx),
impl2_def_id.repr(infcx.tcx));
// `impl1` provides an implementation of `Foo<X,Y> for Z`.
let impl1_substs =
util::fresh_substs_for_impl(infcx, DUMMY_SP, impl1_def_id);
let impl1_trait_ref =
ty::impl_trait_ref(infcx.tcx, impl1_def_id).unwrap()
.subst(infcx.tcx, &impl1_substs);
// Determine whether `impl2` can provide an implementation for those
// same types.
let param_env = ty::empty_parameter_environment();
let mut selcx = SelectionContext::intercrate(infcx, ¶m_env, infcx.tcx);
let obligation = Obligation::misc(DUMMY_SP, impl1_trait_ref);
debug!("impl_can_satisfy obligation={}", obligation.repr(infcx.tcx));
|
identifier_body
|
lib.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Calculate [specified][specified] and [computed values][computed] from a
//! tree of DOM nodes and a set of stylesheets.
//!
//! [computed]: https://drafts.csswg.org/css-cascade/#computed
//! [specified]: https://drafts.csswg.org/css-cascade/#specified
//!
//! In particular, this crate contains the definitions of supported properties,
//! the code to parse them into specified values and calculate the computed
//! values based on the specified values, as well as the code to serialize both
//! specified and computed values.
//!
//! The main entry point is [`recalc_style_at`][recalc_style_at].
//!
//! [recalc_style_at]: traversal/fn.recalc_style_at.html
//!
//! Major dependencies are the [cssparser][cssparser] and [selectors][selectors]
//! crates.
//!
//! [cssparser]:../cssparser/index.html
//! [selectors]:../selectors/index.html
#![cfg_attr(feature = "servo", feature(custom_attribute))]
#![cfg_attr(feature = "servo", feature(custom_derive))]
#![cfg_attr(feature = "servo", feature(plugin))]
#![cfg_attr(feature = "servo", plugin(heapsize_plugin))]
#![cfg_attr(feature = "servo", plugin(plugins))]
#![cfg_attr(feature = "servo", plugin(serde_macros))]
#![deny(unsafe_code)]
#![recursion_limit = "500"] // For match_ignore_ascii_case in PropertyDeclaration::parse
extern crate app_units;
#[allow(unused_extern_crates)]
#[macro_use]
extern crate bitflags;
#[macro_use] #[no_link]
extern crate cfg_if;
extern crate core;
#[macro_use]
extern crate cssparser;
extern crate deque;
extern crate encoding;
extern crate euclid;
extern crate fnv;
extern crate heapsize;
#[allow(unused_extern_crates)]
#[macro_use]
extern crate lazy_static;
extern crate libc;
#[macro_use]
extern crate log;
#[allow(unused_extern_crates)]
#[macro_use]
extern crate matches;
extern crate num_integer;
extern crate num_traits;
#[cfg(feature = "gecko")] extern crate num_cpus;
extern crate ordered_float;
extern crate parking_lot;
extern crate quickersort;
extern crate rand;
extern crate rustc_serialize;
extern crate selectors;
#[cfg(feature = "servo")]
extern crate serde;
extern crate smallvec;
#[cfg(feature = "servo")] #[macro_use] extern crate string_cache;
#[macro_use]
extern crate style_traits;
extern crate time;
extern crate url;
extern crate util;
#[cfg(feature = "gecko")]
#[path = "./gecko_string_cache/mod.rs"]
#[macro_use] pub mod string_cache;
pub mod animation;
pub mod atomic_refcell;
pub mod attr;
pub mod bezier;
pub mod cache;
pub mod cascade_info;
pub mod context;
pub mod custom_properties;
pub mod data;
pub mod dom;
pub mod element_state;
pub mod error_reporting;
pub mod font_face;
#[cfg(feature = "gecko")] #[allow(unsafe_code)] pub mod gecko;
#[cfg(feature = "gecko")] #[allow(unsafe_code)] pub mod gecko_bindings;
pub mod keyframes;
pub mod logical_geometry;
pub mod matching;
pub mod media_queries;
pub mod parallel;
pub mod parser;
pub mod refcell;
pub mod restyle_hints;
pub mod selector_impl;
pub mod selector_matching;
pub mod sequential;
#[cfg(feature = "servo")] pub mod servo_selector_impl;
pub mod sink;
pub mod str;
pub mod stylesheets;
pub mod thread_state;
mod tid;
pub mod timer;
pub mod traversal;
#[macro_use]
#[allow(non_camel_case_types)]
pub mod values;
pub mod viewport;
pub mod workqueue;
use std::sync::Arc;
/// The CSS properties supported by the style system.
// Generated from the properties.mako.rs template by build.rs
#[macro_use]
#[allow(unsafe_code)]
pub mod properties {
include!(concat!(env!("OUT_DIR"), "/properties.rs"));
}
#[cfg(feature = "gecko")]
#[allow(unsafe_code)]
pub mod gecko_properties {
include!(concat!(env!("OUT_DIR"), "/gecko_properties.rs"));
}
macro_rules! reexport_computed_values {
( $( $name: ident )+ ) => {
/// Types for [computed values][computed].
///
/// [computed]: https://drafts.csswg.org/css-cascade/#computed
pub mod computed_values {
$(
pub use properties::longhands::$name::computed_value as $name;
)+
// Don't use a side-specific name needlessly:
pub use properties::longhands::border_top_style::computed_value as border_style;
}
}
}
longhand_properties_idents!(reexport_computed_values);
/// Returns whether the two arguments point to the same value.
#[inline]
pub fn
|
<T:'static>(a: &Arc<T>, b: &Arc<T>) -> bool {
let a: &T = &**a;
let b: &T = &**b;
(a as *const T) == (b as *const T)
}
|
arc_ptr_eq
|
identifier_name
|
lib.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Calculate [specified][specified] and [computed values][computed] from a
//! tree of DOM nodes and a set of stylesheets.
//!
//! [computed]: https://drafts.csswg.org/css-cascade/#computed
//! [specified]: https://drafts.csswg.org/css-cascade/#specified
//!
//! In particular, this crate contains the definitions of supported properties,
//! the code to parse them into specified values and calculate the computed
//! values based on the specified values, as well as the code to serialize both
//! specified and computed values.
//!
//! The main entry point is [`recalc_style_at`][recalc_style_at].
//!
//! [recalc_style_at]: traversal/fn.recalc_style_at.html
//!
//! Major dependencies are the [cssparser][cssparser] and [selectors][selectors]
//! crates.
//!
//! [cssparser]:../cssparser/index.html
//! [selectors]:../selectors/index.html
#![cfg_attr(feature = "servo", feature(custom_attribute))]
#![cfg_attr(feature = "servo", feature(custom_derive))]
#![cfg_attr(feature = "servo", feature(plugin))]
#![cfg_attr(feature = "servo", plugin(heapsize_plugin))]
#![cfg_attr(feature = "servo", plugin(plugins))]
#![cfg_attr(feature = "servo", plugin(serde_macros))]
#![deny(unsafe_code)]
#![recursion_limit = "500"] // For match_ignore_ascii_case in PropertyDeclaration::parse
extern crate app_units;
#[allow(unused_extern_crates)]
#[macro_use]
extern crate bitflags;
#[macro_use] #[no_link]
extern crate cfg_if;
extern crate core;
#[macro_use]
extern crate cssparser;
extern crate deque;
extern crate encoding;
extern crate euclid;
extern crate fnv;
extern crate heapsize;
#[allow(unused_extern_crates)]
#[macro_use]
extern crate lazy_static;
extern crate libc;
#[macro_use]
extern crate log;
#[allow(unused_extern_crates)]
#[macro_use]
extern crate matches;
extern crate num_integer;
extern crate num_traits;
#[cfg(feature = "gecko")] extern crate num_cpus;
extern crate ordered_float;
extern crate parking_lot;
extern crate quickersort;
extern crate rand;
extern crate rustc_serialize;
extern crate selectors;
#[cfg(feature = "servo")]
extern crate serde;
extern crate smallvec;
#[cfg(feature = "servo")] #[macro_use] extern crate string_cache;
#[macro_use]
extern crate style_traits;
extern crate time;
extern crate url;
extern crate util;
#[cfg(feature = "gecko")]
#[path = "./gecko_string_cache/mod.rs"]
#[macro_use] pub mod string_cache;
pub mod animation;
pub mod atomic_refcell;
pub mod attr;
pub mod bezier;
pub mod cache;
pub mod cascade_info;
pub mod context;
pub mod custom_properties;
pub mod data;
pub mod dom;
pub mod element_state;
pub mod error_reporting;
pub mod font_face;
#[cfg(feature = "gecko")] #[allow(unsafe_code)] pub mod gecko;
#[cfg(feature = "gecko")] #[allow(unsafe_code)] pub mod gecko_bindings;
pub mod keyframes;
pub mod logical_geometry;
pub mod matching;
pub mod media_queries;
pub mod parallel;
pub mod parser;
pub mod refcell;
pub mod restyle_hints;
pub mod selector_impl;
pub mod selector_matching;
pub mod sequential;
#[cfg(feature = "servo")] pub mod servo_selector_impl;
pub mod sink;
pub mod str;
pub mod stylesheets;
pub mod thread_state;
mod tid;
pub mod timer;
pub mod traversal;
#[macro_use]
#[allow(non_camel_case_types)]
pub mod values;
pub mod viewport;
pub mod workqueue;
use std::sync::Arc;
/// The CSS properties supported by the style system.
// Generated from the properties.mako.rs template by build.rs
#[macro_use]
#[allow(unsafe_code)]
pub mod properties {
include!(concat!(env!("OUT_DIR"), "/properties.rs"));
}
#[cfg(feature = "gecko")]
#[allow(unsafe_code)]
pub mod gecko_properties {
include!(concat!(env!("OUT_DIR"), "/gecko_properties.rs"));
}
macro_rules! reexport_computed_values {
( $( $name: ident )+ ) => {
/// Types for [computed values][computed].
///
/// [computed]: https://drafts.csswg.org/css-cascade/#computed
pub mod computed_values {
$(
pub use properties::longhands::$name::computed_value as $name;
)+
// Don't use a side-specific name needlessly:
pub use properties::longhands::border_top_style::computed_value as border_style;
}
}
}
longhand_properties_idents!(reexport_computed_values);
/// Returns whether the two arguments point to the same value.
#[inline]
pub fn arc_ptr_eq<T:'static>(a: &Arc<T>, b: &Arc<T>) -> bool
|
{
let a: &T = &**a;
let b: &T = &**b;
(a as *const T) == (b as *const T)
}
|
identifier_body
|
|
lib.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Calculate [specified][specified] and [computed values][computed] from a
//! tree of DOM nodes and a set of stylesheets.
//!
//! [computed]: https://drafts.csswg.org/css-cascade/#computed
//! [specified]: https://drafts.csswg.org/css-cascade/#specified
//!
//! In particular, this crate contains the definitions of supported properties,
//! the code to parse them into specified values and calculate the computed
//! values based on the specified values, as well as the code to serialize both
//! specified and computed values.
//!
//! The main entry point is [`recalc_style_at`][recalc_style_at].
//!
//! [recalc_style_at]: traversal/fn.recalc_style_at.html
//!
//! Major dependencies are the [cssparser][cssparser] and [selectors][selectors]
//! crates.
//!
//! [cssparser]:../cssparser/index.html
//! [selectors]:../selectors/index.html
#![cfg_attr(feature = "servo", feature(custom_attribute))]
#![cfg_attr(feature = "servo", feature(custom_derive))]
#![cfg_attr(feature = "servo", feature(plugin))]
#![cfg_attr(feature = "servo", plugin(heapsize_plugin))]
#![cfg_attr(feature = "servo", plugin(plugins))]
#![cfg_attr(feature = "servo", plugin(serde_macros))]
#![deny(unsafe_code)]
#![recursion_limit = "500"] // For match_ignore_ascii_case in PropertyDeclaration::parse
extern crate app_units;
#[allow(unused_extern_crates)]
#[macro_use]
extern crate bitflags;
#[macro_use] #[no_link]
extern crate cfg_if;
extern crate core;
#[macro_use]
extern crate cssparser;
extern crate deque;
extern crate encoding;
extern crate euclid;
extern crate fnv;
extern crate heapsize;
#[allow(unused_extern_crates)]
#[macro_use]
extern crate lazy_static;
extern crate libc;
#[macro_use]
extern crate log;
#[allow(unused_extern_crates)]
#[macro_use]
extern crate matches;
extern crate num_integer;
extern crate num_traits;
#[cfg(feature = "gecko")] extern crate num_cpus;
extern crate ordered_float;
extern crate parking_lot;
extern crate quickersort;
extern crate rand;
extern crate rustc_serialize;
extern crate selectors;
#[cfg(feature = "servo")]
extern crate serde;
extern crate smallvec;
#[cfg(feature = "servo")] #[macro_use] extern crate string_cache;
#[macro_use]
extern crate style_traits;
extern crate time;
extern crate url;
extern crate util;
#[cfg(feature = "gecko")]
#[path = "./gecko_string_cache/mod.rs"]
#[macro_use] pub mod string_cache;
pub mod animation;
pub mod atomic_refcell;
pub mod attr;
pub mod bezier;
pub mod cache;
pub mod cascade_info;
pub mod context;
pub mod custom_properties;
pub mod data;
pub mod dom;
pub mod element_state;
pub mod error_reporting;
pub mod font_face;
#[cfg(feature = "gecko")] #[allow(unsafe_code)] pub mod gecko;
#[cfg(feature = "gecko")] #[allow(unsafe_code)] pub mod gecko_bindings;
pub mod keyframes;
pub mod logical_geometry;
pub mod matching;
pub mod media_queries;
pub mod parallel;
pub mod parser;
pub mod refcell;
pub mod restyle_hints;
pub mod selector_impl;
pub mod selector_matching;
pub mod sequential;
#[cfg(feature = "servo")] pub mod servo_selector_impl;
pub mod sink;
pub mod str;
pub mod stylesheets;
pub mod thread_state;
mod tid;
pub mod timer;
pub mod traversal;
#[macro_use]
#[allow(non_camel_case_types)]
pub mod values;
pub mod viewport;
pub mod workqueue;
use std::sync::Arc;
/// The CSS properties supported by the style system.
// Generated from the properties.mako.rs template by build.rs
#[macro_use]
#[allow(unsafe_code)]
pub mod properties {
include!(concat!(env!("OUT_DIR"), "/properties.rs"));
}
#[cfg(feature = "gecko")]
#[allow(unsafe_code)]
pub mod gecko_properties {
include!(concat!(env!("OUT_DIR"), "/gecko_properties.rs"));
}
macro_rules! reexport_computed_values {
( $( $name: ident )+ ) => {
/// Types for [computed values][computed].
///
/// [computed]: https://drafts.csswg.org/css-cascade/#computed
pub mod computed_values {
$(
pub use properties::longhands::$name::computed_value as $name;
)+
// Don't use a side-specific name needlessly:
pub use properties::longhands::border_top_style::computed_value as border_style;
}
}
}
longhand_properties_idents!(reexport_computed_values);
/// Returns whether the two arguments point to the same value.
#[inline]
pub fn arc_ptr_eq<T:'static>(a: &Arc<T>, b: &Arc<T>) -> bool {
|
let a: &T = &**a;
let b: &T = &**b;
(a as *const T) == (b as *const T)
}
|
random_line_split
|
|
main.rs
|
use crossbeam_channel::{bounded, select};
use crossbeam_utils::thread;
fn
|
() {
let people = vec!["Anna", "Bob", "Cody", "Dave", "Eva"];
let (s, r) = bounded(1); // Make room for one unmatched send.
// Either send my name into the channel or receive someone else's, whatever happens first.
let seek = |name, s, r| {
select! {
recv(r) -> peer => println!("{} received a message from {}.", name, peer.unwrap()),
send(s, name) -> _ => {}, // Wait for someone to receive my message.
}
};
thread::scope(|scope| {
for name in people {
let (s, r) = (s.clone(), r.clone());
scope.spawn(move |_| seek(name, s, r));
}
})
.unwrap();
// Check if there is a pending send operation.
if let Ok(name) = r.try_recv() {
println!("No one received {}’s message.", name);
}
}
|
main
|
identifier_name
|
main.rs
|
let (s, r) = bounded(1); // Make room for one unmatched send.
// Either send my name into the channel or receive someone else's, whatever happens first.
let seek = |name, s, r| {
select! {
recv(r) -> peer => println!("{} received a message from {}.", name, peer.unwrap()),
send(s, name) -> _ => {}, // Wait for someone to receive my message.
}
};
thread::scope(|scope| {
for name in people {
let (s, r) = (s.clone(), r.clone());
scope.spawn(move |_| seek(name, s, r));
}
})
.unwrap();
// Check if there is a pending send operation.
if let Ok(name) = r.try_recv() {
println!("No one received {}’s message.", name);
}
}
|
use crossbeam_channel::{bounded, select};
use crossbeam_utils::thread;
fn main() {
let people = vec!["Anna", "Bob", "Cody", "Dave", "Eva"];
|
random_line_split
|
|
main.rs
|
use crossbeam_channel::{bounded, select};
use crossbeam_utils::thread;
fn main() {
let people = vec!["Anna", "Bob", "Cody", "Dave", "Eva"];
let (s, r) = bounded(1); // Make room for one unmatched send.
// Either send my name into the channel or receive someone else's, whatever happens first.
let seek = |name, s, r| {
select! {
recv(r) -> peer => println!("{} received a message from {}.", name, peer.unwrap()),
send(s, name) -> _ => {}, // Wait for someone to receive my message.
}
};
thread::scope(|scope| {
for name in people {
let (s, r) = (s.clone(), r.clone());
scope.spawn(move |_| seek(name, s, r));
}
})
.unwrap();
// Check if there is a pending send operation.
if let Ok(name) = r.try_recv()
|
{
println!("No one received {}’s message.", name);
}
}
|
conditional_block
|
|
main.rs
|
use crossbeam_channel::{bounded, select};
use crossbeam_utils::thread;
fn main()
|
// Check if there is a pending send operation.
if let Ok(name) = r.try_recv() {
println!("No one received {}’s message.", name);
}
}
|
{
let people = vec!["Anna", "Bob", "Cody", "Dave", "Eva"];
let (s, r) = bounded(1); // Make room for one unmatched send.
// Either send my name into the channel or receive someone else's, whatever happens first.
let seek = |name, s, r| {
select! {
recv(r) -> peer => println!("{} received a message from {}.", name, peer.unwrap()),
send(s, name) -> _ => {}, // Wait for someone to receive my message.
}
};
thread::scope(|scope| {
for name in people {
let (s, r) = (s.clone(), r.clone());
scope.spawn(move |_| seek(name, s, r));
}
})
.unwrap();
|
identifier_body
|
game.rs
|
use std::io;
use std::io::Stdin;
use std::error::Error;
use commands::InputCommand;
use game_state::GameState;
use settings::Setting;
use updates::Update;
use player::Player;
use action::Action;
use strategies::Strategy;
const CHARACTER: &str = "bixie";
#[derive(Debug)]
enum GameStatus {
New,
Started,
}
#[derive(Debug)]
pub struct Game {
stdin: Stdin,
character: &'static str,
status: GameStatus,
state: Option<GameState>,
strategy: Strategy,
timebank: Option<usize>,
time_per_move: Option<usize>,
player_names: Option<String>,
your_bot: Option<String>,
your_botid: Option<usize>,
field_width: Option<i32>,
field_height: Option<i32>,
max_rounds: Option<usize>,
player: Option<Player>,
enemy: Option<Player>,
round: usize,
}
impl Game {
pub fn new(stdin: io::Stdin, strategy: Strategy) -> Game {
Game {
stdin,
status: GameStatus::New,
character: CHARACTER,
state: None,
timebank: None,
time_per_move: None,
player_names: None,
your_bot: None,
your_botid: None,
field_width: None,
field_height: None,
max_rounds: None,
player: None,
enemy: None,
round: 0,
strategy,
}
}
pub fn start(&mut self) -> Result<(), Box<Error>> {
loop {
let mut command_str = String::new();
self.stdin.read_line(&mut command_str).unwrap();
self.parse_command(&command_str).unwrap();
match self.status {
GameStatus::New => (),
GameStatus::Started => {
if self.round == self.max_rounds.unwrap() {
continue;
}
}
}
}
}
fn parse_command(&mut self, cmd: &str) -> Result<(), &'static str> {
let command = InputCommand::new(cmd).unwrap();
match command {
InputCommand::Setting(val) => self.add_settings(val),
InputCommand::Update(val) => self.update_game(val),
InputCommand::Action(val) => self.perform_action(val),
}
Ok(())
}
fn add_settings(&mut self, setting: Setting) {
match setting {
Setting::YourBot(name) =>
|
Setting::YourBotID(id) => {
if self.player.is_some() {
let mut player = self.player.take().unwrap();
player.set_id(id);
self.player = Some(player);
}
self.your_botid = Some(id);
}
Setting::Timebank(val) => self.timebank = Some(val),
Setting::TimePerMove(val) => self.time_per_move = Some(val),
Setting::PlayerNames(val) => self.player_names = Some(val),
Setting::FieldWidth(val) => self.field_width = Some(val),
Setting::FieldHeight(val) => self.field_height = Some(val),
Setting::MaxRounds(val) => self.max_rounds = Some(val),
}
}
fn update_game(&mut self, update: Update) {
match update {
Update::GameRound(round) => self.round = round,
Update::GameField(state) => {
match self.status {
GameStatus::Started => (),
GameStatus::New => self.status = GameStatus::Started,
}
self.update_game_state(state)
}
Update::PlayerSnippets((player_name, snippets)) => {
if self.player.is_some() {
let mut player = self.player.take().unwrap();
if player.is(&player_name) {
player.snippets = snippets;
}
// For now, let's ignore enemies snippets
self.player = Some(player);
}
}
Update::PlayerBombs((player_name, bombs)) => {
if self.player.is_some() {
let mut player = self.player.take().unwrap();
if player.is(&player_name) {
player.bombs = bombs;
}
// For now, let's ignore enemies bombs
self.player = Some(player);
}
}
}
}
fn perform_action(&mut self, action: Action) {
match action {
Action::Character(_) => println!("{}", self.character),
Action::Move(_) => {
// For now I'm just ignoring timebank management
// Todo: Use timebank information
self.strategy.run(self.state.clone())
}
}
}
fn update_game_state(&mut self, state: String) {
// For now, the game will replace the current state whenever
// a new state is provided. Idealy the game should always store
// previous states and perform a diff with the provided state
// So it will have context (like the direction of the entities).
//
// TODO: Implement state persistency and diff to store context
self.state = Some(GameState::new(
&state,
self.field_width.unwrap(),
self.field_height.unwrap(),
self.your_botid.unwrap())
);
}
}
|
{
self.player = Some(Player::new(name.clone()));
self.your_bot = Some(name);
}
|
conditional_block
|
game.rs
|
use std::io;
use std::io::Stdin;
use std::error::Error;
use commands::InputCommand;
use game_state::GameState;
use settings::Setting;
use updates::Update;
use player::Player;
use action::Action;
use strategies::Strategy;
const CHARACTER: &str = "bixie";
#[derive(Debug)]
enum GameStatus {
New,
Started,
}
#[derive(Debug)]
pub struct Game {
stdin: Stdin,
character: &'static str,
status: GameStatus,
state: Option<GameState>,
strategy: Strategy,
timebank: Option<usize>,
time_per_move: Option<usize>,
player_names: Option<String>,
your_bot: Option<String>,
your_botid: Option<usize>,
field_width: Option<i32>,
field_height: Option<i32>,
max_rounds: Option<usize>,
player: Option<Player>,
enemy: Option<Player>,
round: usize,
}
impl Game {
pub fn new(stdin: io::Stdin, strategy: Strategy) -> Game {
Game {
stdin,
status: GameStatus::New,
character: CHARACTER,
state: None,
timebank: None,
time_per_move: None,
player_names: None,
your_bot: None,
your_botid: None,
field_width: None,
field_height: None,
max_rounds: None,
player: None,
enemy: None,
round: 0,
strategy,
}
}
pub fn start(&mut self) -> Result<(), Box<Error>> {
loop {
let mut command_str = String::new();
|
self.parse_command(&command_str).unwrap();
match self.status {
GameStatus::New => (),
GameStatus::Started => {
if self.round == self.max_rounds.unwrap() {
continue;
}
}
}
}
}
fn parse_command(&mut self, cmd: &str) -> Result<(), &'static str> {
let command = InputCommand::new(cmd).unwrap();
match command {
InputCommand::Setting(val) => self.add_settings(val),
InputCommand::Update(val) => self.update_game(val),
InputCommand::Action(val) => self.perform_action(val),
}
Ok(())
}
fn add_settings(&mut self, setting: Setting) {
match setting {
Setting::YourBot(name) => {
self.player = Some(Player::new(name.clone()));
self.your_bot = Some(name);
}
Setting::YourBotID(id) => {
if self.player.is_some() {
let mut player = self.player.take().unwrap();
player.set_id(id);
self.player = Some(player);
}
self.your_botid = Some(id);
}
Setting::Timebank(val) => self.timebank = Some(val),
Setting::TimePerMove(val) => self.time_per_move = Some(val),
Setting::PlayerNames(val) => self.player_names = Some(val),
Setting::FieldWidth(val) => self.field_width = Some(val),
Setting::FieldHeight(val) => self.field_height = Some(val),
Setting::MaxRounds(val) => self.max_rounds = Some(val),
}
}
fn update_game(&mut self, update: Update) {
match update {
Update::GameRound(round) => self.round = round,
Update::GameField(state) => {
match self.status {
GameStatus::Started => (),
GameStatus::New => self.status = GameStatus::Started,
}
self.update_game_state(state)
}
Update::PlayerSnippets((player_name, snippets)) => {
if self.player.is_some() {
let mut player = self.player.take().unwrap();
if player.is(&player_name) {
player.snippets = snippets;
}
// For now, let's ignore enemies snippets
self.player = Some(player);
}
}
Update::PlayerBombs((player_name, bombs)) => {
if self.player.is_some() {
let mut player = self.player.take().unwrap();
if player.is(&player_name) {
player.bombs = bombs;
}
// For now, let's ignore enemies bombs
self.player = Some(player);
}
}
}
}
fn perform_action(&mut self, action: Action) {
match action {
Action::Character(_) => println!("{}", self.character),
Action::Move(_) => {
// For now I'm just ignoring timebank management
// Todo: Use timebank information
self.strategy.run(self.state.clone())
}
}
}
fn update_game_state(&mut self, state: String) {
// For now, the game will replace the current state whenever
// a new state is provided. Idealy the game should always store
// previous states and perform a diff with the provided state
// So it will have context (like the direction of the entities).
//
// TODO: Implement state persistency and diff to store context
self.state = Some(GameState::new(
&state,
self.field_width.unwrap(),
self.field_height.unwrap(),
self.your_botid.unwrap())
);
}
}
|
self.stdin.read_line(&mut command_str).unwrap();
|
random_line_split
|
game.rs
|
use std::io;
use std::io::Stdin;
use std::error::Error;
use commands::InputCommand;
use game_state::GameState;
use settings::Setting;
use updates::Update;
use player::Player;
use action::Action;
use strategies::Strategy;
const CHARACTER: &str = "bixie";
#[derive(Debug)]
enum GameStatus {
New,
Started,
}
#[derive(Debug)]
pub struct Game {
stdin: Stdin,
character: &'static str,
status: GameStatus,
state: Option<GameState>,
strategy: Strategy,
timebank: Option<usize>,
time_per_move: Option<usize>,
player_names: Option<String>,
your_bot: Option<String>,
your_botid: Option<usize>,
field_width: Option<i32>,
field_height: Option<i32>,
max_rounds: Option<usize>,
player: Option<Player>,
enemy: Option<Player>,
round: usize,
}
impl Game {
pub fn new(stdin: io::Stdin, strategy: Strategy) -> Game {
Game {
stdin,
status: GameStatus::New,
character: CHARACTER,
state: None,
timebank: None,
time_per_move: None,
player_names: None,
your_bot: None,
your_botid: None,
field_width: None,
field_height: None,
max_rounds: None,
player: None,
enemy: None,
round: 0,
strategy,
}
}
pub fn start(&mut self) -> Result<(), Box<Error>> {
loop {
let mut command_str = String::new();
self.stdin.read_line(&mut command_str).unwrap();
self.parse_command(&command_str).unwrap();
match self.status {
GameStatus::New => (),
GameStatus::Started => {
if self.round == self.max_rounds.unwrap() {
continue;
}
}
}
}
}
fn parse_command(&mut self, cmd: &str) -> Result<(), &'static str> {
let command = InputCommand::new(cmd).unwrap();
match command {
InputCommand::Setting(val) => self.add_settings(val),
InputCommand::Update(val) => self.update_game(val),
InputCommand::Action(val) => self.perform_action(val),
}
Ok(())
}
fn add_settings(&mut self, setting: Setting) {
match setting {
Setting::YourBot(name) => {
self.player = Some(Player::new(name.clone()));
self.your_bot = Some(name);
}
Setting::YourBotID(id) => {
if self.player.is_some() {
let mut player = self.player.take().unwrap();
player.set_id(id);
self.player = Some(player);
}
self.your_botid = Some(id);
}
Setting::Timebank(val) => self.timebank = Some(val),
Setting::TimePerMove(val) => self.time_per_move = Some(val),
Setting::PlayerNames(val) => self.player_names = Some(val),
Setting::FieldWidth(val) => self.field_width = Some(val),
Setting::FieldHeight(val) => self.field_height = Some(val),
Setting::MaxRounds(val) => self.max_rounds = Some(val),
}
}
fn update_game(&mut self, update: Update) {
match update {
Update::GameRound(round) => self.round = round,
Update::GameField(state) => {
match self.status {
GameStatus::Started => (),
GameStatus::New => self.status = GameStatus::Started,
}
self.update_game_state(state)
}
Update::PlayerSnippets((player_name, snippets)) => {
if self.player.is_some() {
let mut player = self.player.take().unwrap();
if player.is(&player_name) {
player.snippets = snippets;
}
// For now, let's ignore enemies snippets
self.player = Some(player);
}
}
Update::PlayerBombs((player_name, bombs)) => {
if self.player.is_some() {
let mut player = self.player.take().unwrap();
if player.is(&player_name) {
player.bombs = bombs;
}
// For now, let's ignore enemies bombs
self.player = Some(player);
}
}
}
}
fn perform_action(&mut self, action: Action)
|
fn update_game_state(&mut self, state: String) {
// For now, the game will replace the current state whenever
// a new state is provided. Idealy the game should always store
// previous states and perform a diff with the provided state
// So it will have context (like the direction of the entities).
//
// TODO: Implement state persistency and diff to store context
self.state = Some(GameState::new(
&state,
self.field_width.unwrap(),
self.field_height.unwrap(),
self.your_botid.unwrap())
);
}
}
|
{
match action {
Action::Character(_) => println!("{}", self.character),
Action::Move(_) => {
// For now I'm just ignoring timebank management
// Todo: Use timebank information
self.strategy.run(self.state.clone())
}
}
}
|
identifier_body
|
game.rs
|
use std::io;
use std::io::Stdin;
use std::error::Error;
use commands::InputCommand;
use game_state::GameState;
use settings::Setting;
use updates::Update;
use player::Player;
use action::Action;
use strategies::Strategy;
const CHARACTER: &str = "bixie";
#[derive(Debug)]
enum GameStatus {
New,
Started,
}
#[derive(Debug)]
pub struct Game {
stdin: Stdin,
character: &'static str,
status: GameStatus,
state: Option<GameState>,
strategy: Strategy,
timebank: Option<usize>,
time_per_move: Option<usize>,
player_names: Option<String>,
your_bot: Option<String>,
your_botid: Option<usize>,
field_width: Option<i32>,
field_height: Option<i32>,
max_rounds: Option<usize>,
player: Option<Player>,
enemy: Option<Player>,
round: usize,
}
impl Game {
pub fn new(stdin: io::Stdin, strategy: Strategy) -> Game {
Game {
stdin,
status: GameStatus::New,
character: CHARACTER,
state: None,
timebank: None,
time_per_move: None,
player_names: None,
your_bot: None,
your_botid: None,
field_width: None,
field_height: None,
max_rounds: None,
player: None,
enemy: None,
round: 0,
strategy,
}
}
pub fn start(&mut self) -> Result<(), Box<Error>> {
loop {
let mut command_str = String::new();
self.stdin.read_line(&mut command_str).unwrap();
self.parse_command(&command_str).unwrap();
match self.status {
GameStatus::New => (),
GameStatus::Started => {
if self.round == self.max_rounds.unwrap() {
continue;
}
}
}
}
}
fn
|
(&mut self, cmd: &str) -> Result<(), &'static str> {
let command = InputCommand::new(cmd).unwrap();
match command {
InputCommand::Setting(val) => self.add_settings(val),
InputCommand::Update(val) => self.update_game(val),
InputCommand::Action(val) => self.perform_action(val),
}
Ok(())
}
fn add_settings(&mut self, setting: Setting) {
match setting {
Setting::YourBot(name) => {
self.player = Some(Player::new(name.clone()));
self.your_bot = Some(name);
}
Setting::YourBotID(id) => {
if self.player.is_some() {
let mut player = self.player.take().unwrap();
player.set_id(id);
self.player = Some(player);
}
self.your_botid = Some(id);
}
Setting::Timebank(val) => self.timebank = Some(val),
Setting::TimePerMove(val) => self.time_per_move = Some(val),
Setting::PlayerNames(val) => self.player_names = Some(val),
Setting::FieldWidth(val) => self.field_width = Some(val),
Setting::FieldHeight(val) => self.field_height = Some(val),
Setting::MaxRounds(val) => self.max_rounds = Some(val),
}
}
fn update_game(&mut self, update: Update) {
match update {
Update::GameRound(round) => self.round = round,
Update::GameField(state) => {
match self.status {
GameStatus::Started => (),
GameStatus::New => self.status = GameStatus::Started,
}
self.update_game_state(state)
}
Update::PlayerSnippets((player_name, snippets)) => {
if self.player.is_some() {
let mut player = self.player.take().unwrap();
if player.is(&player_name) {
player.snippets = snippets;
}
// For now, let's ignore enemies snippets
self.player = Some(player);
}
}
Update::PlayerBombs((player_name, bombs)) => {
if self.player.is_some() {
let mut player = self.player.take().unwrap();
if player.is(&player_name) {
player.bombs = bombs;
}
// For now, let's ignore enemies bombs
self.player = Some(player);
}
}
}
}
fn perform_action(&mut self, action: Action) {
match action {
Action::Character(_) => println!("{}", self.character),
Action::Move(_) => {
// For now I'm just ignoring timebank management
// Todo: Use timebank information
self.strategy.run(self.state.clone())
}
}
}
fn update_game_state(&mut self, state: String) {
// For now, the game will replace the current state whenever
// a new state is provided. Idealy the game should always store
// previous states and perform a diff with the provided state
// So it will have context (like the direction of the entities).
//
// TODO: Implement state persistency and diff to store context
self.state = Some(GameState::new(
&state,
self.field_width.unwrap(),
self.field_height.unwrap(),
self.your_botid.unwrap())
);
}
}
|
parse_command
|
identifier_name
|
network_listener.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! The listener that encapsulates all state for an in-progress document request.
//! Any redirects that are encountered are followed. Whenever a non-redirect
//! response is received, it is forwarded to the appropriate script thread.
use crossbeam_channel::Sender;
use http::header::LOCATION;
use ipc_channel::ipc;
use ipc_channel::router::ROUTER;
use msg::constellation_msg::PipelineId;
use net::http_loader::{set_default_accept, set_default_accept_language};
use net_traits::request::{Destination, Referrer, RequestBuilder};
use net_traits::response::ResponseInit;
use net_traits::{CoreResourceMsg, FetchChannels, FetchMetadata, FetchResponseMsg};
use net_traits::{IpcSend, NetworkError, ResourceThreads};
pub struct NetworkListener {
res_init: Option<ResponseInit>,
request_builder: RequestBuilder,
pipeline_id: PipelineId,
resource_threads: ResourceThreads,
sender: Sender<(PipelineId, FetchResponseMsg)>,
should_send: bool,
}
impl NetworkListener {
pub fn new(
request_builder: RequestBuilder,
pipeline_id: PipelineId,
resource_threads: ResourceThreads,
sender: Sender<(PipelineId, FetchResponseMsg)>,
) -> NetworkListener {
NetworkListener {
res_init: None,
request_builder,
pipeline_id,
resource_threads,
sender,
should_send: false,
}
}
pub fn initiate_fetch(&self, cancel_chan: Option<ipc::IpcReceiver<()>>) {
let (ipc_sender, ipc_receiver) = ipc::channel().expect("Failed to create IPC channel!");
let mut listener = NetworkListener {
res_init: self.res_init.clone(),
request_builder: self.request_builder.clone(),
resource_threads: self.resource_threads.clone(),
sender: self.sender.clone(),
pipeline_id: self.pipeline_id.clone(),
should_send: false,
};
let msg = match self.res_init {
Some(ref res_init_) => CoreResourceMsg::FetchRedirect(
self.request_builder.clone(),
res_init_.clone(),
ipc_sender,
None,
),
None => {
set_default_accept(Destination::Document, &mut listener.request_builder.headers);
set_default_accept_language(&mut listener.request_builder.headers);
CoreResourceMsg::Fetch(
listener.request_builder.clone(),
FetchChannels::ResponseMsg(ipc_sender, cancel_chan),
)
},
};
ROUTER.add_route(
ipc_receiver.to_opaque(),
Box::new(move |message| {
let msg = message.to();
match msg {
Ok(FetchResponseMsg::ProcessResponse(res)) => listener.check_redirect(res),
Ok(msg_) => listener.send(msg_),
Err(e) => warn!("Error while receiving network listener message: {}", e),
};
}),
);
if let Err(e) = self.resource_threads.sender().send(msg) {
warn!("Resource thread unavailable ({})", e);
}
}
fn check_redirect(&mut self, message: Result<(FetchMetadata), NetworkError>) {
match message {
Ok(res_metadata) => {
let metadata = match res_metadata {
FetchMetadata::Filtered { ref unsafe_,.. } => unsafe_,
FetchMetadata::Unfiltered(ref m) => m,
};
match metadata.headers {
Some(ref headers) if headers.contains_key(LOCATION) => {
if self.request_builder.url_list.is_empty() {
self.request_builder
.url_list
.push(self.request_builder.url.clone());
}
self.request_builder
.url_list
.push(metadata.final_url.clone());
self.request_builder.referrer = metadata
.referrer
.clone()
.map(|referrer_url| Referrer::ReferrerUrl(referrer_url));
self.request_builder.referrer_policy = metadata.referrer_policy;
self.res_init = Some(ResponseInit {
url: metadata.final_url.clone(),
location_url: metadata.location_url.clone(),
headers: headers.clone().into_inner(),
referrer: metadata.referrer.clone(),
status_code: metadata
.status
.as_ref()
.map(|&(code, _)| code)
.unwrap_or(200),
});
// XXXManishearth we don't have the cancel_chan anymore and
// can't use it here.
//
// Ideally the Fetch code would handle manual redirects on its own
self.initiate_fetch(None);
},
_ => {
// Response should be processed by script thread.
self.should_send = true;
self.send(FetchResponseMsg::ProcessResponse(Ok(res_metadata)));
},
};
},
Err(e) => {
self.should_send = true;
self.send(FetchResponseMsg::ProcessResponse(Err(e)))
},
};
}
fn
|
(&mut self, msg: FetchResponseMsg) {
if self.should_send {
if let Err(e) = self.sender.send((self.pipeline_id, msg)) {
warn!(
"Failed to forward network message to pipeline {}: {:?}",
self.pipeline_id, e
);
}
}
}
}
|
send
|
identifier_name
|
network_listener.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! The listener that encapsulates all state for an in-progress document request.
//! Any redirects that are encountered are followed. Whenever a non-redirect
//! response is received, it is forwarded to the appropriate script thread.
use crossbeam_channel::Sender;
use http::header::LOCATION;
use ipc_channel::ipc;
use ipc_channel::router::ROUTER;
use msg::constellation_msg::PipelineId;
use net::http_loader::{set_default_accept, set_default_accept_language};
use net_traits::request::{Destination, Referrer, RequestBuilder};
use net_traits::response::ResponseInit;
use net_traits::{CoreResourceMsg, FetchChannels, FetchMetadata, FetchResponseMsg};
use net_traits::{IpcSend, NetworkError, ResourceThreads};
pub struct NetworkListener {
res_init: Option<ResponseInit>,
request_builder: RequestBuilder,
pipeline_id: PipelineId,
resource_threads: ResourceThreads,
sender: Sender<(PipelineId, FetchResponseMsg)>,
should_send: bool,
}
impl NetworkListener {
pub fn new(
request_builder: RequestBuilder,
pipeline_id: PipelineId,
resource_threads: ResourceThreads,
sender: Sender<(PipelineId, FetchResponseMsg)>,
) -> NetworkListener {
NetworkListener {
res_init: None,
request_builder,
pipeline_id,
resource_threads,
sender,
should_send: false,
}
}
pub fn initiate_fetch(&self, cancel_chan: Option<ipc::IpcReceiver<()>>) {
let (ipc_sender, ipc_receiver) = ipc::channel().expect("Failed to create IPC channel!");
let mut listener = NetworkListener {
res_init: self.res_init.clone(),
request_builder: self.request_builder.clone(),
resource_threads: self.resource_threads.clone(),
sender: self.sender.clone(),
pipeline_id: self.pipeline_id.clone(),
should_send: false,
};
|
Some(ref res_init_) => CoreResourceMsg::FetchRedirect(
self.request_builder.clone(),
res_init_.clone(),
ipc_sender,
None,
),
None => {
set_default_accept(Destination::Document, &mut listener.request_builder.headers);
set_default_accept_language(&mut listener.request_builder.headers);
CoreResourceMsg::Fetch(
listener.request_builder.clone(),
FetchChannels::ResponseMsg(ipc_sender, cancel_chan),
)
},
};
ROUTER.add_route(
ipc_receiver.to_opaque(),
Box::new(move |message| {
let msg = message.to();
match msg {
Ok(FetchResponseMsg::ProcessResponse(res)) => listener.check_redirect(res),
Ok(msg_) => listener.send(msg_),
Err(e) => warn!("Error while receiving network listener message: {}", e),
};
}),
);
if let Err(e) = self.resource_threads.sender().send(msg) {
warn!("Resource thread unavailable ({})", e);
}
}
fn check_redirect(&mut self, message: Result<(FetchMetadata), NetworkError>) {
match message {
Ok(res_metadata) => {
let metadata = match res_metadata {
FetchMetadata::Filtered { ref unsafe_,.. } => unsafe_,
FetchMetadata::Unfiltered(ref m) => m,
};
match metadata.headers {
Some(ref headers) if headers.contains_key(LOCATION) => {
if self.request_builder.url_list.is_empty() {
self.request_builder
.url_list
.push(self.request_builder.url.clone());
}
self.request_builder
.url_list
.push(metadata.final_url.clone());
self.request_builder.referrer = metadata
.referrer
.clone()
.map(|referrer_url| Referrer::ReferrerUrl(referrer_url));
self.request_builder.referrer_policy = metadata.referrer_policy;
self.res_init = Some(ResponseInit {
url: metadata.final_url.clone(),
location_url: metadata.location_url.clone(),
headers: headers.clone().into_inner(),
referrer: metadata.referrer.clone(),
status_code: metadata
.status
.as_ref()
.map(|&(code, _)| code)
.unwrap_or(200),
});
// XXXManishearth we don't have the cancel_chan anymore and
// can't use it here.
//
// Ideally the Fetch code would handle manual redirects on its own
self.initiate_fetch(None);
},
_ => {
// Response should be processed by script thread.
self.should_send = true;
self.send(FetchResponseMsg::ProcessResponse(Ok(res_metadata)));
},
};
},
Err(e) => {
self.should_send = true;
self.send(FetchResponseMsg::ProcessResponse(Err(e)))
},
};
}
fn send(&mut self, msg: FetchResponseMsg) {
if self.should_send {
if let Err(e) = self.sender.send((self.pipeline_id, msg)) {
warn!(
"Failed to forward network message to pipeline {}: {:?}",
self.pipeline_id, e
);
}
}
}
}
|
let msg = match self.res_init {
|
random_line_split
|
errno.rs
|
use imp::errno as errno;
const ZMQ_HAUSNUMERO: i32 = 156384712;
pub const EACCES: i32 = errno::EACCES;
pub const EADDRINUSE: i32 = errno::EADDRINUSE;
pub const EAGAIN: i32 = errno::EAGAIN;
pub const EBUSY: i32 = errno::EBUSY;
pub const ECONNREFUSED: i32 = errno::ECONNREFUSED;
pub const EFAULT: i32 = errno::EFAULT;
pub const EINTR: i32 = errno::EINTR;
pub const EHOSTUNREACH: i32 = errno::EHOSTUNREACH;
pub const EINPROGRESS: i32 = errno::EINPROGRESS;
pub const EINVAL: i32 = errno::EINVAL;
pub const EMFILE: i32 = errno::EMFILE;
pub const EMSGSIZE: i32 = errno::EMSGSIZE;
pub const ENAMETOOLONG: i32 = errno::ENAMETOOLONG;
pub const ENODEV: i32 = errno::ENODEV;
pub const ENOENT: i32 = errno::ENOENT;
pub const ENOMEM: i32 = errno::ENOMEM;
pub const ENOTCONN: i32 = errno::ENOTCONN;
pub const ENOTSOCK: i32 = errno::ENOTSOCK;
|
pub const EPROTO: i32 = errno::EPROTO;
pub const EPROTONOSUPPORT: i32 = errno::EPROTONOSUPPORT;
#[cfg(not(target_os = "windows"))]
pub const ENOTSUP: i32 = (ZMQ_HAUSNUMERO + 1);
#[cfg(target_os = "windows")]
pub const ENOTSUP: i32 = errno::ENOTSUP;
pub const ENOBUFS: i32 = errno::ENOBUFS;
pub const ENETDOWN: i32 = errno::ENETDOWN;
pub const EADDRNOTAVAIL: i32 = errno::EADDRNOTAVAIL;
// native zmq error codes
pub const EFSM: i32 = (ZMQ_HAUSNUMERO + 51);
pub const ENOCOMPATPROTO: i32 = (ZMQ_HAUSNUMERO + 52);
pub const ETERM: i32 = (ZMQ_HAUSNUMERO + 53);
pub const EMTHREAD: i32 = (ZMQ_HAUSNUMERO + 54);
|
random_line_split
|
|
primitive_reuse_peer.rs
|
extern crate futures;
extern crate tokio_io;
use futures::future::ok;
use std::cell::RefCell;
use std::rc::Rc;
use super::{BoxedNewPeerFuture, Peer};
use std::io::{Error as IoError, Read, Write};
use tokio_io::{AsyncRead, AsyncWrite};
use super::{once, ConstructParams, PeerConstructor, Specifier};
use futures::Future;
use std::ops::DerefMut;
#[derive(Debug)]
pub struct Reuser(pub Rc<dyn Specifier>);
impl Specifier for Reuser {
fn construct(&self, p: ConstructParams) -> PeerConstructor {
let send_zero_msg_on_disconnect = p.program_options.reuser_send_zero_msg_on_disconnect;
let reuser = p.global(GlobalState::default).clone();
let mut reuser = reuser.clone();
let l2r = p.left_to_right.clone();
let inner = || self.0.construct(p).get_only_first_conn(l2r);
once(connection_reuser(
&mut reuser,
inner,
send_zero_msg_on_disconnect,
))
}
specifier_boilerplate!(singleconnect has_subspec globalstate);
self_0_is_subspecifier!(...);
}
specifier_class!(
name = ReuserClass,
target = Reuser,
prefixes = ["reuse-raw:", "raw-reuse:"],
arg_handling = subspec,
overlay = true,
MessageBoundaryStatusDependsOnInnerType,
SingleConnect,
help = r#"
Reuse subspecifier for serving multiple clients: unpredictable mode. [A]
Better used with --unidirectional, otherwise replies get directed to
random connected client.
Example: Forward multiple parallel WebSocket connections to a single persistent TCP connection
websocat -u ws-l:0.0.0.0:8800 reuse:tcp:127.0.0.1:4567
Example (unreliable): don't disconnect SSH when websocket reconnects
websocat ws-l:[::]:8088 reuse:tcp:127.0.0.1:22
"#
);
type PeerSlot = Rc<RefCell<Option<Peer>>>;
#[derive(Default, Clone)]
pub struct GlobalState(PeerSlot);
#[derive(Clone)]
struct PeerHandle(PeerSlot, bool);
impl Read for PeerHandle {
fn read(&mut self, b: &mut [u8]) -> Result<usize, IoError> {
if let Some(ref mut x) = *self.0.borrow_mut().deref_mut()
|
else {
unreachable!()
}
}
}
impl AsyncRead for PeerHandle {}
impl Write for PeerHandle {
fn write(&mut self, b: &[u8]) -> Result<usize, IoError> {
if let Some(ref mut x) = *self.0.borrow_mut().deref_mut() {
x.1.write(b)
} else {
unreachable!()
}
}
fn flush(&mut self) -> Result<(), IoError> {
if let Some(ref mut x) = *self.0.borrow_mut().deref_mut() {
x.1.flush()
} else {
unreachable!()
}
}
}
impl AsyncWrite for PeerHandle {
fn shutdown(&mut self) -> futures::Poll<(), IoError> {
if self.1 {
let _ = self.write(b"");
}
if let Some(ref mut _x) = *self.0.borrow_mut().deref_mut() {
// Ignore shutdown attempts
Ok(futures::Async::Ready(()))
//_x.1.shutdown()
} else {
unreachable!()
}
}
}
pub fn connection_reuser<F: FnOnce() -> BoxedNewPeerFuture>(
s: &mut GlobalState,
inner_peer: F,
send_zero_msg_on_disconnect: bool,
) -> BoxedNewPeerFuture {
let need_init = s.0.borrow().is_none();
let rc = s.0.clone();
if need_init {
info!("Initializing");
Box::new(inner_peer().and_then(move |inner| {
{
let mut b = rc.borrow_mut();
let x: &mut Option<Peer> = b.deref_mut();
*x = Some(inner);
}
let ps: PeerSlot = rc.clone();
let ph1 = PeerHandle(ps, send_zero_msg_on_disconnect);
let ph2 = ph1.clone();
let peer = Peer::new(ph1, ph2, None /* TODO */);
ok(peer)
})) as BoxedNewPeerFuture
} else {
info!("Reusing");
let ps: PeerSlot = rc.clone();
let ph1 = PeerHandle(ps, send_zero_msg_on_disconnect);
let ph2 = ph1.clone();
let peer = Peer::new(ph1, ph2, None /* TODO */);
Box::new(ok(peer)) as BoxedNewPeerFuture
}
}
|
{
x.0.read(b)
}
|
conditional_block
|
primitive_reuse_peer.rs
|
extern crate futures;
extern crate tokio_io;
use futures::future::ok;
use std::cell::RefCell;
use std::rc::Rc;
use super::{BoxedNewPeerFuture, Peer};
|
use std::ops::DerefMut;
#[derive(Debug)]
pub struct Reuser(pub Rc<dyn Specifier>);
impl Specifier for Reuser {
fn construct(&self, p: ConstructParams) -> PeerConstructor {
let send_zero_msg_on_disconnect = p.program_options.reuser_send_zero_msg_on_disconnect;
let reuser = p.global(GlobalState::default).clone();
let mut reuser = reuser.clone();
let l2r = p.left_to_right.clone();
let inner = || self.0.construct(p).get_only_first_conn(l2r);
once(connection_reuser(
&mut reuser,
inner,
send_zero_msg_on_disconnect,
))
}
specifier_boilerplate!(singleconnect has_subspec globalstate);
self_0_is_subspecifier!(...);
}
specifier_class!(
name = ReuserClass,
target = Reuser,
prefixes = ["reuse-raw:", "raw-reuse:"],
arg_handling = subspec,
overlay = true,
MessageBoundaryStatusDependsOnInnerType,
SingleConnect,
help = r#"
Reuse subspecifier for serving multiple clients: unpredictable mode. [A]
Better used with --unidirectional, otherwise replies get directed to
random connected client.
Example: Forward multiple parallel WebSocket connections to a single persistent TCP connection
websocat -u ws-l:0.0.0.0:8800 reuse:tcp:127.0.0.1:4567
Example (unreliable): don't disconnect SSH when websocket reconnects
websocat ws-l:[::]:8088 reuse:tcp:127.0.0.1:22
"#
);
type PeerSlot = Rc<RefCell<Option<Peer>>>;
#[derive(Default, Clone)]
pub struct GlobalState(PeerSlot);
#[derive(Clone)]
struct PeerHandle(PeerSlot, bool);
impl Read for PeerHandle {
fn read(&mut self, b: &mut [u8]) -> Result<usize, IoError> {
if let Some(ref mut x) = *self.0.borrow_mut().deref_mut() {
x.0.read(b)
} else {
unreachable!()
}
}
}
impl AsyncRead for PeerHandle {}
impl Write for PeerHandle {
fn write(&mut self, b: &[u8]) -> Result<usize, IoError> {
if let Some(ref mut x) = *self.0.borrow_mut().deref_mut() {
x.1.write(b)
} else {
unreachable!()
}
}
fn flush(&mut self) -> Result<(), IoError> {
if let Some(ref mut x) = *self.0.borrow_mut().deref_mut() {
x.1.flush()
} else {
unreachable!()
}
}
}
impl AsyncWrite for PeerHandle {
fn shutdown(&mut self) -> futures::Poll<(), IoError> {
if self.1 {
let _ = self.write(b"");
}
if let Some(ref mut _x) = *self.0.borrow_mut().deref_mut() {
// Ignore shutdown attempts
Ok(futures::Async::Ready(()))
//_x.1.shutdown()
} else {
unreachable!()
}
}
}
pub fn connection_reuser<F: FnOnce() -> BoxedNewPeerFuture>(
s: &mut GlobalState,
inner_peer: F,
send_zero_msg_on_disconnect: bool,
) -> BoxedNewPeerFuture {
let need_init = s.0.borrow().is_none();
let rc = s.0.clone();
if need_init {
info!("Initializing");
Box::new(inner_peer().and_then(move |inner| {
{
let mut b = rc.borrow_mut();
let x: &mut Option<Peer> = b.deref_mut();
*x = Some(inner);
}
let ps: PeerSlot = rc.clone();
let ph1 = PeerHandle(ps, send_zero_msg_on_disconnect);
let ph2 = ph1.clone();
let peer = Peer::new(ph1, ph2, None /* TODO */);
ok(peer)
})) as BoxedNewPeerFuture
} else {
info!("Reusing");
let ps: PeerSlot = rc.clone();
let ph1 = PeerHandle(ps, send_zero_msg_on_disconnect);
let ph2 = ph1.clone();
let peer = Peer::new(ph1, ph2, None /* TODO */);
Box::new(ok(peer)) as BoxedNewPeerFuture
}
}
|
use std::io::{Error as IoError, Read, Write};
use tokio_io::{AsyncRead, AsyncWrite};
use super::{once, ConstructParams, PeerConstructor, Specifier};
use futures::Future;
|
random_line_split
|
primitive_reuse_peer.rs
|
extern crate futures;
extern crate tokio_io;
use futures::future::ok;
use std::cell::RefCell;
use std::rc::Rc;
use super::{BoxedNewPeerFuture, Peer};
use std::io::{Error as IoError, Read, Write};
use tokio_io::{AsyncRead, AsyncWrite};
use super::{once, ConstructParams, PeerConstructor, Specifier};
use futures::Future;
use std::ops::DerefMut;
#[derive(Debug)]
pub struct Reuser(pub Rc<dyn Specifier>);
impl Specifier for Reuser {
fn construct(&self, p: ConstructParams) -> PeerConstructor
|
specifier_boilerplate!(singleconnect has_subspec globalstate);
self_0_is_subspecifier!(...);
}
specifier_class!(
name = ReuserClass,
target = Reuser,
prefixes = ["reuse-raw:", "raw-reuse:"],
arg_handling = subspec,
overlay = true,
MessageBoundaryStatusDependsOnInnerType,
SingleConnect,
help = r#"
Reuse subspecifier for serving multiple clients: unpredictable mode. [A]
Better used with --unidirectional, otherwise replies get directed to
random connected client.
Example: Forward multiple parallel WebSocket connections to a single persistent TCP connection
websocat -u ws-l:0.0.0.0:8800 reuse:tcp:127.0.0.1:4567
Example (unreliable): don't disconnect SSH when websocket reconnects
websocat ws-l:[::]:8088 reuse:tcp:127.0.0.1:22
"#
);
type PeerSlot = Rc<RefCell<Option<Peer>>>;
#[derive(Default, Clone)]
pub struct GlobalState(PeerSlot);
#[derive(Clone)]
struct PeerHandle(PeerSlot, bool);
impl Read for PeerHandle {
fn read(&mut self, b: &mut [u8]) -> Result<usize, IoError> {
if let Some(ref mut x) = *self.0.borrow_mut().deref_mut() {
x.0.read(b)
} else {
unreachable!()
}
}
}
impl AsyncRead for PeerHandle {}
impl Write for PeerHandle {
fn write(&mut self, b: &[u8]) -> Result<usize, IoError> {
if let Some(ref mut x) = *self.0.borrow_mut().deref_mut() {
x.1.write(b)
} else {
unreachable!()
}
}
fn flush(&mut self) -> Result<(), IoError> {
if let Some(ref mut x) = *self.0.borrow_mut().deref_mut() {
x.1.flush()
} else {
unreachable!()
}
}
}
impl AsyncWrite for PeerHandle {
fn shutdown(&mut self) -> futures::Poll<(), IoError> {
if self.1 {
let _ = self.write(b"");
}
if let Some(ref mut _x) = *self.0.borrow_mut().deref_mut() {
// Ignore shutdown attempts
Ok(futures::Async::Ready(()))
//_x.1.shutdown()
} else {
unreachable!()
}
}
}
pub fn connection_reuser<F: FnOnce() -> BoxedNewPeerFuture>(
s: &mut GlobalState,
inner_peer: F,
send_zero_msg_on_disconnect: bool,
) -> BoxedNewPeerFuture {
let need_init = s.0.borrow().is_none();
let rc = s.0.clone();
if need_init {
info!("Initializing");
Box::new(inner_peer().and_then(move |inner| {
{
let mut b = rc.borrow_mut();
let x: &mut Option<Peer> = b.deref_mut();
*x = Some(inner);
}
let ps: PeerSlot = rc.clone();
let ph1 = PeerHandle(ps, send_zero_msg_on_disconnect);
let ph2 = ph1.clone();
let peer = Peer::new(ph1, ph2, None /* TODO */);
ok(peer)
})) as BoxedNewPeerFuture
} else {
info!("Reusing");
let ps: PeerSlot = rc.clone();
let ph1 = PeerHandle(ps, send_zero_msg_on_disconnect);
let ph2 = ph1.clone();
let peer = Peer::new(ph1, ph2, None /* TODO */);
Box::new(ok(peer)) as BoxedNewPeerFuture
}
}
|
{
let send_zero_msg_on_disconnect = p.program_options.reuser_send_zero_msg_on_disconnect;
let reuser = p.global(GlobalState::default).clone();
let mut reuser = reuser.clone();
let l2r = p.left_to_right.clone();
let inner = || self.0.construct(p).get_only_first_conn(l2r);
once(connection_reuser(
&mut reuser,
inner,
send_zero_msg_on_disconnect,
))
}
|
identifier_body
|
primitive_reuse_peer.rs
|
extern crate futures;
extern crate tokio_io;
use futures::future::ok;
use std::cell::RefCell;
use std::rc::Rc;
use super::{BoxedNewPeerFuture, Peer};
use std::io::{Error as IoError, Read, Write};
use tokio_io::{AsyncRead, AsyncWrite};
use super::{once, ConstructParams, PeerConstructor, Specifier};
use futures::Future;
use std::ops::DerefMut;
#[derive(Debug)]
pub struct Reuser(pub Rc<dyn Specifier>);
impl Specifier for Reuser {
fn construct(&self, p: ConstructParams) -> PeerConstructor {
let send_zero_msg_on_disconnect = p.program_options.reuser_send_zero_msg_on_disconnect;
let reuser = p.global(GlobalState::default).clone();
let mut reuser = reuser.clone();
let l2r = p.left_to_right.clone();
let inner = || self.0.construct(p).get_only_first_conn(l2r);
once(connection_reuser(
&mut reuser,
inner,
send_zero_msg_on_disconnect,
))
}
specifier_boilerplate!(singleconnect has_subspec globalstate);
self_0_is_subspecifier!(...);
}
specifier_class!(
name = ReuserClass,
target = Reuser,
prefixes = ["reuse-raw:", "raw-reuse:"],
arg_handling = subspec,
overlay = true,
MessageBoundaryStatusDependsOnInnerType,
SingleConnect,
help = r#"
Reuse subspecifier for serving multiple clients: unpredictable mode. [A]
Better used with --unidirectional, otherwise replies get directed to
random connected client.
Example: Forward multiple parallel WebSocket connections to a single persistent TCP connection
websocat -u ws-l:0.0.0.0:8800 reuse:tcp:127.0.0.1:4567
Example (unreliable): don't disconnect SSH when websocket reconnects
websocat ws-l:[::]:8088 reuse:tcp:127.0.0.1:22
"#
);
type PeerSlot = Rc<RefCell<Option<Peer>>>;
#[derive(Default, Clone)]
pub struct GlobalState(PeerSlot);
#[derive(Clone)]
struct PeerHandle(PeerSlot, bool);
impl Read for PeerHandle {
fn read(&mut self, b: &mut [u8]) -> Result<usize, IoError> {
if let Some(ref mut x) = *self.0.borrow_mut().deref_mut() {
x.0.read(b)
} else {
unreachable!()
}
}
}
impl AsyncRead for PeerHandle {}
impl Write for PeerHandle {
fn write(&mut self, b: &[u8]) -> Result<usize, IoError> {
if let Some(ref mut x) = *self.0.borrow_mut().deref_mut() {
x.1.write(b)
} else {
unreachable!()
}
}
fn flush(&mut self) -> Result<(), IoError> {
if let Some(ref mut x) = *self.0.borrow_mut().deref_mut() {
x.1.flush()
} else {
unreachable!()
}
}
}
impl AsyncWrite for PeerHandle {
fn
|
(&mut self) -> futures::Poll<(), IoError> {
if self.1 {
let _ = self.write(b"");
}
if let Some(ref mut _x) = *self.0.borrow_mut().deref_mut() {
// Ignore shutdown attempts
Ok(futures::Async::Ready(()))
//_x.1.shutdown()
} else {
unreachable!()
}
}
}
pub fn connection_reuser<F: FnOnce() -> BoxedNewPeerFuture>(
s: &mut GlobalState,
inner_peer: F,
send_zero_msg_on_disconnect: bool,
) -> BoxedNewPeerFuture {
let need_init = s.0.borrow().is_none();
let rc = s.0.clone();
if need_init {
info!("Initializing");
Box::new(inner_peer().and_then(move |inner| {
{
let mut b = rc.borrow_mut();
let x: &mut Option<Peer> = b.deref_mut();
*x = Some(inner);
}
let ps: PeerSlot = rc.clone();
let ph1 = PeerHandle(ps, send_zero_msg_on_disconnect);
let ph2 = ph1.clone();
let peer = Peer::new(ph1, ph2, None /* TODO */);
ok(peer)
})) as BoxedNewPeerFuture
} else {
info!("Reusing");
let ps: PeerSlot = rc.clone();
let ph1 = PeerHandle(ps, send_zero_msg_on_disconnect);
let ph2 = ph1.clone();
let peer = Peer::new(ph1, ph2, None /* TODO */);
Box::new(ok(peer)) as BoxedNewPeerFuture
}
}
|
shutdown
|
identifier_name
|
derive_from_xml_stream.rs
|
#[macro_use]
extern crate mws_derive;
#[macro_use]
extern crate mws;
extern crate chrono;
use chrono::{DateTime, Utc};
pub use mws::{result, xmlhelper};
#[test]
fn derive_struct() {
#[derive(Debug, PartialEq, Default, FromXmlStream)]
struct S {
a: String,
b: i32,
date: Option<DateTime<Utc>>,
}
test_decode!(
S,
r#"
<a>AAA</a>
<b>777</b>
<date>2016-11-03T00:09:40Z</date>
"#,
S {
a: "AAA".to_owned(),
b: 777,
date: Some("2016-11-03T00:09:40Z".parse().unwrap())
}
);
}
#[test]
fn derive_vec() {
#[derive(Debug, PartialEq, Default, FromXmlStream)]
struct S {
items: Vec<i32>,
}
test_decode!(
S,
r#"
<items>
<value>1</value>
<value>3</value>
<value>5</value>
<value>7</value>
</items>
"#,
S {
items: vec![1, 3, 5, 7],
}
);
}
#[test]
fn derive_struct_vec() {
#[derive(Debug, PartialEq, Default, FromXmlStream)]
struct Item {
a: String,
b: i32,
}
#[derive(Debug, PartialEq, Default, FromXmlStream)]
struct S {
items: Vec<Item>,
}
test_decode!(
S,
r#"
<items>
<item>
<a>AAA</a>
<b>777</b>
</item>
<item>
<a>BBB</a>
<b>888</b>
</item>
</items>
"#,
S {
items: vec![
Item {
a: "AAA".to_string(),
b: 777,
},
Item {
a: "BBB".to_string(),
b: 888,
},
],
}
);
}
#[test]
#[allow(non_snake_case)]
fn derive_from_elem_attr() {
#[derive(Debug, PartialEq, Default, FromXmlStream)]
|
Units: String,
#[from_xml_stream(from_content)]
Value: String,
}
#[derive(Debug, PartialEq, Default, FromXmlStream)]
struct ItemDimensions {
Height: Value,
Length: Value,
Width: Value,
}
#[derive(Debug, PartialEq, Default, FromXmlStream)]
struct Products {
ItemDimensions: ItemDimensions,
}
test_decode!(
Products,
r#"
<ItemDimensions>
<Height Units="inches">2.2</Height>
<Length Units="inches">12.8</Length>
<Width Units="inches">5.8</Width>
</ItemDimensions>
"#,
Products {
ItemDimensions: ItemDimensions {
Height: Value {
Units: "inches".to_string(),
Value: "2.2".to_string(),
},
Length: Value {
Units: "inches".to_string(),
Value: "12.8".to_string(),
},
Width: Value {
Units: "inches".to_string(),
Value: "5.8".to_string(),
},
}
}
);
}
|
struct Value {
#[from_xml_stream(from_attr)]
|
random_line_split
|
derive_from_xml_stream.rs
|
#[macro_use]
extern crate mws_derive;
#[macro_use]
extern crate mws;
extern crate chrono;
use chrono::{DateTime, Utc};
pub use mws::{result, xmlhelper};
#[test]
fn
|
() {
#[derive(Debug, PartialEq, Default, FromXmlStream)]
struct S {
a: String,
b: i32,
date: Option<DateTime<Utc>>,
}
test_decode!(
S,
r#"
<a>AAA</a>
<b>777</b>
<date>2016-11-03T00:09:40Z</date>
"#,
S {
a: "AAA".to_owned(),
b: 777,
date: Some("2016-11-03T00:09:40Z".parse().unwrap())
}
);
}
#[test]
fn derive_vec() {
#[derive(Debug, PartialEq, Default, FromXmlStream)]
struct S {
items: Vec<i32>,
}
test_decode!(
S,
r#"
<items>
<value>1</value>
<value>3</value>
<value>5</value>
<value>7</value>
</items>
"#,
S {
items: vec![1, 3, 5, 7],
}
);
}
#[test]
fn derive_struct_vec() {
#[derive(Debug, PartialEq, Default, FromXmlStream)]
struct Item {
a: String,
b: i32,
}
#[derive(Debug, PartialEq, Default, FromXmlStream)]
struct S {
items: Vec<Item>,
}
test_decode!(
S,
r#"
<items>
<item>
<a>AAA</a>
<b>777</b>
</item>
<item>
<a>BBB</a>
<b>888</b>
</item>
</items>
"#,
S {
items: vec![
Item {
a: "AAA".to_string(),
b: 777,
},
Item {
a: "BBB".to_string(),
b: 888,
},
],
}
);
}
#[test]
#[allow(non_snake_case)]
fn derive_from_elem_attr() {
#[derive(Debug, PartialEq, Default, FromXmlStream)]
struct Value {
#[from_xml_stream(from_attr)]
Units: String,
#[from_xml_stream(from_content)]
Value: String,
}
#[derive(Debug, PartialEq, Default, FromXmlStream)]
struct ItemDimensions {
Height: Value,
Length: Value,
Width: Value,
}
#[derive(Debug, PartialEq, Default, FromXmlStream)]
struct Products {
ItemDimensions: ItemDimensions,
}
test_decode!(
Products,
r#"
<ItemDimensions>
<Height Units="inches">2.2</Height>
<Length Units="inches">12.8</Length>
<Width Units="inches">5.8</Width>
</ItemDimensions>
"#,
Products {
ItemDimensions: ItemDimensions {
Height: Value {
Units: "inches".to_string(),
Value: "2.2".to_string(),
},
Length: Value {
Units: "inches".to_string(),
Value: "12.8".to_string(),
},
Width: Value {
Units: "inches".to_string(),
Value: "5.8".to_string(),
},
}
}
);
}
|
derive_struct
|
identifier_name
|
scmstore.rs
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use std::io::Write;
use async_runtime::block_on;
use async_runtime::stream_to_iter as block_on_stream;
use clidispatch::errors;
use configparser::config::ConfigSet;
use revisionstore::scmstore::file_to_async_key_stream;
use revisionstore::scmstore::FileAttributes;
use revisionstore::scmstore::FileStoreBuilder;
use revisionstore::scmstore::TreeStoreBuilder;
use types::Key;
use super::define_flags;
use super::Repo;
use super::Result;
use super::IO;
define_flags! {
pub struct DebugScmStoreOpts {
/// Run the python version of the command instead (actually runs mostly in rust, but uses store constructed for python, with legacy fallback).
python: bool,
/// Fetch mode (file or tree)
mode: String,
/// Input file containing keys to fetch (hgid,path separated by newlines)
path: String,
}
}
enum FetchMode {
File,
Tree,
}
pub fn run(opts: DebugScmStoreOpts, io: &IO, repo: Repo) -> Result<u8> {
if opts.python {
return Err(errors::FallbackToPython.into());
}
let mode = match opts.mode.as_ref() {
"file" => FetchMode::File,
"tree" => FetchMode::Tree,
_ => return Err(errors::Abort("'mode' must be one of 'file' or 'tree'".into()).into()),
};
let keys: Vec<_> =
block_on_stream(block_on(file_to_async_key_stream(opts.path.into()))?).collect();
let config = repo.config();
match mode {
FetchMode::File => fetch_files(io, &config, keys)?,
FetchMode::Tree => fetch_trees(io, &config, keys)?,
}
Ok(0)
}
fn fetch_files(io: &IO, config: &ConfigSet, keys: Vec<Key>) -> Result<()> {
let file_builder = FileStoreBuilder::new(&config);
let store = file_builder.build()?;
let mut stdout = io.output();
let fetch_result = store.fetch(
keys.into_iter(),
FileAttributes {
content: true,
aux_data: true,
},
);
let (found, missing, _errors) = fetch_result.consume();
for (_, file) in found.into_iter() {
write!(stdout, "Successfully fetched file: {:#?}\n", file)?;
}
for (key, _) in missing.into_iter() {
write!(stdout, "Failed to fetch file: {:#?}\n", key)?;
}
Ok(())
}
fn fetch_trees(io: &IO, config: &ConfigSet, keys: Vec<Key>) -> Result<()>
|
pub fn name() -> &'static str {
"debugscmstore"
}
pub fn doc() -> &'static str {
"test file and tree fetching using scmstore"
}
|
{
let mut tree_builder = TreeStoreBuilder::new(config);
tree_builder = tree_builder.suffix("manifests");
let store = tree_builder.build()?;
let mut stdout = io.output();
let fetch_result = store.fetch_batch(keys.into_iter())?;
let (found, missing, _errors) = fetch_result.consume();
for complete in found.into_iter() {
write!(stdout, "Successfully fetched tree: {:#?}\n", complete)?;
}
for incomplete in missing.into_iter() {
write!(stdout, "Failed to fetch tree: {:#?}\n", incomplete)?;
}
Ok(())
}
|
identifier_body
|
scmstore.rs
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use std::io::Write;
use async_runtime::block_on;
use async_runtime::stream_to_iter as block_on_stream;
use clidispatch::errors;
use configparser::config::ConfigSet;
use revisionstore::scmstore::file_to_async_key_stream;
use revisionstore::scmstore::FileAttributes;
use revisionstore::scmstore::FileStoreBuilder;
use revisionstore::scmstore::TreeStoreBuilder;
use types::Key;
use super::define_flags;
use super::Repo;
use super::Result;
use super::IO;
define_flags! {
pub struct DebugScmStoreOpts {
/// Run the python version of the command instead (actually runs mostly in rust, but uses store constructed for python, with legacy fallback).
python: bool,
/// Fetch mode (file or tree)
mode: String,
/// Input file containing keys to fetch (hgid,path separated by newlines)
path: String,
}
}
enum FetchMode {
File,
Tree,
}
pub fn
|
(opts: DebugScmStoreOpts, io: &IO, repo: Repo) -> Result<u8> {
if opts.python {
return Err(errors::FallbackToPython.into());
}
let mode = match opts.mode.as_ref() {
"file" => FetchMode::File,
"tree" => FetchMode::Tree,
_ => return Err(errors::Abort("'mode' must be one of 'file' or 'tree'".into()).into()),
};
let keys: Vec<_> =
block_on_stream(block_on(file_to_async_key_stream(opts.path.into()))?).collect();
let config = repo.config();
match mode {
FetchMode::File => fetch_files(io, &config, keys)?,
FetchMode::Tree => fetch_trees(io, &config, keys)?,
}
Ok(0)
}
fn fetch_files(io: &IO, config: &ConfigSet, keys: Vec<Key>) -> Result<()> {
let file_builder = FileStoreBuilder::new(&config);
let store = file_builder.build()?;
let mut stdout = io.output();
let fetch_result = store.fetch(
keys.into_iter(),
FileAttributes {
content: true,
aux_data: true,
},
);
let (found, missing, _errors) = fetch_result.consume();
for (_, file) in found.into_iter() {
write!(stdout, "Successfully fetched file: {:#?}\n", file)?;
}
for (key, _) in missing.into_iter() {
write!(stdout, "Failed to fetch file: {:#?}\n", key)?;
}
Ok(())
}
fn fetch_trees(io: &IO, config: &ConfigSet, keys: Vec<Key>) -> Result<()> {
let mut tree_builder = TreeStoreBuilder::new(config);
tree_builder = tree_builder.suffix("manifests");
let store = tree_builder.build()?;
let mut stdout = io.output();
let fetch_result = store.fetch_batch(keys.into_iter())?;
let (found, missing, _errors) = fetch_result.consume();
for complete in found.into_iter() {
write!(stdout, "Successfully fetched tree: {:#?}\n", complete)?;
}
for incomplete in missing.into_iter() {
write!(stdout, "Failed to fetch tree: {:#?}\n", incomplete)?;
}
Ok(())
}
pub fn name() -> &'static str {
"debugscmstore"
}
pub fn doc() -> &'static str {
"test file and tree fetching using scmstore"
}
|
run
|
identifier_name
|
scmstore.rs
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use std::io::Write;
use async_runtime::block_on;
use async_runtime::stream_to_iter as block_on_stream;
use clidispatch::errors;
use configparser::config::ConfigSet;
use revisionstore::scmstore::file_to_async_key_stream;
use revisionstore::scmstore::FileAttributes;
use revisionstore::scmstore::FileStoreBuilder;
use revisionstore::scmstore::TreeStoreBuilder;
use types::Key;
use super::define_flags;
use super::Repo;
use super::Result;
use super::IO;
define_flags! {
pub struct DebugScmStoreOpts {
/// Run the python version of the command instead (actually runs mostly in rust, but uses store constructed for python, with legacy fallback).
python: bool,
/// Fetch mode (file or tree)
mode: String,
/// Input file containing keys to fetch (hgid,path separated by newlines)
path: String,
}
}
enum FetchMode {
File,
Tree,
}
pub fn run(opts: DebugScmStoreOpts, io: &IO, repo: Repo) -> Result<u8> {
if opts.python {
return Err(errors::FallbackToPython.into());
}
let mode = match opts.mode.as_ref() {
"file" => FetchMode::File,
"tree" => FetchMode::Tree,
_ => return Err(errors::Abort("'mode' must be one of 'file' or 'tree'".into()).into()),
};
let keys: Vec<_> =
block_on_stream(block_on(file_to_async_key_stream(opts.path.into()))?).collect();
let config = repo.config();
match mode {
FetchMode::File => fetch_files(io, &config, keys)?,
FetchMode::Tree => fetch_trees(io, &config, keys)?,
}
Ok(0)
}
fn fetch_files(io: &IO, config: &ConfigSet, keys: Vec<Key>) -> Result<()> {
let file_builder = FileStoreBuilder::new(&config);
let store = file_builder.build()?;
let mut stdout = io.output();
let fetch_result = store.fetch(
keys.into_iter(),
FileAttributes {
content: true,
aux_data: true,
},
);
let (found, missing, _errors) = fetch_result.consume();
for (_, file) in found.into_iter() {
write!(stdout, "Successfully fetched file: {:#?}\n", file)?;
}
for (key, _) in missing.into_iter() {
write!(stdout, "Failed to fetch file: {:#?}\n", key)?;
}
Ok(())
}
fn fetch_trees(io: &IO, config: &ConfigSet, keys: Vec<Key>) -> Result<()> {
let mut tree_builder = TreeStoreBuilder::new(config);
tree_builder = tree_builder.suffix("manifests");
let store = tree_builder.build()?;
let mut stdout = io.output();
let fetch_result = store.fetch_batch(keys.into_iter())?;
let (found, missing, _errors) = fetch_result.consume();
for complete in found.into_iter() {
write!(stdout, "Successfully fetched tree: {:#?}\n", complete)?;
}
for incomplete in missing.into_iter() {
write!(stdout, "Failed to fetch tree: {:#?}\n", incomplete)?;
}
|
Ok(())
}
pub fn name() -> &'static str {
"debugscmstore"
}
pub fn doc() -> &'static str {
"test file and tree fetching using scmstore"
}
|
random_line_split
|
|
field.rs
|
use std::str::FromStr;
use crate::parsers::parser;
use crate::parsers::sql::Env;
use crate::parsers::sql::Expr;
use crate::parsers::value::PqlValue;
#[derive(Debug, Default, Clone, PartialEq)]
pub struct
|
{
pub expr: Expr,
pub alias: Option<String>,
}
impl FromStr for Field {
type Err = anyhow::Error;
fn from_str(s: &str) -> anyhow::Result<Self> {
match parser::expressions::parse_field(s) {
Ok((_, field)) => Ok(field),
Err(nom::Err::Error(err)) => {
eprint!("{:#?}", err);
anyhow::bail!("failed")
}
_ => todo!(),
}
}
}
impl Field {
pub fn expand_fullpath(&self, env: &Env) -> Self {
Self {
expr: self.expr.expand_fullpath(&env),
alias: self.alias.to_owned(),
}
}
pub fn evaluate(self, env: &Env) -> PqlValue {
let value = self.expr.eval(&env);
value
}
pub fn rename(self) -> (String, Expr) {
if let Some(alias) = self.alias {
(alias, self.expr)
} else {
let alias = match &self.expr {
Expr::Selector(selector) => selector.to_vec().last().unwrap().to_string(),
_ => todo!(),
};
(alias, self.expr)
}
}
}
|
Field
|
identifier_name
|
field.rs
|
use std::str::FromStr;
use crate::parsers::parser;
use crate::parsers::sql::Env;
use crate::parsers::sql::Expr;
use crate::parsers::value::PqlValue;
#[derive(Debug, Default, Clone, PartialEq)]
pub struct Field {
pub expr: Expr,
pub alias: Option<String>,
}
impl FromStr for Field {
type Err = anyhow::Error;
fn from_str(s: &str) -> anyhow::Result<Self> {
match parser::expressions::parse_field(s) {
Ok((_, field)) => Ok(field),
Err(nom::Err::Error(err)) => {
eprint!("{:#?}", err);
anyhow::bail!("failed")
}
_ => todo!(),
}
}
}
impl Field {
pub fn expand_fullpath(&self, env: &Env) -> Self {
Self {
expr: self.expr.expand_fullpath(&env),
alias: self.alias.to_owned(),
}
|
pub fn evaluate(self, env: &Env) -> PqlValue {
let value = self.expr.eval(&env);
value
}
pub fn rename(self) -> (String, Expr) {
if let Some(alias) = self.alias {
(alias, self.expr)
} else {
let alias = match &self.expr {
Expr::Selector(selector) => selector.to_vec().last().unwrap().to_string(),
_ => todo!(),
};
(alias, self.expr)
}
}
}
|
}
|
random_line_split
|
ptr_as_ptr.rs
|
// run-rustfix
#![warn(clippy::ptr_as_ptr)]
#![feature(custom_inner_attributes)]
fn main() {
let ptr: *const u32 = &42_u32;
let mut_ptr: *mut u32 = &mut 42_u32;
let _ = ptr as *const i32;
let _ = mut_ptr as *mut i32;
// Make sure the lint can handle the difference in their operator precedences.
unsafe {
let ptr_ptr: *const *const u32 = &ptr;
let _ = *ptr_ptr as *const i32;
}
// Changes in mutability. Do not lint this.
let _ = ptr as *mut i32;
let _ = mut_ptr as *const i32;
// `pointer::cast` cannot perform unsized coercions unlike `as`. Do not lint this.
let ptr_of_array: *const [u32; 4] = &[1, 2, 3, 4];
let _ = ptr_of_array as *const [u32];
let _ = ptr_of_array as *const dyn std::fmt::Debug;
// Ensure the lint doesn't produce unnecessary turbofish for inferred types.
let _: *const i32 = ptr as *const _;
let _: *mut i32 = mut_ptr as _;
}
fn _msrv_1_37() {
#![clippy::msrv = "1.37"]
let ptr: *const u32 = &42_u32;
let mut_ptr: *mut u32 = &mut 42_u32;
// `pointer::cast` was stabilized in 1.38. Do not lint this
let _ = ptr as *const i32;
let _ = mut_ptr as *mut i32;
}
fn
|
() {
#![clippy::msrv = "1.38"]
let ptr: *const u32 = &42_u32;
let mut_ptr: *mut u32 = &mut 42_u32;
let _ = ptr as *const i32;
let _ = mut_ptr as *mut i32;
}
|
_msrv_1_38
|
identifier_name
|
ptr_as_ptr.rs
|
// run-rustfix
#![warn(clippy::ptr_as_ptr)]
#![feature(custom_inner_attributes)]
fn main() {
let ptr: *const u32 = &42_u32;
let mut_ptr: *mut u32 = &mut 42_u32;
let _ = ptr as *const i32;
let _ = mut_ptr as *mut i32;
// Make sure the lint can handle the difference in their operator precedences.
unsafe {
let ptr_ptr: *const *const u32 = &ptr;
let _ = *ptr_ptr as *const i32;
}
// Changes in mutability. Do not lint this.
let _ = ptr as *mut i32;
let _ = mut_ptr as *const i32;
// `pointer::cast` cannot perform unsized coercions unlike `as`. Do not lint this.
let ptr_of_array: *const [u32; 4] = &[1, 2, 3, 4];
let _ = ptr_of_array as *const [u32];
let _ = ptr_of_array as *const dyn std::fmt::Debug;
// Ensure the lint doesn't produce unnecessary turbofish for inferred types.
let _: *const i32 = ptr as *const _;
let _: *mut i32 = mut_ptr as _;
}
fn _msrv_1_37() {
#![clippy::msrv = "1.37"]
let ptr: *const u32 = &42_u32;
let mut_ptr: *mut u32 = &mut 42_u32;
// `pointer::cast` was stabilized in 1.38. Do not lint this
let _ = ptr as *const i32;
let _ = mut_ptr as *mut i32;
}
fn _msrv_1_38() {
#![clippy::msrv = "1.38"]
let ptr: *const u32 = &42_u32;
let mut_ptr: *mut u32 = &mut 42_u32;
|
let _ = ptr as *const i32;
let _ = mut_ptr as *mut i32;
}
|
random_line_split
|
|
ptr_as_ptr.rs
|
// run-rustfix
#![warn(clippy::ptr_as_ptr)]
#![feature(custom_inner_attributes)]
fn main() {
let ptr: *const u32 = &42_u32;
let mut_ptr: *mut u32 = &mut 42_u32;
let _ = ptr as *const i32;
let _ = mut_ptr as *mut i32;
// Make sure the lint can handle the difference in their operator precedences.
unsafe {
let ptr_ptr: *const *const u32 = &ptr;
let _ = *ptr_ptr as *const i32;
}
// Changes in mutability. Do not lint this.
let _ = ptr as *mut i32;
let _ = mut_ptr as *const i32;
// `pointer::cast` cannot perform unsized coercions unlike `as`. Do not lint this.
let ptr_of_array: *const [u32; 4] = &[1, 2, 3, 4];
let _ = ptr_of_array as *const [u32];
let _ = ptr_of_array as *const dyn std::fmt::Debug;
// Ensure the lint doesn't produce unnecessary turbofish for inferred types.
let _: *const i32 = ptr as *const _;
let _: *mut i32 = mut_ptr as _;
}
fn _msrv_1_37()
|
fn _msrv_1_38() {
#![clippy::msrv = "1.38"]
let ptr: *const u32 = &42_u32;
let mut_ptr: *mut u32 = &mut 42_u32;
let _ = ptr as *const i32;
let _ = mut_ptr as *mut i32;
}
|
{
#![clippy::msrv = "1.37"]
let ptr: *const u32 = &42_u32;
let mut_ptr: *mut u32 = &mut 42_u32;
// `pointer::cast` was stabilized in 1.38. Do not lint this
let _ = ptr as *const i32;
let _ = mut_ptr as *mut i32;
}
|
identifier_body
|
gdt.rs
|
use core::mem::size_of;
use core::ptr;
use core::sync::atomic::{AtomicU8, Ordering};
use x86_64::structures::gdt::SegmentSelector;
use x86_64::structures::tss::TaskStateSegment;
use x86_64::{PrivilegeLevel, VirtAddr};
pub use x86_64::structures::gdt::Descriptor;
use crate::memory::constants::GDT_ADDR;
pub const DOUBLE_FAULT_IST_INDEX: usize = 0;
/// Max size is fixed so we can have an array of these
const GDT_MAX_SIZE: usize = 8;
pub struct GdtBuilder {
addr: VirtAddr,
next_entry: usize,
}
impl GdtBuilder {
unsafe fn new(addr: VirtAddr) -> Self {
Self {
addr,
next_entry: 1, // first entry is the null descriptor, so it is not free
}
}
pub fn add_entry(&mut self, entry: Descriptor) -> SegmentSelector {
let base: *mut u64 = self.addr.as_mut_ptr();
let index = self.next_entry;
unsafe {
match entry {
Descriptor::UserSegment(value) => {
assert!(index + 1 < GDT_MAX_SIZE, "GDT full");
ptr::write(base.add(self.next_entry), value);
self.next_entry += 1;
},
Descriptor::SystemSegment(value_low, value_high) =>
|
,
};
}
SegmentSelector::new(index as u16, PrivilegeLevel::Ring0)
}
pub unsafe fn load(self) {
use core::mem::size_of;
use x86_64::instructions::tables::{lgdt, DescriptorTablePointer};
let ptr = DescriptorTablePointer {
base: self.addr,
limit: (self.next_entry * size_of::<u64>() - 1) as u16,
};
lgdt(&ptr);
}
}
static USED_GDTS: AtomicU8 = AtomicU8::new(0);
/// Adds to an array of immutable GDTs, one for each processor core
pub fn create_new() -> GdtBuilder {
let index = USED_GDTS.fetch_add(1, Ordering::SeqCst);
let new_gdt_base =
GDT_ADDR.as_u64() + (index as u64) * (GDT_MAX_SIZE * size_of::<u64>()) as u64;
unsafe { GdtBuilder::new(VirtAddr::new(new_gdt_base)) }
}
|
{
assert!(index + 2 < GDT_MAX_SIZE, "GDT full");
ptr::write(base.add(self.next_entry), value_low);
ptr::write(base.add(self.next_entry + 1), value_high);
self.next_entry += 2;
}
|
conditional_block
|
gdt.rs
|
use core::mem::size_of;
use core::ptr;
use core::sync::atomic::{AtomicU8, Ordering};
use x86_64::structures::gdt::SegmentSelector;
use x86_64::structures::tss::TaskStateSegment;
use x86_64::{PrivilegeLevel, VirtAddr};
pub use x86_64::structures::gdt::Descriptor;
use crate::memory::constants::GDT_ADDR;
pub const DOUBLE_FAULT_IST_INDEX: usize = 0;
/// Max size is fixed so we can have an array of these
const GDT_MAX_SIZE: usize = 8;
pub struct GdtBuilder {
addr: VirtAddr,
next_entry: usize,
}
impl GdtBuilder {
unsafe fn
|
(addr: VirtAddr) -> Self {
Self {
addr,
next_entry: 1, // first entry is the null descriptor, so it is not free
}
}
pub fn add_entry(&mut self, entry: Descriptor) -> SegmentSelector {
let base: *mut u64 = self.addr.as_mut_ptr();
let index = self.next_entry;
unsafe {
match entry {
Descriptor::UserSegment(value) => {
assert!(index + 1 < GDT_MAX_SIZE, "GDT full");
ptr::write(base.add(self.next_entry), value);
self.next_entry += 1;
},
Descriptor::SystemSegment(value_low, value_high) => {
assert!(index + 2 < GDT_MAX_SIZE, "GDT full");
ptr::write(base.add(self.next_entry), value_low);
ptr::write(base.add(self.next_entry + 1), value_high);
self.next_entry += 2;
},
};
}
SegmentSelector::new(index as u16, PrivilegeLevel::Ring0)
}
pub unsafe fn load(self) {
use core::mem::size_of;
use x86_64::instructions::tables::{lgdt, DescriptorTablePointer};
let ptr = DescriptorTablePointer {
base: self.addr,
limit: (self.next_entry * size_of::<u64>() - 1) as u16,
};
lgdt(&ptr);
}
}
static USED_GDTS: AtomicU8 = AtomicU8::new(0);
/// Adds to an array of immutable GDTs, one for each processor core
pub fn create_new() -> GdtBuilder {
let index = USED_GDTS.fetch_add(1, Ordering::SeqCst);
let new_gdt_base =
GDT_ADDR.as_u64() + (index as u64) * (GDT_MAX_SIZE * size_of::<u64>()) as u64;
unsafe { GdtBuilder::new(VirtAddr::new(new_gdt_base)) }
}
|
new
|
identifier_name
|
gdt.rs
|
use core::mem::size_of;
use core::ptr;
use core::sync::atomic::{AtomicU8, Ordering};
use x86_64::structures::gdt::SegmentSelector;
use x86_64::structures::tss::TaskStateSegment;
use x86_64::{PrivilegeLevel, VirtAddr};
pub use x86_64::structures::gdt::Descriptor;
use crate::memory::constants::GDT_ADDR;
pub const DOUBLE_FAULT_IST_INDEX: usize = 0;
/// Max size is fixed so we can have an array of these
const GDT_MAX_SIZE: usize = 8;
pub struct GdtBuilder {
addr: VirtAddr,
next_entry: usize,
}
impl GdtBuilder {
unsafe fn new(addr: VirtAddr) -> Self
|
pub fn add_entry(&mut self, entry: Descriptor) -> SegmentSelector {
let base: *mut u64 = self.addr.as_mut_ptr();
let index = self.next_entry;
unsafe {
match entry {
Descriptor::UserSegment(value) => {
assert!(index + 1 < GDT_MAX_SIZE, "GDT full");
ptr::write(base.add(self.next_entry), value);
self.next_entry += 1;
},
Descriptor::SystemSegment(value_low, value_high) => {
assert!(index + 2 < GDT_MAX_SIZE, "GDT full");
ptr::write(base.add(self.next_entry), value_low);
ptr::write(base.add(self.next_entry + 1), value_high);
self.next_entry += 2;
},
};
}
SegmentSelector::new(index as u16, PrivilegeLevel::Ring0)
}
pub unsafe fn load(self) {
use core::mem::size_of;
use x86_64::instructions::tables::{lgdt, DescriptorTablePointer};
let ptr = DescriptorTablePointer {
base: self.addr,
limit: (self.next_entry * size_of::<u64>() - 1) as u16,
};
lgdt(&ptr);
}
}
static USED_GDTS: AtomicU8 = AtomicU8::new(0);
/// Adds to an array of immutable GDTs, one for each processor core
pub fn create_new() -> GdtBuilder {
let index = USED_GDTS.fetch_add(1, Ordering::SeqCst);
let new_gdt_base =
GDT_ADDR.as_u64() + (index as u64) * (GDT_MAX_SIZE * size_of::<u64>()) as u64;
unsafe { GdtBuilder::new(VirtAddr::new(new_gdt_base)) }
}
|
{
Self {
addr,
next_entry: 1, // first entry is the null descriptor, so it is not free
}
}
|
identifier_body
|
gdt.rs
|
use core::mem::size_of;
use core::ptr;
|
use core::sync::atomic::{AtomicU8, Ordering};
use x86_64::structures::gdt::SegmentSelector;
use x86_64::structures::tss::TaskStateSegment;
use x86_64::{PrivilegeLevel, VirtAddr};
pub use x86_64::structures::gdt::Descriptor;
use crate::memory::constants::GDT_ADDR;
pub const DOUBLE_FAULT_IST_INDEX: usize = 0;
/// Max size is fixed so we can have an array of these
const GDT_MAX_SIZE: usize = 8;
pub struct GdtBuilder {
addr: VirtAddr,
next_entry: usize,
}
impl GdtBuilder {
unsafe fn new(addr: VirtAddr) -> Self {
Self {
addr,
next_entry: 1, // first entry is the null descriptor, so it is not free
}
}
pub fn add_entry(&mut self, entry: Descriptor) -> SegmentSelector {
let base: *mut u64 = self.addr.as_mut_ptr();
let index = self.next_entry;
unsafe {
match entry {
Descriptor::UserSegment(value) => {
assert!(index + 1 < GDT_MAX_SIZE, "GDT full");
ptr::write(base.add(self.next_entry), value);
self.next_entry += 1;
},
Descriptor::SystemSegment(value_low, value_high) => {
assert!(index + 2 < GDT_MAX_SIZE, "GDT full");
ptr::write(base.add(self.next_entry), value_low);
ptr::write(base.add(self.next_entry + 1), value_high);
self.next_entry += 2;
},
};
}
SegmentSelector::new(index as u16, PrivilegeLevel::Ring0)
}
pub unsafe fn load(self) {
use core::mem::size_of;
use x86_64::instructions::tables::{lgdt, DescriptorTablePointer};
let ptr = DescriptorTablePointer {
base: self.addr,
limit: (self.next_entry * size_of::<u64>() - 1) as u16,
};
lgdt(&ptr);
}
}
static USED_GDTS: AtomicU8 = AtomicU8::new(0);
/// Adds to an array of immutable GDTs, one for each processor core
pub fn create_new() -> GdtBuilder {
let index = USED_GDTS.fetch_add(1, Ordering::SeqCst);
let new_gdt_base =
GDT_ADDR.as_u64() + (index as u64) * (GDT_MAX_SIZE * size_of::<u64>()) as u64;
unsafe { GdtBuilder::new(VirtAddr::new(new_gdt_base)) }
}
|
random_line_split
|
|
from_into.rs
|
// The From trait is used for value-to-value conversions.
// If From is implemented correctly for a type, the Into trait should work conversely.
// You can read more about it at https://doc.rust-lang.org/std/convert/trait.From.html
#[derive(Debug)]
struct Person {
name: String,
age: usize,
}
// We implement the Default trait to use it as a fallback
// when the provided string is not convertible into a Person object
impl Default for Person {
fn default() -> Person {
Person {
name: String::from("John"),
age: 30,
}
}
}
// I AM NOT DONE
// Your task is to complete this implementation
// in order for the line `let p = Person::from("Mark,20")` to compile
// Please note that you'll need to parse the age component into a `usize`
// with something like `"4".parse::<usize>()`. The outcome of this needs to
// be handled appropriately.
//
// Steps:
// 1. If the length of the provided string is 0, then return the default of Person
// 2. Split the given string on the commas present in it
// 3. Extract the first element from the split operation and use it as the name
// 4. If the name is empty, then return the default of Person
// 5. Extract the other element from the split operation and parse it into a `usize` as the age
// If while parsing the age, something goes wrong, then return the default of Person
// Otherwise, then return an instantiated Person object with the results
impl From<&str> for Person {
fn from(s: &str) -> Person {
}
}
fn main() {
// Use the `from` function
let p1 = Person::from("Mark,20");
// Since From is implemented for Person, we should be able to use Into
let p2: Person = "Gerald,70".into();
println!("{:?}", p1);
println!("{:?}", p2);
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_default() {
// Test that the default person is 30 year old John
let dp = Person::default();
assert_eq!(dp.name, "John");
assert_eq!(dp.age, 30);
}
#[test]
fn test_bad_convert() {
// Test that John is returned when bad string is provided
let p = Person::from("");
assert_eq!(p.name, "John");
assert_eq!(p.age, 30);
}
#[test]
fn test_good_convert() {
// Test that "Mark,20" works
let p = Person::from("Mark,20");
assert_eq!(p.name, "Mark");
assert_eq!(p.age, 20);
}
#[test]
fn test_bad_age() {
// Test that "Mark.twenty" will return the default person due to an error in parsing age
let p = Person::from("Mark,twenty");
assert_eq!(p.name, "John");
assert_eq!(p.age, 30);
}
#[test]
fn test_missing_comma_and_age()
|
#[test]
fn test_missing_age() {
let p: Person = Person::from("Mark,");
assert_eq!(p.name, "John");
assert_eq!(p.age, 30);
}
#[test]
fn test_missing_name() {
let p: Person = Person::from(",1");
assert_eq!(p.name, "John");
assert_eq!(p.age, 30);
}
#[test]
fn test_missing_name_and_age() {
let p: Person = Person::from(",");
assert_eq!(p.name, "John");
assert_eq!(p.age, 30);
}
#[test]
fn test_missing_name_and_invalid_age() {
let p: Person = Person::from(",one");
assert_eq!(p.name, "John");
assert_eq!(p.age, 30);
}
}
|
{
let p: Person = Person::from("Mark");
assert_eq!(p.name, "John");
assert_eq!(p.age, 30);
}
|
identifier_body
|
from_into.rs
|
// The From trait is used for value-to-value conversions.
// If From is implemented correctly for a type, the Into trait should work conversely.
// You can read more about it at https://doc.rust-lang.org/std/convert/trait.From.html
#[derive(Debug)]
struct Person {
name: String,
age: usize,
}
// We implement the Default trait to use it as a fallback
// when the provided string is not convertible into a Person object
impl Default for Person {
fn default() -> Person {
Person {
name: String::from("John"),
age: 30,
}
}
}
// I AM NOT DONE
// Your task is to complete this implementation
// in order for the line `let p = Person::from("Mark,20")` to compile
// Please note that you'll need to parse the age component into a `usize`
// with something like `"4".parse::<usize>()`. The outcome of this needs to
// be handled appropriately.
//
// Steps:
// 1. If the length of the provided string is 0, then return the default of Person
// 2. Split the given string on the commas present in it
// 3. Extract the first element from the split operation and use it as the name
// 4. If the name is empty, then return the default of Person
// 5. Extract the other element from the split operation and parse it into a `usize` as the age
// If while parsing the age, something goes wrong, then return the default of Person
// Otherwise, then return an instantiated Person object with the results
impl From<&str> for Person {
fn from(s: &str) -> Person {
}
}
fn main() {
// Use the `from` function
let p1 = Person::from("Mark,20");
// Since From is implemented for Person, we should be able to use Into
let p2: Person = "Gerald,70".into();
println!("{:?}", p1);
println!("{:?}", p2);
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_default() {
// Test that the default person is 30 year old John
let dp = Person::default();
assert_eq!(dp.name, "John");
assert_eq!(dp.age, 30);
}
#[test]
fn test_bad_convert() {
// Test that John is returned when bad string is provided
let p = Person::from("");
assert_eq!(p.name, "John");
assert_eq!(p.age, 30);
}
#[test]
fn test_good_convert() {
// Test that "Mark,20" works
let p = Person::from("Mark,20");
assert_eq!(p.name, "Mark");
assert_eq!(p.age, 20);
}
#[test]
fn test_bad_age() {
// Test that "Mark.twenty" will return the default person due to an error in parsing age
let p = Person::from("Mark,twenty");
assert_eq!(p.name, "John");
assert_eq!(p.age, 30);
}
#[test]
fn test_missing_comma_and_age() {
let p: Person = Person::from("Mark");
assert_eq!(p.name, "John");
assert_eq!(p.age, 30);
}
#[test]
fn test_missing_age() {
let p: Person = Person::from("Mark,");
assert_eq!(p.name, "John");
assert_eq!(p.age, 30);
|
let p: Person = Person::from(",1");
assert_eq!(p.name, "John");
assert_eq!(p.age, 30);
}
#[test]
fn test_missing_name_and_age() {
let p: Person = Person::from(",");
assert_eq!(p.name, "John");
assert_eq!(p.age, 30);
}
#[test]
fn test_missing_name_and_invalid_age() {
let p: Person = Person::from(",one");
assert_eq!(p.name, "John");
assert_eq!(p.age, 30);
}
}
|
}
#[test]
fn test_missing_name() {
|
random_line_split
|
from_into.rs
|
// The From trait is used for value-to-value conversions.
// If From is implemented correctly for a type, the Into trait should work conversely.
// You can read more about it at https://doc.rust-lang.org/std/convert/trait.From.html
#[derive(Debug)]
struct Person {
name: String,
age: usize,
}
// We implement the Default trait to use it as a fallback
// when the provided string is not convertible into a Person object
impl Default for Person {
fn default() -> Person {
Person {
name: String::from("John"),
age: 30,
}
}
}
// I AM NOT DONE
// Your task is to complete this implementation
// in order for the line `let p = Person::from("Mark,20")` to compile
// Please note that you'll need to parse the age component into a `usize`
// with something like `"4".parse::<usize>()`. The outcome of this needs to
// be handled appropriately.
//
// Steps:
// 1. If the length of the provided string is 0, then return the default of Person
// 2. Split the given string on the commas present in it
// 3. Extract the first element from the split operation and use it as the name
// 4. If the name is empty, then return the default of Person
// 5. Extract the other element from the split operation and parse it into a `usize` as the age
// If while parsing the age, something goes wrong, then return the default of Person
// Otherwise, then return an instantiated Person object with the results
impl From<&str> for Person {
fn from(s: &str) -> Person {
}
}
fn main() {
// Use the `from` function
let p1 = Person::from("Mark,20");
// Since From is implemented for Person, we should be able to use Into
let p2: Person = "Gerald,70".into();
println!("{:?}", p1);
println!("{:?}", p2);
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_default() {
// Test that the default person is 30 year old John
let dp = Person::default();
assert_eq!(dp.name, "John");
assert_eq!(dp.age, 30);
}
#[test]
fn test_bad_convert() {
// Test that John is returned when bad string is provided
let p = Person::from("");
assert_eq!(p.name, "John");
assert_eq!(p.age, 30);
}
#[test]
fn test_good_convert() {
// Test that "Mark,20" works
let p = Person::from("Mark,20");
assert_eq!(p.name, "Mark");
assert_eq!(p.age, 20);
}
#[test]
fn test_bad_age() {
// Test that "Mark.twenty" will return the default person due to an error in parsing age
let p = Person::from("Mark,twenty");
assert_eq!(p.name, "John");
assert_eq!(p.age, 30);
}
#[test]
fn test_missing_comma_and_age() {
let p: Person = Person::from("Mark");
assert_eq!(p.name, "John");
assert_eq!(p.age, 30);
}
#[test]
fn test_missing_age() {
let p: Person = Person::from("Mark,");
assert_eq!(p.name, "John");
assert_eq!(p.age, 30);
}
#[test]
fn
|
() {
let p: Person = Person::from(",1");
assert_eq!(p.name, "John");
assert_eq!(p.age, 30);
}
#[test]
fn test_missing_name_and_age() {
let p: Person = Person::from(",");
assert_eq!(p.name, "John");
assert_eq!(p.age, 30);
}
#[test]
fn test_missing_name_and_invalid_age() {
let p: Person = Person::from(",one");
assert_eq!(p.name, "John");
assert_eq!(p.age, 30);
}
}
|
test_missing_name
|
identifier_name
|
variables.rs
|
use std::convert::TryFrom;
use vm::types::Value;
use vm::contexts::{LocalContext, Environment};
use vm::errors::{RuntimeErrorType, InterpreterResult as Result};
define_named_enum!(NativeVariables {
ContractCaller("contract-caller"), TxSender("tx-sender"), BlockHeight("block-height"),
|
NativeTrue("true"), NativeFalse("false"),
});
pub fn is_reserved_name(name: &str) -> bool {
NativeVariables::lookup_by_name(name).is_some()
}
pub fn lookup_reserved_variable(name: &str, _context: &LocalContext, env: &mut Environment) -> Result<Option<Value>> {
if let Some(variable) = NativeVariables::lookup_by_name(name) {
match variable {
NativeVariables::TxSender => {
let sender = env.sender.clone()
.ok_or(RuntimeErrorType::NoSenderInContext)?;
Ok(Some(sender))
},
NativeVariables::ContractCaller => {
let sender = env.caller.clone()
.ok_or(RuntimeErrorType::NoSenderInContext)?;
Ok(Some(sender))
},
NativeVariables::BlockHeight => {
let block_height = env.global_context.database.get_current_block_height();
Ok(Some(Value::UInt(block_height as u128)))
},
NativeVariables::BurnBlockHeight => {
Err(RuntimeErrorType::NotImplemented.into())
},
NativeVariables::NativeNone => {
Ok(Some(Value::none()))
},
NativeVariables::NativeTrue => {
Ok(Some(Value::Bool(true)))
},
NativeVariables::NativeFalse => {
Ok(Some(Value::Bool(false)))
},
}
} else {
Ok(None)
}
}
|
BurnBlockHeight("burn-block-height"), NativeNone("none"),
|
random_line_split
|
variables.rs
|
use std::convert::TryFrom;
use vm::types::Value;
use vm::contexts::{LocalContext, Environment};
use vm::errors::{RuntimeErrorType, InterpreterResult as Result};
define_named_enum!(NativeVariables {
ContractCaller("contract-caller"), TxSender("tx-sender"), BlockHeight("block-height"),
BurnBlockHeight("burn-block-height"), NativeNone("none"),
NativeTrue("true"), NativeFalse("false"),
});
pub fn is_reserved_name(name: &str) -> bool {
NativeVariables::lookup_by_name(name).is_some()
}
pub fn
|
(name: &str, _context: &LocalContext, env: &mut Environment) -> Result<Option<Value>> {
if let Some(variable) = NativeVariables::lookup_by_name(name) {
match variable {
NativeVariables::TxSender => {
let sender = env.sender.clone()
.ok_or(RuntimeErrorType::NoSenderInContext)?;
Ok(Some(sender))
},
NativeVariables::ContractCaller => {
let sender = env.caller.clone()
.ok_or(RuntimeErrorType::NoSenderInContext)?;
Ok(Some(sender))
},
NativeVariables::BlockHeight => {
let block_height = env.global_context.database.get_current_block_height();
Ok(Some(Value::UInt(block_height as u128)))
},
NativeVariables::BurnBlockHeight => {
Err(RuntimeErrorType::NotImplemented.into())
},
NativeVariables::NativeNone => {
Ok(Some(Value::none()))
},
NativeVariables::NativeTrue => {
Ok(Some(Value::Bool(true)))
},
NativeVariables::NativeFalse => {
Ok(Some(Value::Bool(false)))
},
}
} else {
Ok(None)
}
}
|
lookup_reserved_variable
|
identifier_name
|
variables.rs
|
use std::convert::TryFrom;
use vm::types::Value;
use vm::contexts::{LocalContext, Environment};
use vm::errors::{RuntimeErrorType, InterpreterResult as Result};
define_named_enum!(NativeVariables {
ContractCaller("contract-caller"), TxSender("tx-sender"), BlockHeight("block-height"),
BurnBlockHeight("burn-block-height"), NativeNone("none"),
NativeTrue("true"), NativeFalse("false"),
});
pub fn is_reserved_name(name: &str) -> bool {
NativeVariables::lookup_by_name(name).is_some()
}
pub fn lookup_reserved_variable(name: &str, _context: &LocalContext, env: &mut Environment) -> Result<Option<Value>>
|
NativeVariables::NativeNone => {
Ok(Some(Value::none()))
},
NativeVariables::NativeTrue => {
Ok(Some(Value::Bool(true)))
},
NativeVariables::NativeFalse => {
Ok(Some(Value::Bool(false)))
},
}
} else {
Ok(None)
}
}
|
{
if let Some(variable) = NativeVariables::lookup_by_name(name) {
match variable {
NativeVariables::TxSender => {
let sender = env.sender.clone()
.ok_or(RuntimeErrorType::NoSenderInContext)?;
Ok(Some(sender))
},
NativeVariables::ContractCaller => {
let sender = env.caller.clone()
.ok_or(RuntimeErrorType::NoSenderInContext)?;
Ok(Some(sender))
},
NativeVariables::BlockHeight => {
let block_height = env.global_context.database.get_current_block_height();
Ok(Some(Value::UInt(block_height as u128)))
},
NativeVariables::BurnBlockHeight => {
Err(RuntimeErrorType::NotImplemented.into())
},
|
identifier_body
|
variables.rs
|
use std::convert::TryFrom;
use vm::types::Value;
use vm::contexts::{LocalContext, Environment};
use vm::errors::{RuntimeErrorType, InterpreterResult as Result};
define_named_enum!(NativeVariables {
ContractCaller("contract-caller"), TxSender("tx-sender"), BlockHeight("block-height"),
BurnBlockHeight("burn-block-height"), NativeNone("none"),
NativeTrue("true"), NativeFalse("false"),
});
pub fn is_reserved_name(name: &str) -> bool {
NativeVariables::lookup_by_name(name).is_some()
}
pub fn lookup_reserved_variable(name: &str, _context: &LocalContext, env: &mut Environment) -> Result<Option<Value>> {
if let Some(variable) = NativeVariables::lookup_by_name(name) {
match variable {
NativeVariables::TxSender => {
let sender = env.sender.clone()
.ok_or(RuntimeErrorType::NoSenderInContext)?;
Ok(Some(sender))
},
NativeVariables::ContractCaller => {
let sender = env.caller.clone()
.ok_or(RuntimeErrorType::NoSenderInContext)?;
Ok(Some(sender))
},
NativeVariables::BlockHeight =>
|
,
NativeVariables::BurnBlockHeight => {
Err(RuntimeErrorType::NotImplemented.into())
},
NativeVariables::NativeNone => {
Ok(Some(Value::none()))
},
NativeVariables::NativeTrue => {
Ok(Some(Value::Bool(true)))
},
NativeVariables::NativeFalse => {
Ok(Some(Value::Bool(false)))
},
}
} else {
Ok(None)
}
}
|
{
let block_height = env.global_context.database.get_current_block_height();
Ok(Some(Value::UInt(block_height as u128)))
}
|
conditional_block
|
lib.rs
|
#[macro_use]
extern crate serde_derive;
extern crate chrono;
extern crate hyper;
extern crate hyper_native_tls;
extern crate serde_json;
pub mod types;
pub use types::*;
use std::io::Read;
use hyper::Client;
use hyper::client::RequestBuilder;
use hyper::net::HttpsConnector;
use hyper::header::{Authorization, Link, LinkValue, UserAgent};
use hyper::header::RelationType;
use hyper::method::Method;
use hyper_native_tls::NativeTlsClient;
pub fn print_branch_info(branches_info: &Vec<BranchInfo>, ctx: &Context) {
println!(
"{0:<50} | {1:<10} | {2:<10} | {3:<10} | {4:<10}",
"Branch",
"Ahead",
"Behind",
"Age",
"Will Delete"
);
for branch in branches_info {
let branch_name: String = branch.branch.name.chars().take(50).collect();
println!(
"{0:<50} | {1:<10} | {2:<10} | {3:<10} | {4:<10}",
branch_name,
branch.ahead,
branch.behind,
branch.age.num_days(),
will_delete(branch, &ctx)
);
}
}
pub fn will_delete(branch: &BranchInfo, ctx: &Context) -> bool {
branch.ahead == 0 && branch.age.num_days() >= ctx.days_ago.into()
}
pub fn delete_branch(ctx: &Context, branch: BranchInfo) -> bool {
assert!(will_delete(&branch, ctx));
let url = format!(
"https://api.github.com/repos/{}/{}/git/refs/heads/{}",
ctx.owner,
ctx.repo,
branch.branch.name
);
let client = get_client();
let request = get_request(&client, &ctx.token, &url, Method::Delete);
let res = match request.send() {
Ok(res) => {
if!res.status.is_success() {
println!("{}", res.status);
}
res.status.is_success()
}
Err(e) => {
println!("{}", e);
false
}
};
res
}
pub fn get_request<'a>(
client: &'a Client,
token: &'a str,
url: &'a str,
method: Method,
) -> RequestBuilder<'a> {
let auth = format!("token {}", token.clone());
let req = client
.request(method, url)
.header(UserAgent("branch-destroyer 1.0".to_string()))
.header(Authorization(auth));
req
}
pub fn get_client() -> Client {
let ssl = NativeTlsClient::new().unwrap();
let connector = HttpsConnector::new(ssl);
Client::with_connector(connector)
}
pub fn get_repository(ctx: &mut Context) {
let url = format!("https://api.github.com/repos/{}/{}", ctx.owner, ctx.repo);
let client = get_client();
let mut res = get_request(&client, &ctx.token, &url, Method::Get)
.send()
.unwrap();
let mut content = String::new();
res.read_to_string(&mut content).unwrap();
let repo: Repository = serde_json::from_str(&content).unwrap();
ctx.default_branch = repo.default_branch;
ctx.repo_id = repo.id;
}
pub fn get_branches(ctx: &Context) -> Vec<Branch> {
let first_url = format!(
"https://api.github.com/repos/{}/{}/branches?per_page=100",
ctx.owner,
ctx.repo
);
let mut next_url = Some(first_url);
let mut all_branches: Vec<Branch> = vec![];
let mut i = 0;
while let Some(url) = next_url {
let results = get_branches_and_next(url, &ctx);
let mut results_branches = results.1;
all_branches.append(&mut results_branches);
next_url = results.0;
i = i + 1;
if i > 100 {
panic!("way too many branch iterations!");
}
}
println!(
"found {} branches, taking {} iterations",
all_branches.len(),
i
);
all_branches
}
fn get_branches_and_next(url: String, ctx: &Context) -> (Option<String>, Vec<Branch>)
|
/// Extract link={rel_type} values from the header collection
///
/// Returns GetLinkErr if there's no link header or no link header whose rel={rel_type}
fn get_link_value<'a>(
headers: &hyper::header::Headers,
rel_type: RelationType,
) -> Result<LinkValue, GetLinkErr> {
let link = match headers.get::<Link>() {
Some(x) => Ok(x),
None => Err(GetLinkErr::NoLinkHeader),
};
let next: Result<LinkValue, GetLinkErr> = link.and_then(|x| {
let a = x.values()
.into_iter()
.filter(|x| match x.rel() {
Some(x) => match x[0] {
ref r if (r == &rel_type) => true,
_ => false,
},
_ => false,
})
.next();
match a {
Some(l) => Ok(l.clone()),
None => Err(GetLinkErr::NoMatchingRel(rel_type)),
}
});
next
}
pub fn get_branch_compare_info(ctx: &Context, branch: Branch) -> BranchInfo {
let client = get_client();
let url = format!(
"https://api.github.com/repos/{}/{}/compare/{}...{}",
ctx.owner,
ctx.repo,
ctx.default_branch,
branch.name
);
let mut res = get_request(&client, &ctx.token, &url, Method::Get)
.send()
.unwrap();
let mut content = String::new();
res.read_to_string(&mut content).unwrap();
let compare_result: ComparisonResult = serde_json::from_str(&content).unwrap();
let mut latest_commit = &compare_result.merge_base_commit;
if compare_result.commits.len() > 0 {
let i = compare_result.commits.len();
latest_commit = &compare_result.commits[i - 1];
}
let age = chrono::Utc::now().signed_duration_since(latest_commit.commit.author.date);
BranchInfo {
branch,
ahead: compare_result.ahead_by,
behind: compare_result.behind_by,
age,
}
}
#[derive(Debug)]
pub enum GetLinkErr {
NoLinkHeader,
NoMatchingRel(RelationType),
}
#[cfg(test)]
mod tests {
use super::hyper::header::{Headers, Link, LinkValue, RelationType};
use super::{get_link_value, GetLinkErr};
#[test]
pub fn get_link_value_works() {
let next_link = LinkValue::new("https://google.com").push_rel(RelationType::Next);
let prev_link = LinkValue::new("https://reddit.com").push_rel(RelationType::Prev);
let mut headers = Headers::new();
headers.set(Link::new(vec![next_link, prev_link]));
let next_res = get_link_value(&headers, RelationType::Next);
let prev_res = get_link_value(&headers, RelationType::Prev);
let alt_res = get_link_value(&headers, RelationType::Alternate);
assert!(
next_res.is_ok(),
"we should be able to fetch rel=next because it is in the collection"
);
assert!(
prev_res.is_ok(),
"we should be able to fetch rel=prev because it is in the collection"
);
match alt_res {
Ok(_) => assert!(false, "we should not be able to fetch a missing rel"),
Err(GetLinkErr::NoLinkHeader) => assert!(
false,
"we should not see NoLinkHeader when the collection has a link"
),
Err(GetLinkErr::NoMatchingRel(_)) => assert!(true),
}
}
#[test]
pub fn get_link_value_returns_err_for_headers_without_link() {
let headers = Headers::new();
let res = get_link_value(&headers, RelationType::Next);
match res {
Ok(_) => assert!(
false,
"we should not be able to fetch any link header if there are no link headers"
),
Err(GetLinkErr::NoMatchingRel(_)) => assert!(
false,
"we should not get this error if there are no link headers"
),
Err(GetLinkErr::NoLinkHeader) => assert!(true),
}
}
}
/*
fn build_url(days_old: u32) {}
fn delete_branch() {}
fn format_branch_info() {}
fn get_comparison(head: String, base: String, context: Context) {}
*/
|
{
let client = get_client();
let mut res = get_request(&client, &ctx.token, &url, Method::Get)
.send()
.unwrap();
let mut content = String::new();
res.read_to_string(&mut content).unwrap();
let link_value = get_link_value(&res.headers, RelationType::Next);
let data: Vec<Branch> = serde_json::from_str(&content).unwrap();
(
link_value.ok().and_then(|x| Some(x.link().to_string())),
data,
)
}
|
identifier_body
|
lib.rs
|
#[macro_use]
extern crate serde_derive;
extern crate chrono;
extern crate hyper;
extern crate hyper_native_tls;
extern crate serde_json;
pub mod types;
pub use types::*;
use std::io::Read;
use hyper::Client;
use hyper::client::RequestBuilder;
use hyper::net::HttpsConnector;
use hyper::header::{Authorization, Link, LinkValue, UserAgent};
use hyper::header::RelationType;
use hyper::method::Method;
use hyper_native_tls::NativeTlsClient;
pub fn print_branch_info(branches_info: &Vec<BranchInfo>, ctx: &Context) {
println!(
"{0:<50} | {1:<10} | {2:<10} | {3:<10} | {4:<10}",
"Branch",
"Ahead",
"Behind",
"Age",
"Will Delete"
);
for branch in branches_info {
let branch_name: String = branch.branch.name.chars().take(50).collect();
println!(
"{0:<50} | {1:<10} | {2:<10} | {3:<10} | {4:<10}",
branch_name,
branch.ahead,
branch.behind,
branch.age.num_days(),
will_delete(branch, &ctx)
);
}
}
pub fn will_delete(branch: &BranchInfo, ctx: &Context) -> bool {
branch.ahead == 0 && branch.age.num_days() >= ctx.days_ago.into()
}
pub fn delete_branch(ctx: &Context, branch: BranchInfo) -> bool {
assert!(will_delete(&branch, ctx));
let url = format!(
"https://api.github.com/repos/{}/{}/git/refs/heads/{}",
ctx.owner,
ctx.repo,
branch.branch.name
);
let client = get_client();
let request = get_request(&client, &ctx.token, &url, Method::Delete);
let res = match request.send() {
Ok(res) => {
if!res.status.is_success() {
println!("{}", res.status);
}
res.status.is_success()
}
Err(e) => {
println!("{}", e);
false
}
};
res
}
pub fn get_request<'a>(
client: &'a Client,
token: &'a str,
url: &'a str,
method: Method,
) -> RequestBuilder<'a> {
let auth = format!("token {}", token.clone());
let req = client
.request(method, url)
.header(UserAgent("branch-destroyer 1.0".to_string()))
.header(Authorization(auth));
req
}
pub fn get_client() -> Client {
let ssl = NativeTlsClient::new().unwrap();
let connector = HttpsConnector::new(ssl);
Client::with_connector(connector)
}
pub fn get_repository(ctx: &mut Context) {
let url = format!("https://api.github.com/repos/{}/{}", ctx.owner, ctx.repo);
let client = get_client();
let mut res = get_request(&client, &ctx.token, &url, Method::Get)
.send()
.unwrap();
let mut content = String::new();
res.read_to_string(&mut content).unwrap();
let repo: Repository = serde_json::from_str(&content).unwrap();
ctx.default_branch = repo.default_branch;
ctx.repo_id = repo.id;
}
pub fn get_branches(ctx: &Context) -> Vec<Branch> {
let first_url = format!(
"https://api.github.com/repos/{}/{}/branches?per_page=100",
ctx.owner,
ctx.repo
);
let mut next_url = Some(first_url);
let mut all_branches: Vec<Branch> = vec![];
let mut i = 0;
while let Some(url) = next_url {
let results = get_branches_and_next(url, &ctx);
let mut results_branches = results.1;
all_branches.append(&mut results_branches);
next_url = results.0;
i = i + 1;
if i > 100 {
panic!("way too many branch iterations!");
}
}
println!(
"found {} branches, taking {} iterations",
all_branches.len(),
i
);
|
fn get_branches_and_next(url: String, ctx: &Context) -> (Option<String>, Vec<Branch>) {
let client = get_client();
let mut res = get_request(&client, &ctx.token, &url, Method::Get)
.send()
.unwrap();
let mut content = String::new();
res.read_to_string(&mut content).unwrap();
let link_value = get_link_value(&res.headers, RelationType::Next);
let data: Vec<Branch> = serde_json::from_str(&content).unwrap();
(
link_value.ok().and_then(|x| Some(x.link().to_string())),
data,
)
}
/// Extract link={rel_type} values from the header collection
///
/// Returns GetLinkErr if there's no link header or no link header whose rel={rel_type}
fn get_link_value<'a>(
headers: &hyper::header::Headers,
rel_type: RelationType,
) -> Result<LinkValue, GetLinkErr> {
let link = match headers.get::<Link>() {
Some(x) => Ok(x),
None => Err(GetLinkErr::NoLinkHeader),
};
let next: Result<LinkValue, GetLinkErr> = link.and_then(|x| {
let a = x.values()
.into_iter()
.filter(|x| match x.rel() {
Some(x) => match x[0] {
ref r if (r == &rel_type) => true,
_ => false,
},
_ => false,
})
.next();
match a {
Some(l) => Ok(l.clone()),
None => Err(GetLinkErr::NoMatchingRel(rel_type)),
}
});
next
}
pub fn get_branch_compare_info(ctx: &Context, branch: Branch) -> BranchInfo {
let client = get_client();
let url = format!(
"https://api.github.com/repos/{}/{}/compare/{}...{}",
ctx.owner,
ctx.repo,
ctx.default_branch,
branch.name
);
let mut res = get_request(&client, &ctx.token, &url, Method::Get)
.send()
.unwrap();
let mut content = String::new();
res.read_to_string(&mut content).unwrap();
let compare_result: ComparisonResult = serde_json::from_str(&content).unwrap();
let mut latest_commit = &compare_result.merge_base_commit;
if compare_result.commits.len() > 0 {
let i = compare_result.commits.len();
latest_commit = &compare_result.commits[i - 1];
}
let age = chrono::Utc::now().signed_duration_since(latest_commit.commit.author.date);
BranchInfo {
branch,
ahead: compare_result.ahead_by,
behind: compare_result.behind_by,
age,
}
}
#[derive(Debug)]
pub enum GetLinkErr {
NoLinkHeader,
NoMatchingRel(RelationType),
}
#[cfg(test)]
mod tests {
use super::hyper::header::{Headers, Link, LinkValue, RelationType};
use super::{get_link_value, GetLinkErr};
#[test]
pub fn get_link_value_works() {
let next_link = LinkValue::new("https://google.com").push_rel(RelationType::Next);
let prev_link = LinkValue::new("https://reddit.com").push_rel(RelationType::Prev);
let mut headers = Headers::new();
headers.set(Link::new(vec![next_link, prev_link]));
let next_res = get_link_value(&headers, RelationType::Next);
let prev_res = get_link_value(&headers, RelationType::Prev);
let alt_res = get_link_value(&headers, RelationType::Alternate);
assert!(
next_res.is_ok(),
"we should be able to fetch rel=next because it is in the collection"
);
assert!(
prev_res.is_ok(),
"we should be able to fetch rel=prev because it is in the collection"
);
match alt_res {
Ok(_) => assert!(false, "we should not be able to fetch a missing rel"),
Err(GetLinkErr::NoLinkHeader) => assert!(
false,
"we should not see NoLinkHeader when the collection has a link"
),
Err(GetLinkErr::NoMatchingRel(_)) => assert!(true),
}
}
#[test]
pub fn get_link_value_returns_err_for_headers_without_link() {
let headers = Headers::new();
let res = get_link_value(&headers, RelationType::Next);
match res {
Ok(_) => assert!(
false,
"we should not be able to fetch any link header if there are no link headers"
),
Err(GetLinkErr::NoMatchingRel(_)) => assert!(
false,
"we should not get this error if there are no link headers"
),
Err(GetLinkErr::NoLinkHeader) => assert!(true),
}
}
}
/*
fn build_url(days_old: u32) {}
fn delete_branch() {}
fn format_branch_info() {}
fn get_comparison(head: String, base: String, context: Context) {}
*/
|
all_branches
}
|
random_line_split
|
lib.rs
|
#[macro_use]
extern crate serde_derive;
extern crate chrono;
extern crate hyper;
extern crate hyper_native_tls;
extern crate serde_json;
pub mod types;
pub use types::*;
use std::io::Read;
use hyper::Client;
use hyper::client::RequestBuilder;
use hyper::net::HttpsConnector;
use hyper::header::{Authorization, Link, LinkValue, UserAgent};
use hyper::header::RelationType;
use hyper::method::Method;
use hyper_native_tls::NativeTlsClient;
pub fn print_branch_info(branches_info: &Vec<BranchInfo>, ctx: &Context) {
println!(
"{0:<50} | {1:<10} | {2:<10} | {3:<10} | {4:<10}",
"Branch",
"Ahead",
"Behind",
"Age",
"Will Delete"
);
for branch in branches_info {
let branch_name: String = branch.branch.name.chars().take(50).collect();
println!(
"{0:<50} | {1:<10} | {2:<10} | {3:<10} | {4:<10}",
branch_name,
branch.ahead,
branch.behind,
branch.age.num_days(),
will_delete(branch, &ctx)
);
}
}
pub fn will_delete(branch: &BranchInfo, ctx: &Context) -> bool {
branch.ahead == 0 && branch.age.num_days() >= ctx.days_ago.into()
}
pub fn delete_branch(ctx: &Context, branch: BranchInfo) -> bool {
assert!(will_delete(&branch, ctx));
let url = format!(
"https://api.github.com/repos/{}/{}/git/refs/heads/{}",
ctx.owner,
ctx.repo,
branch.branch.name
);
let client = get_client();
let request = get_request(&client, &ctx.token, &url, Method::Delete);
let res = match request.send() {
Ok(res) => {
if!res.status.is_success() {
println!("{}", res.status);
}
res.status.is_success()
}
Err(e) => {
println!("{}", e);
false
}
};
res
}
pub fn get_request<'a>(
client: &'a Client,
token: &'a str,
url: &'a str,
method: Method,
) -> RequestBuilder<'a> {
let auth = format!("token {}", token.clone());
let req = client
.request(method, url)
.header(UserAgent("branch-destroyer 1.0".to_string()))
.header(Authorization(auth));
req
}
pub fn
|
() -> Client {
let ssl = NativeTlsClient::new().unwrap();
let connector = HttpsConnector::new(ssl);
Client::with_connector(connector)
}
pub fn get_repository(ctx: &mut Context) {
let url = format!("https://api.github.com/repos/{}/{}", ctx.owner, ctx.repo);
let client = get_client();
let mut res = get_request(&client, &ctx.token, &url, Method::Get)
.send()
.unwrap();
let mut content = String::new();
res.read_to_string(&mut content).unwrap();
let repo: Repository = serde_json::from_str(&content).unwrap();
ctx.default_branch = repo.default_branch;
ctx.repo_id = repo.id;
}
pub fn get_branches(ctx: &Context) -> Vec<Branch> {
let first_url = format!(
"https://api.github.com/repos/{}/{}/branches?per_page=100",
ctx.owner,
ctx.repo
);
let mut next_url = Some(first_url);
let mut all_branches: Vec<Branch> = vec![];
let mut i = 0;
while let Some(url) = next_url {
let results = get_branches_and_next(url, &ctx);
let mut results_branches = results.1;
all_branches.append(&mut results_branches);
next_url = results.0;
i = i + 1;
if i > 100 {
panic!("way too many branch iterations!");
}
}
println!(
"found {} branches, taking {} iterations",
all_branches.len(),
i
);
all_branches
}
fn get_branches_and_next(url: String, ctx: &Context) -> (Option<String>, Vec<Branch>) {
let client = get_client();
let mut res = get_request(&client, &ctx.token, &url, Method::Get)
.send()
.unwrap();
let mut content = String::new();
res.read_to_string(&mut content).unwrap();
let link_value = get_link_value(&res.headers, RelationType::Next);
let data: Vec<Branch> = serde_json::from_str(&content).unwrap();
(
link_value.ok().and_then(|x| Some(x.link().to_string())),
data,
)
}
/// Extract link={rel_type} values from the header collection
///
/// Returns GetLinkErr if there's no link header or no link header whose rel={rel_type}
fn get_link_value<'a>(
headers: &hyper::header::Headers,
rel_type: RelationType,
) -> Result<LinkValue, GetLinkErr> {
let link = match headers.get::<Link>() {
Some(x) => Ok(x),
None => Err(GetLinkErr::NoLinkHeader),
};
let next: Result<LinkValue, GetLinkErr> = link.and_then(|x| {
let a = x.values()
.into_iter()
.filter(|x| match x.rel() {
Some(x) => match x[0] {
ref r if (r == &rel_type) => true,
_ => false,
},
_ => false,
})
.next();
match a {
Some(l) => Ok(l.clone()),
None => Err(GetLinkErr::NoMatchingRel(rel_type)),
}
});
next
}
pub fn get_branch_compare_info(ctx: &Context, branch: Branch) -> BranchInfo {
let client = get_client();
let url = format!(
"https://api.github.com/repos/{}/{}/compare/{}...{}",
ctx.owner,
ctx.repo,
ctx.default_branch,
branch.name
);
let mut res = get_request(&client, &ctx.token, &url, Method::Get)
.send()
.unwrap();
let mut content = String::new();
res.read_to_string(&mut content).unwrap();
let compare_result: ComparisonResult = serde_json::from_str(&content).unwrap();
let mut latest_commit = &compare_result.merge_base_commit;
if compare_result.commits.len() > 0 {
let i = compare_result.commits.len();
latest_commit = &compare_result.commits[i - 1];
}
let age = chrono::Utc::now().signed_duration_since(latest_commit.commit.author.date);
BranchInfo {
branch,
ahead: compare_result.ahead_by,
behind: compare_result.behind_by,
age,
}
}
#[derive(Debug)]
pub enum GetLinkErr {
NoLinkHeader,
NoMatchingRel(RelationType),
}
#[cfg(test)]
mod tests {
use super::hyper::header::{Headers, Link, LinkValue, RelationType};
use super::{get_link_value, GetLinkErr};
#[test]
pub fn get_link_value_works() {
let next_link = LinkValue::new("https://google.com").push_rel(RelationType::Next);
let prev_link = LinkValue::new("https://reddit.com").push_rel(RelationType::Prev);
let mut headers = Headers::new();
headers.set(Link::new(vec![next_link, prev_link]));
let next_res = get_link_value(&headers, RelationType::Next);
let prev_res = get_link_value(&headers, RelationType::Prev);
let alt_res = get_link_value(&headers, RelationType::Alternate);
assert!(
next_res.is_ok(),
"we should be able to fetch rel=next because it is in the collection"
);
assert!(
prev_res.is_ok(),
"we should be able to fetch rel=prev because it is in the collection"
);
match alt_res {
Ok(_) => assert!(false, "we should not be able to fetch a missing rel"),
Err(GetLinkErr::NoLinkHeader) => assert!(
false,
"we should not see NoLinkHeader when the collection has a link"
),
Err(GetLinkErr::NoMatchingRel(_)) => assert!(true),
}
}
#[test]
pub fn get_link_value_returns_err_for_headers_without_link() {
let headers = Headers::new();
let res = get_link_value(&headers, RelationType::Next);
match res {
Ok(_) => assert!(
false,
"we should not be able to fetch any link header if there are no link headers"
),
Err(GetLinkErr::NoMatchingRel(_)) => assert!(
false,
"we should not get this error if there are no link headers"
),
Err(GetLinkErr::NoLinkHeader) => assert!(true),
}
}
}
/*
fn build_url(days_old: u32) {}
fn delete_branch() {}
fn format_branch_info() {}
fn get_comparison(head: String, base: String, context: Context) {}
*/
|
get_client
|
identifier_name
|
app_with_file_descriptor.rs
|
/*
Copyright 2017 Jinjing Wang
This file is part of mtcp.
mtcp is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
mtcp is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with mtcp. If not, see <http://www.gnu.org/licenses/>.
*/
use platform::tun_file_descriptor::init_file_descriptor;
use handler::http_proxy::app::app;
use handler::http_proxy::server::Server;
use std::fs::File;
pub fn start_app
(
tun_file: File,
server: Server,
)
|
{
debug!("start_app_with_file..");
let (tun_in_sender, tun_out_receiver) =
app::start_app(server);
init_file_descriptor(tun_in_sender, tun_out_receiver, tun_file);
}
|
identifier_body
|
|
app_with_file_descriptor.rs
|
/*
Copyright 2017 Jinjing Wang
This file is part of mtcp.
mtcp is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
mtcp is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with mtcp. If not, see <http://www.gnu.org/licenses/>.
*/
use platform::tun_file_descriptor::init_file_descriptor;
use handler::http_proxy::app::app;
use handler::http_proxy::server::Server;
use std::fs::File;
pub fn
|
(
tun_file: File,
server: Server,
)
{
debug!("start_app_with_file..");
let (tun_in_sender, tun_out_receiver) =
app::start_app(server);
init_file_descriptor(tun_in_sender, tun_out_receiver, tun_file);
}
|
start_app
|
identifier_name
|
app_with_file_descriptor.rs
|
/*
Copyright 2017 Jinjing Wang
|
mtcp is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
mtcp is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with mtcp. If not, see <http://www.gnu.org/licenses/>.
*/
use platform::tun_file_descriptor::init_file_descriptor;
use handler::http_proxy::app::app;
use handler::http_proxy::server::Server;
use std::fs::File;
pub fn start_app
(
tun_file: File,
server: Server,
)
{
debug!("start_app_with_file..");
let (tun_in_sender, tun_out_receiver) =
app::start_app(server);
init_file_descriptor(tun_in_sender, tun_out_receiver, tun_file);
}
|
This file is part of mtcp.
|
random_line_split
|
foreach-external-iterators-hashmap-break-restart.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern crate collections;
use collections::HashMap;
// This is a fancy one: it uses an external iterator established
// outside the loop, breaks, then _picks back up_ and continues
// iterating with it.
pub fn main()
|
}
assert_eq!(x, 6);
assert_eq!(y, 60);
}
|
{
let mut h = HashMap::new();
let kvs = [(1, 10), (2, 20), (3, 30)];
for &(k,v) in kvs.iter() {
h.insert(k,v);
}
let mut x = 0;
let mut y = 0;
let mut i = h.iter();
for (&k,&v) in i {
x += k;
y += v;
break;
}
for (&k,&v) in i {
x += k;
y += v;
|
identifier_body
|
foreach-external-iterators-hashmap-break-restart.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern crate collections;
use collections::HashMap;
// This is a fancy one: it uses an external iterator established
// outside the loop, breaks, then _picks back up_ and continues
// iterating with it.
pub fn
|
() {
let mut h = HashMap::new();
let kvs = [(1, 10), (2, 20), (3, 30)];
for &(k,v) in kvs.iter() {
h.insert(k,v);
}
let mut x = 0;
let mut y = 0;
let mut i = h.iter();
for (&k,&v) in i {
x += k;
y += v;
break;
}
for (&k,&v) in i {
x += k;
y += v;
}
assert_eq!(x, 6);
assert_eq!(y, 60);
}
|
main
|
identifier_name
|
foreach-external-iterators-hashmap-break-restart.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
|
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern crate collections;
use collections::HashMap;
// This is a fancy one: it uses an external iterator established
// outside the loop, breaks, then _picks back up_ and continues
// iterating with it.
pub fn main() {
let mut h = HashMap::new();
let kvs = [(1, 10), (2, 20), (3, 30)];
for &(k,v) in kvs.iter() {
h.insert(k,v);
}
let mut x = 0;
let mut y = 0;
let mut i = h.iter();
for (&k,&v) in i {
x += k;
y += v;
break;
}
for (&k,&v) in i {
x += k;
y += v;
}
assert_eq!(x, 6);
assert_eq!(y, 60);
}
|
// http://rust-lang.org/COPYRIGHT.
|
random_line_split
|
html.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
#![allow(unrooted_must_root)]
use crate::dom::bindings::codegen::Bindings::HTMLTemplateElementBinding::HTMLTemplateElementMethods;
use crate::dom::bindings::inheritance::{Castable, CharacterDataTypeId, NodeTypeId};
use crate::dom::bindings::root::{Dom, DomRoot};
use crate::dom::bindings::trace::JSTraceable;
use crate::dom::characterdata::CharacterData;
use crate::dom::document::Document;
use crate::dom::documenttype::DocumentType;
use crate::dom::element::Element;
use crate::dom::htmlscriptelement::HTMLScriptElement;
use crate::dom::htmltemplateelement::HTMLTemplateElement;
use crate::dom::node::Node;
use crate::dom::processinginstruction::ProcessingInstruction;
use crate::dom::servoparser::{ParsingAlgorithm, Sink};
use html5ever::buffer_queue::BufferQueue;
use html5ever::serialize::TraversalScope;
use html5ever::serialize::TraversalScope::IncludeNode;
use html5ever::serialize::{AttrRef, Serialize, Serializer};
use html5ever::tokenizer::{Tokenizer as HtmlTokenizer, TokenizerOpts, TokenizerResult};
use html5ever::tree_builder::{Tracer as HtmlTracer, TreeBuilder, TreeBuilderOpts};
use html5ever::QualName;
use js::jsapi::JSTracer;
use servo_url::ServoUrl;
use std::io;
#[derive(JSTraceable, MallocSizeOf)]
#[must_root]
pub struct Tokenizer {
#[ignore_malloc_size_of = "Defined in html5ever"]
inner: HtmlTokenizer<TreeBuilder<Dom<Node>, Sink>>,
}
impl Tokenizer {
pub fn new(
document: &Document,
url: ServoUrl,
fragment_context: Option<super::FragmentContext>,
parsing_algorithm: ParsingAlgorithm,
) -> Self {
let sink = Sink {
base_url: url,
document: Dom::from_ref(document),
current_line: 1,
script: Default::default(),
parsing_algorithm: parsing_algorithm,
};
let options = TreeBuilderOpts {
ignore_missing_rules: true,
..Default::default()
};
let inner = if let Some(fc) = fragment_context {
let tb = TreeBuilder::new_for_fragment(
sink,
Dom::from_ref(fc.context_elem),
fc.form_elem.map(|n| Dom::from_ref(n)),
options,
);
let tok_options = TokenizerOpts {
initial_state: Some(tb.tokenizer_state_for_context_elem()),
..Default::default()
};
HtmlTokenizer::new(tb, tok_options)
} else {
HtmlTokenizer::new(TreeBuilder::new(sink, options), Default::default())
};
Tokenizer { inner: inner }
}
pub fn feed(&mut self, input: &mut BufferQueue) -> Result<(), DomRoot<HTMLScriptElement>> {
match self.inner.feed(input) {
TokenizerResult::Done => Ok(()),
TokenizerResult::Script(script) => Err(DomRoot::from_ref(script.downcast().unwrap())),
}
}
pub fn end(&mut self) {
self.inner.end();
}
pub fn url(&self) -> &ServoUrl {
&self.inner.sink.sink.base_url
}
pub fn set_plaintext_state(&mut self) {
self.inner.set_plaintext_state();
}
}
#[allow(unsafe_code)]
unsafe impl JSTraceable for HtmlTokenizer<TreeBuilder<Dom<Node>, Sink>> {
unsafe fn trace(&self, trc: *mut JSTracer) {
struct
|
(*mut JSTracer);
let tracer = Tracer(trc);
impl HtmlTracer for Tracer {
type Handle = Dom<Node>;
#[allow(unrooted_must_root)]
fn trace_handle(&self, node: &Dom<Node>) {
unsafe {
node.trace(self.0);
}
}
}
let tree_builder = &self.sink;
tree_builder.trace_handles(&tracer);
tree_builder.sink.trace(trc);
}
}
fn start_element<S: Serializer>(node: &Element, serializer: &mut S) -> io::Result<()> {
let name = QualName::new(None, node.namespace().clone(), node.local_name().clone());
let attrs = node
.attrs()
.iter()
.map(|attr| {
let qname = QualName::new(None, attr.namespace().clone(), attr.local_name().clone());
let value = attr.value().clone();
(qname, value)
})
.collect::<Vec<_>>();
let attr_refs = attrs.iter().map(|&(ref qname, ref value)| {
let ar: AttrRef = (&qname, &**value);
ar
});
serializer.start_elem(name, attr_refs)?;
Ok(())
}
fn end_element<S: Serializer>(node: &Element, serializer: &mut S) -> io::Result<()> {
let name = QualName::new(None, node.namespace().clone(), node.local_name().clone());
serializer.end_elem(name)
}
enum SerializationCommand {
OpenElement(DomRoot<Element>),
CloseElement(DomRoot<Element>),
SerializeNonelement(DomRoot<Node>),
}
struct SerializationIterator {
stack: Vec<SerializationCommand>,
}
fn rev_children_iter(n: &Node) -> impl Iterator<Item = DomRoot<Node>> {
if n.downcast::<Element>().map_or(false, |e| e.is_void()) {
return Node::new_document_node().rev_children();
}
match n.downcast::<HTMLTemplateElement>() {
Some(t) => t.Content().upcast::<Node>().rev_children(),
None => n.rev_children(),
}
}
impl SerializationIterator {
fn new(node: &Node, skip_first: bool) -> SerializationIterator {
let mut ret = SerializationIterator { stack: vec![] };
if skip_first {
for c in rev_children_iter(node) {
ret.push_node(&*c);
}
} else {
ret.push_node(node);
}
ret
}
fn push_node(&mut self, n: &Node) {
match n.downcast::<Element>() {
Some(e) => self
.stack
.push(SerializationCommand::OpenElement(DomRoot::from_ref(e))),
None => self.stack.push(SerializationCommand::SerializeNonelement(
DomRoot::from_ref(n),
)),
}
}
}
impl Iterator for SerializationIterator {
type Item = SerializationCommand;
fn next(&mut self) -> Option<SerializationCommand> {
let res = self.stack.pop();
if let Some(SerializationCommand::OpenElement(ref e)) = res {
self.stack
.push(SerializationCommand::CloseElement(e.clone()));
for c in rev_children_iter(&*e.upcast::<Node>()) {
self.push_node(&c);
}
}
res
}
}
impl<'a> Serialize for &'a Node {
fn serialize<S: Serializer>(
&self,
serializer: &mut S,
traversal_scope: TraversalScope,
) -> io::Result<()> {
let node = *self;
let iter = SerializationIterator::new(node, traversal_scope!= IncludeNode);
for cmd in iter {
match cmd {
SerializationCommand::OpenElement(n) => {
start_element(&n, serializer)?;
},
SerializationCommand::CloseElement(n) => {
end_element(&&n, serializer)?;
},
SerializationCommand::SerializeNonelement(n) => match n.type_id() {
NodeTypeId::DocumentType => {
let doctype = n.downcast::<DocumentType>().unwrap();
serializer.write_doctype(&doctype.name())?;
},
NodeTypeId::CharacterData(CharacterDataTypeId::Text(_)) => {
let cdata = n.downcast::<CharacterData>().unwrap();
serializer.write_text(&cdata.data())?;
},
NodeTypeId::CharacterData(CharacterDataTypeId::Comment) => {
let cdata = n.downcast::<CharacterData>().unwrap();
serializer.write_comment(&cdata.data())?;
},
NodeTypeId::CharacterData(CharacterDataTypeId::ProcessingInstruction) => {
let pi = n.downcast::<ProcessingInstruction>().unwrap();
let data = pi.upcast::<CharacterData>().data();
serializer.write_processing_instruction(&pi.target(), &data)?;
},
NodeTypeId::DocumentFragment => {},
NodeTypeId::Document(_) => panic!("Can't serialize Document node itself"),
NodeTypeId::Element(_) => panic!("Element shouldn't appear here"),
},
}
}
Ok(())
}
}
|
Tracer
|
identifier_name
|
html.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
#![allow(unrooted_must_root)]
use crate::dom::bindings::codegen::Bindings::HTMLTemplateElementBinding::HTMLTemplateElementMethods;
use crate::dom::bindings::inheritance::{Castable, CharacterDataTypeId, NodeTypeId};
use crate::dom::bindings::root::{Dom, DomRoot};
use crate::dom::bindings::trace::JSTraceable;
use crate::dom::characterdata::CharacterData;
use crate::dom::document::Document;
use crate::dom::documenttype::DocumentType;
use crate::dom::element::Element;
use crate::dom::htmlscriptelement::HTMLScriptElement;
use crate::dom::htmltemplateelement::HTMLTemplateElement;
use crate::dom::node::Node;
use crate::dom::processinginstruction::ProcessingInstruction;
use crate::dom::servoparser::{ParsingAlgorithm, Sink};
use html5ever::buffer_queue::BufferQueue;
use html5ever::serialize::TraversalScope;
use html5ever::serialize::TraversalScope::IncludeNode;
use html5ever::serialize::{AttrRef, Serialize, Serializer};
use html5ever::tokenizer::{Tokenizer as HtmlTokenizer, TokenizerOpts, TokenizerResult};
use html5ever::tree_builder::{Tracer as HtmlTracer, TreeBuilder, TreeBuilderOpts};
use html5ever::QualName;
use js::jsapi::JSTracer;
use servo_url::ServoUrl;
use std::io;
#[derive(JSTraceable, MallocSizeOf)]
#[must_root]
pub struct Tokenizer {
#[ignore_malloc_size_of = "Defined in html5ever"]
inner: HtmlTokenizer<TreeBuilder<Dom<Node>, Sink>>,
}
impl Tokenizer {
pub fn new(
document: &Document,
url: ServoUrl,
fragment_context: Option<super::FragmentContext>,
parsing_algorithm: ParsingAlgorithm,
) -> Self {
let sink = Sink {
base_url: url,
document: Dom::from_ref(document),
current_line: 1,
script: Default::default(),
parsing_algorithm: parsing_algorithm,
};
let options = TreeBuilderOpts {
ignore_missing_rules: true,
..Default::default()
};
let inner = if let Some(fc) = fragment_context {
let tb = TreeBuilder::new_for_fragment(
sink,
Dom::from_ref(fc.context_elem),
fc.form_elem.map(|n| Dom::from_ref(n)),
options,
);
let tok_options = TokenizerOpts {
initial_state: Some(tb.tokenizer_state_for_context_elem()),
..Default::default()
};
HtmlTokenizer::new(tb, tok_options)
} else {
HtmlTokenizer::new(TreeBuilder::new(sink, options), Default::default())
};
Tokenizer { inner: inner }
}
pub fn feed(&mut self, input: &mut BufferQueue) -> Result<(), DomRoot<HTMLScriptElement>> {
match self.inner.feed(input) {
TokenizerResult::Done => Ok(()),
TokenizerResult::Script(script) => Err(DomRoot::from_ref(script.downcast().unwrap())),
}
}
pub fn end(&mut self) {
self.inner.end();
}
pub fn url(&self) -> &ServoUrl {
&self.inner.sink.sink.base_url
}
pub fn set_plaintext_state(&mut self) {
self.inner.set_plaintext_state();
}
}
#[allow(unsafe_code)]
unsafe impl JSTraceable for HtmlTokenizer<TreeBuilder<Dom<Node>, Sink>> {
unsafe fn trace(&self, trc: *mut JSTracer) {
struct Tracer(*mut JSTracer);
let tracer = Tracer(trc);
impl HtmlTracer for Tracer {
type Handle = Dom<Node>;
#[allow(unrooted_must_root)]
fn trace_handle(&self, node: &Dom<Node>) {
unsafe {
node.trace(self.0);
}
}
}
let tree_builder = &self.sink;
tree_builder.trace_handles(&tracer);
tree_builder.sink.trace(trc);
}
}
fn start_element<S: Serializer>(node: &Element, serializer: &mut S) -> io::Result<()> {
let name = QualName::new(None, node.namespace().clone(), node.local_name().clone());
let attrs = node
.attrs()
.iter()
.map(|attr| {
let qname = QualName::new(None, attr.namespace().clone(), attr.local_name().clone());
let value = attr.value().clone();
(qname, value)
})
.collect::<Vec<_>>();
let attr_refs = attrs.iter().map(|&(ref qname, ref value)| {
let ar: AttrRef = (&qname, &**value);
ar
});
serializer.start_elem(name, attr_refs)?;
Ok(())
}
fn end_element<S: Serializer>(node: &Element, serializer: &mut S) -> io::Result<()> {
let name = QualName::new(None, node.namespace().clone(), node.local_name().clone());
serializer.end_elem(name)
}
enum SerializationCommand {
OpenElement(DomRoot<Element>),
CloseElement(DomRoot<Element>),
SerializeNonelement(DomRoot<Node>),
}
struct SerializationIterator {
stack: Vec<SerializationCommand>,
}
fn rev_children_iter(n: &Node) -> impl Iterator<Item = DomRoot<Node>> {
if n.downcast::<Element>().map_or(false, |e| e.is_void()) {
return Node::new_document_node().rev_children();
}
match n.downcast::<HTMLTemplateElement>() {
Some(t) => t.Content().upcast::<Node>().rev_children(),
None => n.rev_children(),
}
}
impl SerializationIterator {
fn new(node: &Node, skip_first: bool) -> SerializationIterator {
let mut ret = SerializationIterator { stack: vec![] };
if skip_first
|
else {
ret.push_node(node);
}
ret
}
fn push_node(&mut self, n: &Node) {
match n.downcast::<Element>() {
Some(e) => self
.stack
.push(SerializationCommand::OpenElement(DomRoot::from_ref(e))),
None => self.stack.push(SerializationCommand::SerializeNonelement(
DomRoot::from_ref(n),
)),
}
}
}
impl Iterator for SerializationIterator {
type Item = SerializationCommand;
fn next(&mut self) -> Option<SerializationCommand> {
let res = self.stack.pop();
if let Some(SerializationCommand::OpenElement(ref e)) = res {
self.stack
.push(SerializationCommand::CloseElement(e.clone()));
for c in rev_children_iter(&*e.upcast::<Node>()) {
self.push_node(&c);
}
}
res
}
}
impl<'a> Serialize for &'a Node {
fn serialize<S: Serializer>(
&self,
serializer: &mut S,
traversal_scope: TraversalScope,
) -> io::Result<()> {
let node = *self;
let iter = SerializationIterator::new(node, traversal_scope!= IncludeNode);
for cmd in iter {
match cmd {
SerializationCommand::OpenElement(n) => {
start_element(&n, serializer)?;
},
SerializationCommand::CloseElement(n) => {
end_element(&&n, serializer)?;
},
SerializationCommand::SerializeNonelement(n) => match n.type_id() {
NodeTypeId::DocumentType => {
let doctype = n.downcast::<DocumentType>().unwrap();
serializer.write_doctype(&doctype.name())?;
},
NodeTypeId::CharacterData(CharacterDataTypeId::Text(_)) => {
let cdata = n.downcast::<CharacterData>().unwrap();
serializer.write_text(&cdata.data())?;
},
NodeTypeId::CharacterData(CharacterDataTypeId::Comment) => {
let cdata = n.downcast::<CharacterData>().unwrap();
serializer.write_comment(&cdata.data())?;
},
NodeTypeId::CharacterData(CharacterDataTypeId::ProcessingInstruction) => {
let pi = n.downcast::<ProcessingInstruction>().unwrap();
let data = pi.upcast::<CharacterData>().data();
serializer.write_processing_instruction(&pi.target(), &data)?;
},
NodeTypeId::DocumentFragment => {},
NodeTypeId::Document(_) => panic!("Can't serialize Document node itself"),
NodeTypeId::Element(_) => panic!("Element shouldn't appear here"),
},
}
}
Ok(())
}
}
|
{
for c in rev_children_iter(node) {
ret.push_node(&*c);
}
}
|
conditional_block
|
html.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
#![allow(unrooted_must_root)]
use crate::dom::bindings::codegen::Bindings::HTMLTemplateElementBinding::HTMLTemplateElementMethods;
use crate::dom::bindings::inheritance::{Castable, CharacterDataTypeId, NodeTypeId};
use crate::dom::bindings::root::{Dom, DomRoot};
use crate::dom::bindings::trace::JSTraceable;
use crate::dom::characterdata::CharacterData;
use crate::dom::document::Document;
use crate::dom::documenttype::DocumentType;
use crate::dom::element::Element;
use crate::dom::htmlscriptelement::HTMLScriptElement;
use crate::dom::htmltemplateelement::HTMLTemplateElement;
use crate::dom::node::Node;
use crate::dom::processinginstruction::ProcessingInstruction;
use crate::dom::servoparser::{ParsingAlgorithm, Sink};
use html5ever::buffer_queue::BufferQueue;
use html5ever::serialize::TraversalScope;
use html5ever::serialize::TraversalScope::IncludeNode;
use html5ever::serialize::{AttrRef, Serialize, Serializer};
use html5ever::tokenizer::{Tokenizer as HtmlTokenizer, TokenizerOpts, TokenizerResult};
use html5ever::tree_builder::{Tracer as HtmlTracer, TreeBuilder, TreeBuilderOpts};
use html5ever::QualName;
use js::jsapi::JSTracer;
use servo_url::ServoUrl;
use std::io;
#[derive(JSTraceable, MallocSizeOf)]
#[must_root]
pub struct Tokenizer {
#[ignore_malloc_size_of = "Defined in html5ever"]
inner: HtmlTokenizer<TreeBuilder<Dom<Node>, Sink>>,
}
impl Tokenizer {
pub fn new(
document: &Document,
url: ServoUrl,
fragment_context: Option<super::FragmentContext>,
parsing_algorithm: ParsingAlgorithm,
) -> Self {
let sink = Sink {
base_url: url,
document: Dom::from_ref(document),
current_line: 1,
script: Default::default(),
parsing_algorithm: parsing_algorithm,
};
let options = TreeBuilderOpts {
ignore_missing_rules: true,
..Default::default()
};
let inner = if let Some(fc) = fragment_context {
let tb = TreeBuilder::new_for_fragment(
sink,
Dom::from_ref(fc.context_elem),
fc.form_elem.map(|n| Dom::from_ref(n)),
options,
);
let tok_options = TokenizerOpts {
initial_state: Some(tb.tokenizer_state_for_context_elem()),
..Default::default()
};
HtmlTokenizer::new(tb, tok_options)
} else {
HtmlTokenizer::new(TreeBuilder::new(sink, options), Default::default())
};
Tokenizer { inner: inner }
}
pub fn feed(&mut self, input: &mut BufferQueue) -> Result<(), DomRoot<HTMLScriptElement>> {
match self.inner.feed(input) {
TokenizerResult::Done => Ok(()),
TokenizerResult::Script(script) => Err(DomRoot::from_ref(script.downcast().unwrap())),
}
}
pub fn end(&mut self) {
self.inner.end();
}
pub fn url(&self) -> &ServoUrl {
&self.inner.sink.sink.base_url
}
pub fn set_plaintext_state(&mut self) {
self.inner.set_plaintext_state();
}
}
#[allow(unsafe_code)]
unsafe impl JSTraceable for HtmlTokenizer<TreeBuilder<Dom<Node>, Sink>> {
unsafe fn trace(&self, trc: *mut JSTracer) {
struct Tracer(*mut JSTracer);
let tracer = Tracer(trc);
impl HtmlTracer for Tracer {
type Handle = Dom<Node>;
#[allow(unrooted_must_root)]
fn trace_handle(&self, node: &Dom<Node>) {
unsafe {
node.trace(self.0);
}
}
}
let tree_builder = &self.sink;
tree_builder.trace_handles(&tracer);
tree_builder.sink.trace(trc);
}
}
fn start_element<S: Serializer>(node: &Element, serializer: &mut S) -> io::Result<()> {
let name = QualName::new(None, node.namespace().clone(), node.local_name().clone());
let attrs = node
.attrs()
.iter()
.map(|attr| {
let qname = QualName::new(None, attr.namespace().clone(), attr.local_name().clone());
let value = attr.value().clone();
(qname, value)
})
.collect::<Vec<_>>();
let attr_refs = attrs.iter().map(|&(ref qname, ref value)| {
let ar: AttrRef = (&qname, &**value);
ar
});
serializer.start_elem(name, attr_refs)?;
Ok(())
}
fn end_element<S: Serializer>(node: &Element, serializer: &mut S) -> io::Result<()> {
let name = QualName::new(None, node.namespace().clone(), node.local_name().clone());
serializer.end_elem(name)
}
enum SerializationCommand {
OpenElement(DomRoot<Element>),
CloseElement(DomRoot<Element>),
SerializeNonelement(DomRoot<Node>),
}
struct SerializationIterator {
stack: Vec<SerializationCommand>,
}
fn rev_children_iter(n: &Node) -> impl Iterator<Item = DomRoot<Node>> {
if n.downcast::<Element>().map_or(false, |e| e.is_void()) {
return Node::new_document_node().rev_children();
}
match n.downcast::<HTMLTemplateElement>() {
Some(t) => t.Content().upcast::<Node>().rev_children(),
None => n.rev_children(),
}
}
impl SerializationIterator {
fn new(node: &Node, skip_first: bool) -> SerializationIterator {
let mut ret = SerializationIterator { stack: vec![] };
if skip_first {
for c in rev_children_iter(node) {
ret.push_node(&*c);
}
} else {
ret.push_node(node);
}
ret
}
fn push_node(&mut self, n: &Node) {
match n.downcast::<Element>() {
Some(e) => self
.stack
.push(SerializationCommand::OpenElement(DomRoot::from_ref(e))),
None => self.stack.push(SerializationCommand::SerializeNonelement(
DomRoot::from_ref(n),
)),
}
}
}
impl Iterator for SerializationIterator {
type Item = SerializationCommand;
fn next(&mut self) -> Option<SerializationCommand> {
let res = self.stack.pop();
if let Some(SerializationCommand::OpenElement(ref e)) = res {
self.stack
.push(SerializationCommand::CloseElement(e.clone()));
for c in rev_children_iter(&*e.upcast::<Node>()) {
self.push_node(&c);
}
}
res
}
}
impl<'a> Serialize for &'a Node {
fn serialize<S: Serializer>(
&self,
serializer: &mut S,
traversal_scope: TraversalScope,
) -> io::Result<()> {
let node = *self;
let iter = SerializationIterator::new(node, traversal_scope!= IncludeNode);
for cmd in iter {
match cmd {
SerializationCommand::OpenElement(n) => {
start_element(&n, serializer)?;
},
SerializationCommand::CloseElement(n) => {
end_element(&&n, serializer)?;
},
SerializationCommand::SerializeNonelement(n) => match n.type_id() {
NodeTypeId::DocumentType => {
let doctype = n.downcast::<DocumentType>().unwrap();
serializer.write_doctype(&doctype.name())?;
},
NodeTypeId::CharacterData(CharacterDataTypeId::Text(_)) => {
let cdata = n.downcast::<CharacterData>().unwrap();
serializer.write_text(&cdata.data())?;
},
NodeTypeId::CharacterData(CharacterDataTypeId::Comment) => {
let cdata = n.downcast::<CharacterData>().unwrap();
serializer.write_comment(&cdata.data())?;
},
NodeTypeId::CharacterData(CharacterDataTypeId::ProcessingInstruction) => {
let pi = n.downcast::<ProcessingInstruction>().unwrap();
let data = pi.upcast::<CharacterData>().data();
serializer.write_processing_instruction(&pi.target(), &data)?;
},
NodeTypeId::DocumentFragment => {},
|
},
}
}
Ok(())
}
}
|
NodeTypeId::Document(_) => panic!("Can't serialize Document node itself"),
NodeTypeId::Element(_) => panic!("Element shouldn't appear here"),
|
random_line_split
|
blockhash.rs
|
// Implementation adapted from Python version:
// https://github.com/commonsmachinery/blockhash-python/blob/e8b009d/blockhash.py
// Main site: http://blockhash.io
use image::{GenericImageView, Pixel};
use {BitSet, Image, HashBytes};
use std::cmp::Ordering;
use std::ops::AddAssign;
use std::mem;
const FLOAT_EQ_MARGIN: f32 = 0.001;
pub fn blockhash<I: Image, B: HashBytes>(img: &I, width: u32, height: u32) -> B {
assert_eq!(width % 4, 0, "width must be multiple of 4");
assert_eq!(height % 4, 0, "height must be multiple of 4");
let (iwidth, iheight) = img.dimensions();
// Skip the floating point math if it's unnecessary
if iwidth % width == 0 && iheight % height == 0 {
blockhash_fast(img, width, height)
} else {
blockhash_slow(img, width, height)
}
}
macro_rules! gen_hash {
($imgty:ty, $valty:ty, $blocks: expr, $width:expr, $block_width:expr, $block_height:expr, $eq_fn:expr) => ({
#[allow(deprecated)] // deprecated as of 0.22
let channel_count = <<$imgty as GenericImageView>::Pixel as Pixel>::channel_count() as u32;
let group_len = ($width * 4) as usize;
let block_area = $block_width * $block_height;
let cmp_factor = match channel_count {
3 | 4 => 255u32 as $valty * 3u32 as $valty,
2 | 1 => 255u32 as $valty,
_ => panic!("Unrecognized channel count from Image: {}", channel_count),
}
* block_area
/ (2u32 as $valty);
let medians: Vec<$valty> = $blocks.chunks(group_len).map(get_median).collect();
BitSet::from_bools(
$blocks.chunks(group_len).zip(medians)
.flat_map(|(blocks, median)|
blocks.iter().map(move |&block|
block > median ||
($eq_fn(block, median) && median > cmp_factor)
)
)
)
})
}
fn block_adder<'a, T: AddAssign + 'a>(blocks: &'a mut [T], width: u32) -> impl FnMut(u32, u32, T) + 'a {
move |x, y, add| (blocks[(y as usize) * (width as usize) + (x as usize)] += add)
}
fn blockhash_slow<I: Image, B: HashBytes>(img: &I, hwidth: u32, hheight: u32) -> B {
let mut blocks = vec![0f32; (hwidth * hheight) as usize];
let (iwidth, iheight) = img.dimensions();
// Block dimensions, in pixels
let (block_width, block_height) = (iwidth as f32 / hwidth as f32, iheight as f32 / hheight as f32);
img.foreach_pixel8(|x, y, px| {
let mut add_to_block = block_adder(&mut blocks, hwidth);
let px_sum = sum_px(px) as f32;
let (x, y) = (x as f32, y as f32);
let block_x = x / block_width;
let block_y = y / block_height;
let x_mod = x + 1. % block_width;
let y_mod = y + 1. % block_height;
// terminology is mostly arbitrary as long as we're consistent
// if `x` evenly divides `block_height`, this weight will be 0
// so we don't double the sum as `block_top` will equal `block_bottom`
let weight_left = x_mod.fract();
let weight_right = 1. - weight_left;
let weight_top = y_mod.fract();
let weight_bottom = 1. - weight_top;
let block_left = block_x.floor() as u32;
let block_top = block_y.floor() as u32;
let block_right = if x_mod.trunc() == 0. {
block_x.ceil() as u32
} else {
block_left
};
let block_bottom = if y_mod.trunc() == 0. {
block_y.ceil() as u32
} else {
block_top
};
add_to_block(block_left, block_top, px_sum * weight_left * weight_top);
add_to_block(block_left, block_bottom, px_sum * weight_left * weight_bottom);
add_to_block(block_right, block_top, px_sum * weight_right * weight_top);
add_to_block(block_right, block_bottom, px_sum * weight_right * weight_bottom);
});
gen_hash!(I, f32, blocks, hwidth, block_width, block_height,
|l: f32, r: f32| (l - r).abs() < FLOAT_EQ_MARGIN)
}
fn blockhash_fast<I: Image, B: HashBytes>(img: &I, hwidth: u32, hheight: u32) -> B {
let mut blocks = vec![0u32; (hwidth * hheight) as usize];
let (iwidth, iheight) = img.dimensions();
let (block_width, block_height) = (iwidth / hwidth, iheight / hheight);
img.foreach_pixel8(|x, y, px| {
let mut add_to_block = block_adder(&mut blocks, hwidth);
let px_sum = sum_px(px);
let block_x = x / block_width;
let block_y = y / block_height;
add_to_block(block_x, block_y, px_sum);
});
gen_hash!(I, u32, blocks, hwidth, block_width, block_height, |l, r| l == r)
}
#[inline(always)]
fn sum_px(chans: &[u8]) -> u32 {
// Branch prediction should eliminate the match after a few iterations
match chans.len() {
4 => if chans[3] == 0 { 255 * 3 } else { sum_px(&chans[..3]) },
3 => chans.iter().map(|&x| x as u32).sum(),
2 => if chans[1] == 0 { 255 } else { chans[0] as u32 },
1 => chans[0] as u32,
channels => panic!("Unsupported channel count in image: {}", channels),
}
}
fn get_median<T: PartialOrd + Copy>(data: &[T]) -> T {
let mut scratch = data.to_owned();
let median = scratch.len() / 2;
*qselect_inplace(&mut scratch, median)
}
const SORT_THRESH: usize = 8;
fn
|
<T: PartialOrd>(data: &mut [T], k: usize) -> &mut T {
let len = data.len();
assert!(k < len, "Called qselect_inplace with k = {} and data length: {}", k, len);
if len < SORT_THRESH {
data.sort_by(|left, right| left.partial_cmp(right).unwrap_or(Ordering::Less));
return &mut data[k];
}
let pivot_idx = partition(data);
if k == pivot_idx {
&mut data[pivot_idx]
} else if k < pivot_idx {
qselect_inplace(&mut data[..pivot_idx], k)
} else {
qselect_inplace(&mut data[pivot_idx + 1..], k - pivot_idx - 1)
}
}
fn partition<T: PartialOrd>(data: &mut [T]) -> usize {
let len = data.len();
let pivot_idx = {
let first = (&data[0], 0);
let mid = (&data[len / 2], len / 2);
let last = (&data[len - 1], len - 1);
median_of_3(&first, &mid, &last).1
};
data.swap(pivot_idx, len - 1);
let mut curr = 0;
for i in 0.. len - 1 {
if &data[i] < &data[len - 1] {
data.swap(i, curr);
curr += 1;
}
}
data.swap(curr, len - 1);
curr
}
fn median_of_3<T: PartialOrd>(mut x: T, mut y: T, mut z: T) -> T {
if x > y {
mem::swap(&mut x, &mut y);
}
if x > z {
mem::swap(&mut x, &mut z);
}
if x > z {
mem::swap(&mut y, &mut z);
}
y
}
|
qselect_inplace
|
identifier_name
|
blockhash.rs
|
// Implementation adapted from Python version:
// https://github.com/commonsmachinery/blockhash-python/blob/e8b009d/blockhash.py
// Main site: http://blockhash.io
use image::{GenericImageView, Pixel};
use {BitSet, Image, HashBytes};
use std::cmp::Ordering;
use std::ops::AddAssign;
use std::mem;
const FLOAT_EQ_MARGIN: f32 = 0.001;
pub fn blockhash<I: Image, B: HashBytes>(img: &I, width: u32, height: u32) -> B {
assert_eq!(width % 4, 0, "width must be multiple of 4");
assert_eq!(height % 4, 0, "height must be multiple of 4");
let (iwidth, iheight) = img.dimensions();
// Skip the floating point math if it's unnecessary
if iwidth % width == 0 && iheight % height == 0 {
blockhash_fast(img, width, height)
} else {
blockhash_slow(img, width, height)
}
}
macro_rules! gen_hash {
($imgty:ty, $valty:ty, $blocks: expr, $width:expr, $block_width:expr, $block_height:expr, $eq_fn:expr) => ({
#[allow(deprecated)] // deprecated as of 0.22
let channel_count = <<$imgty as GenericImageView>::Pixel as Pixel>::channel_count() as u32;
let group_len = ($width * 4) as usize;
let block_area = $block_width * $block_height;
let cmp_factor = match channel_count {
3 | 4 => 255u32 as $valty * 3u32 as $valty,
2 | 1 => 255u32 as $valty,
_ => panic!("Unrecognized channel count from Image: {}", channel_count),
}
* block_area
|
BitSet::from_bools(
$blocks.chunks(group_len).zip(medians)
.flat_map(|(blocks, median)|
blocks.iter().map(move |&block|
block > median ||
($eq_fn(block, median) && median > cmp_factor)
)
)
)
})
}
fn block_adder<'a, T: AddAssign + 'a>(blocks: &'a mut [T], width: u32) -> impl FnMut(u32, u32, T) + 'a {
move |x, y, add| (blocks[(y as usize) * (width as usize) + (x as usize)] += add)
}
fn blockhash_slow<I: Image, B: HashBytes>(img: &I, hwidth: u32, hheight: u32) -> B {
let mut blocks = vec![0f32; (hwidth * hheight) as usize];
let (iwidth, iheight) = img.dimensions();
// Block dimensions, in pixels
let (block_width, block_height) = (iwidth as f32 / hwidth as f32, iheight as f32 / hheight as f32);
img.foreach_pixel8(|x, y, px| {
let mut add_to_block = block_adder(&mut blocks, hwidth);
let px_sum = sum_px(px) as f32;
let (x, y) = (x as f32, y as f32);
let block_x = x / block_width;
let block_y = y / block_height;
let x_mod = x + 1. % block_width;
let y_mod = y + 1. % block_height;
// terminology is mostly arbitrary as long as we're consistent
// if `x` evenly divides `block_height`, this weight will be 0
// so we don't double the sum as `block_top` will equal `block_bottom`
let weight_left = x_mod.fract();
let weight_right = 1. - weight_left;
let weight_top = y_mod.fract();
let weight_bottom = 1. - weight_top;
let block_left = block_x.floor() as u32;
let block_top = block_y.floor() as u32;
let block_right = if x_mod.trunc() == 0. {
block_x.ceil() as u32
} else {
block_left
};
let block_bottom = if y_mod.trunc() == 0. {
block_y.ceil() as u32
} else {
block_top
};
add_to_block(block_left, block_top, px_sum * weight_left * weight_top);
add_to_block(block_left, block_bottom, px_sum * weight_left * weight_bottom);
add_to_block(block_right, block_top, px_sum * weight_right * weight_top);
add_to_block(block_right, block_bottom, px_sum * weight_right * weight_bottom);
});
gen_hash!(I, f32, blocks, hwidth, block_width, block_height,
|l: f32, r: f32| (l - r).abs() < FLOAT_EQ_MARGIN)
}
fn blockhash_fast<I: Image, B: HashBytes>(img: &I, hwidth: u32, hheight: u32) -> B {
let mut blocks = vec![0u32; (hwidth * hheight) as usize];
let (iwidth, iheight) = img.dimensions();
let (block_width, block_height) = (iwidth / hwidth, iheight / hheight);
img.foreach_pixel8(|x, y, px| {
let mut add_to_block = block_adder(&mut blocks, hwidth);
let px_sum = sum_px(px);
let block_x = x / block_width;
let block_y = y / block_height;
add_to_block(block_x, block_y, px_sum);
});
gen_hash!(I, u32, blocks, hwidth, block_width, block_height, |l, r| l == r)
}
#[inline(always)]
fn sum_px(chans: &[u8]) -> u32 {
// Branch prediction should eliminate the match after a few iterations
match chans.len() {
4 => if chans[3] == 0 { 255 * 3 } else { sum_px(&chans[..3]) },
3 => chans.iter().map(|&x| x as u32).sum(),
2 => if chans[1] == 0 { 255 } else { chans[0] as u32 },
1 => chans[0] as u32,
channels => panic!("Unsupported channel count in image: {}", channels),
}
}
fn get_median<T: PartialOrd + Copy>(data: &[T]) -> T {
let mut scratch = data.to_owned();
let median = scratch.len() / 2;
*qselect_inplace(&mut scratch, median)
}
const SORT_THRESH: usize = 8;
fn qselect_inplace<T: PartialOrd>(data: &mut [T], k: usize) -> &mut T {
let len = data.len();
assert!(k < len, "Called qselect_inplace with k = {} and data length: {}", k, len);
if len < SORT_THRESH {
data.sort_by(|left, right| left.partial_cmp(right).unwrap_or(Ordering::Less));
return &mut data[k];
}
let pivot_idx = partition(data);
if k == pivot_idx {
&mut data[pivot_idx]
} else if k < pivot_idx {
qselect_inplace(&mut data[..pivot_idx], k)
} else {
qselect_inplace(&mut data[pivot_idx + 1..], k - pivot_idx - 1)
}
}
fn partition<T: PartialOrd>(data: &mut [T]) -> usize {
let len = data.len();
let pivot_idx = {
let first = (&data[0], 0);
let mid = (&data[len / 2], len / 2);
let last = (&data[len - 1], len - 1);
median_of_3(&first, &mid, &last).1
};
data.swap(pivot_idx, len - 1);
let mut curr = 0;
for i in 0.. len - 1 {
if &data[i] < &data[len - 1] {
data.swap(i, curr);
curr += 1;
}
}
data.swap(curr, len - 1);
curr
}
fn median_of_3<T: PartialOrd>(mut x: T, mut y: T, mut z: T) -> T {
if x > y {
mem::swap(&mut x, &mut y);
}
if x > z {
mem::swap(&mut x, &mut z);
}
if x > z {
mem::swap(&mut y, &mut z);
}
y
}
|
/ (2u32 as $valty);
let medians: Vec<$valty> = $blocks.chunks(group_len).map(get_median).collect();
|
random_line_split
|
blockhash.rs
|
// Implementation adapted from Python version:
// https://github.com/commonsmachinery/blockhash-python/blob/e8b009d/blockhash.py
// Main site: http://blockhash.io
use image::{GenericImageView, Pixel};
use {BitSet, Image, HashBytes};
use std::cmp::Ordering;
use std::ops::AddAssign;
use std::mem;
const FLOAT_EQ_MARGIN: f32 = 0.001;
pub fn blockhash<I: Image, B: HashBytes>(img: &I, width: u32, height: u32) -> B {
assert_eq!(width % 4, 0, "width must be multiple of 4");
assert_eq!(height % 4, 0, "height must be multiple of 4");
let (iwidth, iheight) = img.dimensions();
// Skip the floating point math if it's unnecessary
if iwidth % width == 0 && iheight % height == 0 {
blockhash_fast(img, width, height)
} else {
blockhash_slow(img, width, height)
}
}
macro_rules! gen_hash {
($imgty:ty, $valty:ty, $blocks: expr, $width:expr, $block_width:expr, $block_height:expr, $eq_fn:expr) => ({
#[allow(deprecated)] // deprecated as of 0.22
let channel_count = <<$imgty as GenericImageView>::Pixel as Pixel>::channel_count() as u32;
let group_len = ($width * 4) as usize;
let block_area = $block_width * $block_height;
let cmp_factor = match channel_count {
3 | 4 => 255u32 as $valty * 3u32 as $valty,
2 | 1 => 255u32 as $valty,
_ => panic!("Unrecognized channel count from Image: {}", channel_count),
}
* block_area
/ (2u32 as $valty);
let medians: Vec<$valty> = $blocks.chunks(group_len).map(get_median).collect();
BitSet::from_bools(
$blocks.chunks(group_len).zip(medians)
.flat_map(|(blocks, median)|
blocks.iter().map(move |&block|
block > median ||
($eq_fn(block, median) && median > cmp_factor)
)
)
)
})
}
fn block_adder<'a, T: AddAssign + 'a>(blocks: &'a mut [T], width: u32) -> impl FnMut(u32, u32, T) + 'a {
move |x, y, add| (blocks[(y as usize) * (width as usize) + (x as usize)] += add)
}
fn blockhash_slow<I: Image, B: HashBytes>(img: &I, hwidth: u32, hheight: u32) -> B {
let mut blocks = vec![0f32; (hwidth * hheight) as usize];
let (iwidth, iheight) = img.dimensions();
// Block dimensions, in pixels
let (block_width, block_height) = (iwidth as f32 / hwidth as f32, iheight as f32 / hheight as f32);
img.foreach_pixel8(|x, y, px| {
let mut add_to_block = block_adder(&mut blocks, hwidth);
let px_sum = sum_px(px) as f32;
let (x, y) = (x as f32, y as f32);
let block_x = x / block_width;
let block_y = y / block_height;
let x_mod = x + 1. % block_width;
let y_mod = y + 1. % block_height;
// terminology is mostly arbitrary as long as we're consistent
// if `x` evenly divides `block_height`, this weight will be 0
// so we don't double the sum as `block_top` will equal `block_bottom`
let weight_left = x_mod.fract();
let weight_right = 1. - weight_left;
let weight_top = y_mod.fract();
let weight_bottom = 1. - weight_top;
let block_left = block_x.floor() as u32;
let block_top = block_y.floor() as u32;
let block_right = if x_mod.trunc() == 0. {
block_x.ceil() as u32
} else {
block_left
};
let block_bottom = if y_mod.trunc() == 0. {
block_y.ceil() as u32
} else {
block_top
};
add_to_block(block_left, block_top, px_sum * weight_left * weight_top);
add_to_block(block_left, block_bottom, px_sum * weight_left * weight_bottom);
add_to_block(block_right, block_top, px_sum * weight_right * weight_top);
add_to_block(block_right, block_bottom, px_sum * weight_right * weight_bottom);
});
gen_hash!(I, f32, blocks, hwidth, block_width, block_height,
|l: f32, r: f32| (l - r).abs() < FLOAT_EQ_MARGIN)
}
fn blockhash_fast<I: Image, B: HashBytes>(img: &I, hwidth: u32, hheight: u32) -> B {
let mut blocks = vec![0u32; (hwidth * hheight) as usize];
let (iwidth, iheight) = img.dimensions();
let (block_width, block_height) = (iwidth / hwidth, iheight / hheight);
img.foreach_pixel8(|x, y, px| {
let mut add_to_block = block_adder(&mut blocks, hwidth);
let px_sum = sum_px(px);
let block_x = x / block_width;
let block_y = y / block_height;
add_to_block(block_x, block_y, px_sum);
});
gen_hash!(I, u32, blocks, hwidth, block_width, block_height, |l, r| l == r)
}
#[inline(always)]
fn sum_px(chans: &[u8]) -> u32 {
// Branch prediction should eliminate the match after a few iterations
match chans.len() {
4 => if chans[3] == 0 { 255 * 3 } else { sum_px(&chans[..3]) },
3 => chans.iter().map(|&x| x as u32).sum(),
2 => if chans[1] == 0 { 255 } else { chans[0] as u32 },
1 => chans[0] as u32,
channels => panic!("Unsupported channel count in image: {}", channels),
}
}
fn get_median<T: PartialOrd + Copy>(data: &[T]) -> T {
let mut scratch = data.to_owned();
let median = scratch.len() / 2;
*qselect_inplace(&mut scratch, median)
}
const SORT_THRESH: usize = 8;
fn qselect_inplace<T: PartialOrd>(data: &mut [T], k: usize) -> &mut T {
let len = data.len();
assert!(k < len, "Called qselect_inplace with k = {} and data length: {}", k, len);
if len < SORT_THRESH {
data.sort_by(|left, right| left.partial_cmp(right).unwrap_or(Ordering::Less));
return &mut data[k];
}
let pivot_idx = partition(data);
if k == pivot_idx {
&mut data[pivot_idx]
} else if k < pivot_idx {
qselect_inplace(&mut data[..pivot_idx], k)
} else
|
}
fn partition<T: PartialOrd>(data: &mut [T]) -> usize {
let len = data.len();
let pivot_idx = {
let first = (&data[0], 0);
let mid = (&data[len / 2], len / 2);
let last = (&data[len - 1], len - 1);
median_of_3(&first, &mid, &last).1
};
data.swap(pivot_idx, len - 1);
let mut curr = 0;
for i in 0.. len - 1 {
if &data[i] < &data[len - 1] {
data.swap(i, curr);
curr += 1;
}
}
data.swap(curr, len - 1);
curr
}
fn median_of_3<T: PartialOrd>(mut x: T, mut y: T, mut z: T) -> T {
if x > y {
mem::swap(&mut x, &mut y);
}
if x > z {
mem::swap(&mut x, &mut z);
}
if x > z {
mem::swap(&mut y, &mut z);
}
y
}
|
{
qselect_inplace(&mut data[pivot_idx + 1..], k - pivot_idx - 1)
}
|
conditional_block
|
blockhash.rs
|
// Implementation adapted from Python version:
// https://github.com/commonsmachinery/blockhash-python/blob/e8b009d/blockhash.py
// Main site: http://blockhash.io
use image::{GenericImageView, Pixel};
use {BitSet, Image, HashBytes};
use std::cmp::Ordering;
use std::ops::AddAssign;
use std::mem;
const FLOAT_EQ_MARGIN: f32 = 0.001;
pub fn blockhash<I: Image, B: HashBytes>(img: &I, width: u32, height: u32) -> B {
assert_eq!(width % 4, 0, "width must be multiple of 4");
assert_eq!(height % 4, 0, "height must be multiple of 4");
let (iwidth, iheight) = img.dimensions();
// Skip the floating point math if it's unnecessary
if iwidth % width == 0 && iheight % height == 0 {
blockhash_fast(img, width, height)
} else {
blockhash_slow(img, width, height)
}
}
macro_rules! gen_hash {
($imgty:ty, $valty:ty, $blocks: expr, $width:expr, $block_width:expr, $block_height:expr, $eq_fn:expr) => ({
#[allow(deprecated)] // deprecated as of 0.22
let channel_count = <<$imgty as GenericImageView>::Pixel as Pixel>::channel_count() as u32;
let group_len = ($width * 4) as usize;
let block_area = $block_width * $block_height;
let cmp_factor = match channel_count {
3 | 4 => 255u32 as $valty * 3u32 as $valty,
2 | 1 => 255u32 as $valty,
_ => panic!("Unrecognized channel count from Image: {}", channel_count),
}
* block_area
/ (2u32 as $valty);
let medians: Vec<$valty> = $blocks.chunks(group_len).map(get_median).collect();
BitSet::from_bools(
$blocks.chunks(group_len).zip(medians)
.flat_map(|(blocks, median)|
blocks.iter().map(move |&block|
block > median ||
($eq_fn(block, median) && median > cmp_factor)
)
)
)
})
}
fn block_adder<'a, T: AddAssign + 'a>(blocks: &'a mut [T], width: u32) -> impl FnMut(u32, u32, T) + 'a
|
fn blockhash_slow<I: Image, B: HashBytes>(img: &I, hwidth: u32, hheight: u32) -> B {
let mut blocks = vec![0f32; (hwidth * hheight) as usize];
let (iwidth, iheight) = img.dimensions();
// Block dimensions, in pixels
let (block_width, block_height) = (iwidth as f32 / hwidth as f32, iheight as f32 / hheight as f32);
img.foreach_pixel8(|x, y, px| {
let mut add_to_block = block_adder(&mut blocks, hwidth);
let px_sum = sum_px(px) as f32;
let (x, y) = (x as f32, y as f32);
let block_x = x / block_width;
let block_y = y / block_height;
let x_mod = x + 1. % block_width;
let y_mod = y + 1. % block_height;
// terminology is mostly arbitrary as long as we're consistent
// if `x` evenly divides `block_height`, this weight will be 0
// so we don't double the sum as `block_top` will equal `block_bottom`
let weight_left = x_mod.fract();
let weight_right = 1. - weight_left;
let weight_top = y_mod.fract();
let weight_bottom = 1. - weight_top;
let block_left = block_x.floor() as u32;
let block_top = block_y.floor() as u32;
let block_right = if x_mod.trunc() == 0. {
block_x.ceil() as u32
} else {
block_left
};
let block_bottom = if y_mod.trunc() == 0. {
block_y.ceil() as u32
} else {
block_top
};
add_to_block(block_left, block_top, px_sum * weight_left * weight_top);
add_to_block(block_left, block_bottom, px_sum * weight_left * weight_bottom);
add_to_block(block_right, block_top, px_sum * weight_right * weight_top);
add_to_block(block_right, block_bottom, px_sum * weight_right * weight_bottom);
});
gen_hash!(I, f32, blocks, hwidth, block_width, block_height,
|l: f32, r: f32| (l - r).abs() < FLOAT_EQ_MARGIN)
}
fn blockhash_fast<I: Image, B: HashBytes>(img: &I, hwidth: u32, hheight: u32) -> B {
let mut blocks = vec![0u32; (hwidth * hheight) as usize];
let (iwidth, iheight) = img.dimensions();
let (block_width, block_height) = (iwidth / hwidth, iheight / hheight);
img.foreach_pixel8(|x, y, px| {
let mut add_to_block = block_adder(&mut blocks, hwidth);
let px_sum = sum_px(px);
let block_x = x / block_width;
let block_y = y / block_height;
add_to_block(block_x, block_y, px_sum);
});
gen_hash!(I, u32, blocks, hwidth, block_width, block_height, |l, r| l == r)
}
#[inline(always)]
fn sum_px(chans: &[u8]) -> u32 {
// Branch prediction should eliminate the match after a few iterations
match chans.len() {
4 => if chans[3] == 0 { 255 * 3 } else { sum_px(&chans[..3]) },
3 => chans.iter().map(|&x| x as u32).sum(),
2 => if chans[1] == 0 { 255 } else { chans[0] as u32 },
1 => chans[0] as u32,
channels => panic!("Unsupported channel count in image: {}", channels),
}
}
fn get_median<T: PartialOrd + Copy>(data: &[T]) -> T {
let mut scratch = data.to_owned();
let median = scratch.len() / 2;
*qselect_inplace(&mut scratch, median)
}
const SORT_THRESH: usize = 8;
fn qselect_inplace<T: PartialOrd>(data: &mut [T], k: usize) -> &mut T {
let len = data.len();
assert!(k < len, "Called qselect_inplace with k = {} and data length: {}", k, len);
if len < SORT_THRESH {
data.sort_by(|left, right| left.partial_cmp(right).unwrap_or(Ordering::Less));
return &mut data[k];
}
let pivot_idx = partition(data);
if k == pivot_idx {
&mut data[pivot_idx]
} else if k < pivot_idx {
qselect_inplace(&mut data[..pivot_idx], k)
} else {
qselect_inplace(&mut data[pivot_idx + 1..], k - pivot_idx - 1)
}
}
fn partition<T: PartialOrd>(data: &mut [T]) -> usize {
let len = data.len();
let pivot_idx = {
let first = (&data[0], 0);
let mid = (&data[len / 2], len / 2);
let last = (&data[len - 1], len - 1);
median_of_3(&first, &mid, &last).1
};
data.swap(pivot_idx, len - 1);
let mut curr = 0;
for i in 0.. len - 1 {
if &data[i] < &data[len - 1] {
data.swap(i, curr);
curr += 1;
}
}
data.swap(curr, len - 1);
curr
}
fn median_of_3<T: PartialOrd>(mut x: T, mut y: T, mut z: T) -> T {
if x > y {
mem::swap(&mut x, &mut y);
}
if x > z {
mem::swap(&mut x, &mut z);
}
if x > z {
mem::swap(&mut y, &mut z);
}
y
}
|
{
move |x, y, add| (blocks[(y as usize) * (width as usize) + (x as usize)] += add)
}
|
identifier_body
|
borrowck-loan-rcvr-overloaded-op.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::ops::Add;
#[derive(Copy)]
struct
|
{
x: isize,
y: isize,
}
impl Add<isize> for Point {
type Output = isize;
fn add(self, z: isize) -> isize {
self.x + self.y + z
}
}
impl Point {
pub fn times(&self, z: isize) -> isize {
self.x * self.y * z
}
}
fn a() {
let mut p = Point {x: 3, y: 4};
// ok (we can loan out rcvr)
p + 3;
p.times(3);
}
fn b() {
let mut p = Point {x: 3, y: 4};
// Here I create an outstanding loan and check that we get conflicts:
let q = &mut p;
p + 3; //~ ERROR cannot use `p`
p.times(3); //~ ERROR cannot borrow `p`
*q + 3; // OK to use the new alias `q`
q.x += 1; // and OK to mutate it
}
fn main() {
}
|
Point
|
identifier_name
|
borrowck-loan-rcvr-overloaded-op.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::ops::Add;
#[derive(Copy)]
struct Point {
x: isize,
y: isize,
}
impl Add<isize> for Point {
type Output = isize;
fn add(self, z: isize) -> isize {
self.x + self.y + z
}
}
impl Point {
pub fn times(&self, z: isize) -> isize {
self.x * self.y * z
}
}
fn a() {
let mut p = Point {x: 3, y: 4};
// ok (we can loan out rcvr)
p + 3;
p.times(3);
}
|
// Here I create an outstanding loan and check that we get conflicts:
let q = &mut p;
p + 3; //~ ERROR cannot use `p`
p.times(3); //~ ERROR cannot borrow `p`
*q + 3; // OK to use the new alias `q`
q.x += 1; // and OK to mutate it
}
fn main() {
}
|
fn b() {
let mut p = Point {x: 3, y: 4};
|
random_line_split
|
borrowck-loan-rcvr-overloaded-op.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::ops::Add;
#[derive(Copy)]
struct Point {
x: isize,
y: isize,
}
impl Add<isize> for Point {
type Output = isize;
fn add(self, z: isize) -> isize {
self.x + self.y + z
}
}
impl Point {
pub fn times(&self, z: isize) -> isize {
self.x * self.y * z
}
}
fn a()
|
fn b() {
let mut p = Point {x: 3, y: 4};
// Here I create an outstanding loan and check that we get conflicts:
let q = &mut p;
p + 3; //~ ERROR cannot use `p`
p.times(3); //~ ERROR cannot borrow `p`
*q + 3; // OK to use the new alias `q`
q.x += 1; // and OK to mutate it
}
fn main() {
}
|
{
let mut p = Point {x: 3, y: 4};
// ok (we can loan out rcvr)
p + 3;
p.times(3);
}
|
identifier_body
|
command.rs
|
use std::fmt;
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum DroneMode {
Normal = 0,
TakingOff = 1,
Landing = 2,
TookOff = 3, // Helper mode, doesn't exist on drone
Abort = 4,
}
|
pub mode: DroneMode,
pub as_array: [u8; 8],
}
impl Command {
pub fn new() -> Command {
Command { throttle: 0, yaw: 0, pitch: 0, roll: 0, mode: DroneMode::Normal, as_array: [0; 8] }
}
pub fn toggle_mode(&mut self, is_toggling: bool) {
match self.mode {
DroneMode::Normal => if is_toggling { self.mode = DroneMode::TakingOff },
DroneMode::TakingOff => if is_toggling { () } else { self.mode = DroneMode::TookOff },
DroneMode::TookOff => if is_toggling { self.mode = DroneMode::Landing },
DroneMode::Landing => if is_toggling { () } else { self.mode = DroneMode::Normal },
DroneMode::Abort => if is_toggling { self.mode = DroneMode::Normal },
}
}
/**
* as_array[0] = constant
* as_array[1] = roll
* as_array[2] = pitch
* as_array[3] = throttle
* as_array[4] = yaw
* as_array[5] = mode
* as_array[6] = checksum
* as_array[7] = constant
*/
pub fn update_array(&mut self) {
self.as_array[0] = 0x66;
let commands = [self.roll, self.pitch, self.throttle, self.yaw];
for i in 0..commands.len() {
if commands[i] >= 0 {
self.as_array[i + 1] = (commands[i] as u8) + 127;
} else {
self.as_array[i + 1] = (commands[i] + 127) as u8;
}
}
self.as_array[5] = if self.mode == DroneMode::TookOff { DroneMode::Normal as u8 } else { self.mode as u8 };
self.as_array[6] = (self.as_array[1] ^ self.as_array[2] ^ self.as_array[3] ^ self.as_array[4] ^ self.as_array[5]) & 0xFF;
self.as_array[7] = 0x99;
}
}
impl fmt::Debug for Command {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "throttle: {}, yaw: {}, pitch: {}, roll: {}, mode: {:?}", self.throttle, self.yaw, self.pitch, self.roll, self.mode)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_throttle_up() {
let mut cmd = Command::new();
cmd.throttle = 127;
cmd.update_array();
assert_eq!(cmd.as_array[3], 254);
}
#[test]
fn test_throttle_down() {
let mut cmd = Command::new();
cmd.throttle = -127;
cmd.update_array();
assert_eq!(cmd.as_array[3], 0);
}
}
|
pub struct Command {
pub pitch: i8,
pub yaw: i8,
pub roll: i8,
pub throttle: i8,
|
random_line_split
|
command.rs
|
use std::fmt;
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum DroneMode {
Normal = 0,
TakingOff = 1,
Landing = 2,
TookOff = 3, // Helper mode, doesn't exist on drone
Abort = 4,
}
pub struct Command {
pub pitch: i8,
pub yaw: i8,
pub roll: i8,
pub throttle: i8,
pub mode: DroneMode,
pub as_array: [u8; 8],
}
impl Command {
pub fn new() -> Command {
Command { throttle: 0, yaw: 0, pitch: 0, roll: 0, mode: DroneMode::Normal, as_array: [0; 8] }
}
pub fn toggle_mode(&mut self, is_toggling: bool) {
match self.mode {
DroneMode::Normal => if is_toggling
|
,
DroneMode::TakingOff => if is_toggling { () } else { self.mode = DroneMode::TookOff },
DroneMode::TookOff => if is_toggling { self.mode = DroneMode::Landing },
DroneMode::Landing => if is_toggling { () } else { self.mode = DroneMode::Normal },
DroneMode::Abort => if is_toggling { self.mode = DroneMode::Normal },
}
}
/**
* as_array[0] = constant
* as_array[1] = roll
* as_array[2] = pitch
* as_array[3] = throttle
* as_array[4] = yaw
* as_array[5] = mode
* as_array[6] = checksum
* as_array[7] = constant
*/
pub fn update_array(&mut self) {
self.as_array[0] = 0x66;
let commands = [self.roll, self.pitch, self.throttle, self.yaw];
for i in 0..commands.len() {
if commands[i] >= 0 {
self.as_array[i + 1] = (commands[i] as u8) + 127;
} else {
self.as_array[i + 1] = (commands[i] + 127) as u8;
}
}
self.as_array[5] = if self.mode == DroneMode::TookOff { DroneMode::Normal as u8 } else { self.mode as u8 };
self.as_array[6] = (self.as_array[1] ^ self.as_array[2] ^ self.as_array[3] ^ self.as_array[4] ^ self.as_array[5]) & 0xFF;
self.as_array[7] = 0x99;
}
}
impl fmt::Debug for Command {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "throttle: {}, yaw: {}, pitch: {}, roll: {}, mode: {:?}", self.throttle, self.yaw, self.pitch, self.roll, self.mode)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_throttle_up() {
let mut cmd = Command::new();
cmd.throttle = 127;
cmd.update_array();
assert_eq!(cmd.as_array[3], 254);
}
#[test]
fn test_throttle_down() {
let mut cmd = Command::new();
cmd.throttle = -127;
cmd.update_array();
assert_eq!(cmd.as_array[3], 0);
}
}
|
{ self.mode = DroneMode::TakingOff }
|
conditional_block
|
command.rs
|
use std::fmt;
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum DroneMode {
Normal = 0,
TakingOff = 1,
Landing = 2,
TookOff = 3, // Helper mode, doesn't exist on drone
Abort = 4,
}
pub struct Command {
pub pitch: i8,
pub yaw: i8,
pub roll: i8,
pub throttle: i8,
pub mode: DroneMode,
pub as_array: [u8; 8],
}
impl Command {
pub fn new() -> Command {
Command { throttle: 0, yaw: 0, pitch: 0, roll: 0, mode: DroneMode::Normal, as_array: [0; 8] }
}
pub fn toggle_mode(&mut self, is_toggling: bool) {
match self.mode {
DroneMode::Normal => if is_toggling { self.mode = DroneMode::TakingOff },
DroneMode::TakingOff => if is_toggling { () } else { self.mode = DroneMode::TookOff },
DroneMode::TookOff => if is_toggling { self.mode = DroneMode::Landing },
DroneMode::Landing => if is_toggling { () } else { self.mode = DroneMode::Normal },
DroneMode::Abort => if is_toggling { self.mode = DroneMode::Normal },
}
}
/**
* as_array[0] = constant
* as_array[1] = roll
* as_array[2] = pitch
* as_array[3] = throttle
* as_array[4] = yaw
* as_array[5] = mode
* as_array[6] = checksum
* as_array[7] = constant
*/
pub fn update_array(&mut self) {
self.as_array[0] = 0x66;
let commands = [self.roll, self.pitch, self.throttle, self.yaw];
for i in 0..commands.len() {
if commands[i] >= 0 {
self.as_array[i + 1] = (commands[i] as u8) + 127;
} else {
self.as_array[i + 1] = (commands[i] + 127) as u8;
}
}
self.as_array[5] = if self.mode == DroneMode::TookOff { DroneMode::Normal as u8 } else { self.mode as u8 };
self.as_array[6] = (self.as_array[1] ^ self.as_array[2] ^ self.as_array[3] ^ self.as_array[4] ^ self.as_array[5]) & 0xFF;
self.as_array[7] = 0x99;
}
}
impl fmt::Debug for Command {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "throttle: {}, yaw: {}, pitch: {}, roll: {}, mode: {:?}", self.throttle, self.yaw, self.pitch, self.roll, self.mode)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn
|
() {
let mut cmd = Command::new();
cmd.throttle = 127;
cmd.update_array();
assert_eq!(cmd.as_array[3], 254);
}
#[test]
fn test_throttle_down() {
let mut cmd = Command::new();
cmd.throttle = -127;
cmd.update_array();
assert_eq!(cmd.as_array[3], 0);
}
}
|
test_throttle_up
|
identifier_name
|
lib.rs
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use std::collections::HashMap;
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering;
use std::sync::RwLock;
use futures::Future;
use once_cell::sync::Lazy;
use once_cell::sync::OnceCell;
pub struct Counter {
name: &'static str,
inner: AtomicUsize,
registered: OnceCell<()>,
}
impl Counter {
/// By convention metric name should be crate.metric_name
/// Metrics without '.' in name are not allowed (cause compilation error)
pub const fn new(name: &'static str) -> Self {
// Unfortunately we can't check name this here because of const restriction
let inner = AtomicUsize::new(0);
let registered = OnceCell::new();
Self {
name,
inner,
registered,
}
}
pub fn increment(&'static self) {
self.add(1);
}
pub fn add(&'static self, val: usize) {
self.inner().fetch_add(val, Ordering::Relaxed);
}
pub fn sub(&'static self, val: usize) {
self.inner().fetch_sub(val, Ordering::Relaxed);
}
pub fn value(&'static self) -> usize {
self.inner().load(Ordering::Relaxed)
}
/// Increment counter by v and decrement it back by v when returned guard is dropped
pub fn entrance_guard(&'static self, v: usize) -> EntranceGuard {
self.add(v);
EntranceGuard(self, v)
}
fn inner(&'static self) -> &AtomicUsize {
self.registered
.get_or_init(|| Registry::global().register_counter(self));
&self.inner
}
}
pub struct EntranceGuard(&'static Counter, usize);
impl Drop for EntranceGuard {
fn drop(&mut self) {
self.0.sub(self.1);
}
}
pub async fn wrap_future_keep_guards<F: Future>(
future: F,
_guards: Vec<EntranceGuard>,
) -> F::Output {
future.await
}
#[derive(Default)]
pub struct Registry {
counters: RwLock<HashMap<&'static str, &'static Counter>>,
}
impl Registry {
pub fn global() -> &'static Self {
static REGISTRY: Lazy<Registry> = Lazy::new(Registry::default);
&*REGISTRY
}
pub fn register_counter(&self, counter: &'static Counter) {
if self
.counters
.write()
.unwrap()
.insert(counter.name, counter)
.is_some()
|
}
pub fn counters(&self) -> HashMap<&'static str, &'static Counter> {
self.counters.read().unwrap().clone()
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn counters_test() {
static COUNTER1: Counter = Counter::new("COUNTER1");
static COUNTER2: Counter = Counter::new("COUNTER2");
COUNTER1.increment();
COUNTER2.add(5);
let counters = Registry::global().counters();
assert_eq!(1, counters.get("COUNTER1").unwrap().value());
assert_eq!(5, counters.get("COUNTER2").unwrap().value());
}
#[test]
fn entrance_test() {
static COUNTER3: Counter = Counter::new("COUNTER3");
let guard1 = COUNTER3.entrance_guard(1);
let counters = Registry::global().counters();
assert_eq!(1, counters.get("COUNTER3").unwrap().value());
std::mem::drop(guard1);
let counters = Registry::global().counters();
assert_eq!(0, counters.get("COUNTER3").unwrap().value());
}
}
|
{
panic!("Counter {} is duplicated", counter.name)
}
|
conditional_block
|
lib.rs
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use std::collections::HashMap;
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering;
use std::sync::RwLock;
use futures::Future;
use once_cell::sync::Lazy;
use once_cell::sync::OnceCell;
pub struct Counter {
name: &'static str,
inner: AtomicUsize,
registered: OnceCell<()>,
}
impl Counter {
/// By convention metric name should be crate.metric_name
/// Metrics without '.' in name are not allowed (cause compilation error)
pub const fn new(name: &'static str) -> Self {
// Unfortunately we can't check name this here because of const restriction
let inner = AtomicUsize::new(0);
let registered = OnceCell::new();
Self {
name,
inner,
registered,
}
}
pub fn increment(&'static self) {
self.add(1);
}
pub fn add(&'static self, val: usize) {
self.inner().fetch_add(val, Ordering::Relaxed);
}
pub fn sub(&'static self, val: usize) {
self.inner().fetch_sub(val, Ordering::Relaxed);
}
pub fn value(&'static self) -> usize {
self.inner().load(Ordering::Relaxed)
}
/// Increment counter by v and decrement it back by v when returned guard is dropped
pub fn entrance_guard(&'static self, v: usize) -> EntranceGuard {
self.add(v);
EntranceGuard(self, v)
}
fn inner(&'static self) -> &AtomicUsize {
self.registered
.get_or_init(|| Registry::global().register_counter(self));
&self.inner
}
}
pub struct EntranceGuard(&'static Counter, usize);
impl Drop for EntranceGuard {
fn drop(&mut self) {
self.0.sub(self.1);
}
}
pub async fn wrap_future_keep_guards<F: Future>(
future: F,
_guards: Vec<EntranceGuard>,
) -> F::Output {
future.await
}
#[derive(Default)]
pub struct Registry {
counters: RwLock<HashMap<&'static str, &'static Counter>>,
}
impl Registry {
pub fn
|
() -> &'static Self {
static REGISTRY: Lazy<Registry> = Lazy::new(Registry::default);
&*REGISTRY
}
pub fn register_counter(&self, counter: &'static Counter) {
if self
.counters
.write()
.unwrap()
.insert(counter.name, counter)
.is_some()
{
panic!("Counter {} is duplicated", counter.name)
}
}
pub fn counters(&self) -> HashMap<&'static str, &'static Counter> {
self.counters.read().unwrap().clone()
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn counters_test() {
static COUNTER1: Counter = Counter::new("COUNTER1");
static COUNTER2: Counter = Counter::new("COUNTER2");
COUNTER1.increment();
COUNTER2.add(5);
let counters = Registry::global().counters();
assert_eq!(1, counters.get("COUNTER1").unwrap().value());
assert_eq!(5, counters.get("COUNTER2").unwrap().value());
}
#[test]
fn entrance_test() {
static COUNTER3: Counter = Counter::new("COUNTER3");
let guard1 = COUNTER3.entrance_guard(1);
let counters = Registry::global().counters();
assert_eq!(1, counters.get("COUNTER3").unwrap().value());
std::mem::drop(guard1);
let counters = Registry::global().counters();
assert_eq!(0, counters.get("COUNTER3").unwrap().value());
}
}
|
global
|
identifier_name
|
lib.rs
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use std::collections::HashMap;
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering;
use std::sync::RwLock;
use futures::Future;
use once_cell::sync::Lazy;
use once_cell::sync::OnceCell;
pub struct Counter {
name: &'static str,
inner: AtomicUsize,
registered: OnceCell<()>,
}
impl Counter {
/// By convention metric name should be crate.metric_name
/// Metrics without '.' in name are not allowed (cause compilation error)
pub const fn new(name: &'static str) -> Self {
// Unfortunately we can't check name this here because of const restriction
let inner = AtomicUsize::new(0);
let registered = OnceCell::new();
Self {
name,
inner,
registered,
}
}
pub fn increment(&'static self) {
self.add(1);
}
pub fn add(&'static self, val: usize) {
self.inner().fetch_add(val, Ordering::Relaxed);
}
pub fn sub(&'static self, val: usize) {
self.inner().fetch_sub(val, Ordering::Relaxed);
}
pub fn value(&'static self) -> usize {
self.inner().load(Ordering::Relaxed)
}
/// Increment counter by v and decrement it back by v when returned guard is dropped
pub fn entrance_guard(&'static self, v: usize) -> EntranceGuard
|
fn inner(&'static self) -> &AtomicUsize {
self.registered
.get_or_init(|| Registry::global().register_counter(self));
&self.inner
}
}
pub struct EntranceGuard(&'static Counter, usize);
impl Drop for EntranceGuard {
fn drop(&mut self) {
self.0.sub(self.1);
}
}
pub async fn wrap_future_keep_guards<F: Future>(
future: F,
_guards: Vec<EntranceGuard>,
) -> F::Output {
future.await
}
#[derive(Default)]
pub struct Registry {
counters: RwLock<HashMap<&'static str, &'static Counter>>,
}
impl Registry {
pub fn global() -> &'static Self {
static REGISTRY: Lazy<Registry> = Lazy::new(Registry::default);
&*REGISTRY
}
pub fn register_counter(&self, counter: &'static Counter) {
if self
.counters
.write()
.unwrap()
.insert(counter.name, counter)
.is_some()
{
panic!("Counter {} is duplicated", counter.name)
}
}
pub fn counters(&self) -> HashMap<&'static str, &'static Counter> {
self.counters.read().unwrap().clone()
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn counters_test() {
static COUNTER1: Counter = Counter::new("COUNTER1");
static COUNTER2: Counter = Counter::new("COUNTER2");
COUNTER1.increment();
COUNTER2.add(5);
let counters = Registry::global().counters();
assert_eq!(1, counters.get("COUNTER1").unwrap().value());
assert_eq!(5, counters.get("COUNTER2").unwrap().value());
}
#[test]
fn entrance_test() {
static COUNTER3: Counter = Counter::new("COUNTER3");
let guard1 = COUNTER3.entrance_guard(1);
let counters = Registry::global().counters();
assert_eq!(1, counters.get("COUNTER3").unwrap().value());
std::mem::drop(guard1);
let counters = Registry::global().counters();
assert_eq!(0, counters.get("COUNTER3").unwrap().value());
}
}
|
{
self.add(v);
EntranceGuard(self, v)
}
|
identifier_body
|
lib.rs
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use std::collections::HashMap;
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering;
use std::sync::RwLock;
use futures::Future;
use once_cell::sync::Lazy;
use once_cell::sync::OnceCell;
pub struct Counter {
name: &'static str,
inner: AtomicUsize,
registered: OnceCell<()>,
}
impl Counter {
/// By convention metric name should be crate.metric_name
/// Metrics without '.' in name are not allowed (cause compilation error)
pub const fn new(name: &'static str) -> Self {
// Unfortunately we can't check name this here because of const restriction
let inner = AtomicUsize::new(0);
let registered = OnceCell::new();
Self {
name,
inner,
registered,
}
}
pub fn increment(&'static self) {
self.add(1);
}
pub fn add(&'static self, val: usize) {
self.inner().fetch_add(val, Ordering::Relaxed);
}
pub fn sub(&'static self, val: usize) {
self.inner().fetch_sub(val, Ordering::Relaxed);
}
pub fn value(&'static self) -> usize {
self.inner().load(Ordering::Relaxed)
}
/// Increment counter by v and decrement it back by v when returned guard is dropped
pub fn entrance_guard(&'static self, v: usize) -> EntranceGuard {
self.add(v);
EntranceGuard(self, v)
}
fn inner(&'static self) -> &AtomicUsize {
self.registered
.get_or_init(|| Registry::global().register_counter(self));
&self.inner
}
}
pub struct EntranceGuard(&'static Counter, usize);
impl Drop for EntranceGuard {
fn drop(&mut self) {
self.0.sub(self.1);
}
}
pub async fn wrap_future_keep_guards<F: Future>(
future: F,
_guards: Vec<EntranceGuard>,
) -> F::Output {
future.await
}
#[derive(Default)]
pub struct Registry {
counters: RwLock<HashMap<&'static str, &'static Counter>>,
}
impl Registry {
pub fn global() -> &'static Self {
static REGISTRY: Lazy<Registry> = Lazy::new(Registry::default);
&*REGISTRY
}
pub fn register_counter(&self, counter: &'static Counter) {
if self
.counters
.write()
.unwrap()
.insert(counter.name, counter)
.is_some()
{
panic!("Counter {} is duplicated", counter.name)
}
}
pub fn counters(&self) -> HashMap<&'static str, &'static Counter> {
self.counters.read().unwrap().clone()
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
|
COUNTER1.increment();
COUNTER2.add(5);
let counters = Registry::global().counters();
assert_eq!(1, counters.get("COUNTER1").unwrap().value());
assert_eq!(5, counters.get("COUNTER2").unwrap().value());
}
#[test]
fn entrance_test() {
static COUNTER3: Counter = Counter::new("COUNTER3");
let guard1 = COUNTER3.entrance_guard(1);
let counters = Registry::global().counters();
assert_eq!(1, counters.get("COUNTER3").unwrap().value());
std::mem::drop(guard1);
let counters = Registry::global().counters();
assert_eq!(0, counters.get("COUNTER3").unwrap().value());
}
}
|
fn counters_test() {
static COUNTER1: Counter = Counter::new("COUNTER1");
static COUNTER2: Counter = Counter::new("COUNTER2");
|
random_line_split
|
lib.rs
|
use chrono::{DateTime, Utc};
use exonum::{
blockchain::{Block, CallInBlock, CallProof, Schema, TxLocation},
crypto::Hash,
helpers::Height,
merkledb::{ListProof, ObjectHash, Snapshot},
messages::{AnyTx, Precommit, Verified},
runtime::{ExecutionError, ExecutionStatus},
};
use serde::{Serialize, Serializer};
use serde_derive::Deserialize;
use std::{
cell::{Ref, RefCell},
collections::BTreeMap,
fmt,
ops::{Bound, Index, RangeBounds},
slice,
time::UNIX_EPOCH,
};
pub mod api;
/// Ending height of the range (exclusive), given the a priori max height.
fn end_height(bound: Bound<&Height>, max: Height) -> Height {
use std::cmp::min;
let inner_end = match bound {
Bound::Included(height) => height.next(),
Bound::Excluded(height) => *height,
Bound::Unbounded => max.next(),
};
min(inner_end, max.next())
}
/// Information about a block in the blockchain.
///
/// # JSON presentation
///
/// JSON object with the following fields:
///
/// | Name | Equivalent type | Description |
/// |------|-------|--------|
/// | `block` | [`Block`] | Block header as recorded in the blockchain |
/// | `precommits` | `Vec<`[`Precommit`]`>` | Precommits authorizing the block |
/// | `txs` | `Vec<`[`Hash`]`>` | Hashes of transactions in the block |
///
/// [`Block`]: https://docs.rs/exonum/latest/exonum/blockchain/struct.Block.html
/// [`Precommit`]: https://docs.rs/exonum/latest/exonum/messages/struct.Precommit.html
/// [`Hash`]: https://docs.rs/exonum-crypto/latest/exonum_crypto/struct.Hash.html
#[derive(Debug)]
pub struct BlockInfo<'a> {
header: Block,
explorer: &'a BlockchainExplorer<'a>,
precommits: RefCell<Option<Vec<Verified<Precommit>>>>,
txs: RefCell<Option<Vec<Hash>>>,
}
impl<'a> BlockInfo<'a> {
fn new(explorer: &'a BlockchainExplorer<'_>, height: Height) -> Self {
let schema = explorer.schema;
let hashes = schema.block_hashes_by_height();
let blocks = schema.blocks();
let block_hash = hashes
.get(height.0)
.unwrap_or_else(|| panic!("Block not found, height: {:?}", height));
let header = blocks
.get(&block_hash)
.unwrap_or_else(|| panic!("Block not found, hash: {:?}", block_hash));
BlockInfo {
explorer,
header,
precommits: RefCell::new(None),
txs: RefCell::new(None),
}
}
/// Returns block header as recorded in the blockchain.
pub fn header(&self) -> &Block {
&self.header
}
/// Extracts the header discarding all other information.
pub fn into_header(self) -> Block {
self.header
}
/// Returns the height of this block.
///
/// This method is equivalent to calling `block.header().height()`.
pub fn height(&self) -> Height {
self.header.height
}
/// Returns the number of transactions in this block.
pub fn len(&self) -> usize {
self.header.tx_count as usize
}
/// Is this block empty (i.e., contains no transactions)?
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Returns a list of precommits for this block.
pub fn precommits(&self) -> Ref<'_, [Verified<Precommit>]> {
if self.precommits.borrow().is_none() {
let precommits = self.explorer.precommits(&self.header);
*self.precommits.borrow_mut() = Some(precommits);
}
Ref::map(self.precommits.borrow(), |cache| {
cache.as_ref().unwrap().as_ref()
})
}
/// Lists hashes of transactions included in this block.
pub fn transaction_hashes(&self) -> Ref<'_, [Hash]> {
if self.txs.borrow().is_none() {
let txs = self.explorer.transaction_hashes(&self.header);
*self.txs.borrow_mut() = Some(txs);
}
Ref::map(self.txs.borrow(), |cache| cache.as_ref().unwrap().as_ref())
}
/// Returns a transaction with the specified index in the block.
pub fn transaction(&self, index: usize) -> Option<CommittedTransaction> {
self.transaction_hashes()
.get(index)
.map(|hash| self.explorer.committed_transaction(hash, None))
}
/// Returns the proof for the execution status of a call within this block.
///
/// Note that if the call did not result in an error or did not happen at all, the returned
/// proof will not contain entries. To distinguish between two cases, one can inspect
/// the number of transactions in the block or IDs of the active services when the block
/// was executed.
pub fn call_proof(&self, call_location: CallInBlock) -> CallProof {
self.explorer
.schema
.call_records(self.header.height)
.unwrap() // safe: we know that the block exists
.get_proof(call_location)
}
/// Iterates over transactions in the block.
pub fn iter(&self) -> Transactions<'_, '_> {
Transactions {
block: self,
ptr: 0,
len: self.len(),
}
}
/// Loads transactions, errors and precommits for the block.
pub fn with_transactions(self) -> BlockWithTransactions {
let (explorer, header, precommits, transactions) =
(self.explorer, self.header, self.precommits, self.txs);
let precommits = precommits
.into_inner()
.unwrap_or_else(|| explorer.precommits(&header));
let transactions = transactions
.into_inner()
.unwrap_or_else(|| explorer.transaction_hashes(&header))
.iter()
.map(|tx_hash| explorer.committed_transaction(tx_hash, None))
.collect();
let errors = self
.explorer
.schema
.call_records(header.height)
.expect("No call record for a committed block");
let errors: Vec<_> = errors
.errors()
.map(|(location, error)| ErrorWithLocation { location, error })
.collect();
BlockWithTransactions {
header,
precommits,
transactions,
errors,
}
}
}
impl<'a> Serialize for BlockInfo<'a> {
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
use serde::ser::SerializeStruct;
let mut s = serializer.serialize_struct("BlockInfo", 3)?;
s.serialize_field("block", &self.header)?;
s.serialize_field("precommits", &*self.precommits())?;
s.serialize_field("txs", &*self.transaction_hashes())?;
s.end()
}
}
/// Iterator over transactions in a block.
#[derive(Debug)]
pub struct Transactions<'r, 'a> {
block: &'r BlockInfo<'a>,
ptr: usize,
len: usize,
}
impl<'a, 'r> Iterator for Transactions<'a, 'r> {
type Item = CommittedTransaction;
fn next(&mut self) -> Option<CommittedTransaction> {
if self.ptr == self.len {
None
} else {
let transaction = self.block.transaction(self.ptr);
self.ptr += 1;
transaction
}
}
}
impl<'a, 'r: 'a> IntoIterator for &'r BlockInfo<'a> {
type Item = CommittedTransaction;
type IntoIter = Transactions<'a, 'r>;
fn into_iter(self) -> Transactions<'a, 'r> {
self.iter()
}
}
/// Information about a block in the blockchain with info on transactions eagerly loaded.
#[derive(Debug, Serialize, Deserialize)]
#[non_exhaustive]
pub struct BlockWithTransactions {
/// Block header as recorded in the blockchain.
#[serde(rename = "block")]
pub header: Block,
/// Precommits.
pub precommits: Vec<Verified<Precommit>>,
/// Transactions in the order they appear in the block.
pub transactions: Vec<CommittedTransaction>,
/// Errors that have occurred within the block.
pub errors: Vec<ErrorWithLocation>,
}
/// Execution error together with its location within the block.
#[derive(Debug, Serialize, Deserialize)]
#[non_exhaustive]
pub struct ErrorWithLocation {
/// Location of the error.
pub location: CallInBlock,
/// Error data.
pub error: ExecutionError,
}
impl fmt::Display for ErrorWithLocation {
fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(formatter, "In {}: {}", self.location, self.error)
}
}
impl BlockWithTransactions {
/// Returns the height of this block.
///
/// This method is equivalent to calling `block.header.height()`.
pub fn height(&self) -> Height {
self.header.height
}
/// Returns the number of transactions in this block.
pub fn len(&self) -> usize {
self.transactions.len()
}
/// Is this block empty (i.e., contains no transactions)?
pub fn is_empty(&self) -> bool {
self.transactions.is_empty()
}
/// Iterates over transactions in the block.
pub fn iter(&self) -> EagerTransactions<'_> {
self.transactions.iter()
}
/// Returns errors converted into a map. Note that this is potentially a costly operation.
pub fn error_map(&self) -> BTreeMap<CallInBlock, &ExecutionError> {
self.errors.iter().map(|e| (e.location, &e.error)).collect()
}
}
/// Iterator over transactions in [`BlockWithTransactions`].
///
/// [`BlockWithTransactions`]: struct.BlockWithTransactions.html
pub type EagerTransactions<'a> = slice::Iter<'a, CommittedTransaction>;
impl Index<usize> for BlockWithTransactions {
type Output = CommittedTransaction;
fn index(&self, index: usize) -> &CommittedTransaction {
self.transactions.get(index).unwrap_or_else(|| {
panic!(
"Index exceeds number of transactions in block {}",
self.len()
);
})
}
}
/// Returns a transaction in the block by its hash. Beware that this is a slow operation
/// (linear w.r.t. the number of transactions in a block).
impl Index<Hash> for BlockWithTransactions {
type Output = CommittedTransaction;
fn index(&self, index: Hash) -> &CommittedTransaction {
self.transactions
.iter()
.find(|&tx| tx.message.object_hash() == index)
.unwrap_or_else(|| {
panic!("No transaction with hash {} in the block", index);
})
}
}
impl<'a> IntoIterator for &'a BlockWithTransactions {
type Item = &'a CommittedTransaction;
type IntoIter = EagerTransactions<'a>;
fn into_iter(self) -> EagerTransactions<'a> {
self.iter()
}
}
/// Information about a particular transaction in the blockchain.
///
/// # JSON presentation
///
/// | Name | Equivalent type | Description |
/// |------|-------|--------|
/// | `message` | `Verified<AnyTx>` | Transaction as recorded in the blockchain |
/// | `location` | [`TxLocation`] | Location of the transaction in the block |
/// | `location_proof` | [`ListProof`]`<`[`Hash`]`>` | Proof of transaction inclusion into a block |
/// | `status` | (custom; see below) | Execution status |
/// | `time` | [`DateTime`]`<`[`Utc`]`>` | Commitment time* |
///
/// \* By commitment time we mean an approximate commitment time of the block
/// which includes the transaction. This time is a median time of the precommit local times
/// of each validator.
///
/// ## `status` field
///
/// The `status` field is a more readable representation of the [`ExecutionStatus`] type.
///
/// For successfully executed transactions, `status` is equal to
///
/// ```json
/// { "type": "success" }
/// ```
///
/// For transactions that cause an [`ExecutionError`], `status` contains the error code
/// and an optional description, i.e., has the following type in the [TypeScript] notation:
///
/// ```typescript
/// type Error = {
/// type:'service_error' | 'core_error' | 'common_error' | 'runtime_error' | 'unexpected_error',
/// code?: number,
/// description?: string,
/// runtime_id: number,
/// call_site?: CallSite,
/// };
///
/// type CallSite = MethodCallSite | HookCallSite;
///
/// type MethodCallSite = {
/// call_type:'method',
/// instance_id: number,
/// interface?: string,
/// method_id: number,
/// };
///
/// type HookCallSite = {
/// call_type: 'constructor' | 'before_transactions' | 'after_transactions',
/// instance_id: number,
/// };
/// ```
///
/// Explanations:
///
/// - `Error.type` determines the component responsible for the error. Usually, errors
/// are generated by the service code, but they can also be caused by the dispatch logic,
/// runtime associated with the service, or come from another source (`unexpected_error`s).
/// - `Error.code` is the error code. For service errors, this code is specific
/// to the service instance (which can be obtained from `call_site`), and for runtime errors -
/// to the runtime. For core errors, the codes are fixed; their meaning can be found
/// in the [`CoreError`] docs. The code is present for all error types except
/// `unexpected_error`s, in which the code is always absent.
/// Besides types listed above, there is also a set of errors that can occur within any context,
/// which are organized in the [`CommonError`].
/// - `Error.description` is an optional human-readable description of the error.
/// - `Error.runtime_id` is the numeric ID of the runtime in which the error has occurred. Note
/// that the runtime is defined for all error types, not just `runtime_error`s, since
/// for any request it's possible to say which runtime is responsible for its processing.
/// - `Error.call_site` provides most precise known location of the call in which the error
/// has occurred.
///
/// [`TxLocation`]: https://docs.rs/exonum/latest/exonum/blockchain/struct.TxLocation.html
/// [`ListProof`]: https://docs.rs/exonum-merkledb/latest/exonum_merkledb/indexes/proof_list/struct.ListProof.html
/// [`Hash`]: https://docs.rs/exonum-crypto/latest/exonum_crypto/struct.Hash.html
/// [`ExecutionStatus`]: https://docs.rs/exonum/latest/exonum/runtime/struct.ExecutionStatus.html
/// [`ExecutionError`]: https://docs.rs/exonum/latest/exonum/runtime/struct.ExecutionError.html
/// [`CoreError`]: https://docs.rs/exonum/latest/exonum/runtime/enum.CoreError.html
/// [`CommonError`]: https://docs.rs/exonum/latest/exonum/runtime/enum.CommonError.html
/// [TypeScript]: https://www.typescriptlang.org/
/// [`DateTime`]: https://docs.rs/chrono/0.4.10/chrono/struct.DateTime.html
/// [`Utc`]: https://docs.rs/chrono/0.4.10/chrono/offset/struct.Utc.html
#[derive(Debug, Serialize, Deserialize)]
pub struct CommittedTransaction {
message: Verified<AnyTx>,
location: TxLocation,
location_proof: ListProof<Hash>,
status: ExecutionStatus,
time: DateTime<Utc>,
}
impl CommittedTransaction {
/// Returns the content of the transaction.
pub fn message(&self) -> &Verified<AnyTx> {
&self.message
}
/// Returns the transaction location in block.
pub fn location(&self) -> &TxLocation {
&self.location
}
/// Returns a proof that transaction is recorded in the blockchain.
pub fn location_proof(&self) -> &ListProof<Hash> {
&self.location_proof
}
/// Returns the status of the transaction execution.
pub fn status(&self) -> Result<(), &ExecutionError> {
self.status.0.as_ref().map(drop)
}
/// Returns an approximate commit time of the block which includes this transaction.
pub fn time(&self) -> &DateTime<Utc> {
&self.time
}
}
/// Information about the transaction.
///
/// Values of this type are returned by the `transaction()` method of the `BlockchainExplorer`.
///
/// # JSON presentation
///
/// ## Committed transactions
///
/// Committed transactions are represented just like a `CommittedTransaction`,
/// with the additional `type` field equal to `"committed"`.
///
/// ## Transaction in pool
///
/// Transactions in pool are represented with a 2-field object:
///
/// - `type` field contains transaction type (`"in-pool"`).
/// - `message` is the full transaction message serialized to the hexadecimal form.
///
/// # Examples
///
/// ```
/// use exonum_explorer::TransactionInfo;
/// use exonum::{crypto::KeyPair, runtime::InstanceId};
/// # use exonum_derive::*;
/// # use serde_derive::*;
/// # use serde_json::json;
///
/// /// Service interface.
/// #[exonum_interface]
/// trait ServiceInterface<Ctx> {
/// type Output;
/// #[interface_method(id = 0)]
/// fn create_wallet(&self, ctx: Ctx, username: String) -> Self::Output;
/// }
///
/// // Create a signed transaction.
/// let keypair = KeyPair::random();
/// const SERVICE_ID: InstanceId = 100;
/// let tx = keypair.create_wallet(SERVICE_ID, "Alice".to_owned());
/// // This transaction in pool will be represented as follows:
/// let json = json!({
/// "type": "in_pool",
/// "message": tx,
/// });
/// let parsed: TransactionInfo = serde_json::from_value(json).unwrap();
/// assert!(parsed.is_in_pool());
/// ```
#[derive(Debug, Serialize, Deserialize)]
#[serde(tag = "type", rename_all = "snake_case")]
#[non_exhaustive]
pub enum TransactionInfo {
/// Transaction is in the memory pool, but not yet committed to the blockchain.
InPool {
/// A content of the uncommitted transaction.
message: Verified<AnyTx>,
},
/// Transaction is already committed to the blockchain.
Committed(CommittedTransaction),
}
impl TransactionInfo {
/// Returns the content of this transaction.
pub fn message(&self) -> &Verified<AnyTx> {
match *self {
TransactionInfo::InPool { ref message } => message,
TransactionInfo::Committed(ref tx) => tx.message(),
}
}
/// Is this in-pool transaction?
pub fn is_in_pool(&self) -> bool {
matches!(*self, TransactionInfo::InPool {.. })
}
/// Is this a committed transaction?
pub fn is_committed(&self) -> bool {
matches!(*self, TransactionInfo::Committed(_))
}
/// Returns a reference to the inner committed transaction if this transaction is committed.
/// For transactions in pool, returns `None`.
pub fn as_committed(&self) -> Option<&CommittedTransaction> {
match *self {
TransactionInfo::Committed(ref tx) => Some(tx),
_ => None,
}
}
}
/// Blockchain explorer.
///
/// # Notes
///
/// The explorer wraps a specific [`Snapshot`] of the blockchain state; that is,
/// all calls to the methods of an explorer instance are guaranteed to be consistent.
///
/// [`Snapshot`]: https://docs.rs/exonum-merkledb/latest/exonum_merkledb/trait.Snapshot.html
#[derive(Debug, Copy, Clone)]
pub struct BlockchainExplorer<'a> {
schema: Schema<&'a dyn Snapshot>,
}
impl<'a> BlockchainExplorer<'a> {
/// Creates a new `BlockchainExplorer` instance from the provided snapshot.
pub fn new(snapshot: &'a dyn Snapshot) -> Self {
BlockchainExplorer {
schema: Schema::new(snapshot),
}
}
/// Creates a new `BlockchainExplorer` instance from the core schema.
pub fn from_schema(schema: Schema<&'a dyn Snapshot>) -> Self {
BlockchainExplorer { schema }
}
/// Returns information about the transaction identified by the hash.
pub fn transaction(&self, tx_hash: &Hash) -> Option<TransactionInfo> {
let message = self.transaction_without_proof(tx_hash)?;
if self.schema.transactions_pool().contains(tx_hash) {
return Some(TransactionInfo::InPool { message });
}
let tx = self.committed_transaction(tx_hash, Some(message));
Some(TransactionInfo::Committed(tx))
}
/// Returns the status of a call in a block.
///
/// # Return value
///
/// This method will return `Ok(())` both if the call completed successfully, or if
/// was not performed at all. The caller is responsible to distinguish these two outcomes.
pub fn call_status(
&self,
block_height: Height,
call_location: CallInBlock,
) -> Result<(), ExecutionError> {
match self.schema.call_records(block_height) {
Some(errors) => errors.get(call_location),
None => Ok(()),
}
}
/// Return transaction message without proof.
pub fn transaction_without_proof(&self, tx_hash: &Hash) -> Option<Verified<AnyTx>> {
self.schema.transactions().get(tx_hash)
}
fn precommits(&self, block: &Block) -> Vec<Verified<Precommit>> {
self.schema
.precommits(&block.object_hash())
.iter()
.collect()
}
fn transaction_hashes(&self, block: &Block) -> Vec<Hash> {
let tx_hashes_table = self.schema.block_transactions(block.height);
tx_hashes_table.iter().collect()
}
/// Retrieves a transaction that is known to be committed.
fn committed_transaction(
&self,
tx_hash: &Hash,
maybe_content: Option<Verified<AnyTx>>,
) -> CommittedTransaction {
let location = self
.schema
.transactions_locations()
.get(tx_hash)
.unwrap_or_else(|| panic!("Location not found for transaction hash {:?}", tx_hash));
let location_proof = self
.schema
.block_transactions(location.block_height())
.get_proof(u64::from(location.position_in_block()));
let block_precommits = self
.schema
.block_and_precommits(location.block_height())
.unwrap();
let time = median_precommits_time(&block_precommits.precommits);
// Unwrap is OK here, because we already know that transaction is committed.
let status = self.schema.transaction_result(location).unwrap();
CommittedTransaction {
message: maybe_content.unwrap_or_else(|| {
self.schema
.transactions()
.get(tx_hash)
.expect("BUG: Cannot find transaction in database")
}),
location,
location_proof,
status: ExecutionStatus(status),
time,
}
}
/// Return the height of the blockchain.
pub fn height(&self) -> Height
|
/// Returns block information for the specified height or `None` if there is no such block.
pub fn block(&self, height: Height) -> Option<BlockInfo<'_>> {
if self.height() >= height {
Some(BlockInfo::new(self, height))
} else {
None
}
}
/// Return a block together with its transactions at the specified height, or `None`
/// if there is no such block.
pub fn block_with_txs(&self, height: Height) -> Option<BlockWithTransactions> {
let txs_table = self.schema.block_transactions(height);
let block_proof = self.schema.block_and_precommits(height)?;
let errors = self.schema.call_records(height)?;
|
{
self.schema.height()
}
|
identifier_body
|
lib.rs
|
<Precommit>>>>,
txs: RefCell<Option<Vec<Hash>>>,
}
impl<'a> BlockInfo<'a> {
fn new(explorer: &'a BlockchainExplorer<'_>, height: Height) -> Self {
let schema = explorer.schema;
let hashes = schema.block_hashes_by_height();
let blocks = schema.blocks();
let block_hash = hashes
.get(height.0)
.unwrap_or_else(|| panic!("Block not found, height: {:?}", height));
let header = blocks
.get(&block_hash)
.unwrap_or_else(|| panic!("Block not found, hash: {:?}", block_hash));
BlockInfo {
explorer,
header,
precommits: RefCell::new(None),
txs: RefCell::new(None),
}
}
/// Returns block header as recorded in the blockchain.
pub fn header(&self) -> &Block {
&self.header
}
/// Extracts the header discarding all other information.
pub fn into_header(self) -> Block {
self.header
}
/// Returns the height of this block.
///
/// This method is equivalent to calling `block.header().height()`.
pub fn height(&self) -> Height {
self.header.height
}
/// Returns the number of transactions in this block.
pub fn len(&self) -> usize {
self.header.tx_count as usize
}
/// Is this block empty (i.e., contains no transactions)?
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Returns a list of precommits for this block.
pub fn precommits(&self) -> Ref<'_, [Verified<Precommit>]> {
if self.precommits.borrow().is_none() {
let precommits = self.explorer.precommits(&self.header);
*self.precommits.borrow_mut() = Some(precommits);
}
Ref::map(self.precommits.borrow(), |cache| {
cache.as_ref().unwrap().as_ref()
})
}
/// Lists hashes of transactions included in this block.
pub fn transaction_hashes(&self) -> Ref<'_, [Hash]> {
if self.txs.borrow().is_none() {
let txs = self.explorer.transaction_hashes(&self.header);
*self.txs.borrow_mut() = Some(txs);
}
Ref::map(self.txs.borrow(), |cache| cache.as_ref().unwrap().as_ref())
}
/// Returns a transaction with the specified index in the block.
pub fn transaction(&self, index: usize) -> Option<CommittedTransaction> {
self.transaction_hashes()
.get(index)
.map(|hash| self.explorer.committed_transaction(hash, None))
}
/// Returns the proof for the execution status of a call within this block.
///
/// Note that if the call did not result in an error or did not happen at all, the returned
/// proof will not contain entries. To distinguish between two cases, one can inspect
/// the number of transactions in the block or IDs of the active services when the block
/// was executed.
pub fn call_proof(&self, call_location: CallInBlock) -> CallProof {
self.explorer
.schema
.call_records(self.header.height)
.unwrap() // safe: we know that the block exists
.get_proof(call_location)
}
/// Iterates over transactions in the block.
pub fn iter(&self) -> Transactions<'_, '_> {
Transactions {
block: self,
ptr: 0,
len: self.len(),
}
}
/// Loads transactions, errors and precommits for the block.
pub fn with_transactions(self) -> BlockWithTransactions {
let (explorer, header, precommits, transactions) =
(self.explorer, self.header, self.precommits, self.txs);
let precommits = precommits
.into_inner()
.unwrap_or_else(|| explorer.precommits(&header));
let transactions = transactions
.into_inner()
.unwrap_or_else(|| explorer.transaction_hashes(&header))
.iter()
.map(|tx_hash| explorer.committed_transaction(tx_hash, None))
.collect();
let errors = self
.explorer
.schema
.call_records(header.height)
.expect("No call record for a committed block");
let errors: Vec<_> = errors
.errors()
.map(|(location, error)| ErrorWithLocation { location, error })
.collect();
BlockWithTransactions {
header,
precommits,
transactions,
errors,
}
}
}
impl<'a> Serialize for BlockInfo<'a> {
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
use serde::ser::SerializeStruct;
let mut s = serializer.serialize_struct("BlockInfo", 3)?;
s.serialize_field("block", &self.header)?;
s.serialize_field("precommits", &*self.precommits())?;
s.serialize_field("txs", &*self.transaction_hashes())?;
s.end()
}
}
/// Iterator over transactions in a block.
#[derive(Debug)]
pub struct Transactions<'r, 'a> {
block: &'r BlockInfo<'a>,
ptr: usize,
len: usize,
}
impl<'a, 'r> Iterator for Transactions<'a, 'r> {
type Item = CommittedTransaction;
fn next(&mut self) -> Option<CommittedTransaction> {
if self.ptr == self.len {
None
} else {
let transaction = self.block.transaction(self.ptr);
self.ptr += 1;
transaction
}
}
}
impl<'a, 'r: 'a> IntoIterator for &'r BlockInfo<'a> {
type Item = CommittedTransaction;
type IntoIter = Transactions<'a, 'r>;
fn into_iter(self) -> Transactions<'a, 'r> {
self.iter()
}
}
/// Information about a block in the blockchain with info on transactions eagerly loaded.
#[derive(Debug, Serialize, Deserialize)]
#[non_exhaustive]
pub struct BlockWithTransactions {
/// Block header as recorded in the blockchain.
#[serde(rename = "block")]
pub header: Block,
/// Precommits.
pub precommits: Vec<Verified<Precommit>>,
/// Transactions in the order they appear in the block.
pub transactions: Vec<CommittedTransaction>,
/// Errors that have occurred within the block.
pub errors: Vec<ErrorWithLocation>,
}
/// Execution error together with its location within the block.
#[derive(Debug, Serialize, Deserialize)]
#[non_exhaustive]
pub struct ErrorWithLocation {
/// Location of the error.
pub location: CallInBlock,
/// Error data.
pub error: ExecutionError,
}
impl fmt::Display for ErrorWithLocation {
fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(formatter, "In {}: {}", self.location, self.error)
}
}
impl BlockWithTransactions {
/// Returns the height of this block.
///
/// This method is equivalent to calling `block.header.height()`.
pub fn height(&self) -> Height {
self.header.height
}
/// Returns the number of transactions in this block.
pub fn len(&self) -> usize {
self.transactions.len()
}
/// Is this block empty (i.e., contains no transactions)?
pub fn is_empty(&self) -> bool {
self.transactions.is_empty()
}
/// Iterates over transactions in the block.
pub fn iter(&self) -> EagerTransactions<'_> {
self.transactions.iter()
}
/// Returns errors converted into a map. Note that this is potentially a costly operation.
pub fn error_map(&self) -> BTreeMap<CallInBlock, &ExecutionError> {
self.errors.iter().map(|e| (e.location, &e.error)).collect()
}
}
/// Iterator over transactions in [`BlockWithTransactions`].
///
/// [`BlockWithTransactions`]: struct.BlockWithTransactions.html
pub type EagerTransactions<'a> = slice::Iter<'a, CommittedTransaction>;
impl Index<usize> for BlockWithTransactions {
type Output = CommittedTransaction;
fn index(&self, index: usize) -> &CommittedTransaction {
self.transactions.get(index).unwrap_or_else(|| {
panic!(
"Index exceeds number of transactions in block {}",
self.len()
);
})
}
}
/// Returns a transaction in the block by its hash. Beware that this is a slow operation
/// (linear w.r.t. the number of transactions in a block).
impl Index<Hash> for BlockWithTransactions {
type Output = CommittedTransaction;
fn index(&self, index: Hash) -> &CommittedTransaction {
self.transactions
.iter()
.find(|&tx| tx.message.object_hash() == index)
.unwrap_or_else(|| {
panic!("No transaction with hash {} in the block", index);
})
}
}
impl<'a> IntoIterator for &'a BlockWithTransactions {
type Item = &'a CommittedTransaction;
type IntoIter = EagerTransactions<'a>;
fn into_iter(self) -> EagerTransactions<'a> {
self.iter()
}
}
/// Information about a particular transaction in the blockchain.
///
/// # JSON presentation
///
/// | Name | Equivalent type | Description |
/// |------|-------|--------|
/// | `message` | `Verified<AnyTx>` | Transaction as recorded in the blockchain |
/// | `location` | [`TxLocation`] | Location of the transaction in the block |
/// | `location_proof` | [`ListProof`]`<`[`Hash`]`>` | Proof of transaction inclusion into a block |
/// | `status` | (custom; see below) | Execution status |
/// | `time` | [`DateTime`]`<`[`Utc`]`>` | Commitment time* |
///
/// \* By commitment time we mean an approximate commitment time of the block
/// which includes the transaction. This time is a median time of the precommit local times
/// of each validator.
///
/// ## `status` field
///
/// The `status` field is a more readable representation of the [`ExecutionStatus`] type.
///
/// For successfully executed transactions, `status` is equal to
///
/// ```json
/// { "type": "success" }
/// ```
///
/// For transactions that cause an [`ExecutionError`], `status` contains the error code
/// and an optional description, i.e., has the following type in the [TypeScript] notation:
///
/// ```typescript
/// type Error = {
/// type:'service_error' | 'core_error' | 'common_error' | 'runtime_error' | 'unexpected_error',
/// code?: number,
/// description?: string,
/// runtime_id: number,
/// call_site?: CallSite,
/// };
///
/// type CallSite = MethodCallSite | HookCallSite;
///
/// type MethodCallSite = {
/// call_type:'method',
/// instance_id: number,
/// interface?: string,
/// method_id: number,
/// };
///
/// type HookCallSite = {
/// call_type: 'constructor' | 'before_transactions' | 'after_transactions',
/// instance_id: number,
/// };
/// ```
///
/// Explanations:
///
/// - `Error.type` determines the component responsible for the error. Usually, errors
/// are generated by the service code, but they can also be caused by the dispatch logic,
/// runtime associated with the service, or come from another source (`unexpected_error`s).
/// - `Error.code` is the error code. For service errors, this code is specific
/// to the service instance (which can be obtained from `call_site`), and for runtime errors -
/// to the runtime. For core errors, the codes are fixed; their meaning can be found
/// in the [`CoreError`] docs. The code is present for all error types except
/// `unexpected_error`s, in which the code is always absent.
/// Besides types listed above, there is also a set of errors that can occur within any context,
/// which are organized in the [`CommonError`].
/// - `Error.description` is an optional human-readable description of the error.
/// - `Error.runtime_id` is the numeric ID of the runtime in which the error has occurred. Note
/// that the runtime is defined for all error types, not just `runtime_error`s, since
/// for any request it's possible to say which runtime is responsible for its processing.
/// - `Error.call_site` provides most precise known location of the call in which the error
/// has occurred.
///
/// [`TxLocation`]: https://docs.rs/exonum/latest/exonum/blockchain/struct.TxLocation.html
/// [`ListProof`]: https://docs.rs/exonum-merkledb/latest/exonum_merkledb/indexes/proof_list/struct.ListProof.html
/// [`Hash`]: https://docs.rs/exonum-crypto/latest/exonum_crypto/struct.Hash.html
/// [`ExecutionStatus`]: https://docs.rs/exonum/latest/exonum/runtime/struct.ExecutionStatus.html
/// [`ExecutionError`]: https://docs.rs/exonum/latest/exonum/runtime/struct.ExecutionError.html
/// [`CoreError`]: https://docs.rs/exonum/latest/exonum/runtime/enum.CoreError.html
/// [`CommonError`]: https://docs.rs/exonum/latest/exonum/runtime/enum.CommonError.html
/// [TypeScript]: https://www.typescriptlang.org/
/// [`DateTime`]: https://docs.rs/chrono/0.4.10/chrono/struct.DateTime.html
/// [`Utc`]: https://docs.rs/chrono/0.4.10/chrono/offset/struct.Utc.html
#[derive(Debug, Serialize, Deserialize)]
pub struct CommittedTransaction {
message: Verified<AnyTx>,
location: TxLocation,
location_proof: ListProof<Hash>,
status: ExecutionStatus,
time: DateTime<Utc>,
}
impl CommittedTransaction {
/// Returns the content of the transaction.
pub fn message(&self) -> &Verified<AnyTx> {
&self.message
}
/// Returns the transaction location in block.
pub fn location(&self) -> &TxLocation {
&self.location
}
/// Returns a proof that transaction is recorded in the blockchain.
pub fn location_proof(&self) -> &ListProof<Hash> {
&self.location_proof
}
/// Returns the status of the transaction execution.
pub fn status(&self) -> Result<(), &ExecutionError> {
self.status.0.as_ref().map(drop)
}
/// Returns an approximate commit time of the block which includes this transaction.
pub fn time(&self) -> &DateTime<Utc> {
&self.time
}
}
/// Information about the transaction.
///
/// Values of this type are returned by the `transaction()` method of the `BlockchainExplorer`.
///
/// # JSON presentation
///
/// ## Committed transactions
///
/// Committed transactions are represented just like a `CommittedTransaction`,
/// with the additional `type` field equal to `"committed"`.
///
/// ## Transaction in pool
///
/// Transactions in pool are represented with a 2-field object:
///
/// - `type` field contains transaction type (`"in-pool"`).
/// - `message` is the full transaction message serialized to the hexadecimal form.
///
/// # Examples
///
/// ```
/// use exonum_explorer::TransactionInfo;
/// use exonum::{crypto::KeyPair, runtime::InstanceId};
/// # use exonum_derive::*;
/// # use serde_derive::*;
/// # use serde_json::json;
///
/// /// Service interface.
/// #[exonum_interface]
/// trait ServiceInterface<Ctx> {
/// type Output;
/// #[interface_method(id = 0)]
/// fn create_wallet(&self, ctx: Ctx, username: String) -> Self::Output;
/// }
///
/// // Create a signed transaction.
/// let keypair = KeyPair::random();
/// const SERVICE_ID: InstanceId = 100;
/// let tx = keypair.create_wallet(SERVICE_ID, "Alice".to_owned());
/// // This transaction in pool will be represented as follows:
/// let json = json!({
/// "type": "in_pool",
/// "message": tx,
/// });
/// let parsed: TransactionInfo = serde_json::from_value(json).unwrap();
/// assert!(parsed.is_in_pool());
/// ```
#[derive(Debug, Serialize, Deserialize)]
#[serde(tag = "type", rename_all = "snake_case")]
#[non_exhaustive]
pub enum TransactionInfo {
/// Transaction is in the memory pool, but not yet committed to the blockchain.
InPool {
/// A content of the uncommitted transaction.
message: Verified<AnyTx>,
},
/// Transaction is already committed to the blockchain.
Committed(CommittedTransaction),
}
impl TransactionInfo {
/// Returns the content of this transaction.
pub fn message(&self) -> &Verified<AnyTx> {
match *self {
TransactionInfo::InPool { ref message } => message,
TransactionInfo::Committed(ref tx) => tx.message(),
}
}
/// Is this in-pool transaction?
pub fn is_in_pool(&self) -> bool {
matches!(*self, TransactionInfo::InPool {.. })
}
/// Is this a committed transaction?
pub fn is_committed(&self) -> bool {
matches!(*self, TransactionInfo::Committed(_))
}
/// Returns a reference to the inner committed transaction if this transaction is committed.
/// For transactions in pool, returns `None`.
pub fn as_committed(&self) -> Option<&CommittedTransaction> {
match *self {
TransactionInfo::Committed(ref tx) => Some(tx),
_ => None,
}
}
}
/// Blockchain explorer.
///
/// # Notes
///
/// The explorer wraps a specific [`Snapshot`] of the blockchain state; that is,
/// all calls to the methods of an explorer instance are guaranteed to be consistent.
///
/// [`Snapshot`]: https://docs.rs/exonum-merkledb/latest/exonum_merkledb/trait.Snapshot.html
#[derive(Debug, Copy, Clone)]
pub struct BlockchainExplorer<'a> {
schema: Schema<&'a dyn Snapshot>,
}
impl<'a> BlockchainExplorer<'a> {
/// Creates a new `BlockchainExplorer` instance from the provided snapshot.
pub fn new(snapshot: &'a dyn Snapshot) -> Self {
BlockchainExplorer {
schema: Schema::new(snapshot),
}
}
/// Creates a new `BlockchainExplorer` instance from the core schema.
pub fn from_schema(schema: Schema<&'a dyn Snapshot>) -> Self {
BlockchainExplorer { schema }
}
/// Returns information about the transaction identified by the hash.
pub fn transaction(&self, tx_hash: &Hash) -> Option<TransactionInfo> {
let message = self.transaction_without_proof(tx_hash)?;
if self.schema.transactions_pool().contains(tx_hash) {
return Some(TransactionInfo::InPool { message });
}
let tx = self.committed_transaction(tx_hash, Some(message));
Some(TransactionInfo::Committed(tx))
}
/// Returns the status of a call in a block.
///
/// # Return value
///
/// This method will return `Ok(())` both if the call completed successfully, or if
/// was not performed at all. The caller is responsible to distinguish these two outcomes.
pub fn call_status(
&self,
block_height: Height,
call_location: CallInBlock,
) -> Result<(), ExecutionError> {
match self.schema.call_records(block_height) {
Some(errors) => errors.get(call_location),
None => Ok(()),
}
}
/// Return transaction message without proof.
pub fn transaction_without_proof(&self, tx_hash: &Hash) -> Option<Verified<AnyTx>> {
self.schema.transactions().get(tx_hash)
}
fn precommits(&self, block: &Block) -> Vec<Verified<Precommit>> {
self.schema
.precommits(&block.object_hash())
.iter()
.collect()
}
fn transaction_hashes(&self, block: &Block) -> Vec<Hash> {
let tx_hashes_table = self.schema.block_transactions(block.height);
tx_hashes_table.iter().collect()
}
/// Retrieves a transaction that is known to be committed.
fn committed_transaction(
&self,
tx_hash: &Hash,
maybe_content: Option<Verified<AnyTx>>,
) -> CommittedTransaction {
let location = self
.schema
.transactions_locations()
.get(tx_hash)
.unwrap_or_else(|| panic!("Location not found for transaction hash {:?}", tx_hash));
let location_proof = self
.schema
.block_transactions(location.block_height())
.get_proof(u64::from(location.position_in_block()));
let block_precommits = self
.schema
.block_and_precommits(location.block_height())
.unwrap();
let time = median_precommits_time(&block_precommits.precommits);
// Unwrap is OK here, because we already know that transaction is committed.
let status = self.schema.transaction_result(location).unwrap();
CommittedTransaction {
message: maybe_content.unwrap_or_else(|| {
self.schema
.transactions()
.get(tx_hash)
.expect("BUG: Cannot find transaction in database")
}),
location,
location_proof,
status: ExecutionStatus(status),
time,
}
}
/// Return the height of the blockchain.
pub fn height(&self) -> Height {
self.schema.height()
}
/// Returns block information for the specified height or `None` if there is no such block.
pub fn block(&self, height: Height) -> Option<BlockInfo<'_>> {
if self.height() >= height {
Some(BlockInfo::new(self, height))
} else {
None
}
}
/// Return a block together with its transactions at the specified height, or `None`
/// if there is no such block.
pub fn block_with_txs(&self, height: Height) -> Option<BlockWithTransactions> {
let txs_table = self.schema.block_transactions(height);
let block_proof = self.schema.block_and_precommits(height)?;
let errors = self.schema.call_records(height)?;
Some(BlockWithTransactions {
header: block_proof.block,
precommits: block_proof.precommits,
transactions: txs_table
.iter()
.map(|tx_hash| self.committed_transaction(&tx_hash, None))
.collect(),
errors: errors
.errors()
.map(|(location, error)| ErrorWithLocation { location, error })
.collect(),
})
}
/// Iterates over blocks in the blockchain.
pub fn blocks<R: RangeBounds<Height>>(&self, heights: R) -> Blocks<'_> {
use std::cmp::max;
let max_height = self.schema.height();
let ptr = match heights.start_bound() {
Bound::Included(height) => *height,
Bound::Excluded(height) => height.next(),
Bound::Unbounded => Height(0),
};
Blocks {
explorer: self,
ptr,
back: max(ptr, end_height(heights.end_bound(), max_height)),
}
}
}
/// Iterator over blocks in the blockchain.
pub struct Blocks<'a> {
explorer: &'a BlockchainExplorer<'a>,
ptr: Height,
back: Height,
}
impl<'a> fmt::Debug for Blocks<'a> {
fn
|
fmt
|
identifier_name
|
|
lib.rs
|
().as_ref()
})
}
/// Lists hashes of transactions included in this block.
pub fn transaction_hashes(&self) -> Ref<'_, [Hash]> {
if self.txs.borrow().is_none() {
let txs = self.explorer.transaction_hashes(&self.header);
*self.txs.borrow_mut() = Some(txs);
}
Ref::map(self.txs.borrow(), |cache| cache.as_ref().unwrap().as_ref())
}
/// Returns a transaction with the specified index in the block.
pub fn transaction(&self, index: usize) -> Option<CommittedTransaction> {
self.transaction_hashes()
.get(index)
.map(|hash| self.explorer.committed_transaction(hash, None))
}
/// Returns the proof for the execution status of a call within this block.
///
/// Note that if the call did not result in an error or did not happen at all, the returned
/// proof will not contain entries. To distinguish between two cases, one can inspect
/// the number of transactions in the block or IDs of the active services when the block
/// was executed.
pub fn call_proof(&self, call_location: CallInBlock) -> CallProof {
self.explorer
.schema
.call_records(self.header.height)
.unwrap() // safe: we know that the block exists
.get_proof(call_location)
}
/// Iterates over transactions in the block.
pub fn iter(&self) -> Transactions<'_, '_> {
Transactions {
block: self,
ptr: 0,
len: self.len(),
}
}
/// Loads transactions, errors and precommits for the block.
pub fn with_transactions(self) -> BlockWithTransactions {
let (explorer, header, precommits, transactions) =
(self.explorer, self.header, self.precommits, self.txs);
let precommits = precommits
.into_inner()
.unwrap_or_else(|| explorer.precommits(&header));
let transactions = transactions
.into_inner()
.unwrap_or_else(|| explorer.transaction_hashes(&header))
.iter()
.map(|tx_hash| explorer.committed_transaction(tx_hash, None))
.collect();
let errors = self
.explorer
.schema
.call_records(header.height)
.expect("No call record for a committed block");
let errors: Vec<_> = errors
.errors()
.map(|(location, error)| ErrorWithLocation { location, error })
.collect();
BlockWithTransactions {
header,
precommits,
transactions,
errors,
}
}
}
impl<'a> Serialize for BlockInfo<'a> {
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
use serde::ser::SerializeStruct;
let mut s = serializer.serialize_struct("BlockInfo", 3)?;
s.serialize_field("block", &self.header)?;
s.serialize_field("precommits", &*self.precommits())?;
s.serialize_field("txs", &*self.transaction_hashes())?;
s.end()
}
}
/// Iterator over transactions in a block.
#[derive(Debug)]
pub struct Transactions<'r, 'a> {
block: &'r BlockInfo<'a>,
ptr: usize,
len: usize,
}
impl<'a, 'r> Iterator for Transactions<'a, 'r> {
type Item = CommittedTransaction;
fn next(&mut self) -> Option<CommittedTransaction> {
if self.ptr == self.len {
None
} else {
let transaction = self.block.transaction(self.ptr);
self.ptr += 1;
transaction
}
}
}
impl<'a, 'r: 'a> IntoIterator for &'r BlockInfo<'a> {
type Item = CommittedTransaction;
type IntoIter = Transactions<'a, 'r>;
fn into_iter(self) -> Transactions<'a, 'r> {
self.iter()
}
}
/// Information about a block in the blockchain with info on transactions eagerly loaded.
#[derive(Debug, Serialize, Deserialize)]
#[non_exhaustive]
pub struct BlockWithTransactions {
/// Block header as recorded in the blockchain.
#[serde(rename = "block")]
pub header: Block,
/// Precommits.
pub precommits: Vec<Verified<Precommit>>,
/// Transactions in the order they appear in the block.
pub transactions: Vec<CommittedTransaction>,
/// Errors that have occurred within the block.
pub errors: Vec<ErrorWithLocation>,
}
/// Execution error together with its location within the block.
#[derive(Debug, Serialize, Deserialize)]
#[non_exhaustive]
pub struct ErrorWithLocation {
/// Location of the error.
pub location: CallInBlock,
/// Error data.
pub error: ExecutionError,
}
impl fmt::Display for ErrorWithLocation {
fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(formatter, "In {}: {}", self.location, self.error)
}
}
impl BlockWithTransactions {
/// Returns the height of this block.
///
/// This method is equivalent to calling `block.header.height()`.
pub fn height(&self) -> Height {
self.header.height
}
/// Returns the number of transactions in this block.
pub fn len(&self) -> usize {
self.transactions.len()
}
/// Is this block empty (i.e., contains no transactions)?
pub fn is_empty(&self) -> bool {
self.transactions.is_empty()
}
/// Iterates over transactions in the block.
pub fn iter(&self) -> EagerTransactions<'_> {
self.transactions.iter()
}
/// Returns errors converted into a map. Note that this is potentially a costly operation.
pub fn error_map(&self) -> BTreeMap<CallInBlock, &ExecutionError> {
self.errors.iter().map(|e| (e.location, &e.error)).collect()
}
}
/// Iterator over transactions in [`BlockWithTransactions`].
///
/// [`BlockWithTransactions`]: struct.BlockWithTransactions.html
pub type EagerTransactions<'a> = slice::Iter<'a, CommittedTransaction>;
impl Index<usize> for BlockWithTransactions {
type Output = CommittedTransaction;
fn index(&self, index: usize) -> &CommittedTransaction {
self.transactions.get(index).unwrap_or_else(|| {
panic!(
"Index exceeds number of transactions in block {}",
self.len()
);
})
}
}
/// Returns a transaction in the block by its hash. Beware that this is a slow operation
/// (linear w.r.t. the number of transactions in a block).
impl Index<Hash> for BlockWithTransactions {
type Output = CommittedTransaction;
fn index(&self, index: Hash) -> &CommittedTransaction {
self.transactions
.iter()
.find(|&tx| tx.message.object_hash() == index)
.unwrap_or_else(|| {
panic!("No transaction with hash {} in the block", index);
})
}
}
impl<'a> IntoIterator for &'a BlockWithTransactions {
type Item = &'a CommittedTransaction;
type IntoIter = EagerTransactions<'a>;
fn into_iter(self) -> EagerTransactions<'a> {
self.iter()
}
}
/// Information about a particular transaction in the blockchain.
///
/// # JSON presentation
///
/// | Name | Equivalent type | Description |
/// |------|-------|--------|
/// | `message` | `Verified<AnyTx>` | Transaction as recorded in the blockchain |
/// | `location` | [`TxLocation`] | Location of the transaction in the block |
/// | `location_proof` | [`ListProof`]`<`[`Hash`]`>` | Proof of transaction inclusion into a block |
/// | `status` | (custom; see below) | Execution status |
/// | `time` | [`DateTime`]`<`[`Utc`]`>` | Commitment time* |
///
/// \* By commitment time we mean an approximate commitment time of the block
/// which includes the transaction. This time is a median time of the precommit local times
/// of each validator.
///
/// ## `status` field
///
/// The `status` field is a more readable representation of the [`ExecutionStatus`] type.
///
/// For successfully executed transactions, `status` is equal to
///
/// ```json
/// { "type": "success" }
/// ```
///
/// For transactions that cause an [`ExecutionError`], `status` contains the error code
/// and an optional description, i.e., has the following type in the [TypeScript] notation:
///
/// ```typescript
/// type Error = {
/// type:'service_error' | 'core_error' | 'common_error' | 'runtime_error' | 'unexpected_error',
/// code?: number,
/// description?: string,
/// runtime_id: number,
/// call_site?: CallSite,
/// };
///
/// type CallSite = MethodCallSite | HookCallSite;
///
/// type MethodCallSite = {
/// call_type:'method',
/// instance_id: number,
/// interface?: string,
/// method_id: number,
/// };
///
/// type HookCallSite = {
/// call_type: 'constructor' | 'before_transactions' | 'after_transactions',
/// instance_id: number,
/// };
/// ```
///
/// Explanations:
///
/// - `Error.type` determines the component responsible for the error. Usually, errors
/// are generated by the service code, but they can also be caused by the dispatch logic,
/// runtime associated with the service, or come from another source (`unexpected_error`s).
/// - `Error.code` is the error code. For service errors, this code is specific
/// to the service instance (which can be obtained from `call_site`), and for runtime errors -
/// to the runtime. For core errors, the codes are fixed; their meaning can be found
/// in the [`CoreError`] docs. The code is present for all error types except
/// `unexpected_error`s, in which the code is always absent.
/// Besides types listed above, there is also a set of errors that can occur within any context,
/// which are organized in the [`CommonError`].
/// - `Error.description` is an optional human-readable description of the error.
/// - `Error.runtime_id` is the numeric ID of the runtime in which the error has occurred. Note
/// that the runtime is defined for all error types, not just `runtime_error`s, since
/// for any request it's possible to say which runtime is responsible for its processing.
/// - `Error.call_site` provides most precise known location of the call in which the error
/// has occurred.
///
/// [`TxLocation`]: https://docs.rs/exonum/latest/exonum/blockchain/struct.TxLocation.html
/// [`ListProof`]: https://docs.rs/exonum-merkledb/latest/exonum_merkledb/indexes/proof_list/struct.ListProof.html
/// [`Hash`]: https://docs.rs/exonum-crypto/latest/exonum_crypto/struct.Hash.html
/// [`ExecutionStatus`]: https://docs.rs/exonum/latest/exonum/runtime/struct.ExecutionStatus.html
/// [`ExecutionError`]: https://docs.rs/exonum/latest/exonum/runtime/struct.ExecutionError.html
/// [`CoreError`]: https://docs.rs/exonum/latest/exonum/runtime/enum.CoreError.html
/// [`CommonError`]: https://docs.rs/exonum/latest/exonum/runtime/enum.CommonError.html
/// [TypeScript]: https://www.typescriptlang.org/
/// [`DateTime`]: https://docs.rs/chrono/0.4.10/chrono/struct.DateTime.html
/// [`Utc`]: https://docs.rs/chrono/0.4.10/chrono/offset/struct.Utc.html
#[derive(Debug, Serialize, Deserialize)]
pub struct CommittedTransaction {
message: Verified<AnyTx>,
location: TxLocation,
location_proof: ListProof<Hash>,
status: ExecutionStatus,
time: DateTime<Utc>,
}
impl CommittedTransaction {
/// Returns the content of the transaction.
pub fn message(&self) -> &Verified<AnyTx> {
&self.message
}
/// Returns the transaction location in block.
pub fn location(&self) -> &TxLocation {
&self.location
}
/// Returns a proof that transaction is recorded in the blockchain.
pub fn location_proof(&self) -> &ListProof<Hash> {
&self.location_proof
}
/// Returns the status of the transaction execution.
pub fn status(&self) -> Result<(), &ExecutionError> {
self.status.0.as_ref().map(drop)
}
/// Returns an approximate commit time of the block which includes this transaction.
pub fn time(&self) -> &DateTime<Utc> {
&self.time
}
}
/// Information about the transaction.
///
/// Values of this type are returned by the `transaction()` method of the `BlockchainExplorer`.
///
/// # JSON presentation
///
/// ## Committed transactions
///
/// Committed transactions are represented just like a `CommittedTransaction`,
/// with the additional `type` field equal to `"committed"`.
///
/// ## Transaction in pool
///
/// Transactions in pool are represented with a 2-field object:
///
/// - `type` field contains transaction type (`"in-pool"`).
/// - `message` is the full transaction message serialized to the hexadecimal form.
///
/// # Examples
///
/// ```
/// use exonum_explorer::TransactionInfo;
/// use exonum::{crypto::KeyPair, runtime::InstanceId};
/// # use exonum_derive::*;
/// # use serde_derive::*;
/// # use serde_json::json;
///
/// /// Service interface.
/// #[exonum_interface]
/// trait ServiceInterface<Ctx> {
/// type Output;
/// #[interface_method(id = 0)]
/// fn create_wallet(&self, ctx: Ctx, username: String) -> Self::Output;
/// }
///
/// // Create a signed transaction.
/// let keypair = KeyPair::random();
/// const SERVICE_ID: InstanceId = 100;
/// let tx = keypair.create_wallet(SERVICE_ID, "Alice".to_owned());
/// // This transaction in pool will be represented as follows:
/// let json = json!({
/// "type": "in_pool",
/// "message": tx,
/// });
/// let parsed: TransactionInfo = serde_json::from_value(json).unwrap();
/// assert!(parsed.is_in_pool());
/// ```
#[derive(Debug, Serialize, Deserialize)]
#[serde(tag = "type", rename_all = "snake_case")]
#[non_exhaustive]
pub enum TransactionInfo {
/// Transaction is in the memory pool, but not yet committed to the blockchain.
InPool {
/// A content of the uncommitted transaction.
message: Verified<AnyTx>,
},
/// Transaction is already committed to the blockchain.
Committed(CommittedTransaction),
}
impl TransactionInfo {
/// Returns the content of this transaction.
pub fn message(&self) -> &Verified<AnyTx> {
match *self {
TransactionInfo::InPool { ref message } => message,
TransactionInfo::Committed(ref tx) => tx.message(),
}
}
/// Is this in-pool transaction?
pub fn is_in_pool(&self) -> bool {
matches!(*self, TransactionInfo::InPool {.. })
}
/// Is this a committed transaction?
pub fn is_committed(&self) -> bool {
matches!(*self, TransactionInfo::Committed(_))
}
/// Returns a reference to the inner committed transaction if this transaction is committed.
/// For transactions in pool, returns `None`.
pub fn as_committed(&self) -> Option<&CommittedTransaction> {
match *self {
TransactionInfo::Committed(ref tx) => Some(tx),
_ => None,
}
}
}
/// Blockchain explorer.
///
/// # Notes
///
/// The explorer wraps a specific [`Snapshot`] of the blockchain state; that is,
/// all calls to the methods of an explorer instance are guaranteed to be consistent.
///
/// [`Snapshot`]: https://docs.rs/exonum-merkledb/latest/exonum_merkledb/trait.Snapshot.html
#[derive(Debug, Copy, Clone)]
pub struct BlockchainExplorer<'a> {
schema: Schema<&'a dyn Snapshot>,
}
impl<'a> BlockchainExplorer<'a> {
/// Creates a new `BlockchainExplorer` instance from the provided snapshot.
pub fn new(snapshot: &'a dyn Snapshot) -> Self {
BlockchainExplorer {
schema: Schema::new(snapshot),
}
}
/// Creates a new `BlockchainExplorer` instance from the core schema.
pub fn from_schema(schema: Schema<&'a dyn Snapshot>) -> Self {
BlockchainExplorer { schema }
}
/// Returns information about the transaction identified by the hash.
pub fn transaction(&self, tx_hash: &Hash) -> Option<TransactionInfo> {
let message = self.transaction_without_proof(tx_hash)?;
if self.schema.transactions_pool().contains(tx_hash) {
return Some(TransactionInfo::InPool { message });
}
let tx = self.committed_transaction(tx_hash, Some(message));
Some(TransactionInfo::Committed(tx))
}
/// Returns the status of a call in a block.
///
/// # Return value
///
/// This method will return `Ok(())` both if the call completed successfully, or if
/// was not performed at all. The caller is responsible to distinguish these two outcomes.
pub fn call_status(
&self,
block_height: Height,
call_location: CallInBlock,
) -> Result<(), ExecutionError> {
match self.schema.call_records(block_height) {
Some(errors) => errors.get(call_location),
None => Ok(()),
}
}
/// Return transaction message without proof.
pub fn transaction_without_proof(&self, tx_hash: &Hash) -> Option<Verified<AnyTx>> {
self.schema.transactions().get(tx_hash)
}
fn precommits(&self, block: &Block) -> Vec<Verified<Precommit>> {
self.schema
.precommits(&block.object_hash())
.iter()
.collect()
}
fn transaction_hashes(&self, block: &Block) -> Vec<Hash> {
let tx_hashes_table = self.schema.block_transactions(block.height);
tx_hashes_table.iter().collect()
}
/// Retrieves a transaction that is known to be committed.
fn committed_transaction(
&self,
tx_hash: &Hash,
maybe_content: Option<Verified<AnyTx>>,
) -> CommittedTransaction {
let location = self
.schema
.transactions_locations()
.get(tx_hash)
.unwrap_or_else(|| panic!("Location not found for transaction hash {:?}", tx_hash));
let location_proof = self
.schema
.block_transactions(location.block_height())
.get_proof(u64::from(location.position_in_block()));
let block_precommits = self
.schema
.block_and_precommits(location.block_height())
.unwrap();
let time = median_precommits_time(&block_precommits.precommits);
// Unwrap is OK here, because we already know that transaction is committed.
let status = self.schema.transaction_result(location).unwrap();
CommittedTransaction {
message: maybe_content.unwrap_or_else(|| {
self.schema
.transactions()
.get(tx_hash)
.expect("BUG: Cannot find transaction in database")
}),
location,
location_proof,
status: ExecutionStatus(status),
time,
}
}
/// Return the height of the blockchain.
pub fn height(&self) -> Height {
self.schema.height()
}
/// Returns block information for the specified height or `None` if there is no such block.
pub fn block(&self, height: Height) -> Option<BlockInfo<'_>> {
if self.height() >= height {
Some(BlockInfo::new(self, height))
} else {
None
}
}
/// Return a block together with its transactions at the specified height, or `None`
/// if there is no such block.
pub fn block_with_txs(&self, height: Height) -> Option<BlockWithTransactions> {
let txs_table = self.schema.block_transactions(height);
let block_proof = self.schema.block_and_precommits(height)?;
let errors = self.schema.call_records(height)?;
Some(BlockWithTransactions {
header: block_proof.block,
precommits: block_proof.precommits,
transactions: txs_table
.iter()
.map(|tx_hash| self.committed_transaction(&tx_hash, None))
.collect(),
errors: errors
.errors()
.map(|(location, error)| ErrorWithLocation { location, error })
.collect(),
})
}
/// Iterates over blocks in the blockchain.
pub fn blocks<R: RangeBounds<Height>>(&self, heights: R) -> Blocks<'_> {
use std::cmp::max;
let max_height = self.schema.height();
let ptr = match heights.start_bound() {
Bound::Included(height) => *height,
Bound::Excluded(height) => height.next(),
Bound::Unbounded => Height(0),
};
Blocks {
explorer: self,
ptr,
back: max(ptr, end_height(heights.end_bound(), max_height)),
}
}
}
/// Iterator over blocks in the blockchain.
pub struct Blocks<'a> {
explorer: &'a BlockchainExplorer<'a>,
ptr: Height,
back: Height,
}
impl<'a> fmt::Debug for Blocks<'a> {
fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
formatter
.debug_struct("Blocks")
.field("ptr", &self.ptr)
.field("back", &self.back)
.finish()
}
}
impl<'a> Iterator for Blocks<'a> {
type Item = BlockInfo<'a>;
fn next(&mut self) -> Option<BlockInfo<'a>> {
if self.ptr == self.back {
return None;
}
let block = BlockInfo::new(self.explorer, self.ptr);
self.ptr = self.ptr.next();
Some(block)
}
fn size_hint(&self) -> (usize, Option<usize>) {
let exact = (self.back.0 - self.ptr.0) as usize;
(exact, Some(exact))
}
fn count(self) -> usize {
(self.back.0 - self.ptr.0) as usize
}
fn nth(&mut self, n: usize) -> Option<BlockInfo<'a>> {
if self.ptr.0 + n as u64 >= self.back.0 {
self.ptr = self.back;
None
} else {
self.ptr = Height(self.ptr.0 + n as u64);
let block = BlockInfo::new(self.explorer, self.ptr);
self.ptr = self.ptr.next();
Some(block)
}
}
}
impl<'a> DoubleEndedIterator for Blocks<'a> {
fn next_back(&mut self) -> Option<BlockInfo<'a>> {
if self.ptr == self.back {
return None;
}
self.back = self.back.previous();
Some(BlockInfo::new(self.explorer, self.back))
}
}
/// Calculates a median time from precommits.
pub fn median_precommits_time(precommits: &[Verified<Precommit>]) -> DateTime<Utc> {
if precommits.is_empty()
|
{
UNIX_EPOCH.into()
}
|
conditional_block
|
|
lib.rs
|
)]
use chrono::{DateTime, Utc};
use exonum::{
blockchain::{Block, CallInBlock, CallProof, Schema, TxLocation},
crypto::Hash,
helpers::Height,
merkledb::{ListProof, ObjectHash, Snapshot},
messages::{AnyTx, Precommit, Verified},
runtime::{ExecutionError, ExecutionStatus},
};
use serde::{Serialize, Serializer};
use serde_derive::Deserialize;
use std::{
cell::{Ref, RefCell},
collections::BTreeMap,
fmt,
ops::{Bound, Index, RangeBounds},
slice,
time::UNIX_EPOCH,
};
pub mod api;
/// Ending height of the range (exclusive), given the a priori max height.
fn end_height(bound: Bound<&Height>, max: Height) -> Height {
use std::cmp::min;
let inner_end = match bound {
Bound::Included(height) => height.next(),
Bound::Excluded(height) => *height,
Bound::Unbounded => max.next(),
};
min(inner_end, max.next())
}
/// Information about a block in the blockchain.
///
/// # JSON presentation
///
/// JSON object with the following fields:
///
/// | Name | Equivalent type | Description |
/// |------|-------|--------|
/// | `block` | [`Block`] | Block header as recorded in the blockchain |
/// | `precommits` | `Vec<`[`Precommit`]`>` | Precommits authorizing the block |
/// | `txs` | `Vec<`[`Hash`]`>` | Hashes of transactions in the block |
///
/// [`Block`]: https://docs.rs/exonum/latest/exonum/blockchain/struct.Block.html
/// [`Precommit`]: https://docs.rs/exonum/latest/exonum/messages/struct.Precommit.html
/// [`Hash`]: https://docs.rs/exonum-crypto/latest/exonum_crypto/struct.Hash.html
#[derive(Debug)]
pub struct BlockInfo<'a> {
header: Block,
explorer: &'a BlockchainExplorer<'a>,
precommits: RefCell<Option<Vec<Verified<Precommit>>>>,
txs: RefCell<Option<Vec<Hash>>>,
}
impl<'a> BlockInfo<'a> {
fn new(explorer: &'a BlockchainExplorer<'_>, height: Height) -> Self {
let schema = explorer.schema;
let hashes = schema.block_hashes_by_height();
let blocks = schema.blocks();
let block_hash = hashes
.get(height.0)
.unwrap_or_else(|| panic!("Block not found, height: {:?}", height));
let header = blocks
.get(&block_hash)
.unwrap_or_else(|| panic!("Block not found, hash: {:?}", block_hash));
BlockInfo {
explorer,
header,
precommits: RefCell::new(None),
txs: RefCell::new(None),
}
}
/// Returns block header as recorded in the blockchain.
pub fn header(&self) -> &Block {
&self.header
}
/// Extracts the header discarding all other information.
pub fn into_header(self) -> Block {
self.header
}
/// Returns the height of this block.
///
/// This method is equivalent to calling `block.header().height()`.
pub fn height(&self) -> Height {
self.header.height
}
/// Returns the number of transactions in this block.
pub fn len(&self) -> usize {
self.header.tx_count as usize
}
/// Is this block empty (i.e., contains no transactions)?
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Returns a list of precommits for this block.
pub fn precommits(&self) -> Ref<'_, [Verified<Precommit>]> {
if self.precommits.borrow().is_none() {
let precommits = self.explorer.precommits(&self.header);
*self.precommits.borrow_mut() = Some(precommits);
}
Ref::map(self.precommits.borrow(), |cache| {
cache.as_ref().unwrap().as_ref()
})
}
/// Lists hashes of transactions included in this block.
pub fn transaction_hashes(&self) -> Ref<'_, [Hash]> {
if self.txs.borrow().is_none() {
let txs = self.explorer.transaction_hashes(&self.header);
*self.txs.borrow_mut() = Some(txs);
}
Ref::map(self.txs.borrow(), |cache| cache.as_ref().unwrap().as_ref())
}
/// Returns a transaction with the specified index in the block.
pub fn transaction(&self, index: usize) -> Option<CommittedTransaction> {
self.transaction_hashes()
.get(index)
.map(|hash| self.explorer.committed_transaction(hash, None))
}
/// Returns the proof for the execution status of a call within this block.
///
/// Note that if the call did not result in an error or did not happen at all, the returned
/// proof will not contain entries. To distinguish between two cases, one can inspect
/// the number of transactions in the block or IDs of the active services when the block
/// was executed.
pub fn call_proof(&self, call_location: CallInBlock) -> CallProof {
self.explorer
.schema
.call_records(self.header.height)
.unwrap() // safe: we know that the block exists
.get_proof(call_location)
}
/// Iterates over transactions in the block.
pub fn iter(&self) -> Transactions<'_, '_> {
Transactions {
block: self,
ptr: 0,
len: self.len(),
}
}
/// Loads transactions, errors and precommits for the block.
pub fn with_transactions(self) -> BlockWithTransactions {
let (explorer, header, precommits, transactions) =
(self.explorer, self.header, self.precommits, self.txs);
let precommits = precommits
.into_inner()
.unwrap_or_else(|| explorer.precommits(&header));
let transactions = transactions
.into_inner()
.unwrap_or_else(|| explorer.transaction_hashes(&header))
.iter()
.map(|tx_hash| explorer.committed_transaction(tx_hash, None))
.collect();
let errors = self
.explorer
.schema
.call_records(header.height)
.expect("No call record for a committed block");
let errors: Vec<_> = errors
.errors()
.map(|(location, error)| ErrorWithLocation { location, error })
.collect();
BlockWithTransactions {
header,
precommits,
transactions,
errors,
}
}
}
impl<'a> Serialize for BlockInfo<'a> {
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
use serde::ser::SerializeStruct;
let mut s = serializer.serialize_struct("BlockInfo", 3)?;
s.serialize_field("block", &self.header)?;
s.serialize_field("precommits", &*self.precommits())?;
s.serialize_field("txs", &*self.transaction_hashes())?;
s.end()
}
}
/// Iterator over transactions in a block.
#[derive(Debug)]
pub struct Transactions<'r, 'a> {
block: &'r BlockInfo<'a>,
ptr: usize,
len: usize,
}
impl<'a, 'r> Iterator for Transactions<'a, 'r> {
type Item = CommittedTransaction;
fn next(&mut self) -> Option<CommittedTransaction> {
if self.ptr == self.len {
None
} else {
let transaction = self.block.transaction(self.ptr);
self.ptr += 1;
transaction
}
}
}
impl<'a, 'r: 'a> IntoIterator for &'r BlockInfo<'a> {
type Item = CommittedTransaction;
type IntoIter = Transactions<'a, 'r>;
fn into_iter(self) -> Transactions<'a, 'r> {
self.iter()
}
}
/// Information about a block in the blockchain with info on transactions eagerly loaded.
#[derive(Debug, Serialize, Deserialize)]
#[non_exhaustive]
pub struct BlockWithTransactions {
/// Block header as recorded in the blockchain.
#[serde(rename = "block")]
pub header: Block,
/// Precommits.
pub precommits: Vec<Verified<Precommit>>,
/// Transactions in the order they appear in the block.
pub transactions: Vec<CommittedTransaction>,
/// Errors that have occurred within the block.
pub errors: Vec<ErrorWithLocation>,
|
/// Execution error together with its location within the block.
#[derive(Debug, Serialize, Deserialize)]
#[non_exhaustive]
pub struct ErrorWithLocation {
/// Location of the error.
pub location: CallInBlock,
/// Error data.
pub error: ExecutionError,
}
impl fmt::Display for ErrorWithLocation {
fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(formatter, "In {}: {}", self.location, self.error)
}
}
impl BlockWithTransactions {
/// Returns the height of this block.
///
/// This method is equivalent to calling `block.header.height()`.
pub fn height(&self) -> Height {
self.header.height
}
/// Returns the number of transactions in this block.
pub fn len(&self) -> usize {
self.transactions.len()
}
/// Is this block empty (i.e., contains no transactions)?
pub fn is_empty(&self) -> bool {
self.transactions.is_empty()
}
/// Iterates over transactions in the block.
pub fn iter(&self) -> EagerTransactions<'_> {
self.transactions.iter()
}
/// Returns errors converted into a map. Note that this is potentially a costly operation.
pub fn error_map(&self) -> BTreeMap<CallInBlock, &ExecutionError> {
self.errors.iter().map(|e| (e.location, &e.error)).collect()
}
}
/// Iterator over transactions in [`BlockWithTransactions`].
///
/// [`BlockWithTransactions`]: struct.BlockWithTransactions.html
pub type EagerTransactions<'a> = slice::Iter<'a, CommittedTransaction>;
impl Index<usize> for BlockWithTransactions {
type Output = CommittedTransaction;
fn index(&self, index: usize) -> &CommittedTransaction {
self.transactions.get(index).unwrap_or_else(|| {
panic!(
"Index exceeds number of transactions in block {}",
self.len()
);
})
}
}
/// Returns a transaction in the block by its hash. Beware that this is a slow operation
/// (linear w.r.t. the number of transactions in a block).
impl Index<Hash> for BlockWithTransactions {
type Output = CommittedTransaction;
fn index(&self, index: Hash) -> &CommittedTransaction {
self.transactions
.iter()
.find(|&tx| tx.message.object_hash() == index)
.unwrap_or_else(|| {
panic!("No transaction with hash {} in the block", index);
})
}
}
impl<'a> IntoIterator for &'a BlockWithTransactions {
type Item = &'a CommittedTransaction;
type IntoIter = EagerTransactions<'a>;
fn into_iter(self) -> EagerTransactions<'a> {
self.iter()
}
}
/// Information about a particular transaction in the blockchain.
///
/// # JSON presentation
///
/// | Name | Equivalent type | Description |
/// |------|-------|--------|
/// | `message` | `Verified<AnyTx>` | Transaction as recorded in the blockchain |
/// | `location` | [`TxLocation`] | Location of the transaction in the block |
/// | `location_proof` | [`ListProof`]`<`[`Hash`]`>` | Proof of transaction inclusion into a block |
/// | `status` | (custom; see below) | Execution status |
/// | `time` | [`DateTime`]`<`[`Utc`]`>` | Commitment time* |
///
/// \* By commitment time we mean an approximate commitment time of the block
/// which includes the transaction. This time is a median time of the precommit local times
/// of each validator.
///
/// ## `status` field
///
/// The `status` field is a more readable representation of the [`ExecutionStatus`] type.
///
/// For successfully executed transactions, `status` is equal to
///
/// ```json
/// { "type": "success" }
/// ```
///
/// For transactions that cause an [`ExecutionError`], `status` contains the error code
/// and an optional description, i.e., has the following type in the [TypeScript] notation:
///
/// ```typescript
/// type Error = {
/// type:'service_error' | 'core_error' | 'common_error' | 'runtime_error' | 'unexpected_error',
/// code?: number,
/// description?: string,
/// runtime_id: number,
/// call_site?: CallSite,
/// };
///
/// type CallSite = MethodCallSite | HookCallSite;
///
/// type MethodCallSite = {
/// call_type:'method',
/// instance_id: number,
/// interface?: string,
/// method_id: number,
/// };
///
/// type HookCallSite = {
/// call_type: 'constructor' | 'before_transactions' | 'after_transactions',
/// instance_id: number,
/// };
/// ```
///
/// Explanations:
///
/// - `Error.type` determines the component responsible for the error. Usually, errors
/// are generated by the service code, but they can also be caused by the dispatch logic,
/// runtime associated with the service, or come from another source (`unexpected_error`s).
/// - `Error.code` is the error code. For service errors, this code is specific
/// to the service instance (which can be obtained from `call_site`), and for runtime errors -
/// to the runtime. For core errors, the codes are fixed; their meaning can be found
/// in the [`CoreError`] docs. The code is present for all error types except
/// `unexpected_error`s, in which the code is always absent.
/// Besides types listed above, there is also a set of errors that can occur within any context,
/// which are organized in the [`CommonError`].
/// - `Error.description` is an optional human-readable description of the error.
/// - `Error.runtime_id` is the numeric ID of the runtime in which the error has occurred. Note
/// that the runtime is defined for all error types, not just `runtime_error`s, since
/// for any request it's possible to say which runtime is responsible for its processing.
/// - `Error.call_site` provides most precise known location of the call in which the error
/// has occurred.
///
/// [`TxLocation`]: https://docs.rs/exonum/latest/exonum/blockchain/struct.TxLocation.html
/// [`ListProof`]: https://docs.rs/exonum-merkledb/latest/exonum_merkledb/indexes/proof_list/struct.ListProof.html
/// [`Hash`]: https://docs.rs/exonum-crypto/latest/exonum_crypto/struct.Hash.html
/// [`ExecutionStatus`]: https://docs.rs/exonum/latest/exonum/runtime/struct.ExecutionStatus.html
/// [`ExecutionError`]: https://docs.rs/exonum/latest/exonum/runtime/struct.ExecutionError.html
/// [`CoreError`]: https://docs.rs/exonum/latest/exonum/runtime/enum.CoreError.html
/// [`CommonError`]: https://docs.rs/exonum/latest/exonum/runtime/enum.CommonError.html
/// [TypeScript]: https://www.typescriptlang.org/
/// [`DateTime`]: https://docs.rs/chrono/0.4.10/chrono/struct.DateTime.html
/// [`Utc`]: https://docs.rs/chrono/0.4.10/chrono/offset/struct.Utc.html
#[derive(Debug, Serialize, Deserialize)]
pub struct CommittedTransaction {
message: Verified<AnyTx>,
location: TxLocation,
location_proof: ListProof<Hash>,
status: ExecutionStatus,
time: DateTime<Utc>,
}
impl CommittedTransaction {
/// Returns the content of the transaction.
pub fn message(&self) -> &Verified<AnyTx> {
&self.message
}
/// Returns the transaction location in block.
pub fn location(&self) -> &TxLocation {
&self.location
}
/// Returns a proof that transaction is recorded in the blockchain.
pub fn location_proof(&self) -> &ListProof<Hash> {
&self.location_proof
}
/// Returns the status of the transaction execution.
pub fn status(&self) -> Result<(), &ExecutionError> {
self.status.0.as_ref().map(drop)
}
/// Returns an approximate commit time of the block which includes this transaction.
pub fn time(&self) -> &DateTime<Utc> {
&self.time
}
}
/// Information about the transaction.
///
/// Values of this type are returned by the `transaction()` method of the `BlockchainExplorer`.
///
/// # JSON presentation
///
/// ## Committed transactions
///
/// Committed transactions are represented just like a `CommittedTransaction`,
/// with the additional `type` field equal to `"committed"`.
///
/// ## Transaction in pool
///
/// Transactions in pool are represented with a 2-field object:
///
/// - `type` field contains transaction type (`"in-pool"`).
/// - `message` is the full transaction message serialized to the hexadecimal form.
///
/// # Examples
///
/// ```
/// use exonum_explorer::TransactionInfo;
/// use exonum::{crypto::KeyPair, runtime::InstanceId};
/// # use exonum_derive::*;
/// # use serde_derive::*;
/// # use serde_json::json;
///
/// /// Service interface.
/// #[exonum_interface]
/// trait ServiceInterface<Ctx> {
/// type Output;
/// #[interface_method(id = 0)]
/// fn create_wallet(&self, ctx: Ctx, username: String) -> Self::Output;
/// }
///
/// // Create a signed transaction.
/// let keypair = KeyPair::random();
/// const SERVICE_ID: InstanceId = 100;
/// let tx = keypair.create_wallet(SERVICE_ID, "Alice".to_owned());
/// // This transaction in pool will be represented as follows:
/// let json = json!({
/// "type": "in_pool",
/// "message": tx,
/// });
/// let parsed: TransactionInfo = serde_json::from_value(json).unwrap();
/// assert!(parsed.is_in_pool());
/// ```
#[derive(Debug, Serialize, Deserialize)]
#[serde(tag = "type", rename_all = "snake_case")]
#[non_exhaustive]
pub enum TransactionInfo {
/// Transaction is in the memory pool, but not yet committed to the blockchain.
InPool {
/// A content of the uncommitted transaction.
message: Verified<AnyTx>,
},
/// Transaction is already committed to the blockchain.
Committed(CommittedTransaction),
}
impl TransactionInfo {
/// Returns the content of this transaction.
pub fn message(&self) -> &Verified<AnyTx> {
match *self {
TransactionInfo::InPool { ref message } => message,
TransactionInfo::Committed(ref tx) => tx.message(),
}
}
/// Is this in-pool transaction?
pub fn is_in_pool(&self) -> bool {
matches!(*self, TransactionInfo::InPool {.. })
}
/// Is this a committed transaction?
pub fn is_committed(&self) -> bool {
matches!(*self, TransactionInfo::Committed(_))
}
/// Returns a reference to the inner committed transaction if this transaction is committed.
/// For transactions in pool, returns `None`.
pub fn as_committed(&self) -> Option<&CommittedTransaction> {
match *self {
TransactionInfo::Committed(ref tx) => Some(tx),
_ => None,
}
}
}
/// Blockchain explorer.
///
/// # Notes
///
/// The explorer wraps a specific [`Snapshot`] of the blockchain state; that is,
/// all calls to the methods of an explorer instance are guaranteed to be consistent.
///
/// [`Snapshot`]: https://docs.rs/exonum-merkledb/latest/exonum_merkledb/trait.Snapshot.html
#[derive(Debug, Copy, Clone)]
pub struct BlockchainExplorer<'a> {
schema: Schema<&'a dyn Snapshot>,
}
impl<'a> BlockchainExplorer<'a> {
/// Creates a new `BlockchainExplorer` instance from the provided snapshot.
pub fn new(snapshot: &'a dyn Snapshot) -> Self {
BlockchainExplorer {
schema: Schema::new(snapshot),
}
}
/// Creates a new `BlockchainExplorer` instance from the core schema.
pub fn from_schema(schema: Schema<&'a dyn Snapshot>) -> Self {
BlockchainExplorer { schema }
}
/// Returns information about the transaction identified by the hash.
pub fn transaction(&self, tx_hash: &Hash) -> Option<TransactionInfo> {
let message = self.transaction_without_proof(tx_hash)?;
if self.schema.transactions_pool().contains(tx_hash) {
return Some(TransactionInfo::InPool { message });
}
let tx = self.committed_transaction(tx_hash, Some(message));
Some(TransactionInfo::Committed(tx))
}
/// Returns the status of a call in a block.
///
/// # Return value
///
/// This method will return `Ok(())` both if the call completed successfully, or if
/// was not performed at all. The caller is responsible to distinguish these two outcomes.
pub fn call_status(
&self,
block_height: Height,
call_location: CallInBlock,
) -> Result<(), ExecutionError> {
match self.schema.call_records(block_height) {
Some(errors) => errors.get(call_location),
None => Ok(()),
}
}
/// Return transaction message without proof.
pub fn transaction_without_proof(&self, tx_hash: &Hash) -> Option<Verified<AnyTx>> {
self.schema.transactions().get(tx_hash)
}
fn precommits(&self, block: &Block) -> Vec<Verified<Precommit>> {
self.schema
.precommits(&block.object_hash())
.iter()
.collect()
}
fn transaction_hashes(&self, block: &Block) -> Vec<Hash> {
let tx_hashes_table = self.schema.block_transactions(block.height);
tx_hashes_table.iter().collect()
}
/// Retrieves a transaction that is known to be committed.
fn committed_transaction(
&self,
tx_hash: &Hash,
maybe_content: Option<Verified<AnyTx>>,
) -> CommittedTransaction {
let location = self
.schema
.transactions_locations()
.get(tx_hash)
.unwrap_or_else(|| panic!("Location not found for transaction hash {:?}", tx_hash));
let location_proof = self
.schema
.block_transactions(location.block_height())
.get_proof(u64::from(location.position_in_block()));
let block_precommits = self
.schema
.block_and_precommits(location.block_height())
.unwrap();
let time = median_precommits_time(&block_precommits.precommits);
// Unwrap is OK here, because we already know that transaction is committed.
let status = self.schema.transaction_result(location).unwrap();
CommittedTransaction {
message: maybe_content.unwrap_or_else(|| {
self.schema
.transactions()
.get(tx_hash)
.expect("BUG: Cannot find transaction in database")
}),
location,
location_proof,
status: ExecutionStatus(status),
time,
}
}
/// Return the height of the blockchain.
pub fn height(&self) -> Height {
self.schema.height()
}
/// Returns block information for the specified height or `None` if there is no such block.
pub fn block(&self, height: Height) -> Option<BlockInfo<'_>> {
if self.height() >= height {
Some(BlockInfo::new(self, height))
} else {
None
}
}
/// Return a block together with its transactions at the specified height, or `None`
/// if there is no such block.
pub fn block_with_txs(&self, height: Height) -> Option<BlockWithTransactions> {
let txs_table = self.schema.block_transactions(height);
let block_proof = self.schema.block_and_precommits(height)?;
let errors = self.schema.call_records(height)?;
|
}
|
random_line_split
|
middleware.rs
|
/// Macro to reduce the boilerplate required for using unboxed
/// closures as `Middleware` due to current type inference behaviour.
///
/// In future, the macro should hopefully be able to be removed while
/// having minimal changes to the closure's code.
///
/// # Limitations
///
/// The body of the `middleware!` macro needs to return something
/// implementing `Responder`. Some older examples had bodies that
/// would return a `MiddlewareResult`, but this was exploiting an
/// unsoundness in the Rust compiler that has since been
/// tightened. See discussion at
/// https://github.com/nickel-org/nickel.rs/issues/399.
///
/// Due to the way the macro is expanded, exiting the body early with
/// a return statement will most likely fail with a cryptic error
/// message. See https://github.com/nickel-org/nickel.rs/issues/389.
///
/// # Examples
/// ```rust,no_run
/// # #[macro_use] extern crate nickel;
/// # #[tokio::main]
/// # async fn main() {
/// use nickel::{Nickel, HttpRouter};
/// use std::sync::atomic::{AtomicUsize, Ordering};
///
/// let mut server = Nickel::new();
///
/// // Some shared resource between requests, must be `Sync + Send`
/// let visits = AtomicUsize::new(0);
///
/// server.get("/", middleware! {
/// format!("{}", visits.fetch_add(1, Ordering::Relaxed))
/// });
///
/// server.listen("127.0.0.1:6767").await.unwrap();
/// # }
/// ```
///
/// # Type hinting
/// Sometimes type inference is unable to determine the datatype for the server,
/// which can lead to a lot of extra type annotations. The `middleware!` macro
/// supports annotating the macro so as to drive the inference allowing the handler
/// code to remain with minimal annotations.
///
/// # ignoring this, since the middleware macro needs work
/// # TODO: migration cleanup - fix middleware macro, or remove it if closures
/// # can be made to take its place
/// ```rust,ignore
/// # #[macro_use] extern crate nickel;
/// # fn main() {
/// # struct MyServerData;
/// middleware! { |_request, _response| <MyServerData>
/// // _response is of type Response<MyServerData>
/// "Hello World"
/// }
/// # ; // This semicolon is required to satisfy returning `()`
/// # }
/// ```
#[macro_export]
macro_rules! middleware {
(|$req:tt, mut $res:ident| <$data:path> $($b:tt)+) => { _middleware_inner!($req, $res, mut $res, <$data> $($b)+) };
(|$req:tt, $res:ident| <$data:path> $($b:tt)+) => { _middleware_inner!($req, $res, $res, <$data> $($b)+) };
(|$req:tt| <$data:path> $($b:tt)+) => { middleware!(|$req, _res| <$data> $($b)+) };
(|$req:tt, mut $res:ident| $($b:tt)+) => { _middleware_inner!($req, $res, mut $res, $($b)+) };
(|$req:tt, $res:ident| $($b:tt)+) => { _middleware_inner!($req, $res, $res, $($b)+) };
(|$req:tt| $($b:tt)+) => { middleware!(|$req, _res| $($b)+) };
($($b:tt)+) => { middleware!(|_, _res| $($b)+) };
}
#[doc(hidden)]
#[macro_export]
macro_rules! _middleware_inner {
($req:tt, $res:ident, $res_binding:pat, <$data:path> $($b:tt)+) => {{
use $crate::{MiddlewareResult,Responder, Response, Request};
#[inline(always)]
fn restrict<R: Responder<$data>>(r: R, res: Response<$data>)
-> MiddlewareResult<$data> {
res.send(r)
}
// Inference fails due to thinking it's a (&Request, Response) with
// different mutability requirements
#[inline(always)]
fn restrict_closure<F>(f: F) -> F
where F: for<'r>
Fn(&'r mut Request<$data>, Response<$data>)
-> MiddlewareResult<$data> + Send + Sync { f }
restrict_closure(move |as_pat!($req), $res_binding| {
restrict(as_block!({$($b)+}), $res)
})
}};
($req:tt, $res:ident, $res_binding:pat, $($b:tt)+) => {{
use $crate::{MiddlewareResult,Responder, Response, Request};
#[inline(always)]
fn restrict<D: Send +'static + Sync, R: Responder<D>>(r: R, res: Response<D>)
-> MiddlewareResult<D> {
res.send(r)
}
// Inference fails due to thinking it's a (&Request, Response) with
// different mutability requirements
#[inline(always)]
|
restrict_closure(move |as_pat!($req), $res_binding| {
restrict(as_block!({$($b)+}), $res)
})
}};
}
#[doc(hidden)]
#[macro_export]
macro_rules! as_block { ($b:block) => ( $b ) }
#[doc(hidden)]
#[macro_export]
macro_rules! as_pat { ($p:pat) => ( $p ) }
|
fn restrict_closure<F, D: Send + 'static + Sync>(f: F) -> F
where F: for<'r>
Fn(&'r mut Request<D>, Response<D>)
-> MiddlewareResult<D> + Send + Sync { f }
|
random_line_split
|
tests.rs
|
extern crate bufstream;
extern crate cargo;
extern crate filetime;
extern crate flate2;
extern crate git2;
extern crate hamcrest;
extern crate libc;
extern crate rustc_serialize;
extern crate tar;
extern crate tempdir;
extern crate term;
extern crate url;
#[cfg(windows)] extern crate kernel32;
#[cfg(windows)] extern crate winapi;
#[macro_use]
extern crate log;
use cargo::util::Rustc;
mod support;
macro_rules! test {
($name:ident $expr:expr) => (
#[test]
fn $name() {
::support::paths::setup();
setup();
$expr;
}
)
}
mod test_bad_config;
mod test_bad_manifest_path;
mod test_cargo;
mod test_cargo_bench;
mod test_cargo_build_auth;
mod test_cargo_build_lib;
mod test_cargo_clean;
mod test_cargo_compile;
mod test_cargo_compile_custom_build;
mod test_cargo_compile_git_deps;
mod test_cargo_compile_path_deps;
mod test_cargo_compile_plugins;
mod test_cargo_cross_compile;
mod test_cargo_doc;
mod test_cargo_features;
mod test_cargo_fetch;
mod test_cargo_freshness;
mod test_cargo_generate_lockfile;
mod test_cargo_install;
mod test_cargo_new;
mod test_cargo_package;
mod test_cargo_profiles;
mod test_cargo_publish;
mod test_cargo_read_manifest;
|
mod test_cargo_registry;
mod test_cargo_run;
mod test_cargo_rustc;
mod test_cargo_rustdoc;
mod test_cargo_search;
mod test_cargo_test;
mod test_cargo_tool_paths;
mod test_cargo_verify_project;
mod test_cargo_version;
mod test_shell;
thread_local!(static RUSTC: Rustc = Rustc::new("rustc").unwrap());
fn rustc_host() -> String {
RUSTC.with(|r| r.host.clone())
}
fn is_nightly() -> bool {
RUSTC.with(|r| {
r.verbose_version.contains("-nightly") ||
r.verbose_version.contains("-dev")
})
}
fn can_panic() -> bool {
RUSTC.with(|r|!(r.host.contains("msvc") &&!r.host.contains("x86_64")))
}
|
random_line_split
|
|
tests.rs
|
extern crate bufstream;
extern crate cargo;
extern crate filetime;
extern crate flate2;
extern crate git2;
extern crate hamcrest;
extern crate libc;
extern crate rustc_serialize;
extern crate tar;
extern crate tempdir;
extern crate term;
extern crate url;
#[cfg(windows)] extern crate kernel32;
#[cfg(windows)] extern crate winapi;
#[macro_use]
extern crate log;
use cargo::util::Rustc;
mod support;
macro_rules! test {
($name:ident $expr:expr) => (
#[test]
fn $name() {
::support::paths::setup();
setup();
$expr;
}
)
}
mod test_bad_config;
mod test_bad_manifest_path;
mod test_cargo;
mod test_cargo_bench;
mod test_cargo_build_auth;
mod test_cargo_build_lib;
mod test_cargo_clean;
mod test_cargo_compile;
mod test_cargo_compile_custom_build;
mod test_cargo_compile_git_deps;
mod test_cargo_compile_path_deps;
mod test_cargo_compile_plugins;
mod test_cargo_cross_compile;
mod test_cargo_doc;
mod test_cargo_features;
mod test_cargo_fetch;
mod test_cargo_freshness;
mod test_cargo_generate_lockfile;
mod test_cargo_install;
mod test_cargo_new;
mod test_cargo_package;
mod test_cargo_profiles;
mod test_cargo_publish;
mod test_cargo_read_manifest;
mod test_cargo_registry;
mod test_cargo_run;
mod test_cargo_rustc;
mod test_cargo_rustdoc;
mod test_cargo_search;
mod test_cargo_test;
mod test_cargo_tool_paths;
mod test_cargo_verify_project;
mod test_cargo_version;
mod test_shell;
thread_local!(static RUSTC: Rustc = Rustc::new("rustc").unwrap());
fn rustc_host() -> String {
RUSTC.with(|r| r.host.clone())
}
fn is_nightly() -> bool {
RUSTC.with(|r| {
r.verbose_version.contains("-nightly") ||
r.verbose_version.contains("-dev")
})
}
fn
|
() -> bool {
RUSTC.with(|r|!(r.host.contains("msvc") &&!r.host.contains("x86_64")))
}
|
can_panic
|
identifier_name
|
tests.rs
|
extern crate bufstream;
extern crate cargo;
extern crate filetime;
extern crate flate2;
extern crate git2;
extern crate hamcrest;
extern crate libc;
extern crate rustc_serialize;
extern crate tar;
extern crate tempdir;
extern crate term;
extern crate url;
#[cfg(windows)] extern crate kernel32;
#[cfg(windows)] extern crate winapi;
#[macro_use]
extern crate log;
use cargo::util::Rustc;
mod support;
macro_rules! test {
($name:ident $expr:expr) => (
#[test]
fn $name() {
::support::paths::setup();
setup();
$expr;
}
)
}
mod test_bad_config;
mod test_bad_manifest_path;
mod test_cargo;
mod test_cargo_bench;
mod test_cargo_build_auth;
mod test_cargo_build_lib;
mod test_cargo_clean;
mod test_cargo_compile;
mod test_cargo_compile_custom_build;
mod test_cargo_compile_git_deps;
mod test_cargo_compile_path_deps;
mod test_cargo_compile_plugins;
mod test_cargo_cross_compile;
mod test_cargo_doc;
mod test_cargo_features;
mod test_cargo_fetch;
mod test_cargo_freshness;
mod test_cargo_generate_lockfile;
mod test_cargo_install;
mod test_cargo_new;
mod test_cargo_package;
mod test_cargo_profiles;
mod test_cargo_publish;
mod test_cargo_read_manifest;
mod test_cargo_registry;
mod test_cargo_run;
mod test_cargo_rustc;
mod test_cargo_rustdoc;
mod test_cargo_search;
mod test_cargo_test;
mod test_cargo_tool_paths;
mod test_cargo_verify_project;
mod test_cargo_version;
mod test_shell;
thread_local!(static RUSTC: Rustc = Rustc::new("rustc").unwrap());
fn rustc_host() -> String {
RUSTC.with(|r| r.host.clone())
}
fn is_nightly() -> bool {
RUSTC.with(|r| {
r.verbose_version.contains("-nightly") ||
r.verbose_version.contains("-dev")
})
}
fn can_panic() -> bool
|
{
RUSTC.with(|r| !(r.host.contains("msvc") && !r.host.contains("x86_64")))
}
|
identifier_body
|
|
lib.rs
|
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Collection types.
//!
//! See [std::collections](../std/collections) for a detailed discussion of collections in Rust.
// Do not remove on snapshot creation. Needed for bootstrap. (Issue #22364)
#![cfg_attr(stage0, feature(custom_attribute))]
#![crate_name = "collections"]
#![unstable(feature = "collections")]
#![staged_api]
#![crate_type = "rlib"]
#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "http://www.rust-lang.org/favicon.ico",
html_root_url = "http://doc.rust-lang.org/nightly/",
html_playground_url = "http://play.rust-lang.org/")]
#![feature(alloc)]
#![feature(box_syntax)]
#![feature(box_patterns)]
#![feature(core)]
#![feature(staged_api)]
#![feature(unboxed_closures)]
#![feature(unicode)]
#![feature(unsafe_destructor)]
#![feature(unique)]
#![feature(unsafe_no_drop_flag)]
#![cfg_attr(test, feature(rand, rustc_private, test))]
#![cfg_attr(test, allow(deprecated))] // rand
#![feature(no_std)]
#![no_std]
#[macro_use]
extern crate core;
extern crate unicode;
extern crate alloc;
#[cfg(test)] extern crate test;
#[cfg(test)] #[macro_use] extern crate std;
#[cfg(test)] #[macro_use] extern crate log;
pub use binary_heap::BinaryHeap;
pub use bit_vec::BitVec;
pub use bit_set::BitSet;
pub use btree_map::BTreeMap;
pub use btree_set::BTreeSet;
pub use linked_list::LinkedList;
pub use enum_set::EnumSet;
pub use vec_deque::VecDeque;
pub use string::String;
pub use vec::Vec;
pub use vec_map::VecMap;
#[deprecated(since = "1.0.0", reason = "renamed to vec_deque")]
#[unstable(feature = "collections")]
pub use vec_deque as ring_buf;
#[deprecated(since = "1.0.0", reason = "renamed to linked_list")]
#[unstable(feature = "collections")]
pub use linked_list as dlist;
#[deprecated(since = "1.0.0", reason = "renamed to bit_vec")]
#[unstable(feature = "collections")]
pub use bit_vec as bitv;
#[deprecated(since = "1.0.0", reason = "renamed to bit_set")]
#[unstable(feature = "collections")]
pub use bit_set as bitv_set;
// Needed for the vec! macro
pub use alloc::boxed;
#[macro_use]
mod macros;
#[cfg(test)] #[macro_use] mod bench;
pub mod binary_heap;
mod bit;
mod btree;
pub mod borrow;
pub mod enum_set;
pub mod fmt;
pub mod linked_list;
pub mod slice;
pub mod str;
pub mod string;
pub mod vec;
pub mod vec_deque;
pub mod vec_map;
#[unstable(feature = "collections",
reason = "RFC 509")]
pub mod bit_vec {
pub use bit::{BitVec, Iter};
#[deprecated(since = "1.0.0", reason = "renamed to BitVec")]
#[unstable(feature = "collections")]
pub use bit::BitVec as Bitv;
}
#[unstable(feature = "collections",
reason = "RFC 509")]
pub mod bit_set {
pub use bit::{BitSet, Union, Intersection, Difference, SymmetricDifference};
pub use bit::SetIter as Iter;
#[deprecated(since = "1.0.0", reason = "renamed to BitSet")]
#[unstable(feature = "collections")]
pub use bit::BitSet as BitvSet;
}
#[stable(feature = "rust1", since = "1.0.0")]
pub mod btree_map {
pub use btree::map::*;
}
#[stable(feature = "rust1", since = "1.0.0")]
pub mod btree_set {
pub use btree::set::*;
}
// FIXME(#14344) this shouldn't be necessary
#[doc(hidden)]
pub fn fixme_14344_be_sure_to_link_to_collections()
|
#[cfg(not(test))]
mod std {
pub use core::ops; // RangeFull
}
#[cfg(test)]
mod prelude {
// from core.
pub use core::clone::Clone;
pub use core::cmp::{PartialEq, Eq, PartialOrd, Ord};
pub use core::cmp::Ordering::{Less, Equal, Greater};
pub use core::iter::range;
pub use core::iter::{FromIterator, Extend, IteratorExt};
pub use core::iter::{Iterator, DoubleEndedIterator, RandomAccessIterator};
pub use core::iter::{ExactSizeIterator};
pub use core::marker::{Copy, Send, Sized, Sync};
pub use core::mem::drop;
pub use core::ops::{Drop, Fn, FnMut, FnOnce};
pub use core::option::Option;
pub use core::option::Option::{Some, None};
pub use core::ptr::PtrExt;
pub use core::result::Result;
pub use core::result::Result::{Ok, Err};
// in core and collections (may differ).
pub use slice::{AsSlice, SliceExt};
pub use str::{Str, StrExt};
// from other crates.
pub use alloc::boxed::Box;
pub use unicode::char::CharExt;
// from collections.
pub use borrow::IntoCow;
pub use slice::SliceConcatExt;
pub use string::{String, ToString};
pub use vec::Vec;
}
/// An endpoint of a range of keys.
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
pub enum Bound<T> {
/// An inclusive bound.
Included(T),
/// An exclusive bound.
Excluded(T),
/// An infinite endpoint. Indicates that there is no bound in this direction.
Unbounded,
}
|
{}
|
identifier_body
|
lib.rs
|
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Collection types.
//!
//! See [std::collections](../std/collections) for a detailed discussion of collections in Rust.
// Do not remove on snapshot creation. Needed for bootstrap. (Issue #22364)
#![cfg_attr(stage0, feature(custom_attribute))]
#![crate_name = "collections"]
#![unstable(feature = "collections")]
#![staged_api]
#![crate_type = "rlib"]
#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "http://www.rust-lang.org/favicon.ico",
html_root_url = "http://doc.rust-lang.org/nightly/",
html_playground_url = "http://play.rust-lang.org/")]
#![feature(alloc)]
#![feature(box_syntax)]
#![feature(box_patterns)]
#![feature(core)]
#![feature(staged_api)]
#![feature(unboxed_closures)]
#![feature(unicode)]
#![feature(unsafe_destructor)]
#![feature(unique)]
#![feature(unsafe_no_drop_flag)]
#![cfg_attr(test, feature(rand, rustc_private, test))]
#![cfg_attr(test, allow(deprecated))] // rand
#![feature(no_std)]
#![no_std]
#[macro_use]
extern crate core;
extern crate unicode;
extern crate alloc;
#[cfg(test)] extern crate test;
#[cfg(test)] #[macro_use] extern crate std;
#[cfg(test)] #[macro_use] extern crate log;
pub use binary_heap::BinaryHeap;
pub use bit_vec::BitVec;
pub use bit_set::BitSet;
pub use btree_map::BTreeMap;
pub use btree_set::BTreeSet;
pub use linked_list::LinkedList;
pub use enum_set::EnumSet;
pub use vec_deque::VecDeque;
pub use string::String;
pub use vec::Vec;
pub use vec_map::VecMap;
#[deprecated(since = "1.0.0", reason = "renamed to vec_deque")]
#[unstable(feature = "collections")]
pub use vec_deque as ring_buf;
#[deprecated(since = "1.0.0", reason = "renamed to linked_list")]
#[unstable(feature = "collections")]
pub use linked_list as dlist;
#[deprecated(since = "1.0.0", reason = "renamed to bit_vec")]
#[unstable(feature = "collections")]
pub use bit_vec as bitv;
#[deprecated(since = "1.0.0", reason = "renamed to bit_set")]
#[unstable(feature = "collections")]
pub use bit_set as bitv_set;
// Needed for the vec! macro
pub use alloc::boxed;
#[macro_use]
mod macros;
#[cfg(test)] #[macro_use] mod bench;
pub mod binary_heap;
mod bit;
mod btree;
pub mod borrow;
pub mod enum_set;
pub mod fmt;
pub mod linked_list;
pub mod slice;
pub mod str;
pub mod string;
pub mod vec;
pub mod vec_deque;
pub mod vec_map;
#[unstable(feature = "collections",
reason = "RFC 509")]
pub mod bit_vec {
pub use bit::{BitVec, Iter};
#[deprecated(since = "1.0.0", reason = "renamed to BitVec")]
#[unstable(feature = "collections")]
pub use bit::BitVec as Bitv;
}
#[unstable(feature = "collections",
reason = "RFC 509")]
pub mod bit_set {
pub use bit::{BitSet, Union, Intersection, Difference, SymmetricDifference};
pub use bit::SetIter as Iter;
#[deprecated(since = "1.0.0", reason = "renamed to BitSet")]
#[unstable(feature = "collections")]
pub use bit::BitSet as BitvSet;
}
#[stable(feature = "rust1", since = "1.0.0")]
pub mod btree_map {
pub use btree::map::*;
}
#[stable(feature = "rust1", since = "1.0.0")]
pub mod btree_set {
pub use btree::set::*;
}
// FIXME(#14344) this shouldn't be necessary
#[doc(hidden)]
pub fn fixme_14344_be_sure_to_link_to_collections() {}
#[cfg(not(test))]
mod std {
pub use core::ops; // RangeFull
}
#[cfg(test)]
mod prelude {
// from core.
pub use core::clone::Clone;
pub use core::cmp::{PartialEq, Eq, PartialOrd, Ord};
pub use core::cmp::Ordering::{Less, Equal, Greater};
pub use core::iter::range;
pub use core::iter::{FromIterator, Extend, IteratorExt};
pub use core::iter::{Iterator, DoubleEndedIterator, RandomAccessIterator};
|
pub use core::iter::{ExactSizeIterator};
pub use core::marker::{Copy, Send, Sized, Sync};
pub use core::mem::drop;
pub use core::ops::{Drop, Fn, FnMut, FnOnce};
pub use core::option::Option;
pub use core::option::Option::{Some, None};
pub use core::ptr::PtrExt;
pub use core::result::Result;
pub use core::result::Result::{Ok, Err};
// in core and collections (may differ).
pub use slice::{AsSlice, SliceExt};
pub use str::{Str, StrExt};
// from other crates.
pub use alloc::boxed::Box;
pub use unicode::char::CharExt;
// from collections.
pub use borrow::IntoCow;
pub use slice::SliceConcatExt;
pub use string::{String, ToString};
pub use vec::Vec;
}
/// An endpoint of a range of keys.
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
pub enum Bound<T> {
/// An inclusive bound.
Included(T),
/// An exclusive bound.
Excluded(T),
/// An infinite endpoint. Indicates that there is no bound in this direction.
Unbounded,
}
|
random_line_split
|
|
lib.rs
|
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Collection types.
//!
//! See [std::collections](../std/collections) for a detailed discussion of collections in Rust.
// Do not remove on snapshot creation. Needed for bootstrap. (Issue #22364)
#![cfg_attr(stage0, feature(custom_attribute))]
#![crate_name = "collections"]
#![unstable(feature = "collections")]
#![staged_api]
#![crate_type = "rlib"]
#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "http://www.rust-lang.org/favicon.ico",
html_root_url = "http://doc.rust-lang.org/nightly/",
html_playground_url = "http://play.rust-lang.org/")]
#![feature(alloc)]
#![feature(box_syntax)]
#![feature(box_patterns)]
#![feature(core)]
#![feature(staged_api)]
#![feature(unboxed_closures)]
#![feature(unicode)]
#![feature(unsafe_destructor)]
#![feature(unique)]
#![feature(unsafe_no_drop_flag)]
#![cfg_attr(test, feature(rand, rustc_private, test))]
#![cfg_attr(test, allow(deprecated))] // rand
#![feature(no_std)]
#![no_std]
#[macro_use]
extern crate core;
extern crate unicode;
extern crate alloc;
#[cfg(test)] extern crate test;
#[cfg(test)] #[macro_use] extern crate std;
#[cfg(test)] #[macro_use] extern crate log;
pub use binary_heap::BinaryHeap;
pub use bit_vec::BitVec;
pub use bit_set::BitSet;
pub use btree_map::BTreeMap;
pub use btree_set::BTreeSet;
pub use linked_list::LinkedList;
pub use enum_set::EnumSet;
pub use vec_deque::VecDeque;
pub use string::String;
pub use vec::Vec;
pub use vec_map::VecMap;
#[deprecated(since = "1.0.0", reason = "renamed to vec_deque")]
#[unstable(feature = "collections")]
pub use vec_deque as ring_buf;
#[deprecated(since = "1.0.0", reason = "renamed to linked_list")]
#[unstable(feature = "collections")]
pub use linked_list as dlist;
#[deprecated(since = "1.0.0", reason = "renamed to bit_vec")]
#[unstable(feature = "collections")]
pub use bit_vec as bitv;
#[deprecated(since = "1.0.0", reason = "renamed to bit_set")]
#[unstable(feature = "collections")]
pub use bit_set as bitv_set;
// Needed for the vec! macro
pub use alloc::boxed;
#[macro_use]
mod macros;
#[cfg(test)] #[macro_use] mod bench;
pub mod binary_heap;
mod bit;
mod btree;
pub mod borrow;
pub mod enum_set;
pub mod fmt;
pub mod linked_list;
pub mod slice;
pub mod str;
pub mod string;
pub mod vec;
pub mod vec_deque;
pub mod vec_map;
#[unstable(feature = "collections",
reason = "RFC 509")]
pub mod bit_vec {
pub use bit::{BitVec, Iter};
#[deprecated(since = "1.0.0", reason = "renamed to BitVec")]
#[unstable(feature = "collections")]
pub use bit::BitVec as Bitv;
}
#[unstable(feature = "collections",
reason = "RFC 509")]
pub mod bit_set {
pub use bit::{BitSet, Union, Intersection, Difference, SymmetricDifference};
pub use bit::SetIter as Iter;
#[deprecated(since = "1.0.0", reason = "renamed to BitSet")]
#[unstable(feature = "collections")]
pub use bit::BitSet as BitvSet;
}
#[stable(feature = "rust1", since = "1.0.0")]
pub mod btree_map {
pub use btree::map::*;
}
#[stable(feature = "rust1", since = "1.0.0")]
pub mod btree_set {
pub use btree::set::*;
}
// FIXME(#14344) this shouldn't be necessary
#[doc(hidden)]
pub fn
|
() {}
#[cfg(not(test))]
mod std {
pub use core::ops; // RangeFull
}
#[cfg(test)]
mod prelude {
// from core.
pub use core::clone::Clone;
pub use core::cmp::{PartialEq, Eq, PartialOrd, Ord};
pub use core::cmp::Ordering::{Less, Equal, Greater};
pub use core::iter::range;
pub use core::iter::{FromIterator, Extend, IteratorExt};
pub use core::iter::{Iterator, DoubleEndedIterator, RandomAccessIterator};
pub use core::iter::{ExactSizeIterator};
pub use core::marker::{Copy, Send, Sized, Sync};
pub use core::mem::drop;
pub use core::ops::{Drop, Fn, FnMut, FnOnce};
pub use core::option::Option;
pub use core::option::Option::{Some, None};
pub use core::ptr::PtrExt;
pub use core::result::Result;
pub use core::result::Result::{Ok, Err};
// in core and collections (may differ).
pub use slice::{AsSlice, SliceExt};
pub use str::{Str, StrExt};
// from other crates.
pub use alloc::boxed::Box;
pub use unicode::char::CharExt;
// from collections.
pub use borrow::IntoCow;
pub use slice::SliceConcatExt;
pub use string::{String, ToString};
pub use vec::Vec;
}
/// An endpoint of a range of keys.
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
pub enum Bound<T> {
/// An inclusive bound.
Included(T),
/// An exclusive bound.
Excluded(T),
/// An infinite endpoint. Indicates that there is no bound in this direction.
Unbounded,
}
|
fixme_14344_be_sure_to_link_to_collections
|
identifier_name
|
build_gecko.rs
|
path
};
static ref SEARCH_PATHS: Vec<PathBuf> = vec![
DISTDIR_PATH.join("include"),
DISTDIR_PATH.join("include/nspr"),
];
static ref ADDED_PATHS: Mutex<HashSet<PathBuf>> = Mutex::new(HashSet::new());
static ref LAST_MODIFIED: Mutex<SystemTime> =
Mutex::new(get_modified_time(&env::current_exe().unwrap())
.expect("Failed to get modified time of executable"));
}
fn get_modified_time(file: &Path) -> Option<SystemTime> {
file.metadata().and_then(|m| m.modified()).ok()
}
fn update_last_modified(file: &Path) {
let modified = get_modified_time(file).expect("Couldn't get file modification time");
let mut last_modified = LAST_MODIFIED.lock().unwrap();
*last_modified = cmp::max(modified, *last_modified);
}
fn search_include(name: &str) -> Option<PathBuf> {
for path in SEARCH_PATHS.iter() {
let file = path.join(name);
if file.is_file() {
update_last_modified(&file);
return Some(file);
}
}
None
}
fn add_headers_recursively(path: PathBuf, added_paths: &mut HashSet<PathBuf>) {
if added_paths.contains(&path) {
return;
}
let mut file = File::open(&path).unwrap();
let mut content = String::new();
file.read_to_string(&mut content).unwrap();
added_paths.insert(path);
// Find all includes and add them recursively
for cap in INCLUDE_RE.captures_iter(&content) {
if let Some(path) = search_include(cap.get(1).unwrap().as_str()) {
add_headers_recursively(path, added_paths);
}
}
}
fn add_include(name: &str) -> String {
let mut added_paths = ADDED_PATHS.lock().unwrap();
let file = search_include(name).expect("Include not found!");
let result = String::from(file.to_str().unwrap());
add_headers_recursively(file, &mut *added_paths);
result
}
trait BuilderExt {
fn get_initial_builder() -> Builder;
fn include<T: Into<String>>(self, file: T) -> Builder;
fn zero_size_type(self, ty: &str, structs_list: &HashSet<&str>) -> Builder;
fn borrowed_type(self, ty: &str) -> Builder;
fn mutable_borrowed_type(self, ty: &str) -> Builder;
}
fn add_clang_args(mut builder: Builder, config: &Table, matched_os: &mut bool) -> Builder {
fn add_args(mut builder: Builder, values: &[toml::Value]) -> Builder {
for item in values.iter() {
builder = builder.clang_arg(item.as_str().expect("Expect string in list"));
}
builder
}
for (k, v) in config.iter() {
if k == "args" {
builder = add_args(builder, v.as_array().unwrap().as_slice());
continue;
}
let equal_idx = k.find('=').expect(&format!("Invalid key: {}", k));
let (target_type, target_value) = k.split_at(equal_idx);
if TARGET_INFO[target_type]!= target_value[1..] {
continue;
}
if target_type == "os" {
*matched_os = true;
}
builder = match *v {
toml::Value::Table(ref table) => add_clang_args(builder, table, matched_os),
toml::Value::Array(ref array) => add_args(builder, array),
_ => panic!("Unknown type"),
};
}
builder
}
impl BuilderExt for Builder {
fn get_initial_builder() -> Builder {
use bindgen::RustTarget;
// Disable rust unions, because we replace some types inside of
// them.
let mut builder = Builder::default().rust_target(RustTarget::Stable_1_0);
let rustfmt_path = env::var_os("RUSTFMT")
// This can be replaced with
// >.filter(|p|!p.is_empty()).map(PathBuf::from)
// once we can use 1.27+.
.and_then(|p| {
if p.is_empty() {
None
} else {
Some(PathBuf::from(p))
}
});
if let Some(path) = rustfmt_path {
builder = builder.with_rustfmt(path);
}
for dir in SEARCH_PATHS.iter() {
builder = builder.clang_arg("-I").clang_arg(dir.to_str().unwrap());
}
builder = builder.include(add_include("mozilla-config.h"));
if env::var("CARGO_FEATURE_GECKO_DEBUG").is_ok() {
builder = builder.clang_arg("-DDEBUG=1").clang_arg("-DJS_DEBUG=1");
}
let mut matched_os = false;
let build_config = CONFIG["build"].as_table().expect("Malformed config file");
builder = add_clang_args(builder, build_config, &mut matched_os);
let build_config = BUILD_CONFIG["build"]
.as_table()
.expect("Malformed config file");
builder = add_clang_args(builder, build_config, &mut matched_os);
if!matched_os {
panic!("Unknown platform");
}
builder
}
fn include<T: Into<String>>(self, file: T) -> Builder {
self.clang_arg("-include").clang_arg(file)
}
// This makes an FFI-safe void type that can't be matched on
// &VoidType is UB to have, because you can match on it
// to produce a reachable unreachable. If it's wrapped in
// a struct as a private field it becomes okay again
//
// Not 100% sure of how safe this is, but it's what we're using
// in the XPCOM ffi too
// https://github.com/nikomatsakis/rust-memory-model/issues/2
fn zero_size_type(self, ty: &str, structs_list: &HashSet<&str>) -> Builder {
if!structs_list.contains(ty) {
self.blacklist_type(ty)
.raw_line(format!("enum {}Void {{ }}", ty))
.raw_line(format!("pub struct {0}({0}Void);", ty))
} else {
self
}
}
fn borrowed_type(self, ty: &str) -> Builder {
self.blacklist_type(format!("{}Borrowed", ty))
.raw_line(format!("pub type {0}Borrowed<'a> = &'a {0};", ty))
.blacklist_type(format!("{}BorrowedOrNull", ty))
.raw_line(format!(
"pub type {0}BorrowedOrNull<'a> = Option<&'a {0}>;",
ty
))
}
fn mutable_borrowed_type(self, ty: &str) -> Builder {
self.borrowed_type(ty)
.blacklist_type(format!("{}BorrowedMut", ty))
.raw_line(format!("pub type {0}BorrowedMut<'a> = &'a mut {0};", ty))
.blacklist_type(format!("{}BorrowedMutOrNull", ty))
.raw_line(format!(
"pub type {0}BorrowedMutOrNull<'a> = Option<&'a mut {0}>;",
ty
))
}
}
struct Fixup {
pat: String,
rep: String,
}
fn write_binding_file(builder: Builder, file: &str, fixups: &[Fixup]) {
let out_file = OUTDIR_PATH.join(file);
if let Some(modified) = get_modified_time(&out_file) {
// Don't generate the file if nothing it depends on was modified.
let last_modified = LAST_MODIFIED.lock().unwrap();
if *last_modified <= modified {
return;
}
}
let command_line_opts = builder.command_line_flags();
let result = builder.generate();
let mut result = match result {
Ok(bindings) => bindings.to_string(),
Err(_) => {
panic!(
"Failed to generate bindings, flags: {:?}",
command_line_opts
);
},
};
for fixup in fixups.iter() {
result = Regex::new(&fixup.pat)
.unwrap()
.replace_all(&result, &*fixup.rep)
.into_owned()
.into();
}
let bytes = result.into_bytes();
File::create(&out_file)
.unwrap()
.write_all(&bytes)
.expect("Unable to write output");
}
fn get_arc_types() -> Vec<String>
|
"Unrecognized line in ServoArcTypeList.h: '{}'",
line
))
.get(1)
.unwrap()
.as_str()
.to_string()
})
.collect()
}
struct BuilderWithConfig<'a> {
builder: Builder,
config: &'a Table,
used_keys: HashSet<&'static str>,
}
impl<'a> BuilderWithConfig<'a> {
fn new(builder: Builder, config: &'a Table) -> Self {
BuilderWithConfig {
builder,
config,
used_keys: HashSet::new(),
}
}
fn handle_list<F>(self, key: &'static str, func: F) -> BuilderWithConfig<'a>
where
F: FnOnce(Builder, slice::Iter<'a, toml::Value>) -> Builder,
{
let mut builder = self.builder;
let config = self.config;
let mut used_keys = self.used_keys;
if let Some(list) = config.get(key) {
used_keys.insert(key);
builder = func(builder, list.as_array().unwrap().as_slice().iter());
}
BuilderWithConfig {
builder,
config,
used_keys,
}
}
fn handle_items<F>(self, key: &'static str, mut func: F) -> BuilderWithConfig<'a>
where
F: FnMut(Builder, &'a toml::Value) -> Builder,
{
self.handle_list(key, |b, iter| iter.fold(b, |b, item| func(b, item)))
}
fn handle_str_items<F>(self, key: &'static str, mut func: F) -> BuilderWithConfig<'a>
where
F: FnMut(Builder, &'a str) -> Builder,
{
self.handle_items(key, |b, item| func(b, item.as_str().unwrap()))
}
fn handle_table_items<F>(self, key: &'static str, mut func: F) -> BuilderWithConfig<'a>
where
F: FnMut(Builder, &'a Table) -> Builder,
{
self.handle_items(key, |b, item| func(b, item.as_table().unwrap()))
}
fn handle_common(self, fixups: &mut Vec<Fixup>) -> BuilderWithConfig<'a> {
self.handle_str_items("headers", |b, item| b.header(add_include(item)))
.handle_str_items("raw-lines", |b, item| b.raw_line(item))
.handle_str_items("hide-types", |b, item| b.blacklist_type(item))
.handle_table_items("fixups", |builder, item| {
fixups.push(Fixup {
pat: item["pat"].as_str().unwrap().into(),
rep: item["rep"].as_str().unwrap().into(),
});
builder
})
}
fn get_builder(self) -> Builder {
for key in self.config.keys() {
if!self.used_keys.contains(key.as_str()) {
panic!(format!("Unknown key: {}", key));
}
}
self.builder
}
}
fn generate_structs() {
let builder = Builder::get_initial_builder()
.enable_cxx_namespaces()
.with_codegen_config(CodegenConfig {
types: true,
vars: true,
..CodegenConfig::nothing()
});
let mut fixups = vec![];
let builder = BuilderWithConfig::new(builder, CONFIG["structs"].as_table().unwrap())
.handle_common(&mut fixups)
.handle_str_items("bitfield-enums", |b, item| b.bitfield_enum(item))
.handle_str_items("rusty-enums", |b, item| b.rustified_enum(item))
.handle_str_items("whitelist-vars", |b, item| b.whitelist_var(item))
.handle_str_items("whitelist-types", |b, item| b.whitelist_type(item))
.handle_str_items("opaque-types", |b, item| b.opaque_type(item))
.handle_table_items("mapped-generic-types", |builder, item| {
let generic = item["generic"].as_bool().unwrap();
let gecko = item["gecko"].as_str().unwrap();
let servo = item["servo"].as_str().unwrap();
let gecko_name = gecko.rsplit("::").next().unwrap();
let gecko = gecko
.split("::")
.map(|s| format!("\\s*{}\\s*", s))
.collect::<Vec<_>>()
.join("::");
fixups.push(Fixup {
pat: format!("\\broot\\s*::\\s*{}\\b", gecko),
rep: format!("::gecko_bindings::structs::{}", gecko_name),
});
builder.blacklist_type(gecko).raw_line(format!(
"pub type {0}{2} = {1}{2};",
gecko_name,
servo,
if generic { "<T>" } else { "" }
))
})
.get_builder();
write_binding_file(builder, STRUCTS_FILE, &fixups);
}
fn setup_logging() -> bool {
use log;
struct BuildLogger {
file: Option<Mutex<fs::File>>,
filter: String,
}
impl log::Log for BuildLogger {
fn enabled(&self, meta: &log::Metadata) -> bool {
self.file.is_some() && meta.target().contains(&self.filter)
}
fn log(&self, record: &log::Record) {
if!self.enabled(record.metadata()) {
return;
}
let mut file = self.file.as_ref().unwrap().lock().unwrap();
let _ = writeln!(
file,
"{} - {} - {} @ {}:{}",
record.level(),
record.target(),
record.args(),
record.file().unwrap_or("<unknown>"),
record.line().unwrap_or(0)
);
}
fn flush(&self) {
if let Some(ref file) = self.file {
file.lock().unwrap().flush().unwrap();
}
}
}
if let Some(path) = env::var_os("STYLO_BUILD_LOG") {
log::set_max_level(log::LevelFilter::Debug);
log::set_boxed_logger(Box::new(BuildLogger {
file: fs::File::create(path).ok().map(Mutex::new),
filter: env::var("STYLO_BUILD_FILTER")
.ok()
.unwrap_or_else(|| "bindgen".to_owned()),
})).expect("Failed to set logger.");
true
} else {
false
}
}
fn generate_bindings
|
{
// Read the file
let mut list_file = File::open(DISTDIR_PATH.join("include/mozilla/ServoArcTypeList.h"))
.expect("Unable to open ServoArcTypeList.h");
let mut content = String::new();
list_file
.read_to_string(&mut content)
.expect("Fail to read ServoArcTypeList.h");
// Remove comments
let block_comment_re = Regex::new(r#"(?s)/\*.*?\*/"#).unwrap();
let content = block_comment_re.replace_all(&content, "");
// Extract the list
let re = Regex::new(r#"^SERVO_ARC_TYPE\(\w+,\s*(\w+)\)$"#).unwrap();
content
.lines()
.map(|line| line.trim())
.filter(|line| !line.is_empty())
.map(|line| {
re.captures(&line)
.expect(&format!(
|
identifier_body
|
build_gecko.rs
|
path
};
static ref SEARCH_PATHS: Vec<PathBuf> = vec![
DISTDIR_PATH.join("include"),
DISTDIR_PATH.join("include/nspr"),
];
static ref ADDED_PATHS: Mutex<HashSet<PathBuf>> = Mutex::new(HashSet::new());
static ref LAST_MODIFIED: Mutex<SystemTime> =
Mutex::new(get_modified_time(&env::current_exe().unwrap())
.expect("Failed to get modified time of executable"));
}
fn get_modified_time(file: &Path) -> Option<SystemTime> {
file.metadata().and_then(|m| m.modified()).ok()
}
fn update_last_modified(file: &Path) {
let modified = get_modified_time(file).expect("Couldn't get file modification time");
let mut last_modified = LAST_MODIFIED.lock().unwrap();
*last_modified = cmp::max(modified, *last_modified);
}
fn search_include(name: &str) -> Option<PathBuf> {
for path in SEARCH_PATHS.iter() {
let file = path.join(name);
if file.is_file() {
update_last_modified(&file);
return Some(file);
}
}
None
}
fn add_headers_recursively(path: PathBuf, added_paths: &mut HashSet<PathBuf>) {
if added_paths.contains(&path) {
return;
}
let mut file = File::open(&path).unwrap();
let mut content = String::new();
file.read_to_string(&mut content).unwrap();
added_paths.insert(path);
// Find all includes and add them recursively
for cap in INCLUDE_RE.captures_iter(&content) {
if let Some(path) = search_include(cap.get(1).unwrap().as_str()) {
add_headers_recursively(path, added_paths);
}
}
}
fn add_include(name: &str) -> String {
let mut added_paths = ADDED_PATHS.lock().unwrap();
let file = search_include(name).expect("Include not found!");
let result = String::from(file.to_str().unwrap());
add_headers_recursively(file, &mut *added_paths);
result
}
trait BuilderExt {
fn get_initial_builder() -> Builder;
fn include<T: Into<String>>(self, file: T) -> Builder;
fn zero_size_type(self, ty: &str, structs_list: &HashSet<&str>) -> Builder;
fn borrowed_type(self, ty: &str) -> Builder;
fn mutable_borrowed_type(self, ty: &str) -> Builder;
}
fn add_clang_args(mut builder: Builder, config: &Table, matched_os: &mut bool) -> Builder {
fn add_args(mut builder: Builder, values: &[toml::Value]) -> Builder {
for item in values.iter() {
builder = builder.clang_arg(item.as_str().expect("Expect string in list"));
}
builder
}
for (k, v) in config.iter() {
if k == "args" {
builder = add_args(builder, v.as_array().unwrap().as_slice());
continue;
}
let equal_idx = k.find('=').expect(&format!("Invalid key: {}", k));
let (target_type, target_value) = k.split_at(equal_idx);
if TARGET_INFO[target_type]!= target_value[1..] {
continue;
}
if target_type == "os" {
*matched_os = true;
}
builder = match *v {
toml::Value::Table(ref table) => add_clang_args(builder, table, matched_os),
toml::Value::Array(ref array) => add_args(builder, array),
_ => panic!("Unknown type"),
};
}
builder
}
impl BuilderExt for Builder {
fn get_initial_builder() -> Builder {
use bindgen::RustTarget;
// Disable rust unions, because we replace some types inside of
// them.
let mut builder = Builder::default().rust_target(RustTarget::Stable_1_0);
let rustfmt_path = env::var_os("RUSTFMT")
// This can be replaced with
// >.filter(|p|!p.is_empty()).map(PathBuf::from)
// once we can use 1.27+.
.and_then(|p| {
if p.is_empty() {
None
} else {
Some(PathBuf::from(p))
}
});
if let Some(path) = rustfmt_path {
builder = builder.with_rustfmt(path);
}
for dir in SEARCH_PATHS.iter() {
builder = builder.clang_arg("-I").clang_arg(dir.to_str().unwrap());
}
builder = builder.include(add_include("mozilla-config.h"));
if env::var("CARGO_FEATURE_GECKO_DEBUG").is_ok() {
builder = builder.clang_arg("-DDEBUG=1").clang_arg("-DJS_DEBUG=1");
}
let mut matched_os = false;
let build_config = CONFIG["build"].as_table().expect("Malformed config file");
builder = add_clang_args(builder, build_config, &mut matched_os);
let build_config = BUILD_CONFIG["build"]
.as_table()
.expect("Malformed config file");
builder = add_clang_args(builder, build_config, &mut matched_os);
if!matched_os {
panic!("Unknown platform");
}
builder
}
fn include<T: Into<String>>(self, file: T) -> Builder {
self.clang_arg("-include").clang_arg(file)
}
// This makes an FFI-safe void type that can't be matched on
// &VoidType is UB to have, because you can match on it
// to produce a reachable unreachable. If it's wrapped in
// a struct as a private field it becomes okay again
//
// Not 100% sure of how safe this is, but it's what we're using
// in the XPCOM ffi too
// https://github.com/nikomatsakis/rust-memory-model/issues/2
fn
|
(self, ty: &str, structs_list: &HashSet<&str>) -> Builder {
if!structs_list.contains(ty) {
self.blacklist_type(ty)
.raw_line(format!("enum {}Void {{ }}", ty))
.raw_line(format!("pub struct {0}({0}Void);", ty))
} else {
self
}
}
fn borrowed_type(self, ty: &str) -> Builder {
self.blacklist_type(format!("{}Borrowed", ty))
.raw_line(format!("pub type {0}Borrowed<'a> = &'a {0};", ty))
.blacklist_type(format!("{}BorrowedOrNull", ty))
.raw_line(format!(
"pub type {0}BorrowedOrNull<'a> = Option<&'a {0}>;",
ty
))
}
fn mutable_borrowed_type(self, ty: &str) -> Builder {
self.borrowed_type(ty)
.blacklist_type(format!("{}BorrowedMut", ty))
.raw_line(format!("pub type {0}BorrowedMut<'a> = &'a mut {0};", ty))
.blacklist_type(format!("{}BorrowedMutOrNull", ty))
.raw_line(format!(
"pub type {0}BorrowedMutOrNull<'a> = Option<&'a mut {0}>;",
ty
))
}
}
struct Fixup {
pat: String,
rep: String,
}
fn write_binding_file(builder: Builder, file: &str, fixups: &[Fixup]) {
let out_file = OUTDIR_PATH.join(file);
if let Some(modified) = get_modified_time(&out_file) {
// Don't generate the file if nothing it depends on was modified.
let last_modified = LAST_MODIFIED.lock().unwrap();
if *last_modified <= modified {
return;
}
}
let command_line_opts = builder.command_line_flags();
let result = builder.generate();
let mut result = match result {
Ok(bindings) => bindings.to_string(),
Err(_) => {
panic!(
"Failed to generate bindings, flags: {:?}",
command_line_opts
);
},
};
for fixup in fixups.iter() {
result = Regex::new(&fixup.pat)
.unwrap()
.replace_all(&result, &*fixup.rep)
.into_owned()
.into();
}
let bytes = result.into_bytes();
File::create(&out_file)
.unwrap()
.write_all(&bytes)
.expect("Unable to write output");
}
fn get_arc_types() -> Vec<String> {
// Read the file
let mut list_file = File::open(DISTDIR_PATH.join("include/mozilla/ServoArcTypeList.h"))
.expect("Unable to open ServoArcTypeList.h");
let mut content = String::new();
list_file
.read_to_string(&mut content)
.expect("Fail to read ServoArcTypeList.h");
// Remove comments
let block_comment_re = Regex::new(r#"(?s)/\*.*?\*/"#).unwrap();
let content = block_comment_re.replace_all(&content, "");
// Extract the list
let re = Regex::new(r#"^SERVO_ARC_TYPE\(\w+,\s*(\w+)\)$"#).unwrap();
content
.lines()
.map(|line| line.trim())
.filter(|line|!line.is_empty())
.map(|line| {
re.captures(&line)
.expect(&format!(
"Unrecognized line in ServoArcTypeList.h: '{}'",
line
))
.get(1)
.unwrap()
.as_str()
.to_string()
})
.collect()
}
struct BuilderWithConfig<'a> {
builder: Builder,
config: &'a Table,
used_keys: HashSet<&'static str>,
}
impl<'a> BuilderWithConfig<'a> {
fn new(builder: Builder, config: &'a Table) -> Self {
BuilderWithConfig {
builder,
config,
used_keys: HashSet::new(),
}
}
fn handle_list<F>(self, key: &'static str, func: F) -> BuilderWithConfig<'a>
where
F: FnOnce(Builder, slice::Iter<'a, toml::Value>) -> Builder,
{
let mut builder = self.builder;
let config = self.config;
let mut used_keys = self.used_keys;
if let Some(list) = config.get(key) {
used_keys.insert(key);
builder = func(builder, list.as_array().unwrap().as_slice().iter());
}
BuilderWithConfig {
builder,
config,
used_keys,
}
}
fn handle_items<F>(self, key: &'static str, mut func: F) -> BuilderWithConfig<'a>
where
F: FnMut(Builder, &'a toml::Value) -> Builder,
{
self.handle_list(key, |b, iter| iter.fold(b, |b, item| func(b, item)))
}
fn handle_str_items<F>(self, key: &'static str, mut func: F) -> BuilderWithConfig<'a>
where
F: FnMut(Builder, &'a str) -> Builder,
{
self.handle_items(key, |b, item| func(b, item.as_str().unwrap()))
}
fn handle_table_items<F>(self, key: &'static str, mut func: F) -> BuilderWithConfig<'a>
where
F: FnMut(Builder, &'a Table) -> Builder,
{
self.handle_items(key, |b, item| func(b, item.as_table().unwrap()))
}
fn handle_common(self, fixups: &mut Vec<Fixup>) -> BuilderWithConfig<'a> {
self.handle_str_items("headers", |b, item| b.header(add_include(item)))
.handle_str_items("raw-lines", |b, item| b.raw_line(item))
.handle_str_items("hide-types", |b, item| b.blacklist_type(item))
.handle_table_items("fixups", |builder, item| {
fixups.push(Fixup {
pat: item["pat"].as_str().unwrap().into(),
rep: item["rep"].as_str().unwrap().into(),
});
builder
})
}
fn get_builder(self) -> Builder {
for key in self.config.keys() {
if!self.used_keys.contains(key.as_str()) {
panic!(format!("Unknown key: {}", key));
}
}
self.builder
}
}
fn generate_structs() {
let builder = Builder::get_initial_builder()
.enable_cxx_namespaces()
.with_codegen_config(CodegenConfig {
types: true,
vars: true,
..CodegenConfig::nothing()
});
let mut fixups = vec![];
let builder = BuilderWithConfig::new(builder, CONFIG["structs"].as_table().unwrap())
.handle_common(&mut fixups)
.handle_str_items("bitfield-enums", |b, item| b.bitfield_enum(item))
.handle_str_items("rusty-enums", |b, item| b.rustified_enum(item))
.handle_str_items("whitelist-vars", |b, item| b.whitelist_var(item))
.handle_str_items("whitelist-types", |b, item| b.whitelist_type(item))
.handle_str_items("opaque-types", |b, item| b.opaque_type(item))
.handle_table_items("mapped-generic-types", |builder, item| {
let generic = item["generic"].as_bool().unwrap();
let gecko = item["gecko"].as_str().unwrap();
let servo = item["servo"].as_str().unwrap();
let gecko_name = gecko.rsplit("::").next().unwrap();
let gecko = gecko
.split("::")
.map(|s| format!("\\s*{}\\s*", s))
.collect::<Vec<_>>()
.join("::");
fixups.push(Fixup {
pat: format!("\\broot\\s*::\\s*{}\\b", gecko),
rep: format!("::gecko_bindings::structs::{}", gecko_name),
});
builder.blacklist_type(gecko).raw_line(format!(
"pub type {0}{2} = {1}{2};",
gecko_name,
servo,
if generic { "<T>" } else { "" }
))
})
.get_builder();
write_binding_file(builder, STRUCTS_FILE, &fixups);
}
fn setup_logging() -> bool {
use log;
struct BuildLogger {
file: Option<Mutex<fs::File>>,
filter: String,
}
impl log::Log for BuildLogger {
fn enabled(&self, meta: &log::Metadata) -> bool {
self.file.is_some() && meta.target().contains(&self.filter)
}
fn log(&self, record: &log::Record) {
if!self.enabled(record.metadata()) {
return;
}
let mut file = self.file.as_ref().unwrap().lock().unwrap();
let _ = writeln!(
file,
"{} - {} - {} @ {}:{}",
record.level(),
record.target(),
record.args(),
record.file().unwrap_or("<unknown>"),
record.line().unwrap_or(0)
);
}
fn flush(&self) {
if let Some(ref file) = self.file {
file.lock().unwrap().flush().unwrap();
}
}
}
if let Some(path) = env::var_os("STYLO_BUILD_LOG") {
log::set_max_level(log::LevelFilter::Debug);
log::set_boxed_logger(Box::new(BuildLogger {
file: fs::File::create(path).ok().map(Mutex::new),
filter: env::var("STYLO_BUILD_FILTER")
.ok()
.unwrap_or_else(|| "bindgen".to_owned()),
})).expect("Failed to set logger.");
true
} else {
false
}
}
fn generate_bindings
|
zero_size_type
|
identifier_name
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.