file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
modify.rs | use std::collections::HashSet;
use std::convert::AsRef;
use std::hash::Hash;
use std::io;
use lber::structures::{Tag, Enumerated, Sequence, Set, OctetString};
use lber::common::TagClass;
use futures::{future, Future};
use tokio_service::Service;
use ldap::{Ldap, LdapOp, next_req_controls};
use result::LdapResult;
/// Possible sub-operations for the Modify operation.
#[derive(Clone, Debug, PartialEq)]
pub enum Mod<S: AsRef<[u8]> + Eq + Hash> {
/// Add an attribute, with at least one value.
Add(S, HashSet<S>),
/// Delete the entire attribute, or the given values of an attribute.
Delete(S, HashSet<S>),
/// Replace an existing attribute, setting its values to those in the set, or delete it if no values are given.
Replace(S, HashSet<S>),
}
impl Ldap {
/// See [`LdapConn::modify()`](struct.LdapConn.html#method.modify).
pub fn modify<S: AsRef<[u8]> + Eq + Hash>(&self, dn: &str, mods: Vec<Mod<S>>) ->
Box<Future<Item=LdapResult, Error=io::Error>> {
let mut any_add_empty = false;
let req = Tag::Sequence(Sequence {
id: 6,
class: TagClass::Application,
inner: vec![
Tag::OctetString(OctetString {
inner: Vec::from(dn.as_bytes()),
.. Default::default()
}),
Tag::Sequence(Sequence {
inner: mods.into_iter().map(|m| {
let mut is_add = false; | Mod::Delete(attr, set) => (1, attr, set),
Mod::Replace(attr, set) => (2, attr, set),
};
if set.is_empty() && is_add {
any_add_empty = true;
}
let op = Tag::Enumerated(Enumerated {
inner: num,
.. Default::default()
});
let part_attr = Tag::Sequence(Sequence {
inner: vec![
Tag::OctetString(OctetString {
inner: Vec::from(attr.as_ref()),
.. Default::default()
}),
Tag::Set(Set {
inner: set.into_iter().map(|val| {
Tag::OctetString(OctetString {
inner: Vec::from(val.as_ref()),
.. Default::default()
})
}).collect(),
.. Default::default()
})
],
.. Default::default()
});
Tag::Sequence(Sequence {
inner: vec![op, part_attr],
.. Default::default()
})
}).collect(),
.. Default::default()
})
]
});
if any_add_empty {
return Box::new(future::err(io::Error::new(io::ErrorKind::Other, "empty value set for Add")));
}
let fut = self.call(LdapOp::Single(req, next_req_controls(self)))
.and_then(|response| {
let (mut result, controls) = (LdapResult::from(response.0), response.1);
result.ctrls = controls;
Ok(result)
});
Box::new(fut)
}
} | let (num, attr, set) = match m {
Mod::Add(attr, set) => { is_add = true; (0, attr, set) }, | random_line_split |
modify.rs | use std::collections::HashSet;
use std::convert::AsRef;
use std::hash::Hash;
use std::io;
use lber::structures::{Tag, Enumerated, Sequence, Set, OctetString};
use lber::common::TagClass;
use futures::{future, Future};
use tokio_service::Service;
use ldap::{Ldap, LdapOp, next_req_controls};
use result::LdapResult;
/// Possible sub-operations for the Modify operation.
#[derive(Clone, Debug, PartialEq)]
pub enum Mod<S: AsRef<[u8]> + Eq + Hash> {
/// Add an attribute, with at least one value.
Add(S, HashSet<S>),
/// Delete the entire attribute, or the given values of an attribute.
Delete(S, HashSet<S>),
/// Replace an existing attribute, setting its values to those in the set, or delete it if no values are given.
Replace(S, HashSet<S>),
}
impl Ldap {
/// See [`LdapConn::modify()`](struct.LdapConn.html#method.modify).
pub fn modify<S: AsRef<[u8]> + Eq + Hash>(&self, dn: &str, mods: Vec<Mod<S>>) ->
Box<Future<Item=LdapResult, Error=io::Error>> {
let mut any_add_empty = false;
let req = Tag::Sequence(Sequence {
id: 6,
class: TagClass::Application,
inner: vec![
Tag::OctetString(OctetString {
inner: Vec::from(dn.as_bytes()),
.. Default::default()
}),
Tag::Sequence(Sequence {
inner: mods.into_iter().map(|m| {
let mut is_add = false;
let (num, attr, set) = match m {
Mod::Add(attr, set) => { is_add = true; (0, attr, set) },
Mod::Delete(attr, set) => (1, attr, set),
Mod::Replace(attr, set) => (2, attr, set),
};
if set.is_empty() && is_add {
any_add_empty = true;
}
let op = Tag::Enumerated(Enumerated {
inner: num,
.. Default::default()
});
let part_attr = Tag::Sequence(Sequence {
inner: vec![
Tag::OctetString(OctetString {
inner: Vec::from(attr.as_ref()),
.. Default::default()
}),
Tag::Set(Set {
inner: set.into_iter().map(|val| {
Tag::OctetString(OctetString {
inner: Vec::from(val.as_ref()),
.. Default::default()
})
}).collect(),
.. Default::default()
})
],
.. Default::default()
});
Tag::Sequence(Sequence {
inner: vec![op, part_attr],
.. Default::default()
})
}).collect(),
.. Default::default()
})
]
});
if any_add_empty |
let fut = self.call(LdapOp::Single(req, next_req_controls(self)))
.and_then(|response| {
let (mut result, controls) = (LdapResult::from(response.0), response.1);
result.ctrls = controls;
Ok(result)
});
Box::new(fut)
}
}
| {
return Box::new(future::err(io::Error::new(io::ErrorKind::Other, "empty value set for Add")));
} | conditional_block |
modify.rs | use std::collections::HashSet;
use std::convert::AsRef;
use std::hash::Hash;
use std::io;
use lber::structures::{Tag, Enumerated, Sequence, Set, OctetString};
use lber::common::TagClass;
use futures::{future, Future};
use tokio_service::Service;
use ldap::{Ldap, LdapOp, next_req_controls};
use result::LdapResult;
/// Possible sub-operations for the Modify operation.
#[derive(Clone, Debug, PartialEq)]
pub enum Mod<S: AsRef<[u8]> + Eq + Hash> {
/// Add an attribute, with at least one value.
Add(S, HashSet<S>),
/// Delete the entire attribute, or the given values of an attribute.
Delete(S, HashSet<S>),
/// Replace an existing attribute, setting its values to those in the set, or delete it if no values are given.
Replace(S, HashSet<S>),
}
impl Ldap {
/// See [`LdapConn::modify()`](struct.LdapConn.html#method.modify).
pub fn modify<S: AsRef<[u8]> + Eq + Hash>(&self, dn: &str, mods: Vec<Mod<S>>) ->
Box<Future<Item=LdapResult, Error=io::Error>> | }
let op = Tag::Enumerated(Enumerated {
inner: num,
.. Default::default()
});
let part_attr = Tag::Sequence(Sequence {
inner: vec![
Tag::OctetString(OctetString {
inner: Vec::from(attr.as_ref()),
.. Default::default()
}),
Tag::Set(Set {
inner: set.into_iter().map(|val| {
Tag::OctetString(OctetString {
inner: Vec::from(val.as_ref()),
.. Default::default()
})
}).collect(),
.. Default::default()
})
],
.. Default::default()
});
Tag::Sequence(Sequence {
inner: vec![op, part_attr],
.. Default::default()
})
}).collect(),
.. Default::default()
})
]
});
if any_add_empty {
return Box::new(future::err(io::Error::new(io::ErrorKind::Other, "empty value set for Add")));
}
let fut = self.call(LdapOp::Single(req, next_req_controls(self)))
.and_then(|response| {
let (mut result, controls) = (LdapResult::from(response.0), response.1);
result.ctrls = controls;
Ok(result)
});
Box::new(fut)
}
}
| {
let mut any_add_empty = false;
let req = Tag::Sequence(Sequence {
id: 6,
class: TagClass::Application,
inner: vec![
Tag::OctetString(OctetString {
inner: Vec::from(dn.as_bytes()),
.. Default::default()
}),
Tag::Sequence(Sequence {
inner: mods.into_iter().map(|m| {
let mut is_add = false;
let (num, attr, set) = match m {
Mod::Add(attr, set) => { is_add = true; (0, attr, set) },
Mod::Delete(attr, set) => (1, attr, set),
Mod::Replace(attr, set) => (2, attr, set),
};
if set.is_empty() && is_add {
any_add_empty = true; | identifier_body |
error.rs | use std::{fmt, error};
#[derive(Debug)]
pub enum ServerError {
UnableToSavePost,
UnspecifiedDatabaseUrl,
UnableToConnectWithDatabase(String),
UnableToLoadPosts,
UnableToDeletePosts,
UnableToPublishPost(i32),
UnableToGetPost(i32)
}
#[allow(unused_variables)]
impl ServerError {
fn desc(&self) -> &str |
}
impl error::Error for ServerError {
fn description(&self) -> &str {
self.desc()
}
}
impl fmt::Display for ServerError {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
self.desc().fmt(formatter)
}
}
| {
match *self {
ServerError::UnableToSavePost => "Error saving post",
ServerError::UnspecifiedDatabaseUrl => "Database URL is unspecified",
ServerError::UnableToConnectWithDatabase(ref url) => "Error connecting to database",
ServerError::UnableToLoadPosts => "Unable to load posts",
ServerError::UnableToDeletePosts => "Unable to delete posts",
ServerError::UnableToPublishPost(ref id) => "Unable to publish post",
ServerError::UnableToGetPost(ref id) => "Unable to get post"
}
} | identifier_body |
error.rs | use std::{fmt, error};
#[derive(Debug)]
pub enum ServerError {
UnableToSavePost,
UnspecifiedDatabaseUrl,
UnableToConnectWithDatabase(String),
UnableToLoadPosts,
UnableToDeletePosts,
UnableToPublishPost(i32),
UnableToGetPost(i32)
}
#[allow(unused_variables)]
impl ServerError {
fn desc(&self) -> &str {
match *self {
ServerError::UnableToSavePost => "Error saving post",
ServerError::UnspecifiedDatabaseUrl => "Database URL is unspecified",
ServerError::UnableToConnectWithDatabase(ref url) => "Error connecting to database",
ServerError::UnableToLoadPosts => "Unable to load posts",
ServerError::UnableToDeletePosts => "Unable to delete posts",
ServerError::UnableToPublishPost(ref id) => "Unable to publish post",
ServerError::UnableToGetPost(ref id) => "Unable to get post"
}
} | fn description(&self) -> &str {
self.desc()
}
}
impl fmt::Display for ServerError {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
self.desc().fmt(formatter)
}
} | }
impl error::Error for ServerError { | random_line_split |
error.rs | use std::{fmt, error};
#[derive(Debug)]
pub enum ServerError {
UnableToSavePost,
UnspecifiedDatabaseUrl,
UnableToConnectWithDatabase(String),
UnableToLoadPosts,
UnableToDeletePosts,
UnableToPublishPost(i32),
UnableToGetPost(i32)
}
#[allow(unused_variables)]
impl ServerError {
fn desc(&self) -> &str {
match *self {
ServerError::UnableToSavePost => "Error saving post",
ServerError::UnspecifiedDatabaseUrl => "Database URL is unspecified",
ServerError::UnableToConnectWithDatabase(ref url) => "Error connecting to database",
ServerError::UnableToLoadPosts => "Unable to load posts",
ServerError::UnableToDeletePosts => "Unable to delete posts",
ServerError::UnableToPublishPost(ref id) => "Unable to publish post",
ServerError::UnableToGetPost(ref id) => "Unable to get post"
}
}
}
impl error::Error for ServerError {
fn description(&self) -> &str {
self.desc()
}
}
impl fmt::Display for ServerError {
fn | (&self, formatter: &mut fmt::Formatter) -> fmt::Result {
self.desc().fmt(formatter)
}
}
| fmt | identifier_name |
lint-dead-code-1.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[allow(unused_variable)];
#[deny(dead_code)];
#[crate_type="lib"];
pub use foo2::Bar2;
mod foo {
pub struct Bar; //~ ERROR: code is never used
}
mod foo2 {
pub struct Bar2;
}
pub static pub_static: int = 0;
static priv_static: int = 0; //~ ERROR: code is never used
static used_static: int = 0;
pub static used_static2: int = used_static;
pub fn pub_fn() {
used_fn();
let used_struct1 = UsedStruct1 { x: 1 };
let used_struct2 = UsedStruct2(1);
let used_struct3 = UsedStruct3;
let e = foo3;
SemiUsedStruct::la_la_la();
}
fn priv_fn() { //~ ERROR: code is never used
let unused_struct = PrivStruct;
}
fn used_fn() {}
pub type typ = ~UsedStruct4;
pub struct PubStruct();
struct PrivStruct; //~ ERROR: code is never used
struct UsedStruct1 { x: int }
struct UsedStruct2(int);
struct UsedStruct3;
struct UsedStruct4;
// this struct is never used directly, but its method is, so we don't want
// to warn it
struct SemiUsedStruct;
impl SemiUsedStruct {
fn la_la_la() {}
}
pub enum pub_enum { foo1, bar1 }
enum priv_enum { foo2, bar2 } //~ ERROR: code is never used
enum used_enum { foo3, bar3 }
fn foo() |
fn bar() { //~ ERROR: code is never used
foo();
}
| { //~ ERROR: code is never used
bar();
let unused_enum = foo2;
} | identifier_body |
lint-dead-code-1.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[allow(unused_variable)];
#[deny(dead_code)];
#[crate_type="lib"]; | }
mod foo2 {
pub struct Bar2;
}
pub static pub_static: int = 0;
static priv_static: int = 0; //~ ERROR: code is never used
static used_static: int = 0;
pub static used_static2: int = used_static;
pub fn pub_fn() {
used_fn();
let used_struct1 = UsedStruct1 { x: 1 };
let used_struct2 = UsedStruct2(1);
let used_struct3 = UsedStruct3;
let e = foo3;
SemiUsedStruct::la_la_la();
}
fn priv_fn() { //~ ERROR: code is never used
let unused_struct = PrivStruct;
}
fn used_fn() {}
pub type typ = ~UsedStruct4;
pub struct PubStruct();
struct PrivStruct; //~ ERROR: code is never used
struct UsedStruct1 { x: int }
struct UsedStruct2(int);
struct UsedStruct3;
struct UsedStruct4;
// this struct is never used directly, but its method is, so we don't want
// to warn it
struct SemiUsedStruct;
impl SemiUsedStruct {
fn la_la_la() {}
}
pub enum pub_enum { foo1, bar1 }
enum priv_enum { foo2, bar2 } //~ ERROR: code is never used
enum used_enum { foo3, bar3 }
fn foo() { //~ ERROR: code is never used
bar();
let unused_enum = foo2;
}
fn bar() { //~ ERROR: code is never used
foo();
} |
pub use foo2::Bar2;
mod foo {
pub struct Bar; //~ ERROR: code is never used | random_line_split |
lint-dead-code-1.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[allow(unused_variable)];
#[deny(dead_code)];
#[crate_type="lib"];
pub use foo2::Bar2;
mod foo {
pub struct Bar; //~ ERROR: code is never used
}
mod foo2 {
pub struct Bar2;
}
pub static pub_static: int = 0;
static priv_static: int = 0; //~ ERROR: code is never used
static used_static: int = 0;
pub static used_static2: int = used_static;
pub fn pub_fn() {
used_fn();
let used_struct1 = UsedStruct1 { x: 1 };
let used_struct2 = UsedStruct2(1);
let used_struct3 = UsedStruct3;
let e = foo3;
SemiUsedStruct::la_la_la();
}
fn priv_fn() { //~ ERROR: code is never used
let unused_struct = PrivStruct;
}
fn used_fn() {}
pub type typ = ~UsedStruct4;
pub struct PubStruct();
struct PrivStruct; //~ ERROR: code is never used
struct | { x: int }
struct UsedStruct2(int);
struct UsedStruct3;
struct UsedStruct4;
// this struct is never used directly, but its method is, so we don't want
// to warn it
struct SemiUsedStruct;
impl SemiUsedStruct {
fn la_la_la() {}
}
pub enum pub_enum { foo1, bar1 }
enum priv_enum { foo2, bar2 } //~ ERROR: code is never used
enum used_enum { foo3, bar3 }
fn foo() { //~ ERROR: code is never used
bar();
let unused_enum = foo2;
}
fn bar() { //~ ERROR: code is never used
foo();
}
| UsedStruct1 | identifier_name |
posting.rs | use amount::MixedAmount; | use std::fmt::Error;
use std::result::Result;
#[derive(Clone, PartialEq, Eq, Debug)]
pub enum ClearedStatus {
Uncleared,
Pending,
Cleared
}
impl Display for ClearedStatus {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
match self {
&ClearedStatus::Uncleared => write!(f, ""),
&ClearedStatus::Pending => write!(f, "!"),
&ClearedStatus::Cleared => write!(f, "*"),
}
}
}
#[derive(Clone, PartialEq, Eq)]
pub struct Tag(String, String);
#[derive(Clone, PartialEq, Eq)]
pub enum PostingType {
Regular,
Virtual,
BalancedVirtual
}
#[derive(Clone, PartialEq, Eq)]
pub struct Posting {
pub status: ClearedStatus,
pub amount: MixedAmount,
pub posting_type: PostingType,
pub tags: Vec<Tag>,
pub balance_assertion: Option<MixedAmount>,
pub transaction: Option<Transaction>
}
impl Posting {
pub fn is_real(&self) -> bool {
self.posting_type == PostingType::Regular
}
pub fn is_virtual(&self) -> bool {
self.posting_type == PostingType::Virtual
}
pub fn related_postings(&self) -> Vec<Posting> {
match self.transaction.clone() {
Some(t) => t.postings.iter().filter(|&x| x!= self).map(|x| x.clone()).collect(),
_ => vec!()
}
}
pub fn sum_postings(postings: Vec<Posting>) -> MixedAmount {
postings.iter().map(|x| x.clone().amount).sum()
}
pub fn status(&self) -> ClearedStatus {
match self.status.clone() {
ClearedStatus::Uncleared => match self.transaction.clone() {
Some(t) => t.status,
_ => ClearedStatus::Uncleared
},
s => s
}
}
pub fn all_tags(&self) -> Vec<Tag> {
self.tags.iter().chain(match self.transaction.clone() {
Some(t) => t.tags,
_ => vec!()
}.iter()).map(|x| x.clone()).collect()
}
} | use transaction::Transaction;
use std::fmt::Display;
use std::fmt::Formatter; | random_line_split |
posting.rs | use amount::MixedAmount;
use transaction::Transaction;
use std::fmt::Display;
use std::fmt::Formatter;
use std::fmt::Error;
use std::result::Result;
#[derive(Clone, PartialEq, Eq, Debug)]
pub enum ClearedStatus {
Uncleared,
Pending,
Cleared
}
impl Display for ClearedStatus {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
match self {
&ClearedStatus::Uncleared => write!(f, ""),
&ClearedStatus::Pending => write!(f, "!"),
&ClearedStatus::Cleared => write!(f, "*"),
}
}
}
#[derive(Clone, PartialEq, Eq)]
pub struct Tag(String, String);
#[derive(Clone, PartialEq, Eq)]
pub enum PostingType {
Regular,
Virtual,
BalancedVirtual
}
#[derive(Clone, PartialEq, Eq)]
pub struct Posting {
pub status: ClearedStatus,
pub amount: MixedAmount,
pub posting_type: PostingType,
pub tags: Vec<Tag>,
pub balance_assertion: Option<MixedAmount>,
pub transaction: Option<Transaction>
}
impl Posting {
pub fn is_real(&self) -> bool {
self.posting_type == PostingType::Regular
}
pub fn is_virtual(&self) -> bool {
self.posting_type == PostingType::Virtual
}
pub fn related_postings(&self) -> Vec<Posting> {
match self.transaction.clone() {
Some(t) => t.postings.iter().filter(|&x| x!= self).map(|x| x.clone()).collect(),
_ => vec!()
}
}
pub fn sum_postings(postings: Vec<Posting>) -> MixedAmount |
pub fn status(&self) -> ClearedStatus {
match self.status.clone() {
ClearedStatus::Uncleared => match self.transaction.clone() {
Some(t) => t.status,
_ => ClearedStatus::Uncleared
},
s => s
}
}
pub fn all_tags(&self) -> Vec<Tag> {
self.tags.iter().chain(match self.transaction.clone() {
Some(t) => t.tags,
_ => vec!()
}.iter()).map(|x| x.clone()).collect()
}
}
| {
postings.iter().map(|x| x.clone().amount).sum()
} | identifier_body |
posting.rs | use amount::MixedAmount;
use transaction::Transaction;
use std::fmt::Display;
use std::fmt::Formatter;
use std::fmt::Error;
use std::result::Result;
#[derive(Clone, PartialEq, Eq, Debug)]
pub enum ClearedStatus {
Uncleared,
Pending,
Cleared
}
impl Display for ClearedStatus {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
match self {
&ClearedStatus::Uncleared => write!(f, ""),
&ClearedStatus::Pending => write!(f, "!"),
&ClearedStatus::Cleared => write!(f, "*"),
}
}
}
#[derive(Clone, PartialEq, Eq)]
pub struct | (String, String);
#[derive(Clone, PartialEq, Eq)]
pub enum PostingType {
Regular,
Virtual,
BalancedVirtual
}
#[derive(Clone, PartialEq, Eq)]
pub struct Posting {
pub status: ClearedStatus,
pub amount: MixedAmount,
pub posting_type: PostingType,
pub tags: Vec<Tag>,
pub balance_assertion: Option<MixedAmount>,
pub transaction: Option<Transaction>
}
impl Posting {
pub fn is_real(&self) -> bool {
self.posting_type == PostingType::Regular
}
pub fn is_virtual(&self) -> bool {
self.posting_type == PostingType::Virtual
}
pub fn related_postings(&self) -> Vec<Posting> {
match self.transaction.clone() {
Some(t) => t.postings.iter().filter(|&x| x!= self).map(|x| x.clone()).collect(),
_ => vec!()
}
}
pub fn sum_postings(postings: Vec<Posting>) -> MixedAmount {
postings.iter().map(|x| x.clone().amount).sum()
}
pub fn status(&self) -> ClearedStatus {
match self.status.clone() {
ClearedStatus::Uncleared => match self.transaction.clone() {
Some(t) => t.status,
_ => ClearedStatus::Uncleared
},
s => s
}
}
pub fn all_tags(&self) -> Vec<Tag> {
self.tags.iter().chain(match self.transaction.clone() {
Some(t) => t.tags,
_ => vec!()
}.iter()).map(|x| x.clone()).collect()
}
}
| Tag | identifier_name |
mp4.rs | #![no_main]
use libfuzzer_sys::fuzz_target;
use mp4parse_capi::*;
use std::convert::TryInto;
use std::io::Read;
type CursorType<'a> = std::io::Cursor<&'a [u8]>;
extern "C" fn vec_read(buf: *mut u8, size: usize, userdata: *mut std::os::raw::c_void) -> isize {
let input: &mut CursorType = unsafe { &mut *(userdata as *mut _) };
let buf = unsafe { std::slice::from_raw_parts_mut(buf, size) };
match input.read(buf) { | Ok(n) => n.try_into().expect("invalid conversion"),
Err(_) => -1,
}
}
fuzz_target!(|data: &[u8]| {
let mut cursor: CursorType = std::io::Cursor::new(data);
let io = Mp4parseIo {
read: Some(vec_read),
userdata: &mut cursor as *mut _ as *mut std::os::raw::c_void,
};
unsafe {
let mut context = std::ptr::null_mut();
if mp4parse_new(&io, &mut context)!= Mp4parseStatus::Ok {
return;
}
let mut frag_info = Default::default();
mp4parse_get_fragment_info(context, &mut frag_info);
let mut pssh_info = Default::default();
mp4parse_get_pssh_info(context, &mut pssh_info);
let mut count = 0;
mp4parse_get_track_count(context, &mut count);
for track in 0..count {
let mut fragmented = 0;
mp4parse_is_fragmented(context, track, &mut fragmented);
let mut info = Default::default();
mp4parse_get_track_info(context, track, &mut info);
match info.track_type {
Mp4parseTrackType::Video => {
let mut video = Mp4parseTrackVideoInfo::default();
mp4parse_get_track_video_info(context, track, &mut video);
}
Mp4parseTrackType::Audio => {
let mut audio = Default::default();
mp4parse_get_track_audio_info(context, track, &mut audio);
}
// No C API for metadata tracks yet.
Mp4parseTrackType::Metadata => {}
}
let mut indices = Default::default();
mp4parse_get_indice_table(context, track, &mut indices);
}
mp4parse_free(context);
}
}); | random_line_split |
|
mp4.rs | #![no_main]
use libfuzzer_sys::fuzz_target;
use mp4parse_capi::*;
use std::convert::TryInto;
use std::io::Read;
type CursorType<'a> = std::io::Cursor<&'a [u8]>;
extern "C" fn | (buf: *mut u8, size: usize, userdata: *mut std::os::raw::c_void) -> isize {
let input: &mut CursorType = unsafe { &mut *(userdata as *mut _) };
let buf = unsafe { std::slice::from_raw_parts_mut(buf, size) };
match input.read(buf) {
Ok(n) => n.try_into().expect("invalid conversion"),
Err(_) => -1,
}
}
fuzz_target!(|data: &[u8]| {
let mut cursor: CursorType = std::io::Cursor::new(data);
let io = Mp4parseIo {
read: Some(vec_read),
userdata: &mut cursor as *mut _ as *mut std::os::raw::c_void,
};
unsafe {
let mut context = std::ptr::null_mut();
if mp4parse_new(&io, &mut context)!= Mp4parseStatus::Ok {
return;
}
let mut frag_info = Default::default();
mp4parse_get_fragment_info(context, &mut frag_info);
let mut pssh_info = Default::default();
mp4parse_get_pssh_info(context, &mut pssh_info);
let mut count = 0;
mp4parse_get_track_count(context, &mut count);
for track in 0..count {
let mut fragmented = 0;
mp4parse_is_fragmented(context, track, &mut fragmented);
let mut info = Default::default();
mp4parse_get_track_info(context, track, &mut info);
match info.track_type {
Mp4parseTrackType::Video => {
let mut video = Mp4parseTrackVideoInfo::default();
mp4parse_get_track_video_info(context, track, &mut video);
}
Mp4parseTrackType::Audio => {
let mut audio = Default::default();
mp4parse_get_track_audio_info(context, track, &mut audio);
}
// No C API for metadata tracks yet.
Mp4parseTrackType::Metadata => {}
}
let mut indices = Default::default();
mp4parse_get_indice_table(context, track, &mut indices);
}
mp4parse_free(context);
}
});
| vec_read | identifier_name |
mp4.rs | #![no_main]
use libfuzzer_sys::fuzz_target;
use mp4parse_capi::*;
use std::convert::TryInto;
use std::io::Read;
type CursorType<'a> = std::io::Cursor<&'a [u8]>;
extern "C" fn vec_read(buf: *mut u8, size: usize, userdata: *mut std::os::raw::c_void) -> isize |
fuzz_target!(|data: &[u8]| {
let mut cursor: CursorType = std::io::Cursor::new(data);
let io = Mp4parseIo {
read: Some(vec_read),
userdata: &mut cursor as *mut _ as *mut std::os::raw::c_void,
};
unsafe {
let mut context = std::ptr::null_mut();
if mp4parse_new(&io, &mut context)!= Mp4parseStatus::Ok {
return;
}
let mut frag_info = Default::default();
mp4parse_get_fragment_info(context, &mut frag_info);
let mut pssh_info = Default::default();
mp4parse_get_pssh_info(context, &mut pssh_info);
let mut count = 0;
mp4parse_get_track_count(context, &mut count);
for track in 0..count {
let mut fragmented = 0;
mp4parse_is_fragmented(context, track, &mut fragmented);
let mut info = Default::default();
mp4parse_get_track_info(context, track, &mut info);
match info.track_type {
Mp4parseTrackType::Video => {
let mut video = Mp4parseTrackVideoInfo::default();
mp4parse_get_track_video_info(context, track, &mut video);
}
Mp4parseTrackType::Audio => {
let mut audio = Default::default();
mp4parse_get_track_audio_info(context, track, &mut audio);
}
// No C API for metadata tracks yet.
Mp4parseTrackType::Metadata => {}
}
let mut indices = Default::default();
mp4parse_get_indice_table(context, track, &mut indices);
}
mp4parse_free(context);
}
});
| {
let input: &mut CursorType = unsafe { &mut *(userdata as *mut _) };
let buf = unsafe { std::slice::from_raw_parts_mut(buf, size) };
match input.read(buf) {
Ok(n) => n.try_into().expect("invalid conversion"),
Err(_) => -1,
}
} | identifier_body |
ui.rs | extern crate rustbox;
use self::rustbox::{Color, Style, RustBox};
use super::board::{Board, HEIGHT, WIDTH};
use super::tetromino::{Tetromino, TetrominoType};
use super::window::Window;
// Default scaling factor for the board
const SCALE: usize = 2;
// Default values for styling terminal output
const DEFAULT_STYLE: Style = rustbox::RB_NORMAL;
const DEFAULT_FG: Color = Color::White;
const DEFAULT_BG: Color = Color::Black;
/// A collection of Window structs representing the user interface
pub struct Ui<'a> {
board: Window<'a>,
score: Window<'a>,
level: Window<'a>,
lines: Window<'a>,
next: Window<'a>,
hold: Window<'a>,
}
impl<'a> Ui<'a> {
/// Initializes a new Ui struct
pub fn new(rb: &'a RustBox) -> Self {
Ui {
board: Window::new(0, 5, (11 * SCALE) - 1, 21, rb),
score: Window::new(12 * SCALE, 6, 11, 1, rb),
level: Window::new(12 * SCALE, 10, 11, 1, rb),
lines: Window::new(12 * SCALE, 14, 11, 1, rb),
next: Window::new(5, 1, (5 * SCALE) + 1, 4, rb),
hold: Window::new(12 * SCALE, 18, (5 * SCALE) + 1, 5, rb),
}
}
/// Setup the default elements of the user interface
pub fn setup(&self) {
self.board.print_borders(DEFAULT_STYLE, DEFAULT_FG, DEFAULT_BG);
self.next.print_borders(DEFAULT_STYLE, DEFAULT_FG, DEFAULT_BG);
self.hold.print_borders(DEFAULT_STYLE, DEFAULT_FG, DEFAULT_BG);
self.print_score(0);
self.print_level(0);
self.print_lines(0);
}
/// Resets some of the user interface elements
pub fn reset(&self) {
self.score.clear();
self.level.clear();
self.lines.clear();
self.print_score(0);
self.print_level(0);
self.print_lines(0);
}
/// Print the state of the board
pub fn | (&self, board: &Board) {
// Start at 2 because only 20 of the board's rows should be displayed
for y in 2..HEIGHT {
for x in 0..WIDTH {
match board.field()[y][x] {
// When printing the board, offset x and y to compensate
// for the Window's borders and showing only 20 rows
Some(ref mino) => {
let color = self.get_tetromino_color(mino);
let rune = self.get_tetromino_rune(mino);
self.board.print_char((x * SCALE) + 1, y - 1, DEFAULT_STYLE, color, DEFAULT_BG, rune);
self.board.print_char((x * SCALE) + 2, y - 1, DEFAULT_STYLE, color, DEFAULT_BG, rune);
}
None => {
self.board.print_char((x * SCALE) + 1, y - 1, DEFAULT_STYLE, DEFAULT_FG, DEFAULT_BG,'');
self.board.print_char((x * SCALE) + 2, y - 1, DEFAULT_STYLE, DEFAULT_FG, DEFAULT_BG, '.');
},
}
}
}
}
/// Gets the character associated with a TetrominoType
fn get_tetromino_rune(&self, tetromino_type: &TetrominoType) -> char {
match tetromino_type {
&TetrominoType::Ghost => '□',
_ => '■',
}
}
/// Gets the color associated with a TetrominoType
fn get_tetromino_color(&self, tetromino_type: &TetrominoType) -> Color {
match tetromino_type {
&TetrominoType::I => Color::Cyan,
&TetrominoType::J => Color::Blue,
&TetrominoType::L | &TetrominoType::Ghost => Color::White,
&TetrominoType::O => Color::Yellow,
&TetrominoType::S => Color::Green,
&TetrominoType::T => Color::Magenta,
&TetrominoType::Z => Color::Red,
}
}
/// Prints the next Tetromino
pub fn print_next(&self, tetromino: Tetromino) {
self.print_tetromino(tetromino, &self.next);
}
/// Prints the hold Tetromino
pub fn print_hold(&self, hold: Option<Tetromino>) {
if let Some(tetromino) = hold {
self.print_tetromino(tetromino, &self.hold);
}
}
// Prints a Tetromino to a specified Window
fn print_tetromino(&self, tetromino: Tetromino, window: &Window) {
window.clear();
window.print_borders(DEFAULT_STYLE, DEFAULT_FG, DEFAULT_BG);
for &mino in tetromino.minos().iter() {
let color = self.get_tetromino_color(&tetromino.tetromino_type());
window.print_char(((mino.x as usize) * SCALE + 2), (mino.y + 1) as usize, DEFAULT_STYLE, color, DEFAULT_BG, '■');
window.print_char(((mino.x as usize) * SCALE + 3), (mino.y + 1) as usize, DEFAULT_STYLE, color, DEFAULT_BG, '■');
}
}
/// Prints the player's score
pub fn print_score(&self, score: usize) {
self.score.print(0, 0, DEFAULT_STYLE, DEFAULT_FG, DEFAULT_BG, &format!("{:}", score));
}
/// Prints the difficulty level
pub fn print_level(&self, level: usize) {
self.level.print(0, 0, DEFAULT_STYLE, DEFAULT_FG, DEFAULT_BG, &format!("{:}", level));
}
/// Prints the number of lines cleared
pub fn print_lines(&self, lines: usize) {
self.lines.print(0, 0, DEFAULT_STYLE, DEFAULT_FG, DEFAULT_BG, &format!("{:}", lines));
}
} | print_board | identifier_name |
ui.rs | use self::rustbox::{Color, Style, RustBox};
use super::board::{Board, HEIGHT, WIDTH};
use super::tetromino::{Tetromino, TetrominoType};
use super::window::Window;
// Default scaling factor for the board
const SCALE: usize = 2;
// Default values for styling terminal output
const DEFAULT_STYLE: Style = rustbox::RB_NORMAL;
const DEFAULT_FG: Color = Color::White;
const DEFAULT_BG: Color = Color::Black;
/// A collection of Window structs representing the user interface
pub struct Ui<'a> {
board: Window<'a>,
score: Window<'a>,
level: Window<'a>,
lines: Window<'a>,
next: Window<'a>,
hold: Window<'a>,
}
impl<'a> Ui<'a> {
/// Initializes a new Ui struct
pub fn new(rb: &'a RustBox) -> Self {
Ui {
board: Window::new(0, 5, (11 * SCALE) - 1, 21, rb),
score: Window::new(12 * SCALE, 6, 11, 1, rb),
level: Window::new(12 * SCALE, 10, 11, 1, rb),
lines: Window::new(12 * SCALE, 14, 11, 1, rb),
next: Window::new(5, 1, (5 * SCALE) + 1, 4, rb),
hold: Window::new(12 * SCALE, 18, (5 * SCALE) + 1, 5, rb),
}
}
/// Setup the default elements of the user interface
pub fn setup(&self) {
self.board.print_borders(DEFAULT_STYLE, DEFAULT_FG, DEFAULT_BG);
self.next.print_borders(DEFAULT_STYLE, DEFAULT_FG, DEFAULT_BG);
self.hold.print_borders(DEFAULT_STYLE, DEFAULT_FG, DEFAULT_BG);
self.print_score(0);
self.print_level(0);
self.print_lines(0);
}
/// Resets some of the user interface elements
pub fn reset(&self) {
self.score.clear();
self.level.clear();
self.lines.clear();
self.print_score(0);
self.print_level(0);
self.print_lines(0);
}
/// Print the state of the board
pub fn print_board(&self, board: &Board) {
// Start at 2 because only 20 of the board's rows should be displayed
for y in 2..HEIGHT {
for x in 0..WIDTH {
match board.field()[y][x] {
// When printing the board, offset x and y to compensate
// for the Window's borders and showing only 20 rows
Some(ref mino) => {
let color = self.get_tetromino_color(mino);
let rune = self.get_tetromino_rune(mino);
self.board.print_char((x * SCALE) + 1, y - 1, DEFAULT_STYLE, color, DEFAULT_BG, rune);
self.board.print_char((x * SCALE) + 2, y - 1, DEFAULT_STYLE, color, DEFAULT_BG, rune);
}
None => {
self.board.print_char((x * SCALE) + 1, y - 1, DEFAULT_STYLE, DEFAULT_FG, DEFAULT_BG,'');
self.board.print_char((x * SCALE) + 2, y - 1, DEFAULT_STYLE, DEFAULT_FG, DEFAULT_BG, '.');
},
}
}
}
}
/// Gets the character associated with a TetrominoType
fn get_tetromino_rune(&self, tetromino_type: &TetrominoType) -> char {
match tetromino_type {
&TetrominoType::Ghost => '□',
_ => '■',
}
}
/// Gets the color associated with a TetrominoType
fn get_tetromino_color(&self, tetromino_type: &TetrominoType) -> Color {
match tetromino_type {
&TetrominoType::I => Color::Cyan,
&TetrominoType::J => Color::Blue,
&TetrominoType::L | &TetrominoType::Ghost => Color::White,
&TetrominoType::O => Color::Yellow,
&TetrominoType::S => Color::Green,
&TetrominoType::T => Color::Magenta,
&TetrominoType::Z => Color::Red,
}
}
/// Prints the next Tetromino
pub fn print_next(&self, tetromino: Tetromino) {
self.print_tetromino(tetromino, &self.next);
}
/// Prints the hold Tetromino
pub fn print_hold(&self, hold: Option<Tetromino>) {
if let Some(tetromino) = hold {
self.print_tetromino(tetromino, &self.hold);
}
}
// Prints a Tetromino to a specified Window
fn print_tetromino(&self, tetromino: Tetromino, window: &Window) {
window.clear();
window.print_borders(DEFAULT_STYLE, DEFAULT_FG, DEFAULT_BG);
for &mino in tetromino.minos().iter() {
let color = self.get_tetromino_color(&tetromino.tetromino_type());
window.print_char(((mino.x as usize) * SCALE + 2), (mino.y + 1) as usize, DEFAULT_STYLE, color, DEFAULT_BG, '■');
window.print_char(((mino.x as usize) * SCALE + 3), (mino.y + 1) as usize, DEFAULT_STYLE, color, DEFAULT_BG, '■');
}
}
/// Prints the player's score
pub fn print_score(&self, score: usize) {
self.score.print(0, 0, DEFAULT_STYLE, DEFAULT_FG, DEFAULT_BG, &format!("{:}", score));
}
/// Prints the difficulty level
pub fn print_level(&self, level: usize) {
self.level.print(0, 0, DEFAULT_STYLE, DEFAULT_FG, DEFAULT_BG, &format!("{:}", level));
}
/// Prints the number of lines cleared
pub fn print_lines(&self, lines: usize) {
self.lines.print(0, 0, DEFAULT_STYLE, DEFAULT_FG, DEFAULT_BG, &format!("{:}", lines));
}
} | extern crate rustbox;
| random_line_split |
|
ui.rs | extern crate rustbox;
use self::rustbox::{Color, Style, RustBox};
use super::board::{Board, HEIGHT, WIDTH};
use super::tetromino::{Tetromino, TetrominoType};
use super::window::Window;
// Default scaling factor for the board
const SCALE: usize = 2;
// Default values for styling terminal output
const DEFAULT_STYLE: Style = rustbox::RB_NORMAL;
const DEFAULT_FG: Color = Color::White;
const DEFAULT_BG: Color = Color::Black;
/// A collection of Window structs representing the user interface
pub struct Ui<'a> {
board: Window<'a>,
score: Window<'a>,
level: Window<'a>,
lines: Window<'a>,
next: Window<'a>,
hold: Window<'a>,
}
impl<'a> Ui<'a> {
/// Initializes a new Ui struct
pub fn new(rb: &'a RustBox) -> Self {
Ui {
board: Window::new(0, 5, (11 * SCALE) - 1, 21, rb),
score: Window::new(12 * SCALE, 6, 11, 1, rb),
level: Window::new(12 * SCALE, 10, 11, 1, rb),
lines: Window::new(12 * SCALE, 14, 11, 1, rb),
next: Window::new(5, 1, (5 * SCALE) + 1, 4, rb),
hold: Window::new(12 * SCALE, 18, (5 * SCALE) + 1, 5, rb),
}
}
/// Setup the default elements of the user interface
pub fn setup(&self) {
self.board.print_borders(DEFAULT_STYLE, DEFAULT_FG, DEFAULT_BG);
self.next.print_borders(DEFAULT_STYLE, DEFAULT_FG, DEFAULT_BG);
self.hold.print_borders(DEFAULT_STYLE, DEFAULT_FG, DEFAULT_BG);
self.print_score(0);
self.print_level(0);
self.print_lines(0);
}
/// Resets some of the user interface elements
pub fn reset(&self) {
self.score.clear();
self.level.clear();
self.lines.clear();
self.print_score(0);
self.print_level(0);
self.print_lines(0);
}
/// Print the state of the board
pub fn print_board(&self, board: &Board) {
// Start at 2 because only 20 of the board's rows should be displayed
for y in 2..HEIGHT {
for x in 0..WIDTH {
match board.field()[y][x] {
// When printing the board, offset x and y to compensate
// for the Window's borders and showing only 20 rows
Some(ref mino) => {
let color = self.get_tetromino_color(mino);
let rune = self.get_tetromino_rune(mino);
self.board.print_char((x * SCALE) + 1, y - 1, DEFAULT_STYLE, color, DEFAULT_BG, rune);
self.board.print_char((x * SCALE) + 2, y - 1, DEFAULT_STYLE, color, DEFAULT_BG, rune);
}
None => {
self.board.print_char((x * SCALE) + 1, y - 1, DEFAULT_STYLE, DEFAULT_FG, DEFAULT_BG,'');
self.board.print_char((x * SCALE) + 2, y - 1, DEFAULT_STYLE, DEFAULT_FG, DEFAULT_BG, '.');
},
}
}
}
}
/// Gets the character associated with a TetrominoType
fn get_tetromino_rune(&self, tetromino_type: &TetrominoType) -> char {
match tetromino_type {
&TetrominoType::Ghost => '□',
_ => '■',
}
}
/// Gets the color associated with a TetrominoType
fn get_tetromino_color(&self, tetromino_type: &TetrominoType) -> Color {
match tetromino_type {
&TetrominoType::I => Color::Cyan,
&TetrominoType::J => Color::Blue,
&TetrominoType::L | &TetrominoType::Ghost => Color::White,
&TetrominoType::O => Color::Yellow,
&TetrominoType::S => Color::Green,
&TetrominoType::T => Color::Magenta,
&TetrominoType::Z => Color::Red,
}
}
/// Prints the next Tetromino
pub fn print_next(&self, tetromino: Tetromino) {
self.print_tetromino(tetromino, &self.next);
}
/// Prints the hold Tetromino
pub fn print_hold(&self, hold: Option<Tetromino>) {
if let Some(tetromino) = hold {
self.print_tetromino(tetromino, &self.hold);
}
}
// Prints a Tetromino to a specified Window
fn print_tetromino(&self, tetromino: Tetromino, window: &Window) {
| / Prints the player's score
pub fn print_score(&self, score: usize) {
self.score.print(0, 0, DEFAULT_STYLE, DEFAULT_FG, DEFAULT_BG, &format!("{:}", score));
}
/// Prints the difficulty level
pub fn print_level(&self, level: usize) {
self.level.print(0, 0, DEFAULT_STYLE, DEFAULT_FG, DEFAULT_BG, &format!("{:}", level));
}
/// Prints the number of lines cleared
pub fn print_lines(&self, lines: usize) {
self.lines.print(0, 0, DEFAULT_STYLE, DEFAULT_FG, DEFAULT_BG, &format!("{:}", lines));
}
} | window.clear();
window.print_borders(DEFAULT_STYLE, DEFAULT_FG, DEFAULT_BG);
for &mino in tetromino.minos().iter() {
let color = self.get_tetromino_color(&tetromino.tetromino_type());
window.print_char(((mino.x as usize) * SCALE + 2), (mino.y + 1) as usize, DEFAULT_STYLE, color, DEFAULT_BG, '■');
window.print_char(((mino.x as usize) * SCALE + 3), (mino.y + 1) as usize, DEFAULT_STYLE, color, DEFAULT_BG, '■');
}
}
// | identifier_body |
pull_request.rs |
#[derive(Clone, Debug)]
pub struct PullRequest {
action: String,
number: u64,
repository: String,
sha: String,
url: String,
author: String,
}
impl PullRequest {
pub fn action(&self) -> String {
self.action.clone()
}
pub fn number(&self) -> u64 {
self.number
}
pub fn repo(&self) -> String {
self.repository.clone()
}
pub fn sha(&self) -> String {
self.sha.clone()
}
pub fn url(&self) -> String {
self.url.clone()
}
pub fn author(&self) -> String {
self.author.clone()
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ParseError {
_priv: (),
}
impl fmt::Display for ParseError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
"provided string is not a valid Push payload".fmt(f)
}
}
impl FromStr for PullRequest {
type Err = ParseError;
fn from_str(payload: &str) -> Result<PullRequest, ParseError> {
debug!("parse pull_request");
if let Ok(parsed) = json::parse(payload) {
let action = parsed["action"].as_str();
let number = parsed["number"].as_u64();
let repository = parsed["repository"]["full_name"].as_str();
let sha = parsed["pull_request"]["head"]["sha"].as_str();
let url = parsed["pull_request"]["head"]["repo"]["clone_url"].as_str();
let author = parsed["pull_request"]["user"]["login"].as_str();
if action.is_none() || number.is_none() || repository.is_none() {
return Err(ParseError { _priv: () });
}
let event = PullRequest {
action: action.unwrap().to_owned(),
number: number.unwrap(),
repository: repository.unwrap().to_owned(),
sha: sha.unwrap().to_owned(),
url: url.unwrap().to_owned(),
author: author.unwrap().to_owned(),
};
debug!("{:?}", event);
Ok(event)
} else {
Err(ParseError { _priv: () })
}
}
}
mod test {
#[test]
fn test_parse() {
use super::PullRequest;
use std::str::FromStr;
let payload = include_str!("pull_request.json");
let event = PullRequest::from_str(payload).unwrap();
assert_eq!(event.action, "opened");
assert_eq!(event.number, 1);
assert_eq!(event.repository, "baxterthehacker/public-repo");
assert_eq!(event.sha, "0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c");
assert_eq!(
event.url,
"https://github.com/baxterthehacker/public-repo.git"
);
assert_eq!(event.author, "baxterthehacker");
}
} | extern crate json;
use std::fmt;
use std::str::FromStr; | random_line_split |
|
pull_request.rs | extern crate json;
use std::fmt;
use std::str::FromStr;
#[derive(Clone, Debug)]
pub struct PullRequest {
action: String,
number: u64,
repository: String,
sha: String,
url: String,
author: String,
}
impl PullRequest {
pub fn action(&self) -> String {
self.action.clone()
}
pub fn number(&self) -> u64 {
self.number
}
pub fn repo(&self) -> String {
self.repository.clone()
}
pub fn sha(&self) -> String {
self.sha.clone()
}
pub fn url(&self) -> String {
self.url.clone()
}
pub fn author(&self) -> String {
self.author.clone()
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ParseError {
_priv: (),
}
impl fmt::Display for ParseError {
fn | (&self, f: &mut fmt::Formatter) -> fmt::Result {
"provided string is not a valid Push payload".fmt(f)
}
}
impl FromStr for PullRequest {
type Err = ParseError;
fn from_str(payload: &str) -> Result<PullRequest, ParseError> {
debug!("parse pull_request");
if let Ok(parsed) = json::parse(payload) {
let action = parsed["action"].as_str();
let number = parsed["number"].as_u64();
let repository = parsed["repository"]["full_name"].as_str();
let sha = parsed["pull_request"]["head"]["sha"].as_str();
let url = parsed["pull_request"]["head"]["repo"]["clone_url"].as_str();
let author = parsed["pull_request"]["user"]["login"].as_str();
if action.is_none() || number.is_none() || repository.is_none() {
return Err(ParseError { _priv: () });
}
let event = PullRequest {
action: action.unwrap().to_owned(),
number: number.unwrap(),
repository: repository.unwrap().to_owned(),
sha: sha.unwrap().to_owned(),
url: url.unwrap().to_owned(),
author: author.unwrap().to_owned(),
};
debug!("{:?}", event);
Ok(event)
} else {
Err(ParseError { _priv: () })
}
}
}
mod test {
#[test]
fn test_parse() {
use super::PullRequest;
use std::str::FromStr;
let payload = include_str!("pull_request.json");
let event = PullRequest::from_str(payload).unwrap();
assert_eq!(event.action, "opened");
assert_eq!(event.number, 1);
assert_eq!(event.repository, "baxterthehacker/public-repo");
assert_eq!(event.sha, "0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c");
assert_eq!(
event.url,
"https://github.com/baxterthehacker/public-repo.git"
);
assert_eq!(event.author, "baxterthehacker");
}
}
| fmt | identifier_name |
pull_request.rs | extern crate json;
use std::fmt;
use std::str::FromStr;
#[derive(Clone, Debug)]
pub struct PullRequest {
action: String,
number: u64,
repository: String,
sha: String,
url: String,
author: String,
}
impl PullRequest {
pub fn action(&self) -> String {
self.action.clone()
}
pub fn number(&self) -> u64 {
self.number
}
pub fn repo(&self) -> String {
self.repository.clone()
}
pub fn sha(&self) -> String {
self.sha.clone()
}
pub fn url(&self) -> String {
self.url.clone()
}
pub fn author(&self) -> String {
self.author.clone()
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ParseError {
_priv: (),
}
impl fmt::Display for ParseError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
"provided string is not a valid Push payload".fmt(f)
}
}
impl FromStr for PullRequest {
type Err = ParseError;
fn from_str(payload: &str) -> Result<PullRequest, ParseError> {
debug!("parse pull_request");
if let Ok(parsed) = json::parse(payload) |
debug!("{:?}", event);
Ok(event)
}
else {
Err(ParseError { _priv: () })
}
}
}
mod test {
#[test]
fn test_parse() {
use super::PullRequest;
use std::str::FromStr;
let payload = include_str!("pull_request.json");
let event = PullRequest::from_str(payload).unwrap();
assert_eq!(event.action, "opened");
assert_eq!(event.number, 1);
assert_eq!(event.repository, "baxterthehacker/public-repo");
assert_eq!(event.sha, "0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c");
assert_eq!(
event.url,
"https://github.com/baxterthehacker/public-repo.git"
);
assert_eq!(event.author, "baxterthehacker");
}
}
| {
let action = parsed["action"].as_str();
let number = parsed["number"].as_u64();
let repository = parsed["repository"]["full_name"].as_str();
let sha = parsed["pull_request"]["head"]["sha"].as_str();
let url = parsed["pull_request"]["head"]["repo"]["clone_url"].as_str();
let author = parsed["pull_request"]["user"]["login"].as_str();
if action.is_none() || number.is_none() || repository.is_none() {
return Err(ParseError { _priv: () });
}
let event = PullRequest {
action: action.unwrap().to_owned(),
number: number.unwrap(),
repository: repository.unwrap().to_owned(),
sha: sha.unwrap().to_owned(),
url: url.unwrap().to_owned(),
author: author.unwrap().to_owned(),
}; | conditional_block |
pull_request.rs | extern crate json;
use std::fmt;
use std::str::FromStr;
#[derive(Clone, Debug)]
pub struct PullRequest {
action: String,
number: u64,
repository: String,
sha: String,
url: String,
author: String,
}
impl PullRequest {
pub fn action(&self) -> String {
self.action.clone()
}
pub fn number(&self) -> u64 {
self.number
}
pub fn repo(&self) -> String {
self.repository.clone()
}
pub fn sha(&self) -> String {
self.sha.clone()
}
pub fn url(&self) -> String {
self.url.clone()
}
pub fn author(&self) -> String {
self.author.clone()
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ParseError {
_priv: (),
}
impl fmt::Display for ParseError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
"provided string is not a valid Push payload".fmt(f)
}
}
impl FromStr for PullRequest {
type Err = ParseError;
fn from_str(payload: &str) -> Result<PullRequest, ParseError> | author: author.unwrap().to_owned(),
};
debug!("{:?}", event);
Ok(event)
} else {
Err(ParseError { _priv: () })
}
}
}
mod test {
#[test]
fn test_parse() {
use super::PullRequest;
use std::str::FromStr;
let payload = include_str!("pull_request.json");
let event = PullRequest::from_str(payload).unwrap();
assert_eq!(event.action, "opened");
assert_eq!(event.number, 1);
assert_eq!(event.repository, "baxterthehacker/public-repo");
assert_eq!(event.sha, "0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c");
assert_eq!(
event.url,
"https://github.com/baxterthehacker/public-repo.git"
);
assert_eq!(event.author, "baxterthehacker");
}
}
| {
debug!("parse pull_request");
if let Ok(parsed) = json::parse(payload) {
let action = parsed["action"].as_str();
let number = parsed["number"].as_u64();
let repository = parsed["repository"]["full_name"].as_str();
let sha = parsed["pull_request"]["head"]["sha"].as_str();
let url = parsed["pull_request"]["head"]["repo"]["clone_url"].as_str();
let author = parsed["pull_request"]["user"]["login"].as_str();
if action.is_none() || number.is_none() || repository.is_none() {
return Err(ParseError { _priv: () });
}
let event = PullRequest {
action: action.unwrap().to_owned(),
number: number.unwrap(),
repository: repository.unwrap().to_owned(),
sha: sha.unwrap().to_owned(),
url: url.unwrap().to_owned(), | identifier_body |
merkletree.rs | use errors::prelude::*;
use services::ledger::merkletree::proof::{Lemma, Proof};
use services::ledger::merkletree::tree::{LeavesIntoIterator, LeavesIterator, Tree, TreeLeafData};
use utils::crypto::hash::{Hash, HASHBYTES};
/// A Merkle tree is a binary tree, with values of type `T` at the leafs,
/// and where every internal node holds the hash of the concatenation of the hashes of its children nodes.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct MerkleTree {
/// The root of the inner binary tree
pub root: Tree,
/// The height of the tree
pub height: usize,
/// The number of leaf nodes in the tree
pub count: usize,
/// The number of nodes in the tree
pub nodes_count: usize
}
impl MerkleTree {
/// Constructs a Merkle Tree from a vector of data blocks.
/// Returns `None` if `values` is empty.
pub fn from_vec(values: Vec<TreeLeafData>) -> IndyResult<Self> {
if values.is_empty() {
return Ok(MerkleTree {
root: Tree::empty(Hash::hash_empty()?),
height: 0,
count: 0,
nodes_count: 0
});
}
let count = values.len();
let mut nodes_count = 0;
let mut height = 0;
let mut cur = Vec::with_capacity(count);
for v in values {
let leaf = Tree::new_leaf(v)?;
cur.push(leaf);
}
while cur.len() > 1 {
let mut next = Vec::new();
while!cur.is_empty() {
if cur.len() == 1 {
next.push(cur.remove(0));
}
else {
let left = cur.remove(0);
let right = cur.remove(0);
|
let node = Tree::Node {
hash: combined_hash.to_vec(),
left: Box::new(left),
right: Box::new(right)
};
next.push(node);
nodes_count+=1;
}
}
height += 1;
cur = next;
}
debug_assert!(cur.len() == 1);
let root = cur.remove(0);
Ok(MerkleTree {
root: root,
height: height,
count: count,
nodes_count: nodes_count
})
}
/// Returns the root hash of Merkle tree
pub fn root_hash(&self) -> &Vec<u8> {
self.root.hash()
}
/// Returns the hex root hash of Merkle tree
pub fn root_hash_hex(&self) -> String {
let rh = self.root.hash();
let mut ret:String = String::with_capacity(HASHBYTES *2);
for i in rh {
ret.push_str(&format!("{:02x}", i));
}
return ret;
}
/// Returns the height of Merkle tree
pub fn height(&self) -> usize {
self.height
}
/// Returns the number of leaves in the Merkle tree
pub fn count(&self) -> usize {
self.count
}
/// Returns whether the Merkle tree is empty or not
pub fn is_empty(&self) -> bool {
self.count() == 0
}
/// Generate an inclusion proof for the given value.
/// Returns `None` if the given value is not found in the tree.
pub fn gen_proof(&self, value: TreeLeafData) -> IndyResult<Option<Proof>> {
let root_hash = self.root_hash().clone();
let leaf_hash = Hash::hash_leaf(&value)?;
Ok(Lemma::new(&self.root, leaf_hash.to_vec().as_slice()).map(|lemma|
Proof::new(root_hash, lemma, value)
))
}
/// Creates an `Iterator` over the values contained in this Merkle tree.
pub fn iter(&self) -> LeavesIterator {
self.root.iter()
}
}
impl IntoIterator for MerkleTree {
type Item = TreeLeafData;
type IntoIter = LeavesIntoIterator;
/// Creates a consuming iterator, that is, one that moves each value out of the Merkle tree.
/// The tree cannot be used after calling this.
fn into_iter(self) -> Self::IntoIter {
self.root.into_iter()
}
}
impl <'a> IntoIterator for &'a MerkleTree {
type Item = &'a TreeLeafData;
type IntoIter = LeavesIterator<'a>;
/// Creates a borrowing `Iterator` over the values contained in this Merkle tree.
fn into_iter(self) -> Self::IntoIter {
self.root.iter()
}
} | let combined_hash = Hash::hash_nodes(
left.hash(),
right.hash()
)?; | random_line_split |
merkletree.rs | use errors::prelude::*;
use services::ledger::merkletree::proof::{Lemma, Proof};
use services::ledger::merkletree::tree::{LeavesIntoIterator, LeavesIterator, Tree, TreeLeafData};
use utils::crypto::hash::{Hash, HASHBYTES};
/// A Merkle tree is a binary tree, with values of type `T` at the leafs,
/// and where every internal node holds the hash of the concatenation of the hashes of its children nodes.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct MerkleTree {
/// The root of the inner binary tree
pub root: Tree,
/// The height of the tree
pub height: usize,
/// The number of leaf nodes in the tree
pub count: usize,
/// The number of nodes in the tree
pub nodes_count: usize
}
impl MerkleTree {
/// Constructs a Merkle Tree from a vector of data blocks.
/// Returns `None` if `values` is empty.
pub fn from_vec(values: Vec<TreeLeafData>) -> IndyResult<Self> {
if values.is_empty() |
let count = values.len();
let mut nodes_count = 0;
let mut height = 0;
let mut cur = Vec::with_capacity(count);
for v in values {
let leaf = Tree::new_leaf(v)?;
cur.push(leaf);
}
while cur.len() > 1 {
let mut next = Vec::new();
while!cur.is_empty() {
if cur.len() == 1 {
next.push(cur.remove(0));
}
else {
let left = cur.remove(0);
let right = cur.remove(0);
let combined_hash = Hash::hash_nodes(
left.hash(),
right.hash()
)?;
let node = Tree::Node {
hash: combined_hash.to_vec(),
left: Box::new(left),
right: Box::new(right)
};
next.push(node);
nodes_count+=1;
}
}
height += 1;
cur = next;
}
debug_assert!(cur.len() == 1);
let root = cur.remove(0);
Ok(MerkleTree {
root: root,
height: height,
count: count,
nodes_count: nodes_count
})
}
/// Returns the root hash of Merkle tree
pub fn root_hash(&self) -> &Vec<u8> {
self.root.hash()
}
/// Returns the hex root hash of Merkle tree
pub fn root_hash_hex(&self) -> String {
let rh = self.root.hash();
let mut ret:String = String::with_capacity(HASHBYTES *2);
for i in rh {
ret.push_str(&format!("{:02x}", i));
}
return ret;
}
/// Returns the height of Merkle tree
pub fn height(&self) -> usize {
self.height
}
/// Returns the number of leaves in the Merkle tree
pub fn count(&self) -> usize {
self.count
}
/// Returns whether the Merkle tree is empty or not
pub fn is_empty(&self) -> bool {
self.count() == 0
}
/// Generate an inclusion proof for the given value.
/// Returns `None` if the given value is not found in the tree.
pub fn gen_proof(&self, value: TreeLeafData) -> IndyResult<Option<Proof>> {
let root_hash = self.root_hash().clone();
let leaf_hash = Hash::hash_leaf(&value)?;
Ok(Lemma::new(&self.root, leaf_hash.to_vec().as_slice()).map(|lemma|
Proof::new(root_hash, lemma, value)
))
}
/// Creates an `Iterator` over the values contained in this Merkle tree.
pub fn iter(&self) -> LeavesIterator {
self.root.iter()
}
}
impl IntoIterator for MerkleTree {
type Item = TreeLeafData;
type IntoIter = LeavesIntoIterator;
/// Creates a consuming iterator, that is, one that moves each value out of the Merkle tree.
/// The tree cannot be used after calling this.
fn into_iter(self) -> Self::IntoIter {
self.root.into_iter()
}
}
impl <'a> IntoIterator for &'a MerkleTree {
type Item = &'a TreeLeafData;
type IntoIter = LeavesIterator<'a>;
/// Creates a borrowing `Iterator` over the values contained in this Merkle tree.
fn into_iter(self) -> Self::IntoIter {
self.root.iter()
}
}
| {
return Ok(MerkleTree {
root: Tree::empty(Hash::hash_empty()?),
height: 0,
count: 0,
nodes_count: 0
});
} | conditional_block |
merkletree.rs | use errors::prelude::*;
use services::ledger::merkletree::proof::{Lemma, Proof};
use services::ledger::merkletree::tree::{LeavesIntoIterator, LeavesIterator, Tree, TreeLeafData};
use utils::crypto::hash::{Hash, HASHBYTES};
/// A Merkle tree is a binary tree, with values of type `T` at the leafs,
/// and where every internal node holds the hash of the concatenation of the hashes of its children nodes.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct MerkleTree {
/// The root of the inner binary tree
pub root: Tree,
/// The height of the tree
pub height: usize,
/// The number of leaf nodes in the tree
pub count: usize,
/// The number of nodes in the tree
pub nodes_count: usize
}
impl MerkleTree {
/// Constructs a Merkle Tree from a vector of data blocks.
/// Returns `None` if `values` is empty.
pub fn from_vec(values: Vec<TreeLeafData>) -> IndyResult<Self> {
if values.is_empty() {
return Ok(MerkleTree {
root: Tree::empty(Hash::hash_empty()?),
height: 0,
count: 0,
nodes_count: 0
});
}
let count = values.len();
let mut nodes_count = 0;
let mut height = 0;
let mut cur = Vec::with_capacity(count);
for v in values {
let leaf = Tree::new_leaf(v)?;
cur.push(leaf);
}
while cur.len() > 1 {
let mut next = Vec::new();
while!cur.is_empty() {
if cur.len() == 1 {
next.push(cur.remove(0));
}
else {
let left = cur.remove(0);
let right = cur.remove(0);
let combined_hash = Hash::hash_nodes(
left.hash(),
right.hash()
)?;
let node = Tree::Node {
hash: combined_hash.to_vec(),
left: Box::new(left),
right: Box::new(right)
};
next.push(node);
nodes_count+=1;
}
}
height += 1;
cur = next;
}
debug_assert!(cur.len() == 1);
let root = cur.remove(0);
Ok(MerkleTree {
root: root,
height: height,
count: count,
nodes_count: nodes_count
})
}
/// Returns the root hash of Merkle tree
pub fn root_hash(&self) -> &Vec<u8> {
self.root.hash()
}
/// Returns the hex root hash of Merkle tree
pub fn root_hash_hex(&self) -> String {
let rh = self.root.hash();
let mut ret:String = String::with_capacity(HASHBYTES *2);
for i in rh {
ret.push_str(&format!("{:02x}", i));
}
return ret;
}
/// Returns the height of Merkle tree
pub fn height(&self) -> usize {
self.height
}
/// Returns the number of leaves in the Merkle tree
pub fn count(&self) -> usize {
self.count
}
/// Returns whether the Merkle tree is empty or not
pub fn is_empty(&self) -> bool {
self.count() == 0
}
/// Generate an inclusion proof for the given value.
/// Returns `None` if the given value is not found in the tree.
pub fn gen_proof(&self, value: TreeLeafData) -> IndyResult<Option<Proof>> {
let root_hash = self.root_hash().clone();
let leaf_hash = Hash::hash_leaf(&value)?;
Ok(Lemma::new(&self.root, leaf_hash.to_vec().as_slice()).map(|lemma|
Proof::new(root_hash, lemma, value)
))
}
/// Creates an `Iterator` over the values contained in this Merkle tree.
pub fn iter(&self) -> LeavesIterator {
self.root.iter()
}
}
impl IntoIterator for MerkleTree {
type Item = TreeLeafData;
type IntoIter = LeavesIntoIterator;
/// Creates a consuming iterator, that is, one that moves each value out of the Merkle tree.
/// The tree cannot be used after calling this.
fn into_iter(self) -> Self::IntoIter {
self.root.into_iter()
}
}
impl <'a> IntoIterator for &'a MerkleTree {
type Item = &'a TreeLeafData;
type IntoIter = LeavesIterator<'a>;
/// Creates a borrowing `Iterator` over the values contained in this Merkle tree.
fn | (self) -> Self::IntoIter {
self.root.iter()
}
}
| into_iter | identifier_name |
class-method-cross-crate.rs | //
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:cci_class_2.rs
extern crate cci_class_2;
use cci_class_2::kitties::cat;
pub fn main() {
let nyan : cat = cat(52, 99);
let kitty = cat(1000, 2);
assert_eq!(nyan.how_hungry, 99);
assert_eq!(kitty.how_hungry, 2);
nyan.speak();
} | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT. | random_line_split |
|
class-method-cross-crate.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:cci_class_2.rs
extern crate cci_class_2;
use cci_class_2::kitties::cat;
pub fn | () {
let nyan : cat = cat(52, 99);
let kitty = cat(1000, 2);
assert_eq!(nyan.how_hungry, 99);
assert_eq!(kitty.how_hungry, 2);
nyan.speak();
}
| main | identifier_name |
class-method-cross-crate.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:cci_class_2.rs
extern crate cci_class_2;
use cci_class_2::kitties::cat;
pub fn main() | {
let nyan : cat = cat(52, 99);
let kitty = cat(1000, 2);
assert_eq!(nyan.how_hungry, 99);
assert_eq!(kitty.how_hungry, 2);
nyan.speak();
} | identifier_body |
|
prelude.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! The core prelude
//!
//! This module is intended for users of libcore which do not link to libstd as
//! well. This module is not imported by default, but using the entire contents
//! of this module will provide all of the useful traits and types in libcore
//! that one would expect from the standard library as well.
//!
//! There is no method to automatically inject this prelude, and this prelude is
//! a subset of the standard library's prelude.
//!
//! # Example
//!
//! ```ignore
//! # fn main() {
//! #![feature(globs)]
//!
//! use core::prelude::*;
//! # }
//! ```
// Reexported core operators
pub use kinds::{Copy, Send, Sized, Sync};
pub use ops::{Add, Sub, Mul, Div, Rem, Neg, Not};
pub use ops::{BitAnd, BitOr, BitXor};
pub use ops::{Drop, Deref, DerefMut};
pub use ops::{Shl, Shr};
pub use ops::{Index, IndexMut};
pub use ops::{Slice, SliceMut};
pub use ops::{Fn, FnMut, FnOnce};
// Reexported functions
pub use iter::range;
pub use mem::drop;
pub use str::from_str;
// Reexported types and traits
pub use char::Char;
pub use clone::Clone;
pub use cmp::{PartialEq, PartialOrd, Eq, Ord};
pub use cmp::{Ordering, Equiv};
pub use cmp::Ordering::{Less, Equal, Greater};
pub use iter::{FromIterator, Extend, IteratorExt};
pub use iter::{Iterator, DoubleEndedIterator, DoubleEndedIteratorExt, RandomAccessIterator};
pub use iter::{IteratorCloneExt, CloneIteratorExt};
pub use iter::{IteratorOrdExt, MutableDoubleEndedIterator, ExactSizeIterator};
pub use num::{ToPrimitive, FromPrimitive};
pub use option::Option;
pub use option::Option::{Some, None};
pub use ptr::RawPtr;
pub use result::Result;
pub use result::Result::{Ok, Err};
pub use str::{Str, StrPrelude}; | pub use slice::{PartialEqSlicePrelude, OrdSlicePrelude};
pub use slice::{AsSlice, SlicePrelude}; | pub use tuple::{Tuple1, Tuple2, Tuple3, Tuple4};
pub use tuple::{Tuple5, Tuple6, Tuple7, Tuple8};
pub use tuple::{Tuple9, Tuple10, Tuple11, Tuple12}; | random_line_split |
theme_config.rs | use std::collections::HashMap;
use ron::{de::from_str, Value};
use serde_derive::{Deserialize, Serialize};
use crate::theming::config::StyleConfig;
pub static BASE_STYLE: &str = "base";
pub static RESOURCE_KEY: &str = "$";
/// Used to store and read properties that could be requested by a | #[serde(default)]
pub styles: HashMap<String, StyleConfig>,
#[serde(default)]
pub resources: HashMap<String, Value>,
}
impl<'a> ThemeConfig {
/// Extends the given theme with another theme. Replaces the
/// current name with the new choosen name `other`. If `other`
/// contains a style with the same key entry, this key will be
/// replaced in the current theme.
pub fn extend(mut self, other: ThemeConfig) -> Self {
let mut other = other;
for style in other.styles.drain() {
self.styles.insert(style.0, style.1);
}
for resource in other.resources.drain() {
self.resources.insert(resource.0, resource.1);
}
self
}
}
impl From<&str> for ThemeConfig {
fn from(s: &str) -> Self {
from_str(s).unwrap()
}
} | /// given property name and a selector.
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
#[serde(rename = "Theme")]
pub struct ThemeConfig { | random_line_split |
theme_config.rs | use std::collections::HashMap;
use ron::{de::from_str, Value};
use serde_derive::{Deserialize, Serialize};
use crate::theming::config::StyleConfig;
pub static BASE_STYLE: &str = "base";
pub static RESOURCE_KEY: &str = "$";
/// Used to store and read properties that could be requested by a
/// given property name and a selector.
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
#[serde(rename = "Theme")]
pub struct ThemeConfig {
#[serde(default)]
pub styles: HashMap<String, StyleConfig>,
#[serde(default)]
pub resources: HashMap<String, Value>,
}
impl<'a> ThemeConfig {
/// Extends the given theme with another theme. Replaces the
/// current name with the new choosen name `other`. If `other`
/// contains a style with the same key entry, this key will be
/// replaced in the current theme.
pub fn extend(mut self, other: ThemeConfig) -> Self {
let mut other = other;
for style in other.styles.drain() {
self.styles.insert(style.0, style.1);
}
for resource in other.resources.drain() {
self.resources.insert(resource.0, resource.1);
}
self
}
}
impl From<&str> for ThemeConfig {
fn | (s: &str) -> Self {
from_str(s).unwrap()
}
}
| from | identifier_name |
theme_config.rs | use std::collections::HashMap;
use ron::{de::from_str, Value};
use serde_derive::{Deserialize, Serialize};
use crate::theming::config::StyleConfig;
pub static BASE_STYLE: &str = "base";
pub static RESOURCE_KEY: &str = "$";
/// Used to store and read properties that could be requested by a
/// given property name and a selector.
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
#[serde(rename = "Theme")]
pub struct ThemeConfig {
#[serde(default)]
pub styles: HashMap<String, StyleConfig>,
#[serde(default)]
pub resources: HashMap<String, Value>,
}
impl<'a> ThemeConfig {
/// Extends the given theme with another theme. Replaces the
/// current name with the new choosen name `other`. If `other`
/// contains a style with the same key entry, this key will be
/// replaced in the current theme.
pub fn extend(mut self, other: ThemeConfig) -> Self {
let mut other = other;
for style in other.styles.drain() {
self.styles.insert(style.0, style.1);
}
for resource in other.resources.drain() {
self.resources.insert(resource.0, resource.1);
}
self
}
}
impl From<&str> for ThemeConfig {
fn from(s: &str) -> Self |
}
| {
from_str(s).unwrap()
} | identifier_body |
cssgroupingrule.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::codegen::Bindings::CSSGroupingRuleBinding::CSSGroupingRuleMethods;
use crate::dom::bindings::error::{ErrorResult, Fallible};
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::reflector::DomObject;
use crate::dom::bindings::root::{DomRoot, MutNullableDom};
use crate::dom::bindings::str::DOMString;
use crate::dom::cssrule::CSSRule;
use crate::dom::cssrulelist::{CSSRuleList, RulesSource};
use crate::dom::cssstylesheet::CSSStyleSheet;
use dom_struct::dom_struct;
use servo_arc::Arc;
use style::shared_lock::{Locked, SharedRwLock};
use style::stylesheets::CssRules as StyleCssRules;
#[dom_struct]
pub struct CSSGroupingRule {
cssrule: CSSRule,
#[ignore_malloc_size_of = "Arc"]
rules: Arc<Locked<StyleCssRules>>,
rulelist: MutNullableDom<CSSRuleList>,
}
impl CSSGroupingRule {
pub fn new_inherited(
parent_stylesheet: &CSSStyleSheet,
rules: Arc<Locked<StyleCssRules>>,
) -> CSSGroupingRule {
CSSGroupingRule {
cssrule: CSSRule::new_inherited(parent_stylesheet),
rules: rules,
rulelist: MutNullableDom::new(None),
}
}
fn rulelist(&self) -> DomRoot<CSSRuleList> |
pub fn parent_stylesheet(&self) -> &CSSStyleSheet {
self.cssrule.parent_stylesheet()
}
pub fn shared_lock(&self) -> &SharedRwLock {
self.cssrule.shared_lock()
}
}
impl CSSGroupingRuleMethods for CSSGroupingRule {
// https://drafts.csswg.org/cssom/#dom-cssgroupingrule-cssrules
fn CssRules(&self) -> DomRoot<CSSRuleList> {
// XXXManishearth check origin clean flag
self.rulelist()
}
// https://drafts.csswg.org/cssom/#dom-cssgroupingrule-insertrule
fn InsertRule(&self, rule: DOMString, index: u32) -> Fallible<u32> {
self.rulelist().insert_rule(&rule, index, /* nested */ true)
}
// https://drafts.csswg.org/cssom/#dom-cssgroupingrule-deleterule
fn DeleteRule(&self, index: u32) -> ErrorResult {
self.rulelist().remove_rule(index)
}
}
| {
let parent_stylesheet = self.upcast::<CSSRule>().parent_stylesheet();
self.rulelist.or_init(|| {
CSSRuleList::new(
self.global().as_window(),
parent_stylesheet,
RulesSource::Rules(self.rules.clone()),
)
})
} | identifier_body |
cssgroupingrule.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::codegen::Bindings::CSSGroupingRuleBinding::CSSGroupingRuleMethods;
use crate::dom::bindings::error::{ErrorResult, Fallible};
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::reflector::DomObject;
use crate::dom::bindings::root::{DomRoot, MutNullableDom};
use crate::dom::bindings::str::DOMString; | use crate::dom::cssrulelist::{CSSRuleList, RulesSource};
use crate::dom::cssstylesheet::CSSStyleSheet;
use dom_struct::dom_struct;
use servo_arc::Arc;
use style::shared_lock::{Locked, SharedRwLock};
use style::stylesheets::CssRules as StyleCssRules;
#[dom_struct]
pub struct CSSGroupingRule {
cssrule: CSSRule,
#[ignore_malloc_size_of = "Arc"]
rules: Arc<Locked<StyleCssRules>>,
rulelist: MutNullableDom<CSSRuleList>,
}
impl CSSGroupingRule {
pub fn new_inherited(
parent_stylesheet: &CSSStyleSheet,
rules: Arc<Locked<StyleCssRules>>,
) -> CSSGroupingRule {
CSSGroupingRule {
cssrule: CSSRule::new_inherited(parent_stylesheet),
rules: rules,
rulelist: MutNullableDom::new(None),
}
}
fn rulelist(&self) -> DomRoot<CSSRuleList> {
let parent_stylesheet = self.upcast::<CSSRule>().parent_stylesheet();
self.rulelist.or_init(|| {
CSSRuleList::new(
self.global().as_window(),
parent_stylesheet,
RulesSource::Rules(self.rules.clone()),
)
})
}
pub fn parent_stylesheet(&self) -> &CSSStyleSheet {
self.cssrule.parent_stylesheet()
}
pub fn shared_lock(&self) -> &SharedRwLock {
self.cssrule.shared_lock()
}
}
impl CSSGroupingRuleMethods for CSSGroupingRule {
// https://drafts.csswg.org/cssom/#dom-cssgroupingrule-cssrules
fn CssRules(&self) -> DomRoot<CSSRuleList> {
// XXXManishearth check origin clean flag
self.rulelist()
}
// https://drafts.csswg.org/cssom/#dom-cssgroupingrule-insertrule
fn InsertRule(&self, rule: DOMString, index: u32) -> Fallible<u32> {
self.rulelist().insert_rule(&rule, index, /* nested */ true)
}
// https://drafts.csswg.org/cssom/#dom-cssgroupingrule-deleterule
fn DeleteRule(&self, index: u32) -> ErrorResult {
self.rulelist().remove_rule(index)
}
} | use crate::dom::cssrule::CSSRule; | random_line_split |
cssgroupingrule.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::codegen::Bindings::CSSGroupingRuleBinding::CSSGroupingRuleMethods;
use crate::dom::bindings::error::{ErrorResult, Fallible};
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::reflector::DomObject;
use crate::dom::bindings::root::{DomRoot, MutNullableDom};
use crate::dom::bindings::str::DOMString;
use crate::dom::cssrule::CSSRule;
use crate::dom::cssrulelist::{CSSRuleList, RulesSource};
use crate::dom::cssstylesheet::CSSStyleSheet;
use dom_struct::dom_struct;
use servo_arc::Arc;
use style::shared_lock::{Locked, SharedRwLock};
use style::stylesheets::CssRules as StyleCssRules;
#[dom_struct]
pub struct CSSGroupingRule {
cssrule: CSSRule,
#[ignore_malloc_size_of = "Arc"]
rules: Arc<Locked<StyleCssRules>>,
rulelist: MutNullableDom<CSSRuleList>,
}
impl CSSGroupingRule {
pub fn new_inherited(
parent_stylesheet: &CSSStyleSheet,
rules: Arc<Locked<StyleCssRules>>,
) -> CSSGroupingRule {
CSSGroupingRule {
cssrule: CSSRule::new_inherited(parent_stylesheet),
rules: rules,
rulelist: MutNullableDom::new(None),
}
}
fn rulelist(&self) -> DomRoot<CSSRuleList> {
let parent_stylesheet = self.upcast::<CSSRule>().parent_stylesheet();
self.rulelist.or_init(|| {
CSSRuleList::new(
self.global().as_window(),
parent_stylesheet,
RulesSource::Rules(self.rules.clone()),
)
})
}
pub fn parent_stylesheet(&self) -> &CSSStyleSheet {
self.cssrule.parent_stylesheet()
}
pub fn shared_lock(&self) -> &SharedRwLock {
self.cssrule.shared_lock()
}
}
impl CSSGroupingRuleMethods for CSSGroupingRule {
// https://drafts.csswg.org/cssom/#dom-cssgroupingrule-cssrules
fn | (&self) -> DomRoot<CSSRuleList> {
// XXXManishearth check origin clean flag
self.rulelist()
}
// https://drafts.csswg.org/cssom/#dom-cssgroupingrule-insertrule
fn InsertRule(&self, rule: DOMString, index: u32) -> Fallible<u32> {
self.rulelist().insert_rule(&rule, index, /* nested */ true)
}
// https://drafts.csswg.org/cssom/#dom-cssgroupingrule-deleterule
fn DeleteRule(&self, index: u32) -> ErrorResult {
self.rulelist().remove_rule(index)
}
}
| CssRules | identifier_name |
build.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#[macro_use]
extern crate lazy_static;
#[cfg(feature = "bindgen")]
extern crate bindgen;
#[cfg(feature = "bindgen")]
extern crate log;
#[cfg(feature = "bindgen")]
extern crate regex;
#[cfg(feature = "bindgen")]
extern crate toml;
extern crate walkdir;
use std::env;
use std::path::Path;
use std::process::{Command, exit};
use walkdir::WalkDir;
#[cfg(feature = "gecko")]
mod build_gecko;
#[cfg(not(feature = "gecko"))]
mod build_gecko {
pub fn generate() {}
}
#[cfg(windows)]
fn find_python() -> String {
if Command::new("python2.7.exe").arg("--version").output().is_ok() {
return "python2.7.exe".to_owned();
}
if Command::new("python27.exe").arg("--version").output().is_ok() {
return "python27.exe".to_owned();
}
if Command::new("python.exe").arg("--version").output().is_ok() {
return "python.exe".to_owned();
}
panic!(concat!("Can't find python (tried python2.7.exe, python27.exe, and python.exe)! ",
"Try fixing PATH or setting the PYTHON env var"));
}
#[cfg(not(windows))]
fn find_python() -> String {
if Command::new("python2.7").arg("--version").output().unwrap().status.success() {
"python2.7"
} else {
"python"
}.to_owned()
}
lazy_static! {
pub static ref PYTHON: String = env::var("PYTHON").ok().unwrap_or_else(find_python);
}
fn generate_properties() {
for entry in WalkDir::new("properties") {
let entry = entry.unwrap();
match entry.path().extension().and_then(|e| e.to_str()) {
Some("mako") | Some("rs") | Some("py") | Some("zip") => {
println!("cargo:rerun-if-changed={}", entry.path().display());
}
_ => {}
}
} | let script = Path::new(&env::var_os("CARGO_MANIFEST_DIR").unwrap())
.join("properties").join("build.py");
let product = if cfg!(feature = "gecko") { "gecko" } else { "servo" };
let status = Command::new(&*PYTHON)
.arg(&script)
.arg(product)
.arg("style-crate")
.status()
.unwrap();
if!status.success() {
exit(1)
}
}
fn main() {
let gecko = cfg!(feature = "gecko");
let servo = cfg!(feature = "servo");
if!(gecko || servo) {
panic!("The style crate requires enabling one of its'servo' or 'gecko' feature flags");
}
if gecko && servo {
panic!("The style crate does not support enabling both its'servo' or 'gecko' \
feature flags at the same time.");
}
println!("cargo:rerun-if-changed=build.rs");
println!("cargo:out_dir={}", env::var("OUT_DIR").unwrap());
generate_properties();
build_gecko::generate();
} | random_line_split |
|
build.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#[macro_use]
extern crate lazy_static;
#[cfg(feature = "bindgen")]
extern crate bindgen;
#[cfg(feature = "bindgen")]
extern crate log;
#[cfg(feature = "bindgen")]
extern crate regex;
#[cfg(feature = "bindgen")]
extern crate toml;
extern crate walkdir;
use std::env;
use std::path::Path;
use std::process::{Command, exit};
use walkdir::WalkDir;
#[cfg(feature = "gecko")]
mod build_gecko;
#[cfg(not(feature = "gecko"))]
mod build_gecko {
pub fn generate() {}
}
#[cfg(windows)]
fn find_python() -> String {
if Command::new("python2.7.exe").arg("--version").output().is_ok() {
return "python2.7.exe".to_owned();
}
if Command::new("python27.exe").arg("--version").output().is_ok() |
if Command::new("python.exe").arg("--version").output().is_ok() {
return "python.exe".to_owned();
}
panic!(concat!("Can't find python (tried python2.7.exe, python27.exe, and python.exe)! ",
"Try fixing PATH or setting the PYTHON env var"));
}
#[cfg(not(windows))]
fn find_python() -> String {
if Command::new("python2.7").arg("--version").output().unwrap().status.success() {
"python2.7"
} else {
"python"
}.to_owned()
}
lazy_static! {
pub static ref PYTHON: String = env::var("PYTHON").ok().unwrap_or_else(find_python);
}
fn generate_properties() {
for entry in WalkDir::new("properties") {
let entry = entry.unwrap();
match entry.path().extension().and_then(|e| e.to_str()) {
Some("mako") | Some("rs") | Some("py") | Some("zip") => {
println!("cargo:rerun-if-changed={}", entry.path().display());
}
_ => {}
}
}
let script = Path::new(&env::var_os("CARGO_MANIFEST_DIR").unwrap())
.join("properties").join("build.py");
let product = if cfg!(feature = "gecko") { "gecko" } else { "servo" };
let status = Command::new(&*PYTHON)
.arg(&script)
.arg(product)
.arg("style-crate")
.status()
.unwrap();
if!status.success() {
exit(1)
}
}
fn main() {
let gecko = cfg!(feature = "gecko");
let servo = cfg!(feature = "servo");
if!(gecko || servo) {
panic!("The style crate requires enabling one of its'servo' or 'gecko' feature flags");
}
if gecko && servo {
panic!("The style crate does not support enabling both its'servo' or 'gecko' \
feature flags at the same time.");
}
println!("cargo:rerun-if-changed=build.rs");
println!("cargo:out_dir={}", env::var("OUT_DIR").unwrap());
generate_properties();
build_gecko::generate();
}
| {
return "python27.exe".to_owned();
} | conditional_block |
build.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#[macro_use]
extern crate lazy_static;
#[cfg(feature = "bindgen")]
extern crate bindgen;
#[cfg(feature = "bindgen")]
extern crate log;
#[cfg(feature = "bindgen")]
extern crate regex;
#[cfg(feature = "bindgen")]
extern crate toml;
extern crate walkdir;
use std::env;
use std::path::Path;
use std::process::{Command, exit};
use walkdir::WalkDir;
#[cfg(feature = "gecko")]
mod build_gecko;
#[cfg(not(feature = "gecko"))]
mod build_gecko {
pub fn generate() {}
}
#[cfg(windows)]
fn find_python() -> String {
if Command::new("python2.7.exe").arg("--version").output().is_ok() {
return "python2.7.exe".to_owned();
}
if Command::new("python27.exe").arg("--version").output().is_ok() {
return "python27.exe".to_owned();
}
if Command::new("python.exe").arg("--version").output().is_ok() {
return "python.exe".to_owned();
}
panic!(concat!("Can't find python (tried python2.7.exe, python27.exe, and python.exe)! ",
"Try fixing PATH or setting the PYTHON env var"));
}
#[cfg(not(windows))]
fn find_python() -> String {
if Command::new("python2.7").arg("--version").output().unwrap().status.success() {
"python2.7"
} else {
"python"
}.to_owned()
}
lazy_static! {
pub static ref PYTHON: String = env::var("PYTHON").ok().unwrap_or_else(find_python);
}
fn generate_properties() | if!status.success() {
exit(1)
}
}
fn main() {
let gecko = cfg!(feature = "gecko");
let servo = cfg!(feature = "servo");
if!(gecko || servo) {
panic!("The style crate requires enabling one of its'servo' or 'gecko' feature flags");
}
if gecko && servo {
panic!("The style crate does not support enabling both its'servo' or 'gecko' \
feature flags at the same time.");
}
println!("cargo:rerun-if-changed=build.rs");
println!("cargo:out_dir={}", env::var("OUT_DIR").unwrap());
generate_properties();
build_gecko::generate();
}
| {
for entry in WalkDir::new("properties") {
let entry = entry.unwrap();
match entry.path().extension().and_then(|e| e.to_str()) {
Some("mako") | Some("rs") | Some("py") | Some("zip") => {
println!("cargo:rerun-if-changed={}", entry.path().display());
}
_ => {}
}
}
let script = Path::new(&env::var_os("CARGO_MANIFEST_DIR").unwrap())
.join("properties").join("build.py");
let product = if cfg!(feature = "gecko") { "gecko" } else { "servo" };
let status = Command::new(&*PYTHON)
.arg(&script)
.arg(product)
.arg("style-crate")
.status()
.unwrap(); | identifier_body |
build.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#[macro_use]
extern crate lazy_static;
#[cfg(feature = "bindgen")]
extern crate bindgen;
#[cfg(feature = "bindgen")]
extern crate log;
#[cfg(feature = "bindgen")]
extern crate regex;
#[cfg(feature = "bindgen")]
extern crate toml;
extern crate walkdir;
use std::env;
use std::path::Path;
use std::process::{Command, exit};
use walkdir::WalkDir;
#[cfg(feature = "gecko")]
mod build_gecko;
#[cfg(not(feature = "gecko"))]
mod build_gecko {
pub fn generate() {}
}
#[cfg(windows)]
fn find_python() -> String {
if Command::new("python2.7.exe").arg("--version").output().is_ok() {
return "python2.7.exe".to_owned();
}
if Command::new("python27.exe").arg("--version").output().is_ok() {
return "python27.exe".to_owned();
}
if Command::new("python.exe").arg("--version").output().is_ok() {
return "python.exe".to_owned();
}
panic!(concat!("Can't find python (tried python2.7.exe, python27.exe, and python.exe)! ",
"Try fixing PATH or setting the PYTHON env var"));
}
#[cfg(not(windows))]
fn find_python() -> String {
if Command::new("python2.7").arg("--version").output().unwrap().status.success() {
"python2.7"
} else {
"python"
}.to_owned()
}
lazy_static! {
pub static ref PYTHON: String = env::var("PYTHON").ok().unwrap_or_else(find_python);
}
fn | () {
for entry in WalkDir::new("properties") {
let entry = entry.unwrap();
match entry.path().extension().and_then(|e| e.to_str()) {
Some("mako") | Some("rs") | Some("py") | Some("zip") => {
println!("cargo:rerun-if-changed={}", entry.path().display());
}
_ => {}
}
}
let script = Path::new(&env::var_os("CARGO_MANIFEST_DIR").unwrap())
.join("properties").join("build.py");
let product = if cfg!(feature = "gecko") { "gecko" } else { "servo" };
let status = Command::new(&*PYTHON)
.arg(&script)
.arg(product)
.arg("style-crate")
.status()
.unwrap();
if!status.success() {
exit(1)
}
}
fn main() {
let gecko = cfg!(feature = "gecko");
let servo = cfg!(feature = "servo");
if!(gecko || servo) {
panic!("The style crate requires enabling one of its'servo' or 'gecko' feature flags");
}
if gecko && servo {
panic!("The style crate does not support enabling both its'servo' or 'gecko' \
feature flags at the same time.");
}
println!("cargo:rerun-if-changed=build.rs");
println!("cargo:out_dir={}", env::var("OUT_DIR").unwrap());
generate_properties();
build_gecko::generate();
}
| generate_properties | identifier_name |
template-fun-ty.rs | #![allow(
dead_code,
non_snake_case,
non_camel_case_types,
non_upper_case_globals
)]
#[repr(C)]
#[derive(Debug, Default, Copy, Clone, Hash, PartialEq, Eq)]
pub struct Foo {
pub _address: u8,
}
pub type Foo_FunctionPtr<T> =
::std::option::Option<unsafe extern "C" fn() -> T>;
#[repr(C)]
#[derive(Debug, Default, Copy, Clone, Hash, PartialEq, Eq)]
pub struct RefPtr {
pub _address: u8,
}
#[repr(C)]
#[derive(Debug, Default, Copy, Clone, Hash, PartialEq, Eq)]
pub struct | {
pub _address: u8,
}
pub type RefPtr_Proxy_member_function<R, Args> =
::std::option::Option<unsafe extern "C" fn(arg1: Args) -> R>;
pub type Returner<T> = ::std::option::Option<unsafe extern "C" fn() -> T>;
| RefPtr_Proxy | identifier_name |
template-fun-ty.rs | #![allow(
dead_code,
non_snake_case,
non_camel_case_types,
non_upper_case_globals
)]
#[repr(C)]
#[derive(Debug, Default, Copy, Clone, Hash, PartialEq, Eq)]
pub struct Foo {
pub _address: u8,
}
pub type Foo_FunctionPtr<T> =
::std::option::Option<unsafe extern "C" fn() -> T>;
#[repr(C)]
#[derive(Debug, Default, Copy, Clone, Hash, PartialEq, Eq)] | }
#[repr(C)]
#[derive(Debug, Default, Copy, Clone, Hash, PartialEq, Eq)]
pub struct RefPtr_Proxy {
pub _address: u8,
}
pub type RefPtr_Proxy_member_function<R, Args> =
::std::option::Option<unsafe extern "C" fn(arg1: Args) -> R>;
pub type Returner<T> = ::std::option::Option<unsafe extern "C" fn() -> T>; | pub struct RefPtr {
pub _address: u8, | random_line_split |
upload.rs | extern crate mig;
#[macro_use]
extern crate log;
extern crate env_logger;
use mig::quic::threaded::{QuicConnection};
| let args = ::std::env::args;
if args().len()!= 2 || args().nth(1) == Some("--help".to_string()) {
println!("Usage: upload clientip:port < file");
return;
}
let address = args().nth(1).unwrap();
let connection = match QuicConnection::new(&*address) {
Ok(connection) => {
connection
},
Err(e) => {
error!("Cannot create a connection: {}", e);
return;
},
};
info!("Running client connected to {}", address);
let mut stream = connection.get_stream(2);
info!("Got stream 2. Copying from the stream to stdout.");
//std::io::copy(&mut stream, &mut std::io::stdout());
std::io::copy(&mut std::io::stdin(), &mut stream).unwrap();
info!("Finished uploading.");
} |
fn main() {
env_logger::init().unwrap();
| random_line_split |
upload.rs | extern crate mig;
#[macro_use]
extern crate log;
extern crate env_logger;
use mig::quic::threaded::{QuicConnection};
fn | () {
env_logger::init().unwrap();
let args = ::std::env::args;
if args().len()!= 2 || args().nth(1) == Some("--help".to_string()) {
println!("Usage: upload clientip:port < file");
return;
}
let address = args().nth(1).unwrap();
let connection = match QuicConnection::new(&*address) {
Ok(connection) => {
connection
},
Err(e) => {
error!("Cannot create a connection: {}", e);
return;
},
};
info!("Running client connected to {}", address);
let mut stream = connection.get_stream(2);
info!("Got stream 2. Copying from the stream to stdout.");
//std::io::copy(&mut stream, &mut std::io::stdout());
std::io::copy(&mut std::io::stdin(), &mut stream).unwrap();
info!("Finished uploading.");
}
| main | identifier_name |
upload.rs | extern crate mig;
#[macro_use]
extern crate log;
extern crate env_logger;
use mig::quic::threaded::{QuicConnection};
fn main() |
info!("Running client connected to {}", address);
let mut stream = connection.get_stream(2);
info!("Got stream 2. Copying from the stream to stdout.");
//std::io::copy(&mut stream, &mut std::io::stdout());
std::io::copy(&mut std::io::stdin(), &mut stream).unwrap();
info!("Finished uploading.");
}
| {
env_logger::init().unwrap();
let args = ::std::env::args;
if args().len() != 2 || args().nth(1) == Some("--help".to_string()) {
println!("Usage: upload clientip:port < file");
return;
}
let address = args().nth(1).unwrap();
let connection = match QuicConnection::new(&*address) {
Ok(connection) => {
connection
},
Err(e) => {
error!("Cannot create a connection: {}", e);
return;
},
}; | identifier_body |
main.rs | extern crate regex;
use regex::Regex;
const MIN_BOUND: u32 = 0;
const MAX_BOUND: u32 = 10;//((1u64 << 32) - 1) as u32;
#[derive(Debug)]
struct Range {
lo: u32,
hi: u32,
}
impl Range {
fn default() -> Range {
Range {
lo: MIN_BOUND,
hi: MAX_BOUND,
}
}
fn new(lo: u32, hi: u32) -> Range {
assert!(lo <= hi);
Range { lo: lo, hi: hi }
}
fn parse(s: &str) -> Range {
let re: Regex = Regex::new(r"^(\d+)-(\d+)$").unwrap();
match re.captures(s) {
None => Range::default(),
Some(caps) => {
match (caps.at(1), caps.at(2)) {
(Some(a), Some(b)) => {
Range::new(a.parse().unwrap_or(MIN_BOUND),
b.parse().unwrap_or(MAX_BOUND))
}
_ => Range::default(),
}
}
}
}
}
impl Iterator for Range {
type Item = u32;
fn next(&mut self) -> Option<u32> {
let lo = self.lo;
let hi = self.hi;
self.lo += 1;
if lo > hi {
None
} else |
}
}
fn mark_off(a: &mut[bool; MAX_BOUND as usize], r: Range) {
for i in r {
a[i as usize] = false;
}
}
fn main() {
let test_input = "5-8\n0-2\n4-7";
let ranges = test_input.lines().map(Range::parse);
let mut a = [true; MAX_BOUND as usize];
for r in ranges {
println!("{:?}", r);
mark_off(&mut a, r);
}
for i in MIN_BOUND.. MAX_BOUND {
if a[i as usize] {
println!("{}", i);
break;
}
}
}
| {
Some(lo)
} | conditional_block |
main.rs | extern crate regex;
use regex::Regex;
const MIN_BOUND: u32 = 0;
const MAX_BOUND: u32 = 10;//((1u64 << 32) - 1) as u32;
#[derive(Debug)]
struct Range {
lo: u32,
hi: u32,
}
impl Range {
fn default() -> Range {
Range {
lo: MIN_BOUND,
hi: MAX_BOUND,
}
}
fn new(lo: u32, hi: u32) -> Range {
assert!(lo <= hi);
Range { lo: lo, hi: hi }
}
fn parse(s: &str) -> Range {
let re: Regex = Regex::new(r"^(\d+)-(\d+)$").unwrap();
match re.captures(s) {
None => Range::default(),
Some(caps) => {
match (caps.at(1), caps.at(2)) {
(Some(a), Some(b)) => {
Range::new(a.parse().unwrap_or(MIN_BOUND),
b.parse().unwrap_or(MAX_BOUND))
}
_ => Range::default(),
}
}
}
}
}
impl Iterator for Range {
type Item = u32;
fn next(&mut self) -> Option<u32> {
let lo = self.lo;
let hi = self.hi;
self.lo += 1;
if lo > hi {
None
} else {
Some(lo)
}
}
} | fn mark_off(a: &mut[bool; MAX_BOUND as usize], r: Range) {
for i in r {
a[i as usize] = false;
}
}
fn main() {
let test_input = "5-8\n0-2\n4-7";
let ranges = test_input.lines().map(Range::parse);
let mut a = [true; MAX_BOUND as usize];
for r in ranges {
println!("{:?}", r);
mark_off(&mut a, r);
}
for i in MIN_BOUND.. MAX_BOUND {
if a[i as usize] {
println!("{}", i);
break;
}
}
} | random_line_split |
|
main.rs | extern crate regex;
use regex::Regex;
const MIN_BOUND: u32 = 0;
const MAX_BOUND: u32 = 10;//((1u64 << 32) - 1) as u32;
#[derive(Debug)]
struct Range {
lo: u32,
hi: u32,
}
impl Range {
fn | () -> Range {
Range {
lo: MIN_BOUND,
hi: MAX_BOUND,
}
}
fn new(lo: u32, hi: u32) -> Range {
assert!(lo <= hi);
Range { lo: lo, hi: hi }
}
fn parse(s: &str) -> Range {
let re: Regex = Regex::new(r"^(\d+)-(\d+)$").unwrap();
match re.captures(s) {
None => Range::default(),
Some(caps) => {
match (caps.at(1), caps.at(2)) {
(Some(a), Some(b)) => {
Range::new(a.parse().unwrap_or(MIN_BOUND),
b.parse().unwrap_or(MAX_BOUND))
}
_ => Range::default(),
}
}
}
}
}
impl Iterator for Range {
type Item = u32;
fn next(&mut self) -> Option<u32> {
let lo = self.lo;
let hi = self.hi;
self.lo += 1;
if lo > hi {
None
} else {
Some(lo)
}
}
}
fn mark_off(a: &mut[bool; MAX_BOUND as usize], r: Range) {
for i in r {
a[i as usize] = false;
}
}
fn main() {
let test_input = "5-8\n0-2\n4-7";
let ranges = test_input.lines().map(Range::parse);
let mut a = [true; MAX_BOUND as usize];
for r in ranges {
println!("{:?}", r);
mark_off(&mut a, r);
}
for i in MIN_BOUND.. MAX_BOUND {
if a[i as usize] {
println!("{}", i);
break;
}
}
}
| default | identifier_name |
main.rs | extern crate regex;
use regex::Regex;
const MIN_BOUND: u32 = 0;
const MAX_BOUND: u32 = 10;//((1u64 << 32) - 1) as u32;
#[derive(Debug)]
struct Range {
lo: u32,
hi: u32,
}
impl Range {
fn default() -> Range {
Range {
lo: MIN_BOUND,
hi: MAX_BOUND,
}
}
fn new(lo: u32, hi: u32) -> Range |
fn parse(s: &str) -> Range {
let re: Regex = Regex::new(r"^(\d+)-(\d+)$").unwrap();
match re.captures(s) {
None => Range::default(),
Some(caps) => {
match (caps.at(1), caps.at(2)) {
(Some(a), Some(b)) => {
Range::new(a.parse().unwrap_or(MIN_BOUND),
b.parse().unwrap_or(MAX_BOUND))
}
_ => Range::default(),
}
}
}
}
}
impl Iterator for Range {
type Item = u32;
fn next(&mut self) -> Option<u32> {
let lo = self.lo;
let hi = self.hi;
self.lo += 1;
if lo > hi {
None
} else {
Some(lo)
}
}
}
fn mark_off(a: &mut[bool; MAX_BOUND as usize], r: Range) {
for i in r {
a[i as usize] = false;
}
}
fn main() {
let test_input = "5-8\n0-2\n4-7";
let ranges = test_input.lines().map(Range::parse);
let mut a = [true; MAX_BOUND as usize];
for r in ranges {
println!("{:?}", r);
mark_off(&mut a, r);
}
for i in MIN_BOUND.. MAX_BOUND {
if a[i as usize] {
println!("{}", i);
break;
}
}
}
| {
assert!(lo <= hi);
Range { lo: lo, hi: hi }
} | identifier_body |
mod.rs | extern mod std;
use std::io::fs::{File, mkdir_recursive};
use std::{os, ptr, str};
use std::io::{Reader, io_error};
use std::path::Path;
use std::libc::{c_void};
use std::unstable::atomics::{AtomicBool, Acquire, Release, INIT_ATOMIC_BOOL};
use nspr::raw::nspr::*;
use super::nss::raw::*;
static mut NSS_INIT_START: AtomicBool = INIT_ATOMIC_BOOL;
static mut NSS_INIT_END: AtomicBool = INIT_ATOMIC_BOOL;
static mut NSS_UNINIT_START: AtomicBool = INIT_ATOMIC_BOOL;
static mut NSS_UNINIT_END: AtomicBool = INIT_ATOMIC_BOOL;
pub mod raw;
pub struct NSS {
priv nss_ctx: Option<*c_void>,
priv nss_cert_mod: Option<SECMODModule>,
cfg_dir: Option<~str>,
}
impl NSS {
pub fn | () -> NSS {
NSS { nss_ctx: None, nss_cert_mod: None, cfg_dir: None, }
}
pub fn set_cfg_dir(&mut self, cfg_dir: &str)
{
self.cfg_dir = Some(cfg_dir.to_owned());
}
pub fn nodb_init(&mut self) -> SECStatus {
unsafe {
if(self.start_init() == SECSuccess) { return SECSuccess; }
if(NSS_NoDB_Init(ptr::null()) == SECFailure){
fail!("NSS is borked!");
}
self.finish_init()
}
}
fn start_init(&mut self) -> SECStatus {
unsafe {
if(NSS_IsInitialized() == PRTrue ) { return SECSuccess; }
if NSS_INIT_START.swap(true, Acquire) { while!NSS_INIT_END.load(Release) { std::task::deschedule(); } }
SECFailure //Not really...
}
}
pub fn init(&mut self) -> SECStatus {
unsafe {
if(self.start_init() == SECSuccess) { return SECSuccess; }
self.cfg_dir = match self.cfg_dir {
None => Some(os::getenv("SSL_DIR").unwrap_or(format!("{}/.pki/nssdb", os::getenv("HOME").unwrap_or(~".")).to_owned())),
Some(ref s) => Some(s.to_owned()), };
let cfg_dir = match self.cfg_dir {
Some(ref s) => s.to_owned(),
None => ~"", };
let mut cfg_path = Path::init(cfg_dir.clone());
let mut nss_path = format!("sql:{}", cfg_dir);
if(!cfg_path.exists()) {
let system_path = &Path::init("/etc/pki/nssdb");
if(!system_path.exists()) {
io_error::cond.trap(|_|{}).inside(|| mkdir_recursive(&cfg_path, 0b111_111_111));
}
else {
cfg_path = Path::init("/etc/pki/nssdb");
nss_path = format!("sql:{}", system_path.as_str().unwrap());
}
}
if(cfg_path.exists()) {
nss_path.with_c_str(|nssdb| self.nss_ctx = Some(NSS_InitContext(nssdb, ptr::null(), ptr::null(), ptr::null(), ptr::null(), NSS_INIT_PK11RELOAD)));
}
if(NSS_IsInitialized() == PRFalse) {
if(NSS_NoDB_Init(ptr::null()) == SECFailure){
fail!("NSS is borked!");
}
}
self.finish_init()
}
}
fn finish_init(&mut self) -> SECStatus {
unsafe {
nss_cmd(|| NSS_SetDomesticPolicy());
self.nss_cert_mod = Some(*SECMOD_LoadUserModule("library=libnssckbi.so name=\"Root Certs\"".to_c_str().unwrap(), ptr::null(), PRFalse));
if(self.nss_cert_mod.unwrap().loaded!= PRTrue) {
return SECFailure;
}
NSS_INIT_END.store(true, Release);
if(NSS_IsInitialized() == PRTrue) {
SECSuccess
}
else {
SECFailure
}
}
}
pub fn uninit(&mut self) -> SECStatus {
unsafe {
if(NSS_IsInitialized() == PRFalse) { return SECSuccess; }
if NSS_UNINIT_START.swap(true, Acquire) { while!NSS_UNINIT_END.load(Release) { std::task::deschedule(); } }
SECMOD_DestroyModule(&self.nss_cert_mod.unwrap());
if(!self.nss_ctx.is_none()) { NSS_ShutdownContext(self.nss_ctx.unwrap()) };
self.nss_ctx = None;
NSS_UNINIT_END.store(true, Release);
}
SECSuccess
}
pub fn trust_cert(file: ~str) -> SECStatus
{
let path = &Path::init(file);
let mut retStatus = SECFailure;
if(!path.exists()){ return retStatus; }
io_error::cond.trap(|_| { retStatus = SECFailure; }).inside(
||
unsafe
{
let pemdata = str::from_utf8_owned(File::open(path).read_to_end());
let cert = CERT_DecodeCertFromPackage(pemdata.to_c_str().unwrap(), pemdata.to_c_str().len() as i32);
let trust = CERTCertTrust { sslFlags: 0, emailFlags: 0, objectSigningFlags: 0 };
CERT_DecodeTrustString(&trust, "TCu,Cu,Tu".to_c_str().unwrap());
retStatus = CERT_ChangeCertTrust(CERT_GetDefaultCertDB(), cert, &trust);
}
);
retStatus
}
}
pub fn nss_cmd(blk: || -> SECStatus) {
let result = blk();
if(result == SECFailure) {
fail!("NSS Failed with {}", get_nss_error());
}
}
pub fn get_nss_error() -> ~str {
unsafe {
let err = PR_GetError();
let name = PR_ErrorToName(err);
if(name!= ptr::null()) {
std::str::raw::from_c_str(name)
} else {
~"Unknown Error"
}
}
}
| new | identifier_name |
mod.rs | extern mod std;
use std::io::fs::{File, mkdir_recursive};
use std::{os, ptr, str};
use std::io::{Reader, io_error};
use std::path::Path;
use std::libc::{c_void};
use std::unstable::atomics::{AtomicBool, Acquire, Release, INIT_ATOMIC_BOOL};
use nspr::raw::nspr::*;
use super::nss::raw::*;
static mut NSS_INIT_START: AtomicBool = INIT_ATOMIC_BOOL;
static mut NSS_INIT_END: AtomicBool = INIT_ATOMIC_BOOL;
static mut NSS_UNINIT_START: AtomicBool = INIT_ATOMIC_BOOL;
static mut NSS_UNINIT_END: AtomicBool = INIT_ATOMIC_BOOL;
pub mod raw;
pub struct NSS {
priv nss_ctx: Option<*c_void>,
priv nss_cert_mod: Option<SECMODModule>,
cfg_dir: Option<~str>,
}
impl NSS {
pub fn new() -> NSS {
NSS { nss_ctx: None, nss_cert_mod: None, cfg_dir: None, }
}
pub fn set_cfg_dir(&mut self, cfg_dir: &str)
{
self.cfg_dir = Some(cfg_dir.to_owned());
}
pub fn nodb_init(&mut self) -> SECStatus {
unsafe {
if(self.start_init() == SECSuccess) |
if(NSS_NoDB_Init(ptr::null()) == SECFailure){
fail!("NSS is borked!");
}
self.finish_init()
}
}
fn start_init(&mut self) -> SECStatus {
unsafe {
if(NSS_IsInitialized() == PRTrue ) { return SECSuccess; }
if NSS_INIT_START.swap(true, Acquire) { while!NSS_INIT_END.load(Release) { std::task::deschedule(); } }
SECFailure //Not really...
}
}
pub fn init(&mut self) -> SECStatus {
unsafe {
if(self.start_init() == SECSuccess) { return SECSuccess; }
self.cfg_dir = match self.cfg_dir {
None => Some(os::getenv("SSL_DIR").unwrap_or(format!("{}/.pki/nssdb", os::getenv("HOME").unwrap_or(~".")).to_owned())),
Some(ref s) => Some(s.to_owned()), };
let cfg_dir = match self.cfg_dir {
Some(ref s) => s.to_owned(),
None => ~"", };
let mut cfg_path = Path::init(cfg_dir.clone());
let mut nss_path = format!("sql:{}", cfg_dir);
if(!cfg_path.exists()) {
let system_path = &Path::init("/etc/pki/nssdb");
if(!system_path.exists()) {
io_error::cond.trap(|_|{}).inside(|| mkdir_recursive(&cfg_path, 0b111_111_111));
}
else {
cfg_path = Path::init("/etc/pki/nssdb");
nss_path = format!("sql:{}", system_path.as_str().unwrap());
}
}
if(cfg_path.exists()) {
nss_path.with_c_str(|nssdb| self.nss_ctx = Some(NSS_InitContext(nssdb, ptr::null(), ptr::null(), ptr::null(), ptr::null(), NSS_INIT_PK11RELOAD)));
}
if(NSS_IsInitialized() == PRFalse) {
if(NSS_NoDB_Init(ptr::null()) == SECFailure){
fail!("NSS is borked!");
}
}
self.finish_init()
}
}
fn finish_init(&mut self) -> SECStatus {
unsafe {
nss_cmd(|| NSS_SetDomesticPolicy());
self.nss_cert_mod = Some(*SECMOD_LoadUserModule("library=libnssckbi.so name=\"Root Certs\"".to_c_str().unwrap(), ptr::null(), PRFalse));
if(self.nss_cert_mod.unwrap().loaded!= PRTrue) {
return SECFailure;
}
NSS_INIT_END.store(true, Release);
if(NSS_IsInitialized() == PRTrue) {
SECSuccess
}
else {
SECFailure
}
}
}
pub fn uninit(&mut self) -> SECStatus {
unsafe {
if(NSS_IsInitialized() == PRFalse) { return SECSuccess; }
if NSS_UNINIT_START.swap(true, Acquire) { while!NSS_UNINIT_END.load(Release) { std::task::deschedule(); } }
SECMOD_DestroyModule(&self.nss_cert_mod.unwrap());
if(!self.nss_ctx.is_none()) { NSS_ShutdownContext(self.nss_ctx.unwrap()) };
self.nss_ctx = None;
NSS_UNINIT_END.store(true, Release);
}
SECSuccess
}
pub fn trust_cert(file: ~str) -> SECStatus
{
let path = &Path::init(file);
let mut retStatus = SECFailure;
if(!path.exists()){ return retStatus; }
io_error::cond.trap(|_| { retStatus = SECFailure; }).inside(
||
unsafe
{
let pemdata = str::from_utf8_owned(File::open(path).read_to_end());
let cert = CERT_DecodeCertFromPackage(pemdata.to_c_str().unwrap(), pemdata.to_c_str().len() as i32);
let trust = CERTCertTrust { sslFlags: 0, emailFlags: 0, objectSigningFlags: 0 };
CERT_DecodeTrustString(&trust, "TCu,Cu,Tu".to_c_str().unwrap());
retStatus = CERT_ChangeCertTrust(CERT_GetDefaultCertDB(), cert, &trust);
}
);
retStatus
}
}
pub fn nss_cmd(blk: || -> SECStatus) {
let result = blk();
if(result == SECFailure) {
fail!("NSS Failed with {}", get_nss_error());
}
}
pub fn get_nss_error() -> ~str {
unsafe {
let err = PR_GetError();
let name = PR_ErrorToName(err);
if(name!= ptr::null()) {
std::str::raw::from_c_str(name)
} else {
~"Unknown Error"
}
}
}
| { return SECSuccess; } | conditional_block |
mod.rs | extern mod std;
use std::io::fs::{File, mkdir_recursive};
use std::{os, ptr, str};
use std::io::{Reader, io_error};
use std::path::Path;
use std::libc::{c_void};
use std::unstable::atomics::{AtomicBool, Acquire, Release, INIT_ATOMIC_BOOL};
use nspr::raw::nspr::*;
use super::nss::raw::*;
static mut NSS_INIT_START: AtomicBool = INIT_ATOMIC_BOOL;
static mut NSS_INIT_END: AtomicBool = INIT_ATOMIC_BOOL;
static mut NSS_UNINIT_START: AtomicBool = INIT_ATOMIC_BOOL;
static mut NSS_UNINIT_END: AtomicBool = INIT_ATOMIC_BOOL;
pub mod raw;
pub struct NSS {
priv nss_ctx: Option<*c_void>,
priv nss_cert_mod: Option<SECMODModule>,
cfg_dir: Option<~str>,
}
impl NSS {
pub fn new() -> NSS {
NSS { nss_ctx: None, nss_cert_mod: None, cfg_dir: None, }
}
pub fn set_cfg_dir(&mut self, cfg_dir: &str)
{
self.cfg_dir = Some(cfg_dir.to_owned());
}
pub fn nodb_init(&mut self) -> SECStatus {
unsafe {
if(self.start_init() == SECSuccess) { return SECSuccess; }
if(NSS_NoDB_Init(ptr::null()) == SECFailure){
fail!("NSS is borked!");
}
self.finish_init()
}
}
fn start_init(&mut self) -> SECStatus {
unsafe {
if(NSS_IsInitialized() == PRTrue ) { return SECSuccess; }
if NSS_INIT_START.swap(true, Acquire) { while!NSS_INIT_END.load(Release) { std::task::deschedule(); } }
SECFailure //Not really...
}
}
pub fn init(&mut self) -> SECStatus {
unsafe {
if(self.start_init() == SECSuccess) { return SECSuccess; }
self.cfg_dir = match self.cfg_dir {
None => Some(os::getenv("SSL_DIR").unwrap_or(format!("{}/.pki/nssdb", os::getenv("HOME").unwrap_or(~".")).to_owned())),
Some(ref s) => Some(s.to_owned()), };
let cfg_dir = match self.cfg_dir {
Some(ref s) => s.to_owned(),
None => ~"", };
let mut cfg_path = Path::init(cfg_dir.clone());
let mut nss_path = format!("sql:{}", cfg_dir);
if(!cfg_path.exists()) {
let system_path = &Path::init("/etc/pki/nssdb");
if(!system_path.exists()) {
io_error::cond.trap(|_|{}).inside(|| mkdir_recursive(&cfg_path, 0b111_111_111));
} |
if(cfg_path.exists()) {
nss_path.with_c_str(|nssdb| self.nss_ctx = Some(NSS_InitContext(nssdb, ptr::null(), ptr::null(), ptr::null(), ptr::null(), NSS_INIT_PK11RELOAD)));
}
if(NSS_IsInitialized() == PRFalse) {
if(NSS_NoDB_Init(ptr::null()) == SECFailure){
fail!("NSS is borked!");
}
}
self.finish_init()
}
}
fn finish_init(&mut self) -> SECStatus {
unsafe {
nss_cmd(|| NSS_SetDomesticPolicy());
self.nss_cert_mod = Some(*SECMOD_LoadUserModule("library=libnssckbi.so name=\"Root Certs\"".to_c_str().unwrap(), ptr::null(), PRFalse));
if(self.nss_cert_mod.unwrap().loaded!= PRTrue) {
return SECFailure;
}
NSS_INIT_END.store(true, Release);
if(NSS_IsInitialized() == PRTrue) {
SECSuccess
}
else {
SECFailure
}
}
}
pub fn uninit(&mut self) -> SECStatus {
unsafe {
if(NSS_IsInitialized() == PRFalse) { return SECSuccess; }
if NSS_UNINIT_START.swap(true, Acquire) { while!NSS_UNINIT_END.load(Release) { std::task::deschedule(); } }
SECMOD_DestroyModule(&self.nss_cert_mod.unwrap());
if(!self.nss_ctx.is_none()) { NSS_ShutdownContext(self.nss_ctx.unwrap()) };
self.nss_ctx = None;
NSS_UNINIT_END.store(true, Release);
}
SECSuccess
}
pub fn trust_cert(file: ~str) -> SECStatus
{
let path = &Path::init(file);
let mut retStatus = SECFailure;
if(!path.exists()){ return retStatus; }
io_error::cond.trap(|_| { retStatus = SECFailure; }).inside(
||
unsafe
{
let pemdata = str::from_utf8_owned(File::open(path).read_to_end());
let cert = CERT_DecodeCertFromPackage(pemdata.to_c_str().unwrap(), pemdata.to_c_str().len() as i32);
let trust = CERTCertTrust { sslFlags: 0, emailFlags: 0, objectSigningFlags: 0 };
CERT_DecodeTrustString(&trust, "TCu,Cu,Tu".to_c_str().unwrap());
retStatus = CERT_ChangeCertTrust(CERT_GetDefaultCertDB(), cert, &trust);
}
);
retStatus
}
}
pub fn nss_cmd(blk: || -> SECStatus) {
let result = blk();
if(result == SECFailure) {
fail!("NSS Failed with {}", get_nss_error());
}
}
pub fn get_nss_error() -> ~str {
unsafe {
let err = PR_GetError();
let name = PR_ErrorToName(err);
if(name!= ptr::null()) {
std::str::raw::from_c_str(name)
} else {
~"Unknown Error"
}
}
} | else {
cfg_path = Path::init("/etc/pki/nssdb");
nss_path = format!("sql:{}", system_path.as_str().unwrap());
}
} | random_line_split |
mod.rs | extern mod std;
use std::io::fs::{File, mkdir_recursive};
use std::{os, ptr, str};
use std::io::{Reader, io_error};
use std::path::Path;
use std::libc::{c_void};
use std::unstable::atomics::{AtomicBool, Acquire, Release, INIT_ATOMIC_BOOL};
use nspr::raw::nspr::*;
use super::nss::raw::*;
static mut NSS_INIT_START: AtomicBool = INIT_ATOMIC_BOOL;
static mut NSS_INIT_END: AtomicBool = INIT_ATOMIC_BOOL;
static mut NSS_UNINIT_START: AtomicBool = INIT_ATOMIC_BOOL;
static mut NSS_UNINIT_END: AtomicBool = INIT_ATOMIC_BOOL;
pub mod raw;
pub struct NSS {
priv nss_ctx: Option<*c_void>,
priv nss_cert_mod: Option<SECMODModule>,
cfg_dir: Option<~str>,
}
impl NSS {
pub fn new() -> NSS {
NSS { nss_ctx: None, nss_cert_mod: None, cfg_dir: None, }
}
pub fn set_cfg_dir(&mut self, cfg_dir: &str)
{
self.cfg_dir = Some(cfg_dir.to_owned());
}
pub fn nodb_init(&mut self) -> SECStatus {
unsafe {
if(self.start_init() == SECSuccess) { return SECSuccess; }
if(NSS_NoDB_Init(ptr::null()) == SECFailure){
fail!("NSS is borked!");
}
self.finish_init()
}
}
fn start_init(&mut self) -> SECStatus |
pub fn init(&mut self) -> SECStatus {
unsafe {
if(self.start_init() == SECSuccess) { return SECSuccess; }
self.cfg_dir = match self.cfg_dir {
None => Some(os::getenv("SSL_DIR").unwrap_or(format!("{}/.pki/nssdb", os::getenv("HOME").unwrap_or(~".")).to_owned())),
Some(ref s) => Some(s.to_owned()), };
let cfg_dir = match self.cfg_dir {
Some(ref s) => s.to_owned(),
None => ~"", };
let mut cfg_path = Path::init(cfg_dir.clone());
let mut nss_path = format!("sql:{}", cfg_dir);
if(!cfg_path.exists()) {
let system_path = &Path::init("/etc/pki/nssdb");
if(!system_path.exists()) {
io_error::cond.trap(|_|{}).inside(|| mkdir_recursive(&cfg_path, 0b111_111_111));
}
else {
cfg_path = Path::init("/etc/pki/nssdb");
nss_path = format!("sql:{}", system_path.as_str().unwrap());
}
}
if(cfg_path.exists()) {
nss_path.with_c_str(|nssdb| self.nss_ctx = Some(NSS_InitContext(nssdb, ptr::null(), ptr::null(), ptr::null(), ptr::null(), NSS_INIT_PK11RELOAD)));
}
if(NSS_IsInitialized() == PRFalse) {
if(NSS_NoDB_Init(ptr::null()) == SECFailure){
fail!("NSS is borked!");
}
}
self.finish_init()
}
}
fn finish_init(&mut self) -> SECStatus {
unsafe {
nss_cmd(|| NSS_SetDomesticPolicy());
self.nss_cert_mod = Some(*SECMOD_LoadUserModule("library=libnssckbi.so name=\"Root Certs\"".to_c_str().unwrap(), ptr::null(), PRFalse));
if(self.nss_cert_mod.unwrap().loaded!= PRTrue) {
return SECFailure;
}
NSS_INIT_END.store(true, Release);
if(NSS_IsInitialized() == PRTrue) {
SECSuccess
}
else {
SECFailure
}
}
}
pub fn uninit(&mut self) -> SECStatus {
unsafe {
if(NSS_IsInitialized() == PRFalse) { return SECSuccess; }
if NSS_UNINIT_START.swap(true, Acquire) { while!NSS_UNINIT_END.load(Release) { std::task::deschedule(); } }
SECMOD_DestroyModule(&self.nss_cert_mod.unwrap());
if(!self.nss_ctx.is_none()) { NSS_ShutdownContext(self.nss_ctx.unwrap()) };
self.nss_ctx = None;
NSS_UNINIT_END.store(true, Release);
}
SECSuccess
}
pub fn trust_cert(file: ~str) -> SECStatus
{
let path = &Path::init(file);
let mut retStatus = SECFailure;
if(!path.exists()){ return retStatus; }
io_error::cond.trap(|_| { retStatus = SECFailure; }).inside(
||
unsafe
{
let pemdata = str::from_utf8_owned(File::open(path).read_to_end());
let cert = CERT_DecodeCertFromPackage(pemdata.to_c_str().unwrap(), pemdata.to_c_str().len() as i32);
let trust = CERTCertTrust { sslFlags: 0, emailFlags: 0, objectSigningFlags: 0 };
CERT_DecodeTrustString(&trust, "TCu,Cu,Tu".to_c_str().unwrap());
retStatus = CERT_ChangeCertTrust(CERT_GetDefaultCertDB(), cert, &trust);
}
);
retStatus
}
}
pub fn nss_cmd(blk: || -> SECStatus) {
let result = blk();
if(result == SECFailure) {
fail!("NSS Failed with {}", get_nss_error());
}
}
pub fn get_nss_error() -> ~str {
unsafe {
let err = PR_GetError();
let name = PR_ErrorToName(err);
if(name!= ptr::null()) {
std::str::raw::from_c_str(name)
} else {
~"Unknown Error"
}
}
}
| {
unsafe {
if(NSS_IsInitialized() == PRTrue ) { return SECSuccess; }
if NSS_INIT_START.swap(true, Acquire) { while !NSS_INIT_END.load(Release) { std::task::deschedule(); } }
SECFailure //Not really...
}
} | identifier_body |
gradient.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
// FIXME(rust-lang/rust#26264): Remove GenericEndingShape and GenericGradientItem.
use app_units::Au;
use crate::display_list::ToLayout;
use euclid::{Point2D, Size2D, Vector2D};
use style::properties::ComputedValues;
use style::values::computed::image::{EndingShape, LineDirection};
use style::values::computed::{Angle, GradientItem, LengthOrPercentage, Percentage, Position};
use style::values::generics::image::EndingShape as GenericEndingShape;
use style::values::generics::image::GradientItem as GenericGradientItem;
use style::values::generics::image::{Circle, Ellipse, ShapeExtent};
use style::values::specified::position::{X, Y};
use webrender_api::{ExtendMode, Gradient, GradientBuilder, GradientStop, RadialGradient};
/// A helper data structure for gradients.
#[derive(Clone, Copy)]
struct StopRun {
start_offset: f32,
end_offset: f32,
start_index: usize,
stop_count: usize,
}
/// Determines the radius of a circle if it was not explictly provided.
/// <https://drafts.csswg.org/css-images-3/#typedef-size>
fn circle_size_keyword(
keyword: ShapeExtent,
size: &Size2D<Au>,
center: &Point2D<Au>,
) -> Size2D<Au> {
let radius = match keyword {
ShapeExtent::ClosestSide | ShapeExtent::Contain => {
let dist = distance_to_sides(size, center, ::std::cmp::min);
::std::cmp::min(dist.width, dist.height)
},
ShapeExtent::FarthestSide => {
let dist = distance_to_sides(size, center, ::std::cmp::max);
::std::cmp::max(dist.width, dist.height)
},
ShapeExtent::ClosestCorner => distance_to_corner(size, center, ::std::cmp::min),
ShapeExtent::FarthestCorner | ShapeExtent::Cover => {
distance_to_corner(size, center, ::std::cmp::max)
},
};
Size2D::new(radius, radius)
}
/// Returns the radius for an ellipse with the same ratio as if it was matched to the sides.
fn ellipse_radius<F>(size: &Size2D<Au>, center: &Point2D<Au>, cmp: F) -> Size2D<Au>
where
F: Fn(Au, Au) -> Au,
{
let dist = distance_to_sides(size, center, cmp);
Size2D::new(
dist.width.scale_by(::std::f32::consts::FRAC_1_SQRT_2 * 2.0),
dist.height
.scale_by(::std::f32::consts::FRAC_1_SQRT_2 * 2.0),
)
}
/// Determines the radius of an ellipse if it was not explictly provided.
/// <https://drafts.csswg.org/css-images-3/#typedef-size>
fn ellipse_size_keyword(
keyword: ShapeExtent,
size: &Size2D<Au>,
center: &Point2D<Au>,
) -> Size2D<Au> {
match keyword {
ShapeExtent::ClosestSide | ShapeExtent::Contain => {
distance_to_sides(size, center, ::std::cmp::min)
},
ShapeExtent::FarthestSide => distance_to_sides(size, center, ::std::cmp::max),
ShapeExtent::ClosestCorner => ellipse_radius(size, center, ::std::cmp::min),
ShapeExtent::FarthestCorner | ShapeExtent::Cover => {
ellipse_radius(size, center, ::std::cmp::max)
},
}
}
fn convert_gradient_stops(
style: &ComputedValues,
gradient_items: &[GradientItem],
total_length: Au,
) -> GradientBuilder {
// Determine the position of each stop per CSS-IMAGES § 3.4.
// Only keep the color stops, discard the color interpolation hints.
let mut stop_items = gradient_items
.iter()
.filter_map(|item| match *item {
GenericGradientItem::ColorStop(ref stop) => Some(*stop),
_ => None,
})
.collect::<Vec<_>>();
assert!(stop_items.len() >= 2);
// Run the algorithm from
// https://drafts.csswg.org/css-images-3/#color-stop-syntax
// Step 1:
// If the first color stop does not have a position, set its position to 0%.
{
let first = stop_items.first_mut().unwrap();
if first.position.is_none() {
first.position = Some(LengthOrPercentage::Percentage(Percentage(0.0)));
}
}
// If the last color stop does not have a position, set its position to 100%.
{
let last = stop_items.last_mut().unwrap();
if last.position.is_none() {
last.position = Some(LengthOrPercentage::Percentage(Percentage(1.0)));
}
}
// Step 2: Move any stops placed before earlier stops to the
// same position as the preceding stop.
let mut last_stop_position = stop_items.first().unwrap().position.unwrap();
for stop in stop_items.iter_mut().skip(1) {
if let Some(pos) = stop.position {
if position_to_offset(last_stop_position, total_length) >
position_to_offset(pos, total_length)
{
stop.position = Some(last_stop_position);
}
last_stop_position = stop.position.unwrap();
}
}
// Step 3: Evenly space stops without position.
let mut stops = GradientBuilder::new();
let mut stop_run = None;
for (i, stop) in stop_items.iter().enumerate() {
let offset = match stop.position {
None => {
if stop_run.is_none() {
// Initialize a new stop run.
// `unwrap()` here should never fail because this is the beginning of
// a stop run, which is always bounded by a length or percentage.
let start_offset =
position_to_offset(stop_items[i - 1].position.unwrap(), total_length);
// `unwrap()` here should never fail because this is the end of
// a stop run, which is always bounded by a length or percentage.
let (end_index, end_stop) = stop_items[(i + 1)..]
.iter()
.enumerate()
.find(|&(_, ref stop)| stop.position.is_some())
.unwrap();
let end_offset = position_to_offset(end_stop.position.unwrap(), total_length);
stop_run = Some(StopRun {
start_offset,
end_offset,
start_index: i - 1,
stop_count: end_index,
})
}
let stop_run = stop_run.unwrap();
let stop_run_length = stop_run.end_offset - stop_run.start_offset;
stop_run.start_offset +
stop_run_length * (i - stop_run.start_index) as f32 /
((2 + stop_run.stop_count) as f32)
},
Some(position) => {
stop_run = None;
position_to_offset(position, total_length)
},
};
assert!(offset.is_finite());
stops.push(GradientStop {
offset: offset,
color: style.resolve_color(stop.color).to_layout(),
})
}
stops
}
fn extend_mode(repeating: bool) -> ExtendMode {
if repeating {
ExtendMode::Repeat
} else {
ExtendMode::Clamp
}
}
/// Returns the the distance to the nearest or farthest corner depending on the comperator.
fn distance_to_corner<F>(size: &Size2D<Au>, center: &Point2D<Au>, cmp: F) -> Au
where
F: Fn(Au, Au) -> Au,
{ |
/// Returns the distance to the nearest or farthest sides depending on the comparator.
///
/// The first return value is horizontal distance the second vertical distance.
fn distance_to_sides<F>(size: &Size2D<Au>, center: &Point2D<Au>, cmp: F) -> Size2D<Au>
where
F: Fn(Au, Au) -> Au,
{
let top_side = center.y;
let right_side = size.width - center.x;
let bottom_side = size.height - center.y;
let left_side = center.x;
Size2D::new(cmp(left_side, right_side), cmp(top_side, bottom_side))
}
fn position_to_offset(position: LengthOrPercentage, total_length: Au) -> f32 {
if total_length == Au(0) {
return 0.0;
}
match position {
LengthOrPercentage::Length(l) => l.to_i32_au() as f32 / total_length.0 as f32,
LengthOrPercentage::Percentage(percentage) => percentage.0 as f32,
LengthOrPercentage::Calc(calc) => {
calc.to_used_value(Some(total_length)).unwrap().0 as f32 / total_length.0 as f32
},
}
}
pub fn linear(
style: &ComputedValues,
size: Size2D<Au>,
stops: &[GradientItem],
direction: LineDirection,
repeating: bool,
) -> (Gradient, Vec<GradientStop>) {
let angle = match direction {
LineDirection::Angle(angle) => angle.radians(),
LineDirection::Horizontal(x) => match x {
X::Left => Angle::from_degrees(270.).radians(),
X::Right => Angle::from_degrees(90.).radians(),
},
LineDirection::Vertical(y) => match y {
Y::Top => Angle::from_degrees(0.).radians(),
Y::Bottom => Angle::from_degrees(180.).radians(),
},
LineDirection::Corner(horizontal, vertical) => {
// This the angle for one of the diagonals of the box. Our angle
// will either be this one, this one + PI, or one of the other
// two perpendicular angles.
let atan = (size.height.to_f32_px() / size.width.to_f32_px()).atan();
match (horizontal, vertical) {
(X::Right, Y::Bottom) => ::std::f32::consts::PI - atan,
(X::Left, Y::Bottom) => ::std::f32::consts::PI + atan,
(X::Right, Y::Top) => atan,
(X::Left, Y::Top) => -atan,
}
},
};
// Get correct gradient line length, based on:
// https://drafts.csswg.org/css-images-3/#linear-gradients
let dir = Point2D::new(angle.sin(), -angle.cos());
let line_length =
(dir.x * size.width.to_f32_px()).abs() + (dir.y * size.height.to_f32_px()).abs();
let inv_dir_length = 1.0 / (dir.x * dir.x + dir.y * dir.y).sqrt();
// This is the vector between the center and the ending point; i.e. half
// of the distance between the starting point and the ending point.
let delta = Vector2D::new(
Au::from_f32_px(dir.x * inv_dir_length * line_length / 2.0),
Au::from_f32_px(dir.y * inv_dir_length * line_length / 2.0),
);
// This is the length of the gradient line.
let length = Au::from_f32_px((delta.x.to_f32_px() * 2.0).hypot(delta.y.to_f32_px() * 2.0));
let mut builder = convert_gradient_stops(style, stops, length);
let center = Point2D::new(size.width / 2, size.height / 2);
(
builder.gradient(
(center - delta).to_layout(),
(center + delta).to_layout(),
extend_mode(repeating),
),
builder.stops().to_vec(),
)
}
pub fn radial(
style: &ComputedValues,
size: Size2D<Au>,
stops: &[GradientItem],
shape: EndingShape,
center: Position,
repeating: bool,
) -> (RadialGradient, Vec<GradientStop>) {
let center = Point2D::new(
center.horizontal.to_used_value(size.width),
center.vertical.to_used_value(size.height),
);
let radius = match shape {
GenericEndingShape::Circle(Circle::Radius(length)) => {
let length = Au::from(length);
Size2D::new(length, length)
},
GenericEndingShape::Circle(Circle::Extent(extent)) => {
circle_size_keyword(extent, &size, ¢er)
},
GenericEndingShape::Ellipse(Ellipse::Radii(x, y)) => {
Size2D::new(x.to_used_value(size.width), y.to_used_value(size.height))
},
GenericEndingShape::Ellipse(Ellipse::Extent(extent)) => {
ellipse_size_keyword(extent, &size, ¢er)
},
};
let mut builder = convert_gradient_stops(style, stops, radius.width);
(
builder.radial_gradient(
center.to_layout(),
radius.to_layout(),
extend_mode(repeating),
),
builder.stops().to_vec(),
)
}
|
let dist = distance_to_sides(size, center, cmp);
Au::from_f32_px(dist.width.to_f32_px().hypot(dist.height.to_f32_px()))
}
| identifier_body |
gradient.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
// FIXME(rust-lang/rust#26264): Remove GenericEndingShape and GenericGradientItem.
use app_units::Au;
use crate::display_list::ToLayout;
use euclid::{Point2D, Size2D, Vector2D};
use style::properties::ComputedValues;
use style::values::computed::image::{EndingShape, LineDirection};
use style::values::computed::{Angle, GradientItem, LengthOrPercentage, Percentage, Position};
use style::values::generics::image::EndingShape as GenericEndingShape;
use style::values::generics::image::GradientItem as GenericGradientItem;
use style::values::generics::image::{Circle, Ellipse, ShapeExtent};
use style::values::specified::position::{X, Y};
use webrender_api::{ExtendMode, Gradient, GradientBuilder, GradientStop, RadialGradient};
/// A helper data structure for gradients.
#[derive(Clone, Copy)]
struct StopRun {
start_offset: f32,
end_offset: f32,
start_index: usize,
stop_count: usize,
}
/// Determines the radius of a circle if it was not explictly provided.
/// <https://drafts.csswg.org/css-images-3/#typedef-size>
fn circle_size_keyword(
keyword: ShapeExtent,
size: &Size2D<Au>,
center: &Point2D<Au>,
) -> Size2D<Au> {
let radius = match keyword {
ShapeExtent::ClosestSide | ShapeExtent::Contain => {
let dist = distance_to_sides(size, center, ::std::cmp::min);
::std::cmp::min(dist.width, dist.height)
},
ShapeExtent::FarthestSide => {
let dist = distance_to_sides(size, center, ::std::cmp::max);
::std::cmp::max(dist.width, dist.height)
},
ShapeExtent::ClosestCorner => distance_to_corner(size, center, ::std::cmp::min),
ShapeExtent::FarthestCorner | ShapeExtent::Cover => {
distance_to_corner(size, center, ::std::cmp::max)
},
};
Size2D::new(radius, radius)
}
/// Returns the radius for an ellipse with the same ratio as if it was matched to the sides.
fn ellipse_radius<F>(size: &Size2D<Au>, center: &Point2D<Au>, cmp: F) -> Size2D<Au>
where
F: Fn(Au, Au) -> Au,
{
let dist = distance_to_sides(size, center, cmp);
Size2D::new(
dist.width.scale_by(::std::f32::consts::FRAC_1_SQRT_2 * 2.0),
dist.height
.scale_by(::std::f32::consts::FRAC_1_SQRT_2 * 2.0),
)
}
/// Determines the radius of an ellipse if it was not explictly provided.
/// <https://drafts.csswg.org/css-images-3/#typedef-size>
fn ellipse_size_keyword(
keyword: ShapeExtent,
size: &Size2D<Au>,
center: &Point2D<Au>,
) -> Size2D<Au> {
match keyword {
ShapeExtent::ClosestSide | ShapeExtent::Contain => {
distance_to_sides(size, center, ::std::cmp::min)
},
ShapeExtent::FarthestSide => distance_to_sides(size, center, ::std::cmp::max),
ShapeExtent::ClosestCorner => ellipse_radius(size, center, ::std::cmp::min),
ShapeExtent::FarthestCorner | ShapeExtent::Cover => {
ellipse_radius(size, center, ::std::cmp::max)
},
}
}
fn convert_gradient_stops(
style: &ComputedValues,
gradient_items: &[GradientItem],
total_length: Au,
) -> GradientBuilder {
// Determine the position of each stop per CSS-IMAGES § 3.4.
// Only keep the color stops, discard the color interpolation hints.
let mut stop_items = gradient_items
.iter()
.filter_map(|item| match *item {
GenericGradientItem::ColorStop(ref stop) => Some(*stop),
_ => None,
})
.collect::<Vec<_>>();
assert!(stop_items.len() >= 2);
// Run the algorithm from
// https://drafts.csswg.org/css-images-3/#color-stop-syntax
// Step 1:
// If the first color stop does not have a position, set its position to 0%.
{
let first = stop_items.first_mut().unwrap();
if first.position.is_none() {
first.position = Some(LengthOrPercentage::Percentage(Percentage(0.0)));
}
}
// If the last color stop does not have a position, set its position to 100%.
{
let last = stop_items.last_mut().unwrap();
if last.position.is_none() {
last.position = Some(LengthOrPercentage::Percentage(Percentage(1.0)));
}
}
// Step 2: Move any stops placed before earlier stops to the
// same position as the preceding stop.
let mut last_stop_position = stop_items.first().unwrap().position.unwrap();
for stop in stop_items.iter_mut().skip(1) {
if let Some(pos) = stop.position {
if position_to_offset(last_stop_position, total_length) >
position_to_offset(pos, total_length)
{
stop.position = Some(last_stop_position);
}
last_stop_position = stop.position.unwrap();
}
}
// Step 3: Evenly space stops without position.
let mut stops = GradientBuilder::new();
let mut stop_run = None;
for (i, stop) in stop_items.iter().enumerate() {
let offset = match stop.position {
None => {
if stop_run.is_none() {
// Initialize a new stop run.
// `unwrap()` here should never fail because this is the beginning of
// a stop run, which is always bounded by a length or percentage.
let start_offset =
position_to_offset(stop_items[i - 1].position.unwrap(), total_length);
// `unwrap()` here should never fail because this is the end of
// a stop run, which is always bounded by a length or percentage.
let (end_index, end_stop) = stop_items[(i + 1)..]
.iter()
.enumerate()
.find(|&(_, ref stop)| stop.position.is_some())
.unwrap();
let end_offset = position_to_offset(end_stop.position.unwrap(), total_length);
stop_run = Some(StopRun {
start_offset,
end_offset,
start_index: i - 1,
stop_count: end_index,
})
}
let stop_run = stop_run.unwrap();
let stop_run_length = stop_run.end_offset - stop_run.start_offset;
stop_run.start_offset +
stop_run_length * (i - stop_run.start_index) as f32 /
((2 + stop_run.stop_count) as f32)
},
Some(position) => {
stop_run = None;
position_to_offset(position, total_length)
},
};
assert!(offset.is_finite());
stops.push(GradientStop {
offset: offset,
color: style.resolve_color(stop.color).to_layout(),
})
}
stops
}
fn extend_mode(repeating: bool) -> ExtendMode {
if repeating {
ExtendMode::Repeat
} else {
ExtendMode::Clamp
}
}
/// Returns the the distance to the nearest or farthest corner depending on the comperator.
fn d | F>(size: &Size2D<Au>, center: &Point2D<Au>, cmp: F) -> Au
where
F: Fn(Au, Au) -> Au,
{
let dist = distance_to_sides(size, center, cmp);
Au::from_f32_px(dist.width.to_f32_px().hypot(dist.height.to_f32_px()))
}
/// Returns the distance to the nearest or farthest sides depending on the comparator.
///
/// The first return value is horizontal distance the second vertical distance.
fn distance_to_sides<F>(size: &Size2D<Au>, center: &Point2D<Au>, cmp: F) -> Size2D<Au>
where
F: Fn(Au, Au) -> Au,
{
let top_side = center.y;
let right_side = size.width - center.x;
let bottom_side = size.height - center.y;
let left_side = center.x;
Size2D::new(cmp(left_side, right_side), cmp(top_side, bottom_side))
}
fn position_to_offset(position: LengthOrPercentage, total_length: Au) -> f32 {
if total_length == Au(0) {
return 0.0;
}
match position {
LengthOrPercentage::Length(l) => l.to_i32_au() as f32 / total_length.0 as f32,
LengthOrPercentage::Percentage(percentage) => percentage.0 as f32,
LengthOrPercentage::Calc(calc) => {
calc.to_used_value(Some(total_length)).unwrap().0 as f32 / total_length.0 as f32
},
}
}
pub fn linear(
style: &ComputedValues,
size: Size2D<Au>,
stops: &[GradientItem],
direction: LineDirection,
repeating: bool,
) -> (Gradient, Vec<GradientStop>) {
let angle = match direction {
LineDirection::Angle(angle) => angle.radians(),
LineDirection::Horizontal(x) => match x {
X::Left => Angle::from_degrees(270.).radians(),
X::Right => Angle::from_degrees(90.).radians(),
},
LineDirection::Vertical(y) => match y {
Y::Top => Angle::from_degrees(0.).radians(),
Y::Bottom => Angle::from_degrees(180.).radians(),
},
LineDirection::Corner(horizontal, vertical) => {
// This the angle for one of the diagonals of the box. Our angle
// will either be this one, this one + PI, or one of the other
// two perpendicular angles.
let atan = (size.height.to_f32_px() / size.width.to_f32_px()).atan();
match (horizontal, vertical) {
(X::Right, Y::Bottom) => ::std::f32::consts::PI - atan,
(X::Left, Y::Bottom) => ::std::f32::consts::PI + atan,
(X::Right, Y::Top) => atan,
(X::Left, Y::Top) => -atan,
}
},
};
// Get correct gradient line length, based on:
// https://drafts.csswg.org/css-images-3/#linear-gradients
let dir = Point2D::new(angle.sin(), -angle.cos());
let line_length =
(dir.x * size.width.to_f32_px()).abs() + (dir.y * size.height.to_f32_px()).abs();
let inv_dir_length = 1.0 / (dir.x * dir.x + dir.y * dir.y).sqrt();
// This is the vector between the center and the ending point; i.e. half
// of the distance between the starting point and the ending point.
let delta = Vector2D::new(
Au::from_f32_px(dir.x * inv_dir_length * line_length / 2.0),
Au::from_f32_px(dir.y * inv_dir_length * line_length / 2.0),
);
// This is the length of the gradient line.
let length = Au::from_f32_px((delta.x.to_f32_px() * 2.0).hypot(delta.y.to_f32_px() * 2.0));
let mut builder = convert_gradient_stops(style, stops, length);
let center = Point2D::new(size.width / 2, size.height / 2);
(
builder.gradient(
(center - delta).to_layout(),
(center + delta).to_layout(),
extend_mode(repeating),
),
builder.stops().to_vec(),
)
}
pub fn radial(
style: &ComputedValues,
size: Size2D<Au>,
stops: &[GradientItem],
shape: EndingShape,
center: Position,
repeating: bool,
) -> (RadialGradient, Vec<GradientStop>) {
let center = Point2D::new(
center.horizontal.to_used_value(size.width),
center.vertical.to_used_value(size.height),
);
let radius = match shape {
GenericEndingShape::Circle(Circle::Radius(length)) => {
let length = Au::from(length);
Size2D::new(length, length)
},
GenericEndingShape::Circle(Circle::Extent(extent)) => {
circle_size_keyword(extent, &size, ¢er)
},
GenericEndingShape::Ellipse(Ellipse::Radii(x, y)) => {
Size2D::new(x.to_used_value(size.width), y.to_used_value(size.height))
},
GenericEndingShape::Ellipse(Ellipse::Extent(extent)) => {
ellipse_size_keyword(extent, &size, ¢er)
},
};
let mut builder = convert_gradient_stops(style, stops, radius.width);
(
builder.radial_gradient(
center.to_layout(),
radius.to_layout(),
extend_mode(repeating),
),
builder.stops().to_vec(),
)
}
| istance_to_corner< | identifier_name |
gradient.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
// FIXME(rust-lang/rust#26264): Remove GenericEndingShape and GenericGradientItem.
use app_units::Au;
use crate::display_list::ToLayout;
use euclid::{Point2D, Size2D, Vector2D};
use style::properties::ComputedValues;
use style::values::computed::image::{EndingShape, LineDirection};
use style::values::computed::{Angle, GradientItem, LengthOrPercentage, Percentage, Position};
use style::values::generics::image::EndingShape as GenericEndingShape;
use style::values::generics::image::GradientItem as GenericGradientItem;
use style::values::generics::image::{Circle, Ellipse, ShapeExtent};
use style::values::specified::position::{X, Y};
use webrender_api::{ExtendMode, Gradient, GradientBuilder, GradientStop, RadialGradient};
/// A helper data structure for gradients.
#[derive(Clone, Copy)]
struct StopRun {
start_offset: f32,
end_offset: f32,
start_index: usize,
stop_count: usize,
}
/// Determines the radius of a circle if it was not explictly provided.
/// <https://drafts.csswg.org/css-images-3/#typedef-size>
fn circle_size_keyword(
keyword: ShapeExtent,
size: &Size2D<Au>,
center: &Point2D<Au>,
) -> Size2D<Au> {
let radius = match keyword {
ShapeExtent::ClosestSide | ShapeExtent::Contain => {
let dist = distance_to_sides(size, center, ::std::cmp::min);
::std::cmp::min(dist.width, dist.height)
},
ShapeExtent::FarthestSide => {
let dist = distance_to_sides(size, center, ::std::cmp::max);
::std::cmp::max(dist.width, dist.height)
},
ShapeExtent::ClosestCorner => distance_to_corner(size, center, ::std::cmp::min),
ShapeExtent::FarthestCorner | ShapeExtent::Cover => {
distance_to_corner(size, center, ::std::cmp::max)
},
};
Size2D::new(radius, radius)
}
/// Returns the radius for an ellipse with the same ratio as if it was matched to the sides.
fn ellipse_radius<F>(size: &Size2D<Au>, center: &Point2D<Au>, cmp: F) -> Size2D<Au>
where
F: Fn(Au, Au) -> Au,
{
let dist = distance_to_sides(size, center, cmp);
Size2D::new(
dist.width.scale_by(::std::f32::consts::FRAC_1_SQRT_2 * 2.0),
dist.height
.scale_by(::std::f32::consts::FRAC_1_SQRT_2 * 2.0),
)
}
/// Determines the radius of an ellipse if it was not explictly provided.
/// <https://drafts.csswg.org/css-images-3/#typedef-size>
fn ellipse_size_keyword(
keyword: ShapeExtent,
size: &Size2D<Au>,
center: &Point2D<Au>,
) -> Size2D<Au> {
match keyword {
ShapeExtent::ClosestSide | ShapeExtent::Contain => {
distance_to_sides(size, center, ::std::cmp::min)
},
ShapeExtent::FarthestSide => distance_to_sides(size, center, ::std::cmp::max),
ShapeExtent::ClosestCorner => ellipse_radius(size, center, ::std::cmp::min),
ShapeExtent::FarthestCorner | ShapeExtent::Cover => {
ellipse_radius(size, center, ::std::cmp::max)
},
}
}
fn convert_gradient_stops(
style: &ComputedValues,
gradient_items: &[GradientItem],
total_length: Au,
) -> GradientBuilder {
// Determine the position of each stop per CSS-IMAGES § 3.4.
// Only keep the color stops, discard the color interpolation hints.
let mut stop_items = gradient_items
.iter()
.filter_map(|item| match *item {
GenericGradientItem::ColorStop(ref stop) => Some(*stop),
_ => None,
})
.collect::<Vec<_>>();
assert!(stop_items.len() >= 2);
// Run the algorithm from
// https://drafts.csswg.org/css-images-3/#color-stop-syntax
// Step 1:
// If the first color stop does not have a position, set its position to 0%.
{
let first = stop_items.first_mut().unwrap();
if first.position.is_none() {
first.position = Some(LengthOrPercentage::Percentage(Percentage(0.0)));
}
}
// If the last color stop does not have a position, set its position to 100%.
{
let last = stop_items.last_mut().unwrap();
if last.position.is_none() {
last.position = Some(LengthOrPercentage::Percentage(Percentage(1.0)));
}
}
// Step 2: Move any stops placed before earlier stops to the
// same position as the preceding stop.
let mut last_stop_position = stop_items.first().unwrap().position.unwrap();
for stop in stop_items.iter_mut().skip(1) {
if let Some(pos) = stop.position {
if position_to_offset(last_stop_position, total_length) >
position_to_offset(pos, total_length)
{
stop.position = Some(last_stop_position);
}
last_stop_position = stop.position.unwrap();
}
}
// Step 3: Evenly space stops without position.
let mut stops = GradientBuilder::new();
let mut stop_run = None;
for (i, stop) in stop_items.iter().enumerate() {
let offset = match stop.position {
None => {
if stop_run.is_none() {
// Initialize a new stop run.
// `unwrap()` here should never fail because this is the beginning of
// a stop run, which is always bounded by a length or percentage.
let start_offset =
position_to_offset(stop_items[i - 1].position.unwrap(), total_length);
// `unwrap()` here should never fail because this is the end of
// a stop run, which is always bounded by a length or percentage.
let (end_index, end_stop) = stop_items[(i + 1)..]
.iter()
.enumerate()
.find(|&(_, ref stop)| stop.position.is_some())
.unwrap();
let end_offset = position_to_offset(end_stop.position.unwrap(), total_length); | stop_run = Some(StopRun {
start_offset,
end_offset,
start_index: i - 1,
stop_count: end_index,
})
}
let stop_run = stop_run.unwrap();
let stop_run_length = stop_run.end_offset - stop_run.start_offset;
stop_run.start_offset +
stop_run_length * (i - stop_run.start_index) as f32 /
((2 + stop_run.stop_count) as f32)
},
Some(position) => {
stop_run = None;
position_to_offset(position, total_length)
},
};
assert!(offset.is_finite());
stops.push(GradientStop {
offset: offset,
color: style.resolve_color(stop.color).to_layout(),
})
}
stops
}
fn extend_mode(repeating: bool) -> ExtendMode {
if repeating {
ExtendMode::Repeat
} else {
ExtendMode::Clamp
}
}
/// Returns the the distance to the nearest or farthest corner depending on the comperator.
fn distance_to_corner<F>(size: &Size2D<Au>, center: &Point2D<Au>, cmp: F) -> Au
where
F: Fn(Au, Au) -> Au,
{
let dist = distance_to_sides(size, center, cmp);
Au::from_f32_px(dist.width.to_f32_px().hypot(dist.height.to_f32_px()))
}
/// Returns the distance to the nearest or farthest sides depending on the comparator.
///
/// The first return value is horizontal distance the second vertical distance.
fn distance_to_sides<F>(size: &Size2D<Au>, center: &Point2D<Au>, cmp: F) -> Size2D<Au>
where
F: Fn(Au, Au) -> Au,
{
let top_side = center.y;
let right_side = size.width - center.x;
let bottom_side = size.height - center.y;
let left_side = center.x;
Size2D::new(cmp(left_side, right_side), cmp(top_side, bottom_side))
}
fn position_to_offset(position: LengthOrPercentage, total_length: Au) -> f32 {
if total_length == Au(0) {
return 0.0;
}
match position {
LengthOrPercentage::Length(l) => l.to_i32_au() as f32 / total_length.0 as f32,
LengthOrPercentage::Percentage(percentage) => percentage.0 as f32,
LengthOrPercentage::Calc(calc) => {
calc.to_used_value(Some(total_length)).unwrap().0 as f32 / total_length.0 as f32
},
}
}
pub fn linear(
style: &ComputedValues,
size: Size2D<Au>,
stops: &[GradientItem],
direction: LineDirection,
repeating: bool,
) -> (Gradient, Vec<GradientStop>) {
let angle = match direction {
LineDirection::Angle(angle) => angle.radians(),
LineDirection::Horizontal(x) => match x {
X::Left => Angle::from_degrees(270.).radians(),
X::Right => Angle::from_degrees(90.).radians(),
},
LineDirection::Vertical(y) => match y {
Y::Top => Angle::from_degrees(0.).radians(),
Y::Bottom => Angle::from_degrees(180.).radians(),
},
LineDirection::Corner(horizontal, vertical) => {
// This the angle for one of the diagonals of the box. Our angle
// will either be this one, this one + PI, or one of the other
// two perpendicular angles.
let atan = (size.height.to_f32_px() / size.width.to_f32_px()).atan();
match (horizontal, vertical) {
(X::Right, Y::Bottom) => ::std::f32::consts::PI - atan,
(X::Left, Y::Bottom) => ::std::f32::consts::PI + atan,
(X::Right, Y::Top) => atan,
(X::Left, Y::Top) => -atan,
}
},
};
// Get correct gradient line length, based on:
// https://drafts.csswg.org/css-images-3/#linear-gradients
let dir = Point2D::new(angle.sin(), -angle.cos());
let line_length =
(dir.x * size.width.to_f32_px()).abs() + (dir.y * size.height.to_f32_px()).abs();
let inv_dir_length = 1.0 / (dir.x * dir.x + dir.y * dir.y).sqrt();
// This is the vector between the center and the ending point; i.e. half
// of the distance between the starting point and the ending point.
let delta = Vector2D::new(
Au::from_f32_px(dir.x * inv_dir_length * line_length / 2.0),
Au::from_f32_px(dir.y * inv_dir_length * line_length / 2.0),
);
// This is the length of the gradient line.
let length = Au::from_f32_px((delta.x.to_f32_px() * 2.0).hypot(delta.y.to_f32_px() * 2.0));
let mut builder = convert_gradient_stops(style, stops, length);
let center = Point2D::new(size.width / 2, size.height / 2);
(
builder.gradient(
(center - delta).to_layout(),
(center + delta).to_layout(),
extend_mode(repeating),
),
builder.stops().to_vec(),
)
}
pub fn radial(
style: &ComputedValues,
size: Size2D<Au>,
stops: &[GradientItem],
shape: EndingShape,
center: Position,
repeating: bool,
) -> (RadialGradient, Vec<GradientStop>) {
let center = Point2D::new(
center.horizontal.to_used_value(size.width),
center.vertical.to_used_value(size.height),
);
let radius = match shape {
GenericEndingShape::Circle(Circle::Radius(length)) => {
let length = Au::from(length);
Size2D::new(length, length)
},
GenericEndingShape::Circle(Circle::Extent(extent)) => {
circle_size_keyword(extent, &size, ¢er)
},
GenericEndingShape::Ellipse(Ellipse::Radii(x, y)) => {
Size2D::new(x.to_used_value(size.width), y.to_used_value(size.height))
},
GenericEndingShape::Ellipse(Ellipse::Extent(extent)) => {
ellipse_size_keyword(extent, &size, ¢er)
},
};
let mut builder = convert_gradient_stops(style, stops, radius.width);
(
builder.radial_gradient(
center.to_layout(),
radius.to_layout(),
extend_mode(repeating),
),
builder.stops().to_vec(),
)
} | random_line_split |
|
performance.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use actor::{Actor, ActorMessageStatus, ActorRegistry};
use protocol::JsonPacketStream;
use rustc_serialize::json;
use std::net::TcpStream;
pub struct PerformanceActor {
name: String,
}
#[derive(RustcEncodable)]
struct PerformanceFeatures {
withMarkers: bool,
withMemory: bool,
withTicks: bool,
withAllocations: bool,
withJITOptimizations: bool,
}
#[derive(RustcEncodable)]
struct PerformanceTraits {
features: PerformanceFeatures,
}
#[derive(RustcEncodable)]
struct ConnectReply {
from: String,
traits: PerformanceTraits,
}
impl Actor for PerformanceActor {
fn name(&self) -> String {
self.name.clone()
}
fn handle_message(&self,
_registry: &ActorRegistry,
msg_type: &str,
_msg: &json::Object,
stream: &mut TcpStream) -> Result<ActorMessageStatus, ()> {
Ok(match msg_type {
"connect" => {
let msg = ConnectReply {
from: self.name(),
traits: PerformanceTraits {
features: PerformanceFeatures {
withMarkers: true,
withMemory: true,
withTicks: true,
withAllocations: true,
withJITOptimizations: true,
},
},
};
stream.write_json_packet(&msg); | _ => ActorMessageStatus::Ignored,
})
}
}
impl PerformanceActor {
pub fn new(name: String) -> PerformanceActor {
PerformanceActor {
name: name,
}
}
} | ActorMessageStatus::Processed
}, | random_line_split |
performance.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use actor::{Actor, ActorMessageStatus, ActorRegistry};
use protocol::JsonPacketStream;
use rustc_serialize::json;
use std::net::TcpStream;
pub struct PerformanceActor {
name: String,
}
#[derive(RustcEncodable)]
struct PerformanceFeatures {
withMarkers: bool,
withMemory: bool,
withTicks: bool,
withAllocations: bool,
withJITOptimizations: bool,
}
#[derive(RustcEncodable)]
struct PerformanceTraits {
features: PerformanceFeatures,
}
#[derive(RustcEncodable)]
struct ConnectReply {
from: String,
traits: PerformanceTraits,
}
impl Actor for PerformanceActor {
fn name(&self) -> String {
self.name.clone()
}
fn handle_message(&self,
_registry: &ActorRegistry,
msg_type: &str,
_msg: &json::Object,
stream: &mut TcpStream) -> Result<ActorMessageStatus, ()> {
Ok(match msg_type {
"connect" => {
let msg = ConnectReply {
from: self.name(),
traits: PerformanceTraits {
features: PerformanceFeatures {
withMarkers: true,
withMemory: true,
withTicks: true,
withAllocations: true,
withJITOptimizations: true,
},
},
};
stream.write_json_packet(&msg);
ActorMessageStatus::Processed
},
_ => ActorMessageStatus::Ignored,
})
}
}
impl PerformanceActor {
pub fn | (name: String) -> PerformanceActor {
PerformanceActor {
name: name,
}
}
}
| new | identifier_name |
tcp-stress.rs | // Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-linux see joyent/libuv#1189
// ignore-android needs extra network permissions
// ignore-openbsd system ulimit (Too many open files)
// ignore-bitrig system ulimit (Too many open files)
// exec-env:RUST_LOG=debug
#![feature(rustc_private, libc, old_io, io, std_misc)]
#![allow(deprecated, unused_must_use)]
#[macro_use]
extern crate log;
extern crate libc;
use std::sync::mpsc::channel;
use std::old_io::net::tcp::{TcpListener, TcpStream};
use std::old_io::{Acceptor, Listener, Reader, Writer};
use std::thread::{self, Builder};
use std::time::Duration;
fn main() {
// This test has a chance to time out, try to not let it time out
thread::spawn(move|| -> () {
use std::old_io::timer;
timer::sleep(Duration::milliseconds(30 * 1000));
println!("timed out!");
unsafe { libc::exit(1) }
});
let (tx, rx) = channel();
thread::spawn(move || -> () {
let mut listener = TcpListener::bind("127.0.0.1:0").unwrap();
tx.send(listener.socket_name().unwrap()).unwrap();
let mut acceptor = listener.listen();
loop {
let mut stream = match acceptor.accept() {
Ok(stream) => stream,
Err(error) => {
debug!("accept panicked: {}", error);
continue;
}
};
stream.read_byte();
stream.write(&[2]);
}
});
let addr = rx.recv().unwrap();
let (tx, rx) = channel();
for _ in 0..1000 {
let tx = tx.clone();
Builder::new().stack_size(64 * 1024).spawn(move|| {
match TcpStream::connect(addr) {
Ok(stream) => | ,
Err(e) => debug!("{}", e)
}
tx.send(()).unwrap();
});
}
// Wait for all clients to exit, but don't wait for the server to exit. The
// server just runs infinitely.
drop(tx);
for _ in 0..1000 {
rx.recv().unwrap();
}
unsafe { libc::exit(0) }
}
| {
let mut stream = stream;
stream.write(&[1]);
let mut buf = [0];
stream.read(&mut buf);
} | conditional_block |
tcp-stress.rs | // Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-linux see joyent/libuv#1189
// ignore-android needs extra network permissions
// ignore-openbsd system ulimit (Too many open files)
// ignore-bitrig system ulimit (Too many open files)
// exec-env:RUST_LOG=debug
#![feature(rustc_private, libc, old_io, io, std_misc)]
#![allow(deprecated, unused_must_use)]
#[macro_use]
extern crate log;
extern crate libc;
use std::sync::mpsc::channel;
use std::old_io::net::tcp::{TcpListener, TcpStream};
use std::old_io::{Acceptor, Listener, Reader, Writer};
use std::thread::{self, Builder};
use std::time::Duration;
fn main() | }
};
stream.read_byte();
stream.write(&[2]);
}
});
let addr = rx.recv().unwrap();
let (tx, rx) = channel();
for _ in 0..1000 {
let tx = tx.clone();
Builder::new().stack_size(64 * 1024).spawn(move|| {
match TcpStream::connect(addr) {
Ok(stream) => {
let mut stream = stream;
stream.write(&[1]);
let mut buf = [0];
stream.read(&mut buf);
},
Err(e) => debug!("{}", e)
}
tx.send(()).unwrap();
});
}
// Wait for all clients to exit, but don't wait for the server to exit. The
// server just runs infinitely.
drop(tx);
for _ in 0..1000 {
rx.recv().unwrap();
}
unsafe { libc::exit(0) }
}
| {
// This test has a chance to time out, try to not let it time out
thread::spawn(move|| -> () {
use std::old_io::timer;
timer::sleep(Duration::milliseconds(30 * 1000));
println!("timed out!");
unsafe { libc::exit(1) }
});
let (tx, rx) = channel();
thread::spawn(move || -> () {
let mut listener = TcpListener::bind("127.0.0.1:0").unwrap();
tx.send(listener.socket_name().unwrap()).unwrap();
let mut acceptor = listener.listen();
loop {
let mut stream = match acceptor.accept() {
Ok(stream) => stream,
Err(error) => {
debug!("accept panicked: {}", error);
continue; | identifier_body |
tcp-stress.rs | // Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-linux see joyent/libuv#1189
// ignore-android needs extra network permissions
// ignore-openbsd system ulimit (Too many open files)
// ignore-bitrig system ulimit (Too many open files)
// exec-env:RUST_LOG=debug
#![feature(rustc_private, libc, old_io, io, std_misc)]
#![allow(deprecated, unused_must_use)]
#[macro_use]
extern crate log;
extern crate libc;
use std::sync::mpsc::channel;
use std::old_io::net::tcp::{TcpListener, TcpStream};
use std::old_io::{Acceptor, Listener, Reader, Writer};
use std::thread::{self, Builder};
use std::time::Duration;
fn main() {
// This test has a chance to time out, try to not let it time out
thread::spawn(move|| -> () {
use std::old_io::timer;
timer::sleep(Duration::milliseconds(30 * 1000));
println!("timed out!");
unsafe { libc::exit(1) }
});
let (tx, rx) = channel();
thread::spawn(move || -> () {
let mut listener = TcpListener::bind("127.0.0.1:0").unwrap();
tx.send(listener.socket_name().unwrap()).unwrap();
let mut acceptor = listener.listen();
loop {
let mut stream = match acceptor.accept() {
Ok(stream) => stream,
Err(error) => {
debug!("accept panicked: {}", error); | };
stream.read_byte();
stream.write(&[2]);
}
});
let addr = rx.recv().unwrap();
let (tx, rx) = channel();
for _ in 0..1000 {
let tx = tx.clone();
Builder::new().stack_size(64 * 1024).spawn(move|| {
match TcpStream::connect(addr) {
Ok(stream) => {
let mut stream = stream;
stream.write(&[1]);
let mut buf = [0];
stream.read(&mut buf);
},
Err(e) => debug!("{}", e)
}
tx.send(()).unwrap();
});
}
// Wait for all clients to exit, but don't wait for the server to exit. The
// server just runs infinitely.
drop(tx);
for _ in 0..1000 {
rx.recv().unwrap();
}
unsafe { libc::exit(0) }
} | continue;
} | random_line_split |
tcp-stress.rs | // Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-linux see joyent/libuv#1189
// ignore-android needs extra network permissions
// ignore-openbsd system ulimit (Too many open files)
// ignore-bitrig system ulimit (Too many open files)
// exec-env:RUST_LOG=debug
#![feature(rustc_private, libc, old_io, io, std_misc)]
#![allow(deprecated, unused_must_use)]
#[macro_use]
extern crate log;
extern crate libc;
use std::sync::mpsc::channel;
use std::old_io::net::tcp::{TcpListener, TcpStream};
use std::old_io::{Acceptor, Listener, Reader, Writer};
use std::thread::{self, Builder};
use std::time::Duration;
fn | () {
// This test has a chance to time out, try to not let it time out
thread::spawn(move|| -> () {
use std::old_io::timer;
timer::sleep(Duration::milliseconds(30 * 1000));
println!("timed out!");
unsafe { libc::exit(1) }
});
let (tx, rx) = channel();
thread::spawn(move || -> () {
let mut listener = TcpListener::bind("127.0.0.1:0").unwrap();
tx.send(listener.socket_name().unwrap()).unwrap();
let mut acceptor = listener.listen();
loop {
let mut stream = match acceptor.accept() {
Ok(stream) => stream,
Err(error) => {
debug!("accept panicked: {}", error);
continue;
}
};
stream.read_byte();
stream.write(&[2]);
}
});
let addr = rx.recv().unwrap();
let (tx, rx) = channel();
for _ in 0..1000 {
let tx = tx.clone();
Builder::new().stack_size(64 * 1024).spawn(move|| {
match TcpStream::connect(addr) {
Ok(stream) => {
let mut stream = stream;
stream.write(&[1]);
let mut buf = [0];
stream.read(&mut buf);
},
Err(e) => debug!("{}", e)
}
tx.send(()).unwrap();
});
}
// Wait for all clients to exit, but don't wait for the server to exit. The
// server just runs infinitely.
drop(tx);
for _ in 0..1000 {
rx.recv().unwrap();
}
unsafe { libc::exit(0) }
}
| main | identifier_name |
stateful_eval.rs | use std::sync::{Mutex, Arc};
use std::result::Result;
use std::error::Error;
use serde_json::*;
use gossyp_base::*;
use gossyp_base::basic::*;
use super::binding_environment::*;
use super::script_interpreter::*;
use super::bind_statement::*;
use super::evaluate_statement::*;
use super::script::*;
use super::bound_script::*;
///
/// Represents a tool that can be used to evaluate scripts and maintains
/// state (useful for evaluation in a REPL environment)
///
#[derive(Clone)]
pub struct StatefulEvalTool {
binding: Arc<Mutex<Box<VariableBindingEnvironment>>>,
execution: Arc<Mutex<ScriptExecutionEnvironment>>
}
impl StatefulEvalTool {
pub fn new() -> StatefulEvalTool {
StatefulEvalTool {
binding: Arc::new(Mutex::new(BindingEnvironment::new())),
execution: Arc::new(Mutex::new(ScriptExecutionEnvironment::new()))
}
}
///
/// Evaluates an unbound statement using this tool
///
pub fn evaluate_unbound_statement(&self, script: &Script, environment: &Environment) -> Result<Value, Value> {
self.evaluate_statement(&self.bind_statement(script, environment)?, environment)
}
///
/// Binds a statement to this tool
///
pub fn bind_statement(&self, script: &Script, environment: &Environment) -> Result<BoundScript, Value> {
// Merge the stuff in the external environment with the stored environment
let our_environment = &mut **self.binding.lock().unwrap();
let their_environment = BindingEnvironment::from_environment(environment);
let mut combined_environment = BindingEnvironment::combine(our_environment, &*their_environment);
// Bind to the combined environments
bind_statement(script, &mut *combined_environment)
}
///
/// Evaluates a statement in the environment represented by this tool
///
pub fn evaluate_statement(&self, script: &BoundScript, environment: &Environment) -> Result<Value, Value> {
evaluate_statement(script, environment, &mut *self.execution.lock().unwrap())
}
}
impl Tool for StatefulEvalTool {
fn invoke_json(&self, input: Value, environment: &Environment) -> Result<Value, Value> {
let script = from_value::<Vec<Script>>(input)
.map(|statements| Script::Sequence(statements));
match script {
Ok(script) => self.evaluate_unbound_statement(&script, environment),
Err(script_error) => Err(json![{
"error": "JSON input decode failed",
"description": script_error.description(),
}])
}
}
}
///
/// Tool function that creates an eval state in an environment
///
pub fn create_evaluator_with_state_tool(eval_name: String, environment: &Environment) -> Result<(), Value> {
define_new_tool(environment, &eval_name, Box::new(StatefulEvalTool::new()))?;
Ok(())
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn | () {
let eval = StatefulEvalTool::new();
let env = EmptyEnvironment::new();
// var x = 1
let assign_x = eval.evaluate_unbound_statement(&Script::Var(ScriptToken::identifier("x"), Expression::Number(ScriptToken::number("1"))), &env);
assert!(assign_x.is_ok());
// x
let val_of_x = eval.evaluate_unbound_statement(&Script::RunCommand(Expression::Identifier(ScriptToken::identifier("x"))), &env);
assert!(val_of_x == Ok(json![ 1 ]));
}
#[test]
fn can_bind_tool_from_passed_in_environment() {
let eval = StatefulEvalTool::new();
let env = DynamicEnvironment::new();
env.define("test-tool", Box::new(make_pure_tool(|_: ()| 42)));
// test-tool
let val_of_test_tool = eval.evaluate_unbound_statement(&Script::RunCommand(Expression::Identifier(ScriptToken::identifier("test-tool"))), &env);
assert!(val_of_test_tool == Ok(json![ 42 ]));
}
}
| can_bind_variable_using_stateful_tool | identifier_name |
stateful_eval.rs | use std::sync::{Mutex, Arc};
use std::result::Result;
use std::error::Error;
use serde_json::*;
use gossyp_base::*;
use gossyp_base::basic::*;
use super::binding_environment::*;
use super::script_interpreter::*;
use super::bind_statement::*;
use super::evaluate_statement::*;
use super::script::*;
use super::bound_script::*;
///
/// Represents a tool that can be used to evaluate scripts and maintains
/// state (useful for evaluation in a REPL environment)
///
#[derive(Clone)]
pub struct StatefulEvalTool {
binding: Arc<Mutex<Box<VariableBindingEnvironment>>>,
execution: Arc<Mutex<ScriptExecutionEnvironment>>
}
impl StatefulEvalTool {
pub fn new() -> StatefulEvalTool {
StatefulEvalTool {
binding: Arc::new(Mutex::new(BindingEnvironment::new())),
execution: Arc::new(Mutex::new(ScriptExecutionEnvironment::new()))
}
}
///
/// Evaluates an unbound statement using this tool
///
pub fn evaluate_unbound_statement(&self, script: &Script, environment: &Environment) -> Result<Value, Value> {
self.evaluate_statement(&self.bind_statement(script, environment)?, environment)
}
///
/// Binds a statement to this tool
///
pub fn bind_statement(&self, script: &Script, environment: &Environment) -> Result<BoundScript, Value> {
// Merge the stuff in the external environment with the stored environment
let our_environment = &mut **self.binding.lock().unwrap();
let their_environment = BindingEnvironment::from_environment(environment);
let mut combined_environment = BindingEnvironment::combine(our_environment, &*their_environment);
// Bind to the combined environments
bind_statement(script, &mut *combined_environment)
}
///
/// Evaluates a statement in the environment represented by this tool
///
pub fn evaluate_statement(&self, script: &BoundScript, environment: &Environment) -> Result<Value, Value> {
evaluate_statement(script, environment, &mut *self.execution.lock().unwrap())
}
}
impl Tool for StatefulEvalTool {
fn invoke_json(&self, input: Value, environment: &Environment) -> Result<Value, Value> {
let script = from_value::<Vec<Script>>(input)
.map(|statements| Script::Sequence(statements));
match script {
Ok(script) => self.evaluate_unbound_statement(&script, environment),
Err(script_error) => Err(json![{
"error": "JSON input decode failed",
"description": script_error.description(),
}])
}
}
}
///
/// Tool function that creates an eval state in an environment
///
pub fn create_evaluator_with_state_tool(eval_name: String, environment: &Environment) -> Result<(), Value> |
#[cfg(test)]
mod test {
use super::*;
#[test]
fn can_bind_variable_using_stateful_tool() {
let eval = StatefulEvalTool::new();
let env = EmptyEnvironment::new();
// var x = 1
let assign_x = eval.evaluate_unbound_statement(&Script::Var(ScriptToken::identifier("x"), Expression::Number(ScriptToken::number("1"))), &env);
assert!(assign_x.is_ok());
// x
let val_of_x = eval.evaluate_unbound_statement(&Script::RunCommand(Expression::Identifier(ScriptToken::identifier("x"))), &env);
assert!(val_of_x == Ok(json![ 1 ]));
}
#[test]
fn can_bind_tool_from_passed_in_environment() {
let eval = StatefulEvalTool::new();
let env = DynamicEnvironment::new();
env.define("test-tool", Box::new(make_pure_tool(|_: ()| 42)));
// test-tool
let val_of_test_tool = eval.evaluate_unbound_statement(&Script::RunCommand(Expression::Identifier(ScriptToken::identifier("test-tool"))), &env);
assert!(val_of_test_tool == Ok(json![ 42 ]));
}
}
| {
define_new_tool(environment, &eval_name, Box::new(StatefulEvalTool::new()))?;
Ok(())
} | identifier_body |
stateful_eval.rs | use std::sync::{Mutex, Arc};
use std::result::Result;
use std::error::Error;
use serde_json::*;
use gossyp_base::*;
use gossyp_base::basic::*;
use super::binding_environment::*;
use super::script_interpreter::*;
use super::bind_statement::*;
use super::evaluate_statement::*;
use super::script::*;
use super::bound_script::*;
///
/// Represents a tool that can be used to evaluate scripts and maintains
/// state (useful for evaluation in a REPL environment)
///
#[derive(Clone)]
pub struct StatefulEvalTool {
binding: Arc<Mutex<Box<VariableBindingEnvironment>>>,
execution: Arc<Mutex<ScriptExecutionEnvironment>>
}
impl StatefulEvalTool {
pub fn new() -> StatefulEvalTool {
StatefulEvalTool {
binding: Arc::new(Mutex::new(BindingEnvironment::new())),
execution: Arc::new(Mutex::new(ScriptExecutionEnvironment::new()))
}
}
///
/// Evaluates an unbound statement using this tool
///
pub fn evaluate_unbound_statement(&self, script: &Script, environment: &Environment) -> Result<Value, Value> {
self.evaluate_statement(&self.bind_statement(script, environment)?, environment)
}
///
/// Binds a statement to this tool
///
pub fn bind_statement(&self, script: &Script, environment: &Environment) -> Result<BoundScript, Value> {
// Merge the stuff in the external environment with the stored environment
let our_environment = &mut **self.binding.lock().unwrap();
let their_environment = BindingEnvironment::from_environment(environment);
let mut combined_environment = BindingEnvironment::combine(our_environment, &*their_environment);
// Bind to the combined environments
bind_statement(script, &mut *combined_environment)
}
///
/// Evaluates a statement in the environment represented by this tool
///
pub fn evaluate_statement(&self, script: &BoundScript, environment: &Environment) -> Result<Value, Value> {
evaluate_statement(script, environment, &mut *self.execution.lock().unwrap())
}
}
impl Tool for StatefulEvalTool {
fn invoke_json(&self, input: Value, environment: &Environment) -> Result<Value, Value> {
let script = from_value::<Vec<Script>>(input) | Ok(script) => self.evaluate_unbound_statement(&script, environment),
Err(script_error) => Err(json![{
"error": "JSON input decode failed",
"description": script_error.description(),
}])
}
}
}
///
/// Tool function that creates an eval state in an environment
///
pub fn create_evaluator_with_state_tool(eval_name: String, environment: &Environment) -> Result<(), Value> {
define_new_tool(environment, &eval_name, Box::new(StatefulEvalTool::new()))?;
Ok(())
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn can_bind_variable_using_stateful_tool() {
let eval = StatefulEvalTool::new();
let env = EmptyEnvironment::new();
// var x = 1
let assign_x = eval.evaluate_unbound_statement(&Script::Var(ScriptToken::identifier("x"), Expression::Number(ScriptToken::number("1"))), &env);
assert!(assign_x.is_ok());
// x
let val_of_x = eval.evaluate_unbound_statement(&Script::RunCommand(Expression::Identifier(ScriptToken::identifier("x"))), &env);
assert!(val_of_x == Ok(json![ 1 ]));
}
#[test]
fn can_bind_tool_from_passed_in_environment() {
let eval = StatefulEvalTool::new();
let env = DynamicEnvironment::new();
env.define("test-tool", Box::new(make_pure_tool(|_: ()| 42)));
// test-tool
let val_of_test_tool = eval.evaluate_unbound_statement(&Script::RunCommand(Expression::Identifier(ScriptToken::identifier("test-tool"))), &env);
assert!(val_of_test_tool == Ok(json![ 42 ]));
}
} | .map(|statements| Script::Sequence(statements));
match script { | random_line_split |
utils.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use rustc::front::map as ast_map;
use rustc::lint::LateContext;
use rustc::middle::def;
use rustc::middle::def_id::DefId;
use rustc_front::hir;
use syntax::ast;
use syntax::attr::mark_used;
use syntax::ptr::P;
/// Matches a type with a provided string, and returns its type parameters if successful
///
/// Try not to use this for types defined in crates you own, use match_lang_ty instead (for lint passes)
pub fn match_ty_unwrap<'a>(ty: &'a ast::Ty, segments: &[&str]) -> Option<&'a [P<ast::Ty>]> {
match ty.node {
ast::TyPath(_, ast::Path { segments: ref seg,.. }) => {
// So hir::Path isn't the full path, just the tokens that were provided.
// I could muck around with the maps and find the full path
// however the more efficient way is to simply reverse the iterators and zip them
// which will compare them in reverse until one of them runs out of segments
if seg.iter().rev().zip(segments.iter().rev()).all(|(a, b)| a.identifier.name.as_str() == *b) {
match seg.last() {
Some(&ast::PathSegment { parameters: ast::AngleBracketedParameters(ref a),.. }) => {
Some(&a.types)
}
_ => None
}
} else {
None
}
},
_ => None
}
}
/// Checks if a type has a #[servo_lang = "str"] attribute
pub fn match_lang_ty(cx: &LateContext, ty: &hir::Ty, value: &str) -> bool {
match ty.node {
hir::TyPath(..) => {},
_ => return false,
}
let def_id = match cx.tcx.def_map.borrow().get(&ty.id) {
Some(&def::PathResolution { base_def: def::DefTy(def_id, _),.. }) => def_id,
_ => return false,
};
match_lang_did(cx, def_id, value)
}
pub fn match_lang_did(cx: &LateContext, did: DefId, value: &str) -> bool {
cx.tcx.get_attrs(did).iter().any(|attr| {
match attr.node.value.node {
ast::MetaNameValue(ref name, ref val) if &**name == "servo_lang" => {
match val.node {
ast::LitStr(ref v, _) if &**v == value => {
mark_used(attr);
true
},
_ => false,
}
}
_ => false,
}
})
}
// Determines if a block is in an unsafe context so that an unhelpful
// lint can be aborted.
pub fn unsafe_context(map: &ast_map::Map, id: ast::NodeId) -> bool | }
/// check if a DefId's path matches the given absolute type path
/// usage e.g. with
/// `match_def_path(cx, id, &["core", "option", "Option"])`
pub fn match_def_path(cx: &LateContext, def_id: DefId, path: &[&str]) -> bool {
cx.tcx.with_path(def_id, |iter| iter.map(|elem| elem.name())
.zip(path.iter()).all(|(nm, p)| &nm.as_str() == p))
}
| {
match map.find(map.get_parent(id)) {
Some(ast_map::NodeImplItem(itm)) => {
match itm.node {
hir::MethodImplItem(ref sig, _) => sig.unsafety == hir::Unsafety::Unsafe,
_ => false
}
},
Some(ast_map::NodeItem(itm)) => {
match itm.node {
hir::ItemFn(_, style, _, _, _, _) => match style {
hir::Unsafety::Unsafe => true,
_ => false,
},
_ => false,
}
}
_ => false // There are probably a couple of other unsafe cases we don't care to lint, those will need
// to be added.
} | identifier_body |
utils.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */ | use rustc::lint::LateContext;
use rustc::middle::def;
use rustc::middle::def_id::DefId;
use rustc_front::hir;
use syntax::ast;
use syntax::attr::mark_used;
use syntax::ptr::P;
/// Matches a type with a provided string, and returns its type parameters if successful
///
/// Try not to use this for types defined in crates you own, use match_lang_ty instead (for lint passes)
pub fn match_ty_unwrap<'a>(ty: &'a ast::Ty, segments: &[&str]) -> Option<&'a [P<ast::Ty>]> {
match ty.node {
ast::TyPath(_, ast::Path { segments: ref seg,.. }) => {
// So hir::Path isn't the full path, just the tokens that were provided.
// I could muck around with the maps and find the full path
// however the more efficient way is to simply reverse the iterators and zip them
// which will compare them in reverse until one of them runs out of segments
if seg.iter().rev().zip(segments.iter().rev()).all(|(a, b)| a.identifier.name.as_str() == *b) {
match seg.last() {
Some(&ast::PathSegment { parameters: ast::AngleBracketedParameters(ref a),.. }) => {
Some(&a.types)
}
_ => None
}
} else {
None
}
},
_ => None
}
}
/// Checks if a type has a #[servo_lang = "str"] attribute
pub fn match_lang_ty(cx: &LateContext, ty: &hir::Ty, value: &str) -> bool {
match ty.node {
hir::TyPath(..) => {},
_ => return false,
}
let def_id = match cx.tcx.def_map.borrow().get(&ty.id) {
Some(&def::PathResolution { base_def: def::DefTy(def_id, _),.. }) => def_id,
_ => return false,
};
match_lang_did(cx, def_id, value)
}
pub fn match_lang_did(cx: &LateContext, did: DefId, value: &str) -> bool {
cx.tcx.get_attrs(did).iter().any(|attr| {
match attr.node.value.node {
ast::MetaNameValue(ref name, ref val) if &**name == "servo_lang" => {
match val.node {
ast::LitStr(ref v, _) if &**v == value => {
mark_used(attr);
true
},
_ => false,
}
}
_ => false,
}
})
}
// Determines if a block is in an unsafe context so that an unhelpful
// lint can be aborted.
pub fn unsafe_context(map: &ast_map::Map, id: ast::NodeId) -> bool {
match map.find(map.get_parent(id)) {
Some(ast_map::NodeImplItem(itm)) => {
match itm.node {
hir::MethodImplItem(ref sig, _) => sig.unsafety == hir::Unsafety::Unsafe,
_ => false
}
},
Some(ast_map::NodeItem(itm)) => {
match itm.node {
hir::ItemFn(_, style, _, _, _, _) => match style {
hir::Unsafety::Unsafe => true,
_ => false,
},
_ => false,
}
}
_ => false // There are probably a couple of other unsafe cases we don't care to lint, those will need
// to be added.
}
}
/// check if a DefId's path matches the given absolute type path
/// usage e.g. with
/// `match_def_path(cx, id, &["core", "option", "Option"])`
pub fn match_def_path(cx: &LateContext, def_id: DefId, path: &[&str]) -> bool {
cx.tcx.with_path(def_id, |iter| iter.map(|elem| elem.name())
.zip(path.iter()).all(|(nm, p)| &nm.as_str() == p))
} |
use rustc::front::map as ast_map; | random_line_split |
utils.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use rustc::front::map as ast_map;
use rustc::lint::LateContext;
use rustc::middle::def;
use rustc::middle::def_id::DefId;
use rustc_front::hir;
use syntax::ast;
use syntax::attr::mark_used;
use syntax::ptr::P;
/// Matches a type with a provided string, and returns its type parameters if successful
///
/// Try not to use this for types defined in crates you own, use match_lang_ty instead (for lint passes)
pub fn match_ty_unwrap<'a>(ty: &'a ast::Ty, segments: &[&str]) -> Option<&'a [P<ast::Ty>]> {
match ty.node {
ast::TyPath(_, ast::Path { segments: ref seg,.. }) => {
// So hir::Path isn't the full path, just the tokens that were provided.
// I could muck around with the maps and find the full path
// however the more efficient way is to simply reverse the iterators and zip them
// which will compare them in reverse until one of them runs out of segments
if seg.iter().rev().zip(segments.iter().rev()).all(|(a, b)| a.identifier.name.as_str() == *b) {
match seg.last() {
Some(&ast::PathSegment { parameters: ast::AngleBracketedParameters(ref a),.. }) => {
Some(&a.types)
}
_ => None
}
} else {
None
}
},
_ => None
}
}
/// Checks if a type has a #[servo_lang = "str"] attribute
pub fn match_lang_ty(cx: &LateContext, ty: &hir::Ty, value: &str) -> bool {
match ty.node {
hir::TyPath(..) => {},
_ => return false,
}
let def_id = match cx.tcx.def_map.borrow().get(&ty.id) {
Some(&def::PathResolution { base_def: def::DefTy(def_id, _),.. }) => def_id,
_ => return false,
};
match_lang_did(cx, def_id, value)
}
pub fn | (cx: &LateContext, did: DefId, value: &str) -> bool {
cx.tcx.get_attrs(did).iter().any(|attr| {
match attr.node.value.node {
ast::MetaNameValue(ref name, ref val) if &**name == "servo_lang" => {
match val.node {
ast::LitStr(ref v, _) if &**v == value => {
mark_used(attr);
true
},
_ => false,
}
}
_ => false,
}
})
}
// Determines if a block is in an unsafe context so that an unhelpful
// lint can be aborted.
pub fn unsafe_context(map: &ast_map::Map, id: ast::NodeId) -> bool {
match map.find(map.get_parent(id)) {
Some(ast_map::NodeImplItem(itm)) => {
match itm.node {
hir::MethodImplItem(ref sig, _) => sig.unsafety == hir::Unsafety::Unsafe,
_ => false
}
},
Some(ast_map::NodeItem(itm)) => {
match itm.node {
hir::ItemFn(_, style, _, _, _, _) => match style {
hir::Unsafety::Unsafe => true,
_ => false,
},
_ => false,
}
}
_ => false // There are probably a couple of other unsafe cases we don't care to lint, those will need
// to be added.
}
}
/// check if a DefId's path matches the given absolute type path
/// usage e.g. with
/// `match_def_path(cx, id, &["core", "option", "Option"])`
pub fn match_def_path(cx: &LateContext, def_id: DefId, path: &[&str]) -> bool {
cx.tcx.with_path(def_id, |iter| iter.map(|elem| elem.name())
.zip(path.iter()).all(|(nm, p)| &nm.as_str() == p))
}
| match_lang_did | identifier_name |
imagedata.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::codegen::Bindings::ImageDataBinding;
use crate::dom::bindings::codegen::Bindings::ImageDataBinding::ImageDataMethods;
use crate::dom::bindings::error::{Error, Fallible};
use crate::dom::bindings::reflector::{reflect_dom_object, Reflector};
use crate::dom::bindings::root::DomRoot;
use crate::dom::globalscope::GlobalScope;
use dom_struct::dom_struct;
use euclid::{Rect, Size2D};
use ipc_channel::ipc::IpcSharedMemory;
use js::jsapi::{Heap, JSContext, JSObject};
use js::rust::Runtime;
use js::typedarray::{CreateWith, Uint8ClampedArray};
use std::borrow::Cow;
use std::default::Default;
use std::ptr;
use std::ptr::NonNull;
use std::vec::Vec;
#[dom_struct]
pub struct ImageData {
reflector_: Reflector,
width: u32,
height: u32,
data: Heap<*mut JSObject>,
}
impl ImageData {
#[allow(unsafe_code)]
pub fn new(
global: &GlobalScope,
width: u32,
height: u32,
mut data: Option<Vec<u8>>,
) -> Fallible<DomRoot<ImageData>> {
let len = width * height * 4;
unsafe {
let cx = global.get_cx();
rooted!(in (cx) let mut js_object = ptr::null_mut::<JSObject>());
let data = match data {
Some(ref mut d) => {
d.resize(len as usize, 0);
CreateWith::Slice(&d[..])
},
None => CreateWith::Length(len),
};
Uint8ClampedArray::create(cx, data, js_object.handle_mut()).unwrap();
Self::new_with_jsobject(global, width, Some(height), Some(js_object.get()))
}
}
#[allow(unsafe_code)]
unsafe fn new_with_jsobject(
global: &GlobalScope,
width: u32,
mut opt_height: Option<u32>,
opt_jsobject: Option<*mut JSObject>,
) -> Fallible<DomRoot<ImageData>> {
assert!(opt_jsobject.is_some() || opt_height.is_some());
if width == 0 {
return Err(Error::IndexSize);
}
// checking jsobject type and verifying (height * width * 4 == jsobject.byte_len())
if let Some(jsobject) = opt_jsobject {
let cx = global.get_cx();
typedarray!(in(cx) let array_res: Uint8ClampedArray = jsobject);
let array = array_res.map_err(|_| {
Error::Type("Argument to Image data is not an Uint8ClampedArray".to_owned())
})?;
let byte_len = array.as_slice().len() as u32;
if byte_len % 4!= 0 {
return Err(Error::InvalidState);
}
let len = byte_len / 4;
if width == 0 || len % width!= 0 {
return Err(Error::IndexSize);
}
let height = len / width;
if opt_height.map_or(false, |x| height!= x) {
return Err(Error::IndexSize);
} else {
opt_height = Some(height);
}
}
let height = opt_height.unwrap();
if height == 0 {
return Err(Error::IndexSize); | width: width,
height: height,
data: Heap::default(),
});
if let Some(jsobject) = opt_jsobject {
(*imagedata).data.set(jsobject);
} else {
let len = width * height * 4;
let cx = global.get_cx();
rooted!(in (cx) let mut array = ptr::null_mut::<JSObject>());
Uint8ClampedArray::create(cx, CreateWith::Length(len), array.handle_mut()).unwrap();
(*imagedata).data.set(array.get());
}
Ok(reflect_dom_object(
imagedata,
global,
ImageDataBinding::Wrap,
))
}
// https://html.spec.whatwg.org/multipage/#pixel-manipulation:dom-imagedata-3
#[allow(unsafe_code)]
pub fn Constructor(global: &GlobalScope, width: u32, height: u32) -> Fallible<DomRoot<Self>> {
unsafe { Self::new_with_jsobject(global, width, Some(height), None) }
}
// https://html.spec.whatwg.org/multipage/#pixel-manipulation:dom-imagedata-4
#[allow(unsafe_code)]
#[allow(unused_variables)]
pub unsafe fn Constructor_(
cx: *mut JSContext,
global: &GlobalScope,
jsobject: *mut JSObject,
width: u32,
opt_height: Option<u32>,
) -> Fallible<DomRoot<Self>> {
Self::new_with_jsobject(global, width, opt_height, Some(jsobject))
}
/// Nothing must change the array on the JS side while the slice is live.
#[allow(unsafe_code)]
pub unsafe fn as_slice(&self) -> &[u8] {
assert!(!self.data.get().is_null());
let cx = Runtime::get();
assert!(!cx.is_null());
typedarray!(in(cx) let array: Uint8ClampedArray = self.data.get());
let array = array.as_ref().unwrap();
// NOTE(nox): This is just as unsafe as `as_slice` itself even though we
// are extending the lifetime of the slice, because the data in
// this ImageData instance will never change. The method is thus unsafe
// because the array may be manipulated from JS while the reference
// is live.
let ptr = array.as_slice() as *const _;
&*ptr
}
#[allow(unsafe_code)]
pub fn to_shared_memory(&self) -> IpcSharedMemory {
IpcSharedMemory::from_bytes(unsafe { self.as_slice() })
}
#[allow(unsafe_code)]
pub unsafe fn get_rect(&self, rect: Rect<u32>) -> Cow<[u8]> {
pixels::rgba8_get_rect(self.as_slice(), self.get_size(), rect)
}
pub fn get_size(&self) -> Size2D<u32> {
Size2D::new(self.Width(), self.Height())
}
}
impl ImageDataMethods for ImageData {
// https://html.spec.whatwg.org/multipage/#dom-imagedata-width
fn Width(&self) -> u32 {
self.width
}
// https://html.spec.whatwg.org/multipage/#dom-imagedata-height
fn Height(&self) -> u32 {
self.height
}
#[allow(unsafe_code)]
// https://html.spec.whatwg.org/multipage/#dom-imagedata-data
unsafe fn Data(&self, _: *mut JSContext) -> NonNull<JSObject> {
NonNull::new(self.data.get()).expect("got a null pointer")
}
} | }
let imagedata = Box::new(ImageData {
reflector_: Reflector::new(), | random_line_split |
imagedata.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::codegen::Bindings::ImageDataBinding;
use crate::dom::bindings::codegen::Bindings::ImageDataBinding::ImageDataMethods;
use crate::dom::bindings::error::{Error, Fallible};
use crate::dom::bindings::reflector::{reflect_dom_object, Reflector};
use crate::dom::bindings::root::DomRoot;
use crate::dom::globalscope::GlobalScope;
use dom_struct::dom_struct;
use euclid::{Rect, Size2D};
use ipc_channel::ipc::IpcSharedMemory;
use js::jsapi::{Heap, JSContext, JSObject};
use js::rust::Runtime;
use js::typedarray::{CreateWith, Uint8ClampedArray};
use std::borrow::Cow;
use std::default::Default;
use std::ptr;
use std::ptr::NonNull;
use std::vec::Vec;
#[dom_struct]
pub struct ImageData {
reflector_: Reflector,
width: u32,
height: u32,
data: Heap<*mut JSObject>,
}
impl ImageData {
#[allow(unsafe_code)]
pub fn new(
global: &GlobalScope,
width: u32,
height: u32,
mut data: Option<Vec<u8>>,
) -> Fallible<DomRoot<ImageData>> {
let len = width * height * 4;
unsafe {
let cx = global.get_cx();
rooted!(in (cx) let mut js_object = ptr::null_mut::<JSObject>());
let data = match data {
Some(ref mut d) => {
d.resize(len as usize, 0);
CreateWith::Slice(&d[..])
},
None => CreateWith::Length(len),
};
Uint8ClampedArray::create(cx, data, js_object.handle_mut()).unwrap();
Self::new_with_jsobject(global, width, Some(height), Some(js_object.get()))
}
}
#[allow(unsafe_code)]
unsafe fn new_with_jsobject(
global: &GlobalScope,
width: u32,
mut opt_height: Option<u32>,
opt_jsobject: Option<*mut JSObject>,
) -> Fallible<DomRoot<ImageData>> {
assert!(opt_jsobject.is_some() || opt_height.is_some());
if width == 0 {
return Err(Error::IndexSize);
}
// checking jsobject type and verifying (height * width * 4 == jsobject.byte_len())
if let Some(jsobject) = opt_jsobject {
let cx = global.get_cx();
typedarray!(in(cx) let array_res: Uint8ClampedArray = jsobject);
let array = array_res.map_err(|_| {
Error::Type("Argument to Image data is not an Uint8ClampedArray".to_owned())
})?;
let byte_len = array.as_slice().len() as u32;
if byte_len % 4!= 0 {
return Err(Error::InvalidState);
}
let len = byte_len / 4;
if width == 0 || len % width!= 0 {
return Err(Error::IndexSize);
}
let height = len / width;
if opt_height.map_or(false, |x| height!= x) {
return Err(Error::IndexSize);
} else |
}
let height = opt_height.unwrap();
if height == 0 {
return Err(Error::IndexSize);
}
let imagedata = Box::new(ImageData {
reflector_: Reflector::new(),
width: width,
height: height,
data: Heap::default(),
});
if let Some(jsobject) = opt_jsobject {
(*imagedata).data.set(jsobject);
} else {
let len = width * height * 4;
let cx = global.get_cx();
rooted!(in (cx) let mut array = ptr::null_mut::<JSObject>());
Uint8ClampedArray::create(cx, CreateWith::Length(len), array.handle_mut()).unwrap();
(*imagedata).data.set(array.get());
}
Ok(reflect_dom_object(
imagedata,
global,
ImageDataBinding::Wrap,
))
}
// https://html.spec.whatwg.org/multipage/#pixel-manipulation:dom-imagedata-3
#[allow(unsafe_code)]
pub fn Constructor(global: &GlobalScope, width: u32, height: u32) -> Fallible<DomRoot<Self>> {
unsafe { Self::new_with_jsobject(global, width, Some(height), None) }
}
// https://html.spec.whatwg.org/multipage/#pixel-manipulation:dom-imagedata-4
#[allow(unsafe_code)]
#[allow(unused_variables)]
pub unsafe fn Constructor_(
cx: *mut JSContext,
global: &GlobalScope,
jsobject: *mut JSObject,
width: u32,
opt_height: Option<u32>,
) -> Fallible<DomRoot<Self>> {
Self::new_with_jsobject(global, width, opt_height, Some(jsobject))
}
/// Nothing must change the array on the JS side while the slice is live.
#[allow(unsafe_code)]
pub unsafe fn as_slice(&self) -> &[u8] {
assert!(!self.data.get().is_null());
let cx = Runtime::get();
assert!(!cx.is_null());
typedarray!(in(cx) let array: Uint8ClampedArray = self.data.get());
let array = array.as_ref().unwrap();
// NOTE(nox): This is just as unsafe as `as_slice` itself even though we
// are extending the lifetime of the slice, because the data in
// this ImageData instance will never change. The method is thus unsafe
// because the array may be manipulated from JS while the reference
// is live.
let ptr = array.as_slice() as *const _;
&*ptr
}
#[allow(unsafe_code)]
pub fn to_shared_memory(&self) -> IpcSharedMemory {
IpcSharedMemory::from_bytes(unsafe { self.as_slice() })
}
#[allow(unsafe_code)]
pub unsafe fn get_rect(&self, rect: Rect<u32>) -> Cow<[u8]> {
pixels::rgba8_get_rect(self.as_slice(), self.get_size(), rect)
}
pub fn get_size(&self) -> Size2D<u32> {
Size2D::new(self.Width(), self.Height())
}
}
impl ImageDataMethods for ImageData {
// https://html.spec.whatwg.org/multipage/#dom-imagedata-width
fn Width(&self) -> u32 {
self.width
}
// https://html.spec.whatwg.org/multipage/#dom-imagedata-height
fn Height(&self) -> u32 {
self.height
}
#[allow(unsafe_code)]
// https://html.spec.whatwg.org/multipage/#dom-imagedata-data
unsafe fn Data(&self, _: *mut JSContext) -> NonNull<JSObject> {
NonNull::new(self.data.get()).expect("got a null pointer")
}
}
| {
opt_height = Some(height);
} | conditional_block |
imagedata.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::codegen::Bindings::ImageDataBinding;
use crate::dom::bindings::codegen::Bindings::ImageDataBinding::ImageDataMethods;
use crate::dom::bindings::error::{Error, Fallible};
use crate::dom::bindings::reflector::{reflect_dom_object, Reflector};
use crate::dom::bindings::root::DomRoot;
use crate::dom::globalscope::GlobalScope;
use dom_struct::dom_struct;
use euclid::{Rect, Size2D};
use ipc_channel::ipc::IpcSharedMemory;
use js::jsapi::{Heap, JSContext, JSObject};
use js::rust::Runtime;
use js::typedarray::{CreateWith, Uint8ClampedArray};
use std::borrow::Cow;
use std::default::Default;
use std::ptr;
use std::ptr::NonNull;
use std::vec::Vec;
#[dom_struct]
pub struct ImageData {
reflector_: Reflector,
width: u32,
height: u32,
data: Heap<*mut JSObject>,
}
impl ImageData {
#[allow(unsafe_code)]
pub fn new(
global: &GlobalScope,
width: u32,
height: u32,
mut data: Option<Vec<u8>>,
) -> Fallible<DomRoot<ImageData>> {
let len = width * height * 4;
unsafe {
let cx = global.get_cx();
rooted!(in (cx) let mut js_object = ptr::null_mut::<JSObject>());
let data = match data {
Some(ref mut d) => {
d.resize(len as usize, 0);
CreateWith::Slice(&d[..])
},
None => CreateWith::Length(len),
};
Uint8ClampedArray::create(cx, data, js_object.handle_mut()).unwrap();
Self::new_with_jsobject(global, width, Some(height), Some(js_object.get()))
}
}
#[allow(unsafe_code)]
unsafe fn new_with_jsobject(
global: &GlobalScope,
width: u32,
mut opt_height: Option<u32>,
opt_jsobject: Option<*mut JSObject>,
) -> Fallible<DomRoot<ImageData>> {
assert!(opt_jsobject.is_some() || opt_height.is_some());
if width == 0 {
return Err(Error::IndexSize);
}
// checking jsobject type and verifying (height * width * 4 == jsobject.byte_len())
if let Some(jsobject) = opt_jsobject {
let cx = global.get_cx();
typedarray!(in(cx) let array_res: Uint8ClampedArray = jsobject);
let array = array_res.map_err(|_| {
Error::Type("Argument to Image data is not an Uint8ClampedArray".to_owned())
})?;
let byte_len = array.as_slice().len() as u32;
if byte_len % 4!= 0 {
return Err(Error::InvalidState);
}
let len = byte_len / 4;
if width == 0 || len % width!= 0 {
return Err(Error::IndexSize);
}
let height = len / width;
if opt_height.map_or(false, |x| height!= x) {
return Err(Error::IndexSize);
} else {
opt_height = Some(height);
}
}
let height = opt_height.unwrap();
if height == 0 {
return Err(Error::IndexSize);
}
let imagedata = Box::new(ImageData {
reflector_: Reflector::new(),
width: width,
height: height,
data: Heap::default(),
});
if let Some(jsobject) = opt_jsobject {
(*imagedata).data.set(jsobject);
} else {
let len = width * height * 4;
let cx = global.get_cx();
rooted!(in (cx) let mut array = ptr::null_mut::<JSObject>());
Uint8ClampedArray::create(cx, CreateWith::Length(len), array.handle_mut()).unwrap();
(*imagedata).data.set(array.get());
}
Ok(reflect_dom_object(
imagedata,
global,
ImageDataBinding::Wrap,
))
}
// https://html.spec.whatwg.org/multipage/#pixel-manipulation:dom-imagedata-3
#[allow(unsafe_code)]
pub fn Constructor(global: &GlobalScope, width: u32, height: u32) -> Fallible<DomRoot<Self>> {
unsafe { Self::new_with_jsobject(global, width, Some(height), None) }
}
// https://html.spec.whatwg.org/multipage/#pixel-manipulation:dom-imagedata-4
#[allow(unsafe_code)]
#[allow(unused_variables)]
pub unsafe fn Constructor_(
cx: *mut JSContext,
global: &GlobalScope,
jsobject: *mut JSObject,
width: u32,
opt_height: Option<u32>,
) -> Fallible<DomRoot<Self>> {
Self::new_with_jsobject(global, width, opt_height, Some(jsobject))
}
/// Nothing must change the array on the JS side while the slice is live.
#[allow(unsafe_code)]
pub unsafe fn as_slice(&self) -> &[u8] {
assert!(!self.data.get().is_null());
let cx = Runtime::get();
assert!(!cx.is_null());
typedarray!(in(cx) let array: Uint8ClampedArray = self.data.get());
let array = array.as_ref().unwrap();
// NOTE(nox): This is just as unsafe as `as_slice` itself even though we
// are extending the lifetime of the slice, because the data in
// this ImageData instance will never change. The method is thus unsafe
// because the array may be manipulated from JS while the reference
// is live.
let ptr = array.as_slice() as *const _;
&*ptr
}
#[allow(unsafe_code)]
pub fn to_shared_memory(&self) -> IpcSharedMemory {
IpcSharedMemory::from_bytes(unsafe { self.as_slice() })
}
#[allow(unsafe_code)]
pub unsafe fn get_rect(&self, rect: Rect<u32>) -> Cow<[u8]> {
pixels::rgba8_get_rect(self.as_slice(), self.get_size(), rect)
}
pub fn get_size(&self) -> Size2D<u32> {
Size2D::new(self.Width(), self.Height())
}
}
impl ImageDataMethods for ImageData {
// https://html.spec.whatwg.org/multipage/#dom-imagedata-width
fn Width(&self) -> u32 |
// https://html.spec.whatwg.org/multipage/#dom-imagedata-height
fn Height(&self) -> u32 {
self.height
}
#[allow(unsafe_code)]
// https://html.spec.whatwg.org/multipage/#dom-imagedata-data
unsafe fn Data(&self, _: *mut JSContext) -> NonNull<JSObject> {
NonNull::new(self.data.get()).expect("got a null pointer")
}
}
| {
self.width
} | identifier_body |
imagedata.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::codegen::Bindings::ImageDataBinding;
use crate::dom::bindings::codegen::Bindings::ImageDataBinding::ImageDataMethods;
use crate::dom::bindings::error::{Error, Fallible};
use crate::dom::bindings::reflector::{reflect_dom_object, Reflector};
use crate::dom::bindings::root::DomRoot;
use crate::dom::globalscope::GlobalScope;
use dom_struct::dom_struct;
use euclid::{Rect, Size2D};
use ipc_channel::ipc::IpcSharedMemory;
use js::jsapi::{Heap, JSContext, JSObject};
use js::rust::Runtime;
use js::typedarray::{CreateWith, Uint8ClampedArray};
use std::borrow::Cow;
use std::default::Default;
use std::ptr;
use std::ptr::NonNull;
use std::vec::Vec;
#[dom_struct]
pub struct ImageData {
reflector_: Reflector,
width: u32,
height: u32,
data: Heap<*mut JSObject>,
}
impl ImageData {
#[allow(unsafe_code)]
pub fn new(
global: &GlobalScope,
width: u32,
height: u32,
mut data: Option<Vec<u8>>,
) -> Fallible<DomRoot<ImageData>> {
let len = width * height * 4;
unsafe {
let cx = global.get_cx();
rooted!(in (cx) let mut js_object = ptr::null_mut::<JSObject>());
let data = match data {
Some(ref mut d) => {
d.resize(len as usize, 0);
CreateWith::Slice(&d[..])
},
None => CreateWith::Length(len),
};
Uint8ClampedArray::create(cx, data, js_object.handle_mut()).unwrap();
Self::new_with_jsobject(global, width, Some(height), Some(js_object.get()))
}
}
#[allow(unsafe_code)]
unsafe fn new_with_jsobject(
global: &GlobalScope,
width: u32,
mut opt_height: Option<u32>,
opt_jsobject: Option<*mut JSObject>,
) -> Fallible<DomRoot<ImageData>> {
assert!(opt_jsobject.is_some() || opt_height.is_some());
if width == 0 {
return Err(Error::IndexSize);
}
// checking jsobject type and verifying (height * width * 4 == jsobject.byte_len())
if let Some(jsobject) = opt_jsobject {
let cx = global.get_cx();
typedarray!(in(cx) let array_res: Uint8ClampedArray = jsobject);
let array = array_res.map_err(|_| {
Error::Type("Argument to Image data is not an Uint8ClampedArray".to_owned())
})?;
let byte_len = array.as_slice().len() as u32;
if byte_len % 4!= 0 {
return Err(Error::InvalidState);
}
let len = byte_len / 4;
if width == 0 || len % width!= 0 {
return Err(Error::IndexSize);
}
let height = len / width;
if opt_height.map_or(false, |x| height!= x) {
return Err(Error::IndexSize);
} else {
opt_height = Some(height);
}
}
let height = opt_height.unwrap();
if height == 0 {
return Err(Error::IndexSize);
}
let imagedata = Box::new(ImageData {
reflector_: Reflector::new(),
width: width,
height: height,
data: Heap::default(),
});
if let Some(jsobject) = opt_jsobject {
(*imagedata).data.set(jsobject);
} else {
let len = width * height * 4;
let cx = global.get_cx();
rooted!(in (cx) let mut array = ptr::null_mut::<JSObject>());
Uint8ClampedArray::create(cx, CreateWith::Length(len), array.handle_mut()).unwrap();
(*imagedata).data.set(array.get());
}
Ok(reflect_dom_object(
imagedata,
global,
ImageDataBinding::Wrap,
))
}
// https://html.spec.whatwg.org/multipage/#pixel-manipulation:dom-imagedata-3
#[allow(unsafe_code)]
pub fn | (global: &GlobalScope, width: u32, height: u32) -> Fallible<DomRoot<Self>> {
unsafe { Self::new_with_jsobject(global, width, Some(height), None) }
}
// https://html.spec.whatwg.org/multipage/#pixel-manipulation:dom-imagedata-4
#[allow(unsafe_code)]
#[allow(unused_variables)]
pub unsafe fn Constructor_(
cx: *mut JSContext,
global: &GlobalScope,
jsobject: *mut JSObject,
width: u32,
opt_height: Option<u32>,
) -> Fallible<DomRoot<Self>> {
Self::new_with_jsobject(global, width, opt_height, Some(jsobject))
}
/// Nothing must change the array on the JS side while the slice is live.
#[allow(unsafe_code)]
pub unsafe fn as_slice(&self) -> &[u8] {
assert!(!self.data.get().is_null());
let cx = Runtime::get();
assert!(!cx.is_null());
typedarray!(in(cx) let array: Uint8ClampedArray = self.data.get());
let array = array.as_ref().unwrap();
// NOTE(nox): This is just as unsafe as `as_slice` itself even though we
// are extending the lifetime of the slice, because the data in
// this ImageData instance will never change. The method is thus unsafe
// because the array may be manipulated from JS while the reference
// is live.
let ptr = array.as_slice() as *const _;
&*ptr
}
#[allow(unsafe_code)]
pub fn to_shared_memory(&self) -> IpcSharedMemory {
IpcSharedMemory::from_bytes(unsafe { self.as_slice() })
}
#[allow(unsafe_code)]
pub unsafe fn get_rect(&self, rect: Rect<u32>) -> Cow<[u8]> {
pixels::rgba8_get_rect(self.as_slice(), self.get_size(), rect)
}
pub fn get_size(&self) -> Size2D<u32> {
Size2D::new(self.Width(), self.Height())
}
}
impl ImageDataMethods for ImageData {
// https://html.spec.whatwg.org/multipage/#dom-imagedata-width
fn Width(&self) -> u32 {
self.width
}
// https://html.spec.whatwg.org/multipage/#dom-imagedata-height
fn Height(&self) -> u32 {
self.height
}
#[allow(unsafe_code)]
// https://html.spec.whatwg.org/multipage/#dom-imagedata-data
unsafe fn Data(&self, _: *mut JSContext) -> NonNull<JSObject> {
NonNull::new(self.data.get()).expect("got a null pointer")
}
}
| Constructor | identifier_name |
bench.rs | use std::io::process::ExitStatus;
use cargo::ops;
use cargo::core::MultiShell;
use cargo::util::{CliResult, CliError, CargoError};
use cargo::util::important_paths::{find_root_manifest_for_cwd};
use docopt;
docopt!(Options, "
Execute all benchmarks of a local package
Usage:
cargo bench [options] [--] [<args>...]
Options:
-h, --help Print this message
--no-run Compile, but don't run benchmarks
-j N, --jobs N The number of jobs to run in parallel
--target TRIPLE Build for the target triple
--manifest-path PATH Path to the manifest to build benchmarks for
-v, --verbose Use verbose output
All of the trailing arguments are passed to the benchmark binaries generated
for filtering benchmarks and generally providing options configuring how they
run.
", flag_jobs: Option<uint>, flag_target: Option<String>,
flag_manifest_path: Option<String>)
pub fn execute(options: Options, shell: &mut MultiShell) -> CliResult<Option<()>> | match err {
None => Ok(None),
Some(err) => {
Err(match err.exit {
Some(ExitStatus(i)) => CliError::new("", i as uint),
_ => CliError::from_boxed(err.mark_human(), 101)
})
}
}
}
| {
let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path));
shell.set_verbose(options.flag_verbose);
let mut ops = ops::TestOptions {
no_run: options.flag_no_run,
compile_opts: ops::CompileOptions {
update: false,
env: "bench",
shell: shell,
jobs: options.flag_jobs,
target: options.flag_target.as_ref().map(|s| s.as_slice()),
dev_deps: true,
},
};
let err = try!(ops::run_benches(&root, &mut ops,
options.arg_args.as_slice()).map_err(|err| {
CliError::from_boxed(err, 101)
})); | identifier_body |
bench.rs | use std::io::process::ExitStatus;
use cargo::ops;
use cargo::core::MultiShell;
use cargo::util::{CliResult, CliError, CargoError};
use cargo::util::important_paths::{find_root_manifest_for_cwd};
use docopt;
docopt!(Options, "
Execute all benchmarks of a local package
Usage:
cargo bench [options] [--] [<args>...]
Options:
-h, --help Print this message
--no-run Compile, but don't run benchmarks
-j N, --jobs N The number of jobs to run in parallel
--target TRIPLE Build for the target triple
--manifest-path PATH Path to the manifest to build benchmarks for
-v, --verbose Use verbose output
All of the trailing arguments are passed to the benchmark binaries generated
for filtering benchmarks and generally providing options configuring how they
run.
", flag_jobs: Option<uint>, flag_target: Option<String>,
flag_manifest_path: Option<String>)
pub fn | (options: Options, shell: &mut MultiShell) -> CliResult<Option<()>> {
let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path));
shell.set_verbose(options.flag_verbose);
let mut ops = ops::TestOptions {
no_run: options.flag_no_run,
compile_opts: ops::CompileOptions {
update: false,
env: "bench",
shell: shell,
jobs: options.flag_jobs,
target: options.flag_target.as_ref().map(|s| s.as_slice()),
dev_deps: true,
},
};
let err = try!(ops::run_benches(&root, &mut ops,
options.arg_args.as_slice()).map_err(|err| {
CliError::from_boxed(err, 101)
}));
match err {
None => Ok(None),
Some(err) => {
Err(match err.exit {
Some(ExitStatus(i)) => CliError::new("", i as uint),
_ => CliError::from_boxed(err.mark_human(), 101)
})
}
}
}
| execute | identifier_name |
bench.rs | use std::io::process::ExitStatus;
use cargo::ops;
use cargo::core::MultiShell;
use cargo::util::{CliResult, CliError, CargoError};
use cargo::util::important_paths::{find_root_manifest_for_cwd};
use docopt;
docopt!(Options, "
Execute all benchmarks of a local package
Usage:
cargo bench [options] [--] [<args>...]
Options:
-h, --help Print this message
--no-run Compile, but don't run benchmarks
-j N, --jobs N The number of jobs to run in parallel
--target TRIPLE Build for the target triple
--manifest-path PATH Path to the manifest to build benchmarks for
-v, --verbose Use verbose output
All of the trailing arguments are passed to the benchmark binaries generated
for filtering benchmarks and generally providing options configuring how they
run.
", flag_jobs: Option<uint>, flag_target: Option<String>,
flag_manifest_path: Option<String>)
pub fn execute(options: Options, shell: &mut MultiShell) -> CliResult<Option<()>> {
let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path));
shell.set_verbose(options.flag_verbose);
let mut ops = ops::TestOptions {
no_run: options.flag_no_run,
compile_opts: ops::CompileOptions {
update: false,
env: "bench",
shell: shell,
jobs: options.flag_jobs,
target: options.flag_target.as_ref().map(|s| s.as_slice()),
dev_deps: true,
},
};
let err = try!(ops::run_benches(&root, &mut ops,
options.arg_args.as_slice()).map_err(|err| {
CliError::from_boxed(err, 101)
}));
match err {
None => Ok(None),
Some(err) => { | Err(match err.exit {
Some(ExitStatus(i)) => CliError::new("", i as uint),
_ => CliError::from_boxed(err.mark_human(), 101)
})
}
}
} | random_line_split |
|
match_single_binding.rs | // run-rustfix
#![warn(clippy::match_single_binding)]
#![allow(unused_variables, clippy::many_single_char_names, clippy::toplevel_ref_arg)]
struct Point {
x: i32,
y: i32,
}
fn coords() -> Point {
Point { x: 1, y: 2 }
}
macro_rules! foo {
($param:expr) => {
match $param {
_ => println!("whatever"),
}
};
}
fn | () {
let a = 1;
let b = 2;
let c = 3;
// Lint
match (a, b, c) {
(x, y, z) => {
println!("{} {} {}", x, y, z);
},
}
// Lint
match (a, b, c) {
(x, y, z) => println!("{} {} {}", x, y, z),
}
// Ok
foo!(a);
// Ok
match a {
2 => println!("2"),
_ => println!("Not 2"),
}
// Ok
let d = Some(5);
match d {
Some(d) => println!("{}", d),
_ => println!("None"),
}
// Lint
match a {
_ => println!("whatever"),
}
// Lint
match a {
_ => {
let x = 29;
println!("x has a value of {}", x);
},
}
// Lint
match a {
_ => {
let e = 5 * a;
if e >= 5 {
println!("e is superior to 5");
}
},
}
// Lint
let p = Point { x: 0, y: 7 };
match p {
Point { x, y } => println!("Coords: ({}, {})", x, y),
}
// Lint
match p {
Point { x: x1, y: y1 } => println!("Coords: ({}, {})", x1, y1),
}
// Lint
let x = 5;
match x {
ref r => println!("Got a reference to {}", r),
}
// Lint
let mut x = 5;
match x {
ref mut mr => println!("Got a mutable reference to {}", mr),
}
// Lint
let product = match coords() {
Point { x, y } => x * y,
};
// Lint
let v = vec![Some(1), Some(2), Some(3), Some(4)];
#[allow(clippy::let_and_return)]
let _ = v
.iter()
.map(|i| match i.unwrap() {
unwrapped => unwrapped,
})
.collect::<Vec<u8>>();
// Ok
let x = 1;
match x {
#[cfg(disabled_feature)]
0 => println!("Disabled branch"),
_ => println!("Enabled branch"),
}
// Ok
let x = 1;
let y = 1;
match match y {
0 => 1,
_ => 2,
} {
#[cfg(disabled_feature)]
0 => println!("Array index start"),
_ => println!("Not an array index start"),
}
// False negative
let x = 1;
match x {
// =>
_ => println!("Not an array index start"),
}
}
| main | identifier_name |
match_single_binding.rs | // run-rustfix
#![warn(clippy::match_single_binding)]
#![allow(unused_variables, clippy::many_single_char_names, clippy::toplevel_ref_arg)]
struct Point {
x: i32,
y: i32,
}
fn coords() -> Point {
Point { x: 1, y: 2 }
}
macro_rules! foo {
($param:expr) => {
match $param {
_ => println!("whatever"),
}
};
}
fn main() | }
// Ok
let d = Some(5);
match d {
Some(d) => println!("{}", d),
_ => println!("None"),
}
// Lint
match a {
_ => println!("whatever"),
}
// Lint
match a {
_ => {
let x = 29;
println!("x has a value of {}", x);
},
}
// Lint
match a {
_ => {
let e = 5 * a;
if e >= 5 {
println!("e is superior to 5");
}
},
}
// Lint
let p = Point { x: 0, y: 7 };
match p {
Point { x, y } => println!("Coords: ({}, {})", x, y),
}
// Lint
match p {
Point { x: x1, y: y1 } => println!("Coords: ({}, {})", x1, y1),
}
// Lint
let x = 5;
match x {
ref r => println!("Got a reference to {}", r),
}
// Lint
let mut x = 5;
match x {
ref mut mr => println!("Got a mutable reference to {}", mr),
}
// Lint
let product = match coords() {
Point { x, y } => x * y,
};
// Lint
let v = vec![Some(1), Some(2), Some(3), Some(4)];
#[allow(clippy::let_and_return)]
let _ = v
.iter()
.map(|i| match i.unwrap() {
unwrapped => unwrapped,
})
.collect::<Vec<u8>>();
// Ok
let x = 1;
match x {
#[cfg(disabled_feature)]
0 => println!("Disabled branch"),
_ => println!("Enabled branch"),
}
// Ok
let x = 1;
let y = 1;
match match y {
0 => 1,
_ => 2,
} {
#[cfg(disabled_feature)]
0 => println!("Array index start"),
_ => println!("Not an array index start"),
}
// False negative
let x = 1;
match x {
// =>
_ => println!("Not an array index start"),
}
}
| {
let a = 1;
let b = 2;
let c = 3;
// Lint
match (a, b, c) {
(x, y, z) => {
println!("{} {} {}", x, y, z);
},
}
// Lint
match (a, b, c) {
(x, y, z) => println!("{} {} {}", x, y, z),
}
// Ok
foo!(a);
// Ok
match a {
2 => println!("2"),
_ => println!("Not 2"), | identifier_body |
match_single_binding.rs | // run-rustfix
#![warn(clippy::match_single_binding)]
#![allow(unused_variables, clippy::many_single_char_names, clippy::toplevel_ref_arg)]
struct Point {
x: i32,
y: i32,
}
fn coords() -> Point {
Point { x: 1, y: 2 } | match $param {
_ => println!("whatever"),
}
};
}
fn main() {
let a = 1;
let b = 2;
let c = 3;
// Lint
match (a, b, c) {
(x, y, z) => {
println!("{} {} {}", x, y, z);
},
}
// Lint
match (a, b, c) {
(x, y, z) => println!("{} {} {}", x, y, z),
}
// Ok
foo!(a);
// Ok
match a {
2 => println!("2"),
_ => println!("Not 2"),
}
// Ok
let d = Some(5);
match d {
Some(d) => println!("{}", d),
_ => println!("None"),
}
// Lint
match a {
_ => println!("whatever"),
}
// Lint
match a {
_ => {
let x = 29;
println!("x has a value of {}", x);
},
}
// Lint
match a {
_ => {
let e = 5 * a;
if e >= 5 {
println!("e is superior to 5");
}
},
}
// Lint
let p = Point { x: 0, y: 7 };
match p {
Point { x, y } => println!("Coords: ({}, {})", x, y),
}
// Lint
match p {
Point { x: x1, y: y1 } => println!("Coords: ({}, {})", x1, y1),
}
// Lint
let x = 5;
match x {
ref r => println!("Got a reference to {}", r),
}
// Lint
let mut x = 5;
match x {
ref mut mr => println!("Got a mutable reference to {}", mr),
}
// Lint
let product = match coords() {
Point { x, y } => x * y,
};
// Lint
let v = vec![Some(1), Some(2), Some(3), Some(4)];
#[allow(clippy::let_and_return)]
let _ = v
.iter()
.map(|i| match i.unwrap() {
unwrapped => unwrapped,
})
.collect::<Vec<u8>>();
// Ok
let x = 1;
match x {
#[cfg(disabled_feature)]
0 => println!("Disabled branch"),
_ => println!("Enabled branch"),
}
// Ok
let x = 1;
let y = 1;
match match y {
0 => 1,
_ => 2,
} {
#[cfg(disabled_feature)]
0 => println!("Array index start"),
_ => println!("Not an array index start"),
}
// False negative
let x = 1;
match x {
// =>
_ => println!("Not an array index start"),
}
} | }
macro_rules! foo {
($param:expr) => { | random_line_split |
builtin-superkinds-double-superkind.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test for traits that inherit from multiple builtin kinds at once,
// testing that all such kinds must be present on implementing types. | trait Foo : Send+Share { }
impl <T: Share> Foo for (T,) { } //~ ERROR cannot implement this trait
impl <T: Send> Foo for (T,T) { } //~ ERROR cannot implement this trait
impl <T: Send+Share> Foo for (T,T,T) { } // (ok)
fn main() { } | random_line_split |
|
builtin-superkinds-double-superkind.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test for traits that inherit from multiple builtin kinds at once,
// testing that all such kinds must be present on implementing types.
trait Foo : Send+Share { }
impl <T: Share> Foo for (T,) { } //~ ERROR cannot implement this trait
impl <T: Send> Foo for (T,T) { } //~ ERROR cannot implement this trait
impl <T: Send+Share> Foo for (T,T,T) { } // (ok)
fn main() | { } | identifier_body |
|
builtin-superkinds-double-superkind.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test for traits that inherit from multiple builtin kinds at once,
// testing that all such kinds must be present on implementing types.
trait Foo : Send+Share { }
impl <T: Share> Foo for (T,) { } //~ ERROR cannot implement this trait
impl <T: Send> Foo for (T,T) { } //~ ERROR cannot implement this trait
impl <T: Send+Share> Foo for (T,T,T) { } // (ok)
fn | () { }
| main | identifier_name |
lib.rs | //! Rust library for Pico encoding.
//!
//! This is a library implementing the Pico file encoding used for storing
//! malware. See http://mons-pico.github.io/ for details on this.
extern crate md5;
extern crate rand;
#[warn(missing_docs)]
pub mod constants;
pub mod errors;
mod pico;
pub mod file;
mod crypt;
mod intbytes;
mod header;
pub use pico::Pico;
pub use header::HeaderFormat;
use constants::{MAGIC, MINOR, MAJOR};
pub use pico::gen_random_key;
/// Obtain the Pico magic number. The "magic number" used at the start of a
/// file to indicate that it is a Pico-encoded file.
///
/// ```
/// println!("The magic number is {:#04X}", pico::magic());
/// ```
pub fn magic() -> u16 |
/// Obtain the major version number for the encoding implemented by this
/// library. See also `minor`.
pub fn major() -> u16 { MAJOR }
/// Obtain the minor version number for the encoding implemented by this
/// library. See also `major`.
pub fn minor() -> u16 { MINOR }
#[test]
fn check_version() {
assert_eq!(major(), MAJOR);
assert_eq!(minor(), MINOR);
}
#[test]
fn check_magic() {
assert_eq!(magic(), MAGIC);
}
| { MAGIC } | identifier_body |
lib.rs | //! Rust library for Pico encoding.
//!
//! This is a library implementing the Pico file encoding used for storing
//! malware. See http://mons-pico.github.io/ for details on this.
extern crate md5;
extern crate rand;
#[warn(missing_docs)]
pub mod constants;
pub mod errors;
mod pico;
pub mod file;
mod crypt;
mod intbytes;
mod header;
pub use pico::Pico;
pub use header::HeaderFormat;
use constants::{MAGIC, MINOR, MAJOR};
pub use pico::gen_random_key;
/// Obtain the Pico magic number. The "magic number" used at the start of a
/// file to indicate that it is a Pico-encoded file.
///
/// ```
/// println!("The magic number is {:#04X}", pico::magic()); | pub fn magic() -> u16 { MAGIC }
/// Obtain the major version number for the encoding implemented by this
/// library. See also `minor`.
pub fn major() -> u16 { MAJOR }
/// Obtain the minor version number for the encoding implemented by this
/// library. See also `major`.
pub fn minor() -> u16 { MINOR }
#[test]
fn check_version() {
assert_eq!(major(), MAJOR);
assert_eq!(minor(), MINOR);
}
#[test]
fn check_magic() {
assert_eq!(magic(), MAGIC);
} | /// ``` | random_line_split |
lib.rs | //! Rust library for Pico encoding.
//!
//! This is a library implementing the Pico file encoding used for storing
//! malware. See http://mons-pico.github.io/ for details on this.
extern crate md5;
extern crate rand;
#[warn(missing_docs)]
pub mod constants;
pub mod errors;
mod pico;
pub mod file;
mod crypt;
mod intbytes;
mod header;
pub use pico::Pico;
pub use header::HeaderFormat;
use constants::{MAGIC, MINOR, MAJOR};
pub use pico::gen_random_key;
/// Obtain the Pico magic number. The "magic number" used at the start of a
/// file to indicate that it is a Pico-encoded file.
///
/// ```
/// println!("The magic number is {:#04X}", pico::magic());
/// ```
pub fn magic() -> u16 { MAGIC }
/// Obtain the major version number for the encoding implemented by this
/// library. See also `minor`.
pub fn major() -> u16 { MAJOR }
/// Obtain the minor version number for the encoding implemented by this
/// library. See also `major`.
pub fn minor() -> u16 { MINOR }
#[test]
fn check_version() {
assert_eq!(major(), MAJOR);
assert_eq!(minor(), MINOR);
}
#[test]
fn | () {
assert_eq!(magic(), MAGIC);
}
| check_magic | identifier_name |
timer.rs | use cpu;
use mem;
#[derive(Default)]
pub struct Timer {
div_counter: u8,
timer_counter: u16,
} | pub fn on_clock(&mut self, mem: &mut mem::Mem) {
self.update_div(mem);
self.update_timer(mem);
}
fn update_div(&mut self, mem: &mut mem::Mem) {
self.div_counter = self.div_counter.overflowing_add(1).0;
if self.div_counter == 0 {
let div = mem.read_reg(mem::RegAddr::DIV);
let (new_div, _) = div.overflowing_add(1);
mem.write_reg(mem::RegAddr::DIV, new_div);
}
}
fn update_timer(&mut self, mem: &mut mem::Mem) {
self.timer_counter = self.timer_counter.overflowing_add(1).0;
let tac = mem.read_reg(mem::RegAddr::TAC);
if tac & 0x4 == 0 {
// Check if enabled
return;
}
let clock_sel = tac & 0x3;
let div = match clock_sel {
0x00 => 1024,
0x01 => 16,
0x02 => 64,
0x03 => 256,
_ => panic!("Impossible"),
};
if self.timer_counter % div!= 0 {
return;
}
let tima = mem.read_reg(mem::RegAddr::TIMA);
let new_tima = if tima == 0xff {
mem.set_interrupt_flag(cpu::Interrupt::Timer, true);
mem.read_reg(mem::RegAddr::TMA)
} else {
tima + 1
};
mem.write_reg(mem::RegAddr::TIMA, new_tima);
// TODO: Handle double speed mode. (divide div by 2)
}
} |
impl Timer { | random_line_split |
timer.rs | use cpu;
use mem;
#[derive(Default)]
pub struct Timer {
div_counter: u8,
timer_counter: u16,
}
impl Timer {
pub fn | (&mut self, mem: &mut mem::Mem) {
self.update_div(mem);
self.update_timer(mem);
}
fn update_div(&mut self, mem: &mut mem::Mem) {
self.div_counter = self.div_counter.overflowing_add(1).0;
if self.div_counter == 0 {
let div = mem.read_reg(mem::RegAddr::DIV);
let (new_div, _) = div.overflowing_add(1);
mem.write_reg(mem::RegAddr::DIV, new_div);
}
}
fn update_timer(&mut self, mem: &mut mem::Mem) {
self.timer_counter = self.timer_counter.overflowing_add(1).0;
let tac = mem.read_reg(mem::RegAddr::TAC);
if tac & 0x4 == 0 {
// Check if enabled
return;
}
let clock_sel = tac & 0x3;
let div = match clock_sel {
0x00 => 1024,
0x01 => 16,
0x02 => 64,
0x03 => 256,
_ => panic!("Impossible"),
};
if self.timer_counter % div!= 0 {
return;
}
let tima = mem.read_reg(mem::RegAddr::TIMA);
let new_tima = if tima == 0xff {
mem.set_interrupt_flag(cpu::Interrupt::Timer, true);
mem.read_reg(mem::RegAddr::TMA)
} else {
tima + 1
};
mem.write_reg(mem::RegAddr::TIMA, new_tima);
// TODO: Handle double speed mode. (divide div by 2)
}
}
| on_clock | identifier_name |
timer.rs | use cpu;
use mem;
#[derive(Default)]
pub struct Timer {
div_counter: u8,
timer_counter: u16,
}
impl Timer {
pub fn on_clock(&mut self, mem: &mut mem::Mem) |
fn update_div(&mut self, mem: &mut mem::Mem) {
self.div_counter = self.div_counter.overflowing_add(1).0;
if self.div_counter == 0 {
let div = mem.read_reg(mem::RegAddr::DIV);
let (new_div, _) = div.overflowing_add(1);
mem.write_reg(mem::RegAddr::DIV, new_div);
}
}
fn update_timer(&mut self, mem: &mut mem::Mem) {
self.timer_counter = self.timer_counter.overflowing_add(1).0;
let tac = mem.read_reg(mem::RegAddr::TAC);
if tac & 0x4 == 0 {
// Check if enabled
return;
}
let clock_sel = tac & 0x3;
let div = match clock_sel {
0x00 => 1024,
0x01 => 16,
0x02 => 64,
0x03 => 256,
_ => panic!("Impossible"),
};
if self.timer_counter % div!= 0 {
return;
}
let tima = mem.read_reg(mem::RegAddr::TIMA);
let new_tima = if tima == 0xff {
mem.set_interrupt_flag(cpu::Interrupt::Timer, true);
mem.read_reg(mem::RegAddr::TMA)
} else {
tima + 1
};
mem.write_reg(mem::RegAddr::TIMA, new_tima);
// TODO: Handle double speed mode. (divide div by 2)
}
}
| {
self.update_div(mem);
self.update_timer(mem);
} | identifier_body |
fasta_reader.rs | use crate::common::Region;
use anyhow::Context;
use anyhow::Result;
use bio::io::fasta;
use itertools::Itertools;
use serde::Serialize;
use std::collections::HashMap;
use std::path::Path;
pub fn read_fasta<P: AsRef<Path> + std::fmt::Debug>(
path: P,
region: &Region,
compensate_0_basing: bool,
) -> Result<Vec<Nucleobase>> {
let mut reader = fasta::IndexedReader::from_file(&path).unwrap();
let index =
fasta::Index::with_fasta_file(&path).context("error reading index file of input FASTA")?;
let _sequences = index.sequences();
let mut seq: Vec<u8> = Vec::new();
reader.fetch(®ion.target, region.start, region.end)?;
reader.read(&mut seq)?;
let mut fasta = Vec::new();
let mut ind = region.start;
if compensate_0_basing {
ind += 1;
}
for a in seq {
let base = char::from(a);
let marker = base.to_uppercase().collect_vec().pop().unwrap();
let b = Nucleobase {
position: ind,
marker_type: marker,
row: 0,
repeat: base.is_lowercase(),
};
fasta.push(b);
ind += 1;
}
Ok(fasta)
}
pub fn | (path: &Path) -> Result<HashMap<String, u64>> {
let index = fasta::Index::with_fasta_file(&path).context("error reading input FASTA")?;
let sequences = index.sequences();
Ok(sequences
.iter()
.map(|s| (s.name.to_owned(), s.len))
.collect())
}
#[derive(Serialize, Clone, Debug, PartialEq)]
pub struct Nucleobase {
position: u64,
marker_type: char,
row: u8,
repeat: bool,
}
impl Nucleobase {
pub fn get_marker_type(&self) -> char {
self.marker_type
}
}
| get_fasta_lengths | identifier_name |
fasta_reader.rs | use crate::common::Region;
use anyhow::Context;
use anyhow::Result;
use bio::io::fasta;
use itertools::Itertools;
use serde::Serialize;
use std::collections::HashMap;
use std::path::Path;
pub fn read_fasta<P: AsRef<Path> + std::fmt::Debug>(
path: P,
region: &Region,
compensate_0_basing: bool,
) -> Result<Vec<Nucleobase>> {
let mut reader = fasta::IndexedReader::from_file(&path).unwrap();
let index =
fasta::Index::with_fasta_file(&path).context("error reading index file of input FASTA")?;
let _sequences = index.sequences();
let mut seq: Vec<u8> = Vec::new();
reader.fetch(®ion.target, region.start, region.end)?;
reader.read(&mut seq)?;
let mut fasta = Vec::new();
let mut ind = region.start;
if compensate_0_basing |
for a in seq {
let base = char::from(a);
let marker = base.to_uppercase().collect_vec().pop().unwrap();
let b = Nucleobase {
position: ind,
marker_type: marker,
row: 0,
repeat: base.is_lowercase(),
};
fasta.push(b);
ind += 1;
}
Ok(fasta)
}
pub fn get_fasta_lengths(path: &Path) -> Result<HashMap<String, u64>> {
let index = fasta::Index::with_fasta_file(&path).context("error reading input FASTA")?;
let sequences = index.sequences();
Ok(sequences
.iter()
.map(|s| (s.name.to_owned(), s.len))
.collect())
}
#[derive(Serialize, Clone, Debug, PartialEq)]
pub struct Nucleobase {
position: u64,
marker_type: char,
row: u8,
repeat: bool,
}
impl Nucleobase {
pub fn get_marker_type(&self) -> char {
self.marker_type
}
}
| {
ind += 1;
} | conditional_block |
fasta_reader.rs | use crate::common::Region;
use anyhow::Context;
use anyhow::Result;
use bio::io::fasta;
use itertools::Itertools;
use serde::Serialize;
use std::collections::HashMap;
use std::path::Path;
pub fn read_fasta<P: AsRef<Path> + std::fmt::Debug>(
path: P,
region: &Region,
compensate_0_basing: bool,
) -> Result<Vec<Nucleobase>> {
let mut reader = fasta::IndexedReader::from_file(&path).unwrap();
let index =
fasta::Index::with_fasta_file(&path).context("error reading index file of input FASTA")?;
let _sequences = index.sequences();
let mut seq: Vec<u8> = Vec::new();
reader.fetch(®ion.target, region.start, region.end)?;
reader.read(&mut seq)?;
let mut fasta = Vec::new();
let mut ind = region.start;
if compensate_0_basing {
ind += 1;
}
for a in seq {
let base = char::from(a);
let marker = base.to_uppercase().collect_vec().pop().unwrap();
let b = Nucleobase {
position: ind,
marker_type: marker,
row: 0,
repeat: base.is_lowercase(),
};
fasta.push(b);
ind += 1;
}
Ok(fasta)
}
pub fn get_fasta_lengths(path: &Path) -> Result<HashMap<String, u64>> |
#[derive(Serialize, Clone, Debug, PartialEq)]
pub struct Nucleobase {
position: u64,
marker_type: char,
row: u8,
repeat: bool,
}
impl Nucleobase {
pub fn get_marker_type(&self) -> char {
self.marker_type
}
}
| {
let index = fasta::Index::with_fasta_file(&path).context("error reading input FASTA")?;
let sequences = index.sequences();
Ok(sequences
.iter()
.map(|s| (s.name.to_owned(), s.len))
.collect())
} | identifier_body |
fasta_reader.rs | use crate::common::Region;
use anyhow::Context; | use itertools::Itertools;
use serde::Serialize;
use std::collections::HashMap;
use std::path::Path;
pub fn read_fasta<P: AsRef<Path> + std::fmt::Debug>(
path: P,
region: &Region,
compensate_0_basing: bool,
) -> Result<Vec<Nucleobase>> {
let mut reader = fasta::IndexedReader::from_file(&path).unwrap();
let index =
fasta::Index::with_fasta_file(&path).context("error reading index file of input FASTA")?;
let _sequences = index.sequences();
let mut seq: Vec<u8> = Vec::new();
reader.fetch(®ion.target, region.start, region.end)?;
reader.read(&mut seq)?;
let mut fasta = Vec::new();
let mut ind = region.start;
if compensate_0_basing {
ind += 1;
}
for a in seq {
let base = char::from(a);
let marker = base.to_uppercase().collect_vec().pop().unwrap();
let b = Nucleobase {
position: ind,
marker_type: marker,
row: 0,
repeat: base.is_lowercase(),
};
fasta.push(b);
ind += 1;
}
Ok(fasta)
}
pub fn get_fasta_lengths(path: &Path) -> Result<HashMap<String, u64>> {
let index = fasta::Index::with_fasta_file(&path).context("error reading input FASTA")?;
let sequences = index.sequences();
Ok(sequences
.iter()
.map(|s| (s.name.to_owned(), s.len))
.collect())
}
#[derive(Serialize, Clone, Debug, PartialEq)]
pub struct Nucleobase {
position: u64,
marker_type: char,
row: u8,
repeat: bool,
}
impl Nucleobase {
pub fn get_marker_type(&self) -> char {
self.marker_type
}
} | use anyhow::Result;
use bio::io::fasta; | random_line_split |
unique.rs | use crate::convert::From;
use crate::fmt;
use crate::marker::{PhantomData, Unsize};
use crate::mem;
use crate::ops::{CoerceUnsized, DispatchFromDyn};
/// A wrapper around a raw non-null `*mut T` that indicates that the possessor
/// of this wrapper owns the referent. Useful for building abstractions like
/// `Box<T>`, `Vec<T>`, `String`, and `HashMap<K, V>`.
///
/// Unlike `*mut T`, `Unique<T>` behaves "as if" it were an instance of `T`.
/// It implements `Send`/`Sync` if `T` is `Send`/`Sync`. It also implies
/// the kind of strong aliasing guarantees an instance of `T` can expect:
/// the referent of the pointer should not be modified without a unique path to
/// its owning Unique.
///
/// If you're uncertain of whether it's correct to use `Unique` for your purposes,
/// consider using `NonNull`, which has weaker semantics.
///
/// Unlike `*mut T`, the pointer must always be non-null, even if the pointer
/// is never dereferenced. This is so that enums may use this forbidden value
/// as a discriminant -- `Option<Unique<T>>` has the same size as `Unique<T>`.
/// However the pointer may still dangle if it isn't dereferenced.
///
/// Unlike `*mut T`, `Unique<T>` is covariant over `T`. This should always be correct
/// for any type which upholds Unique's aliasing requirements.
#[unstable(
feature = "ptr_internals",
issue = "none",
reason = "use `NonNull` instead and consider `PhantomData<T>` \
(if you also use `#[may_dangle]`), `Send`, and/or `Sync`"
)]
#[doc(hidden)]
#[repr(transparent)]
#[rustc_layout_scalar_valid_range_start(1)]
pub struct Unique<T:?Sized> {
pointer: *const T,
// NOTE: this marker has no consequences for variance, but is necessary
// for dropck to understand that we logically own a `T`.
//
// For details, see:
// https://github.com/rust-lang/rfcs/blob/master/text/0769-sound-generic-drop.md#phantom-data
_marker: PhantomData<T>,
}
/// `Unique` pointers are `Send` if `T` is `Send` because the data they
/// reference is unaliased. Note that this aliasing invariant is
/// unenforced by the type system; the abstraction using the
/// `Unique` must enforce it.
#[unstable(feature = "ptr_internals", issue = "none")]
unsafe impl<T: Send +?Sized> Send for Unique<T> {}
/// `Unique` pointers are `Sync` if `T` is `Sync` because the data they
/// reference is unaliased. Note that this aliasing invariant is
/// unenforced by the type system; the abstraction using the
/// `Unique` must enforce it.
#[unstable(feature = "ptr_internals", issue = "none")]
unsafe impl<T: Sync +?Sized> Sync for Unique<T> {}
#[unstable(feature = "ptr_internals", issue = "none")]
impl<T: Sized> Unique<T> {
/// Creates a new `Unique` that is dangling, but well-aligned.
///
/// This is useful for initializing types which lazily allocate, like
/// `Vec::new` does.
///
/// Note that the pointer value may potentially represent a valid pointer to
/// a `T`, which means this must not be used as a "not yet initialized"
/// sentinel value. Types that lazily allocate must track initialization by
/// some other means.
#[inline]
pub const fn dangling() -> Self {
// SAFETY: mem::align_of() returns a valid, non-null pointer. The
// conditions to call new_unchecked() are thus respected.
unsafe { Unique::new_unchecked(mem::align_of::<T>() as *mut T) }
}
}
#[unstable(feature = "ptr_internals", issue = "none")]
impl<T:?Sized> Unique<T> {
/// Creates a new `Unique`.
///
/// # Safety
///
/// `ptr` must be non-null.
#[inline]
pub const unsafe fn new_unchecked(ptr: *mut T) -> Self {
// SAFETY: the caller must guarantee that `ptr` is non-null.
unsafe { Unique { pointer: ptr as _, _marker: PhantomData } }
}
/// Creates a new `Unique` if `ptr` is non-null.
#[inline]
pub fn new(ptr: *mut T) -> Option<Self> {
if!ptr.is_null() {
// SAFETY: The pointer has already been checked and is not null.
Some(unsafe { Unique { pointer: ptr as _, _marker: PhantomData } })
} else |
}
/// Acquires the underlying `*mut` pointer.
#[inline]
pub const fn as_ptr(self) -> *mut T {
self.pointer as *mut T
}
/// Dereferences the content.
///
/// The resulting lifetime is bound to self so this behaves "as if"
/// it were actually an instance of T that is getting borrowed. If a longer
/// (unbound) lifetime is needed, use `&*my_ptr.as_ptr()`.
#[inline]
pub unsafe fn as_ref(&self) -> &T {
// SAFETY: the caller must guarantee that `self` meets all the
// requirements for a reference.
unsafe { &*self.as_ptr() }
}
/// Mutably dereferences the content.
///
/// The resulting lifetime is bound to self so this behaves "as if"
/// it were actually an instance of T that is getting borrowed. If a longer
/// (unbound) lifetime is needed, use `&mut *my_ptr.as_ptr()`.
#[inline]
pub unsafe fn as_mut(&mut self) -> &mut T {
// SAFETY: the caller must guarantee that `self` meets all the
// requirements for a mutable reference.
unsafe { &mut *self.as_ptr() }
}
/// Casts to a pointer of another type.
#[inline]
pub const fn cast<U>(self) -> Unique<U> {
// SAFETY: Unique::new_unchecked() creates a new unique and needs
// the given pointer to not be null.
// Since we are passing self as a pointer, it cannot be null.
unsafe { Unique::new_unchecked(self.as_ptr() as *mut U) }
}
}
#[unstable(feature = "ptr_internals", issue = "none")]
impl<T:?Sized> Clone for Unique<T> {
#[inline]
fn clone(&self) -> Self {
*self
}
}
#[unstable(feature = "ptr_internals", issue = "none")]
impl<T:?Sized> Copy for Unique<T> {}
#[unstable(feature = "ptr_internals", issue = "none")]
impl<T:?Sized, U:?Sized> CoerceUnsized<Unique<U>> for Unique<T> where T: Unsize<U> {}
#[unstable(feature = "ptr_internals", issue = "none")]
impl<T:?Sized, U:?Sized> DispatchFromDyn<Unique<U>> for Unique<T> where T: Unsize<U> {}
#[unstable(feature = "ptr_internals", issue = "none")]
impl<T:?Sized> fmt::Debug for Unique<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Pointer::fmt(&self.as_ptr(), f)
}
}
#[unstable(feature = "ptr_internals", issue = "none")]
impl<T:?Sized> fmt::Pointer for Unique<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Pointer::fmt(&self.as_ptr(), f)
}
}
#[unstable(feature = "ptr_internals", issue = "none")]
impl<T:?Sized> From<&mut T> for Unique<T> {
#[inline]
fn from(reference: &mut T) -> Self {
// SAFETY: A mutable reference cannot be null
unsafe { Unique { pointer: reference as *mut T, _marker: PhantomData } }
}
}
| {
None
} | conditional_block |
unique.rs | use crate::convert::From;
use crate::fmt;
use crate::marker::{PhantomData, Unsize};
use crate::mem;
use crate::ops::{CoerceUnsized, DispatchFromDyn};
/// A wrapper around a raw non-null `*mut T` that indicates that the possessor
/// of this wrapper owns the referent. Useful for building abstractions like
/// `Box<T>`, `Vec<T>`, `String`, and `HashMap<K, V>`.
///
/// Unlike `*mut T`, `Unique<T>` behaves "as if" it were an instance of `T`.
/// It implements `Send`/`Sync` if `T` is `Send`/`Sync`. It also implies
/// the kind of strong aliasing guarantees an instance of `T` can expect:
/// the referent of the pointer should not be modified without a unique path to
/// its owning Unique.
///
/// If you're uncertain of whether it's correct to use `Unique` for your purposes,
/// consider using `NonNull`, which has weaker semantics.
///
/// Unlike `*mut T`, the pointer must always be non-null, even if the pointer
/// is never dereferenced. This is so that enums may use this forbidden value
/// as a discriminant -- `Option<Unique<T>>` has the same size as `Unique<T>`.
/// However the pointer may still dangle if it isn't dereferenced.
///
/// Unlike `*mut T`, `Unique<T>` is covariant over `T`. This should always be correct
/// for any type which upholds Unique's aliasing requirements.
#[unstable(
feature = "ptr_internals",
issue = "none",
reason = "use `NonNull` instead and consider `PhantomData<T>` \
(if you also use `#[may_dangle]`), `Send`, and/or `Sync`"
)]
#[doc(hidden)]
#[repr(transparent)]
#[rustc_layout_scalar_valid_range_start(1)]
pub struct Unique<T:?Sized> {
pointer: *const T,
// NOTE: this marker has no consequences for variance, but is necessary
// for dropck to understand that we logically own a `T`.
//
// For details, see:
// https://github.com/rust-lang/rfcs/blob/master/text/0769-sound-generic-drop.md#phantom-data
_marker: PhantomData<T>,
}
/// `Unique` pointers are `Send` if `T` is `Send` because the data they
/// reference is unaliased. Note that this aliasing invariant is
/// unenforced by the type system; the abstraction using the
/// `Unique` must enforce it.
#[unstable(feature = "ptr_internals", issue = "none")]
unsafe impl<T: Send +?Sized> Send for Unique<T> {}
/// `Unique` pointers are `Sync` if `T` is `Sync` because the data they
/// reference is unaliased. Note that this aliasing invariant is
/// unenforced by the type system; the abstraction using the
/// `Unique` must enforce it.
#[unstable(feature = "ptr_internals", issue = "none")]
unsafe impl<T: Sync +?Sized> Sync for Unique<T> {}
#[unstable(feature = "ptr_internals", issue = "none")]
impl<T: Sized> Unique<T> {
/// Creates a new `Unique` that is dangling, but well-aligned.
///
/// This is useful for initializing types which lazily allocate, like
/// `Vec::new` does.
///
/// Note that the pointer value may potentially represent a valid pointer to
/// a `T`, which means this must not be used as a "not yet initialized"
/// sentinel value. Types that lazily allocate must track initialization by
/// some other means.
#[inline]
pub const fn dangling() -> Self {
// SAFETY: mem::align_of() returns a valid, non-null pointer. The
// conditions to call new_unchecked() are thus respected.
unsafe { Unique::new_unchecked(mem::align_of::<T>() as *mut T) }
}
}
#[unstable(feature = "ptr_internals", issue = "none")]
impl<T:?Sized> Unique<T> {
/// Creates a new `Unique`.
///
/// # Safety
///
/// `ptr` must be non-null.
#[inline]
pub const unsafe fn new_unchecked(ptr: *mut T) -> Self {
// SAFETY: the caller must guarantee that `ptr` is non-null.
unsafe { Unique { pointer: ptr as _, _marker: PhantomData } }
}
/// Creates a new `Unique` if `ptr` is non-null.
#[inline]
pub fn new(ptr: *mut T) -> Option<Self> {
if!ptr.is_null() {
// SAFETY: The pointer has already been checked and is not null.
Some(unsafe { Unique { pointer: ptr as _, _marker: PhantomData } })
} else {
None
}
}
/// Acquires the underlying `*mut` pointer.
#[inline]
pub const fn as_ptr(self) -> *mut T {
self.pointer as *mut T
}
/// Dereferences the content.
///
/// The resulting lifetime is bound to self so this behaves "as if"
/// it were actually an instance of T that is getting borrowed. If a longer
/// (unbound) lifetime is needed, use `&*my_ptr.as_ptr()`.
#[inline]
pub unsafe fn as_ref(&self) -> &T {
// SAFETY: the caller must guarantee that `self` meets all the
// requirements for a reference.
unsafe { &*self.as_ptr() }
}
/// Mutably dereferences the content.
///
/// The resulting lifetime is bound to self so this behaves "as if"
/// it were actually an instance of T that is getting borrowed. If a longer
/// (unbound) lifetime is needed, use `&mut *my_ptr.as_ptr()`.
#[inline]
pub unsafe fn as_mut(&mut self) -> &mut T {
// SAFETY: the caller must guarantee that `self` meets all the
// requirements for a mutable reference.
unsafe { &mut *self.as_ptr() }
}
/// Casts to a pointer of another type.
#[inline]
pub const fn cast<U>(self) -> Unique<U> {
// SAFETY: Unique::new_unchecked() creates a new unique and needs
// the given pointer to not be null.
// Since we are passing self as a pointer, it cannot be null.
unsafe { Unique::new_unchecked(self.as_ptr() as *mut U) }
}
}
#[unstable(feature = "ptr_internals", issue = "none")]
impl<T:?Sized> Clone for Unique<T> {
#[inline]
fn | (&self) -> Self {
*self
}
}
#[unstable(feature = "ptr_internals", issue = "none")]
impl<T:?Sized> Copy for Unique<T> {}
#[unstable(feature = "ptr_internals", issue = "none")]
impl<T:?Sized, U:?Sized> CoerceUnsized<Unique<U>> for Unique<T> where T: Unsize<U> {}
#[unstable(feature = "ptr_internals", issue = "none")]
impl<T:?Sized, U:?Sized> DispatchFromDyn<Unique<U>> for Unique<T> where T: Unsize<U> {}
#[unstable(feature = "ptr_internals", issue = "none")]
impl<T:?Sized> fmt::Debug for Unique<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Pointer::fmt(&self.as_ptr(), f)
}
}
#[unstable(feature = "ptr_internals", issue = "none")]
impl<T:?Sized> fmt::Pointer for Unique<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Pointer::fmt(&self.as_ptr(), f)
}
}
#[unstable(feature = "ptr_internals", issue = "none")]
impl<T:?Sized> From<&mut T> for Unique<T> {
#[inline]
fn from(reference: &mut T) -> Self {
// SAFETY: A mutable reference cannot be null
unsafe { Unique { pointer: reference as *mut T, _marker: PhantomData } }
}
}
| clone | identifier_name |
unique.rs | use crate::convert::From;
use crate::fmt;
use crate::marker::{PhantomData, Unsize};
use crate::mem;
use crate::ops::{CoerceUnsized, DispatchFromDyn};
/// A wrapper around a raw non-null `*mut T` that indicates that the possessor
/// of this wrapper owns the referent. Useful for building abstractions like
/// `Box<T>`, `Vec<T>`, `String`, and `HashMap<K, V>`.
///
/// Unlike `*mut T`, `Unique<T>` behaves "as if" it were an instance of `T`.
/// It implements `Send`/`Sync` if `T` is `Send`/`Sync`. It also implies
/// the kind of strong aliasing guarantees an instance of `T` can expect:
/// the referent of the pointer should not be modified without a unique path to
/// its owning Unique.
///
/// If you're uncertain of whether it's correct to use `Unique` for your purposes,
/// consider using `NonNull`, which has weaker semantics.
///
/// Unlike `*mut T`, the pointer must always be non-null, even if the pointer
/// is never dereferenced. This is so that enums may use this forbidden value
/// as a discriminant -- `Option<Unique<T>>` has the same size as `Unique<T>`.
/// However the pointer may still dangle if it isn't dereferenced.
///
/// Unlike `*mut T`, `Unique<T>` is covariant over `T`. This should always be correct
/// for any type which upholds Unique's aliasing requirements.
#[unstable(
feature = "ptr_internals",
issue = "none",
reason = "use `NonNull` instead and consider `PhantomData<T>` \
(if you also use `#[may_dangle]`), `Send`, and/or `Sync`"
)]
#[doc(hidden)]
#[repr(transparent)]
#[rustc_layout_scalar_valid_range_start(1)]
pub struct Unique<T:?Sized> {
pointer: *const T,
// NOTE: this marker has no consequences for variance, but is necessary
// for dropck to understand that we logically own a `T`.
//
// For details, see:
// https://github.com/rust-lang/rfcs/blob/master/text/0769-sound-generic-drop.md#phantom-data
_marker: PhantomData<T>,
} | /// unenforced by the type system; the abstraction using the
/// `Unique` must enforce it.
#[unstable(feature = "ptr_internals", issue = "none")]
unsafe impl<T: Send +?Sized> Send for Unique<T> {}
/// `Unique` pointers are `Sync` if `T` is `Sync` because the data they
/// reference is unaliased. Note that this aliasing invariant is
/// unenforced by the type system; the abstraction using the
/// `Unique` must enforce it.
#[unstable(feature = "ptr_internals", issue = "none")]
unsafe impl<T: Sync +?Sized> Sync for Unique<T> {}
#[unstable(feature = "ptr_internals", issue = "none")]
impl<T: Sized> Unique<T> {
/// Creates a new `Unique` that is dangling, but well-aligned.
///
/// This is useful for initializing types which lazily allocate, like
/// `Vec::new` does.
///
/// Note that the pointer value may potentially represent a valid pointer to
/// a `T`, which means this must not be used as a "not yet initialized"
/// sentinel value. Types that lazily allocate must track initialization by
/// some other means.
#[inline]
pub const fn dangling() -> Self {
// SAFETY: mem::align_of() returns a valid, non-null pointer. The
// conditions to call new_unchecked() are thus respected.
unsafe { Unique::new_unchecked(mem::align_of::<T>() as *mut T) }
}
}
#[unstable(feature = "ptr_internals", issue = "none")]
impl<T:?Sized> Unique<T> {
/// Creates a new `Unique`.
///
/// # Safety
///
/// `ptr` must be non-null.
#[inline]
pub const unsafe fn new_unchecked(ptr: *mut T) -> Self {
// SAFETY: the caller must guarantee that `ptr` is non-null.
unsafe { Unique { pointer: ptr as _, _marker: PhantomData } }
}
/// Creates a new `Unique` if `ptr` is non-null.
#[inline]
pub fn new(ptr: *mut T) -> Option<Self> {
if!ptr.is_null() {
// SAFETY: The pointer has already been checked and is not null.
Some(unsafe { Unique { pointer: ptr as _, _marker: PhantomData } })
} else {
None
}
}
/// Acquires the underlying `*mut` pointer.
#[inline]
pub const fn as_ptr(self) -> *mut T {
self.pointer as *mut T
}
/// Dereferences the content.
///
/// The resulting lifetime is bound to self so this behaves "as if"
/// it were actually an instance of T that is getting borrowed. If a longer
/// (unbound) lifetime is needed, use `&*my_ptr.as_ptr()`.
#[inline]
pub unsafe fn as_ref(&self) -> &T {
// SAFETY: the caller must guarantee that `self` meets all the
// requirements for a reference.
unsafe { &*self.as_ptr() }
}
/// Mutably dereferences the content.
///
/// The resulting lifetime is bound to self so this behaves "as if"
/// it were actually an instance of T that is getting borrowed. If a longer
/// (unbound) lifetime is needed, use `&mut *my_ptr.as_ptr()`.
#[inline]
pub unsafe fn as_mut(&mut self) -> &mut T {
// SAFETY: the caller must guarantee that `self` meets all the
// requirements for a mutable reference.
unsafe { &mut *self.as_ptr() }
}
/// Casts to a pointer of another type.
#[inline]
pub const fn cast<U>(self) -> Unique<U> {
// SAFETY: Unique::new_unchecked() creates a new unique and needs
// the given pointer to not be null.
// Since we are passing self as a pointer, it cannot be null.
unsafe { Unique::new_unchecked(self.as_ptr() as *mut U) }
}
}
#[unstable(feature = "ptr_internals", issue = "none")]
impl<T:?Sized> Clone for Unique<T> {
#[inline]
fn clone(&self) -> Self {
*self
}
}
#[unstable(feature = "ptr_internals", issue = "none")]
impl<T:?Sized> Copy for Unique<T> {}
#[unstable(feature = "ptr_internals", issue = "none")]
impl<T:?Sized, U:?Sized> CoerceUnsized<Unique<U>> for Unique<T> where T: Unsize<U> {}
#[unstable(feature = "ptr_internals", issue = "none")]
impl<T:?Sized, U:?Sized> DispatchFromDyn<Unique<U>> for Unique<T> where T: Unsize<U> {}
#[unstable(feature = "ptr_internals", issue = "none")]
impl<T:?Sized> fmt::Debug for Unique<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Pointer::fmt(&self.as_ptr(), f)
}
}
#[unstable(feature = "ptr_internals", issue = "none")]
impl<T:?Sized> fmt::Pointer for Unique<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Pointer::fmt(&self.as_ptr(), f)
}
}
#[unstable(feature = "ptr_internals", issue = "none")]
impl<T:?Sized> From<&mut T> for Unique<T> {
#[inline]
fn from(reference: &mut T) -> Self {
// SAFETY: A mutable reference cannot be null
unsafe { Unique { pointer: reference as *mut T, _marker: PhantomData } }
}
} |
/// `Unique` pointers are `Send` if `T` is `Send` because the data they
/// reference is unaliased. Note that this aliasing invariant is | random_line_split |
unique.rs | use crate::convert::From;
use crate::fmt;
use crate::marker::{PhantomData, Unsize};
use crate::mem;
use crate::ops::{CoerceUnsized, DispatchFromDyn};
/// A wrapper around a raw non-null `*mut T` that indicates that the possessor
/// of this wrapper owns the referent. Useful for building abstractions like
/// `Box<T>`, `Vec<T>`, `String`, and `HashMap<K, V>`.
///
/// Unlike `*mut T`, `Unique<T>` behaves "as if" it were an instance of `T`.
/// It implements `Send`/`Sync` if `T` is `Send`/`Sync`. It also implies
/// the kind of strong aliasing guarantees an instance of `T` can expect:
/// the referent of the pointer should not be modified without a unique path to
/// its owning Unique.
///
/// If you're uncertain of whether it's correct to use `Unique` for your purposes,
/// consider using `NonNull`, which has weaker semantics.
///
/// Unlike `*mut T`, the pointer must always be non-null, even if the pointer
/// is never dereferenced. This is so that enums may use this forbidden value
/// as a discriminant -- `Option<Unique<T>>` has the same size as `Unique<T>`.
/// However the pointer may still dangle if it isn't dereferenced.
///
/// Unlike `*mut T`, `Unique<T>` is covariant over `T`. This should always be correct
/// for any type which upholds Unique's aliasing requirements.
#[unstable(
feature = "ptr_internals",
issue = "none",
reason = "use `NonNull` instead and consider `PhantomData<T>` \
(if you also use `#[may_dangle]`), `Send`, and/or `Sync`"
)]
#[doc(hidden)]
#[repr(transparent)]
#[rustc_layout_scalar_valid_range_start(1)]
pub struct Unique<T:?Sized> {
pointer: *const T,
// NOTE: this marker has no consequences for variance, but is necessary
// for dropck to understand that we logically own a `T`.
//
// For details, see:
// https://github.com/rust-lang/rfcs/blob/master/text/0769-sound-generic-drop.md#phantom-data
_marker: PhantomData<T>,
}
/// `Unique` pointers are `Send` if `T` is `Send` because the data they
/// reference is unaliased. Note that this aliasing invariant is
/// unenforced by the type system; the abstraction using the
/// `Unique` must enforce it.
#[unstable(feature = "ptr_internals", issue = "none")]
unsafe impl<T: Send +?Sized> Send for Unique<T> {}
/// `Unique` pointers are `Sync` if `T` is `Sync` because the data they
/// reference is unaliased. Note that this aliasing invariant is
/// unenforced by the type system; the abstraction using the
/// `Unique` must enforce it.
#[unstable(feature = "ptr_internals", issue = "none")]
unsafe impl<T: Sync +?Sized> Sync for Unique<T> {}
#[unstable(feature = "ptr_internals", issue = "none")]
impl<T: Sized> Unique<T> {
/// Creates a new `Unique` that is dangling, but well-aligned.
///
/// This is useful for initializing types which lazily allocate, like
/// `Vec::new` does.
///
/// Note that the pointer value may potentially represent a valid pointer to
/// a `T`, which means this must not be used as a "not yet initialized"
/// sentinel value. Types that lazily allocate must track initialization by
/// some other means.
#[inline]
pub const fn dangling() -> Self {
// SAFETY: mem::align_of() returns a valid, non-null pointer. The
// conditions to call new_unchecked() are thus respected.
unsafe { Unique::new_unchecked(mem::align_of::<T>() as *mut T) }
}
}
#[unstable(feature = "ptr_internals", issue = "none")]
impl<T:?Sized> Unique<T> {
/// Creates a new `Unique`.
///
/// # Safety
///
/// `ptr` must be non-null.
#[inline]
pub const unsafe fn new_unchecked(ptr: *mut T) -> Self {
// SAFETY: the caller must guarantee that `ptr` is non-null.
unsafe { Unique { pointer: ptr as _, _marker: PhantomData } }
}
/// Creates a new `Unique` if `ptr` is non-null.
#[inline]
pub fn new(ptr: *mut T) -> Option<Self> {
if!ptr.is_null() {
// SAFETY: The pointer has already been checked and is not null.
Some(unsafe { Unique { pointer: ptr as _, _marker: PhantomData } })
} else {
None
}
}
/// Acquires the underlying `*mut` pointer.
#[inline]
pub const fn as_ptr(self) -> *mut T {
self.pointer as *mut T
}
/// Dereferences the content.
///
/// The resulting lifetime is bound to self so this behaves "as if"
/// it were actually an instance of T that is getting borrowed. If a longer
/// (unbound) lifetime is needed, use `&*my_ptr.as_ptr()`.
#[inline]
pub unsafe fn as_ref(&self) -> &T {
// SAFETY: the caller must guarantee that `self` meets all the
// requirements for a reference.
unsafe { &*self.as_ptr() }
}
/// Mutably dereferences the content.
///
/// The resulting lifetime is bound to self so this behaves "as if"
/// it were actually an instance of T that is getting borrowed. If a longer
/// (unbound) lifetime is needed, use `&mut *my_ptr.as_ptr()`.
#[inline]
pub unsafe fn as_mut(&mut self) -> &mut T {
// SAFETY: the caller must guarantee that `self` meets all the
// requirements for a mutable reference.
unsafe { &mut *self.as_ptr() }
}
/// Casts to a pointer of another type.
#[inline]
pub const fn cast<U>(self) -> Unique<U> {
// SAFETY: Unique::new_unchecked() creates a new unique and needs
// the given pointer to not be null.
// Since we are passing self as a pointer, it cannot be null.
unsafe { Unique::new_unchecked(self.as_ptr() as *mut U) }
}
}
#[unstable(feature = "ptr_internals", issue = "none")]
impl<T:?Sized> Clone for Unique<T> {
#[inline]
fn clone(&self) -> Self {
*self
}
}
#[unstable(feature = "ptr_internals", issue = "none")]
impl<T:?Sized> Copy for Unique<T> {}
#[unstable(feature = "ptr_internals", issue = "none")]
impl<T:?Sized, U:?Sized> CoerceUnsized<Unique<U>> for Unique<T> where T: Unsize<U> {}
#[unstable(feature = "ptr_internals", issue = "none")]
impl<T:?Sized, U:?Sized> DispatchFromDyn<Unique<U>> for Unique<T> where T: Unsize<U> {}
#[unstable(feature = "ptr_internals", issue = "none")]
impl<T:?Sized> fmt::Debug for Unique<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Pointer::fmt(&self.as_ptr(), f)
}
}
#[unstable(feature = "ptr_internals", issue = "none")]
impl<T:?Sized> fmt::Pointer for Unique<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Pointer::fmt(&self.as_ptr(), f)
}
}
#[unstable(feature = "ptr_internals", issue = "none")]
impl<T:?Sized> From<&mut T> for Unique<T> {
#[inline]
fn from(reference: &mut T) -> Self |
}
| {
// SAFETY: A mutable reference cannot be null
unsafe { Unique { pointer: reference as *mut T, _marker: PhantomData } }
} | identifier_body |
window.rs | use std::ops::{Bound, Range, RangeBounds};
/// A owned window around an underlying buffer.
///
/// Normally slices work great for considering sub-portions of a buffer, but
/// unfortunately a slice is a *borrowed* type in Rust which has an associated
/// lifetime. When working with future and async I/O these lifetimes are not
/// always appropriate, and are sometimes difficult to store in tasks. This
/// type strives to fill this gap by providing an "owned slice" around an
/// underlying buffer of bytes.
///
/// A `Window<T>` wraps an underlying buffer, `T`, and has configurable
/// start/end indexes to alter the behavior of the `AsRef<[u8]>` implementation
/// that this type carries.
///
/// This type can be particularly useful when working with the `write_all`
/// combinator in this crate. Data can be sliced via `Window`, consumed by
/// `write_all`, and then earned back once the write operation finishes through
/// the `into_inner` method on this type.
#[derive(Debug)]
pub struct | <T> {
inner: T,
range: Range<usize>,
}
impl<T: AsRef<[u8]>> Window<T> {
/// Creates a new window around the buffer `t` defaulting to the entire
/// slice.
///
/// Further methods can be called on the returned `Window<T>` to alter the
/// window into the data provided.
pub fn new(t: T) -> Self {
Self { range: 0..t.as_ref().len(), inner: t }
}
/// Gets a shared reference to the underlying buffer inside of this
/// `Window`.
pub fn get_ref(&self) -> &T {
&self.inner
}
/// Gets a mutable reference to the underlying buffer inside of this
/// `Window`.
pub fn get_mut(&mut self) -> &mut T {
&mut self.inner
}
/// Consumes this `Window`, returning the underlying buffer.
pub fn into_inner(self) -> T {
self.inner
}
/// Returns the starting index of this window into the underlying buffer
/// `T`.
pub fn start(&self) -> usize {
self.range.start
}
/// Returns the end index of this window into the underlying buffer
/// `T`.
pub fn end(&self) -> usize {
self.range.end
}
/// Changes the range of this window to the range specified.
///
/// # Panics
///
/// This method will panic if `range` is out of bounds for the underlying
/// slice or if [`start_bound()`] of `range` comes after the [`end_bound()`].
///
/// [`start_bound()`]: std::ops::RangeBounds::start_bound
/// [`end_bound()`]: std::ops::RangeBounds::end_bound
pub fn set<R: RangeBounds<usize>>(&mut self, range: R) {
let start = match range.start_bound() {
Bound::Included(n) => *n,
Bound::Excluded(n) => *n + 1,
Bound::Unbounded => 0,
};
let end = match range.end_bound() {
Bound::Included(n) => *n + 1,
Bound::Excluded(n) => *n,
Bound::Unbounded => self.inner.as_ref().len(),
};
assert!(end <= self.inner.as_ref().len());
assert!(start <= end);
self.range.start = start;
self.range.end = end;
}
}
impl<T: AsRef<[u8]>> AsRef<[u8]> for Window<T> {
fn as_ref(&self) -> &[u8] {
&self.inner.as_ref()[self.range.start..self.range.end]
}
}
impl<T: AsMut<[u8]>> AsMut<[u8]> for Window<T> {
fn as_mut(&mut self) -> &mut [u8] {
&mut self.inner.as_mut()[self.range.start..self.range.end]
}
}
| Window | identifier_name |
window.rs | use std::ops::{Bound, Range, RangeBounds};
/// A owned window around an underlying buffer.
///
/// Normally slices work great for considering sub-portions of a buffer, but
/// unfortunately a slice is a *borrowed* type in Rust which has an associated
/// lifetime. When working with future and async I/O these lifetimes are not
/// always appropriate, and are sometimes difficult to store in tasks. This
/// type strives to fill this gap by providing an "owned slice" around an
/// underlying buffer of bytes.
///
/// A `Window<T>` wraps an underlying buffer, `T`, and has configurable
/// start/end indexes to alter the behavior of the `AsRef<[u8]>` implementation
/// that this type carries.
///
/// This type can be particularly useful when working with the `write_all`
/// combinator in this crate. Data can be sliced via `Window`, consumed by
/// `write_all`, and then earned back once the write operation finishes through
/// the `into_inner` method on this type.
#[derive(Debug)]
pub struct Window<T> {
inner: T,
range: Range<usize>,
}
impl<T: AsRef<[u8]>> Window<T> {
/// Creates a new window around the buffer `t` defaulting to the entire
/// slice.
///
/// Further methods can be called on the returned `Window<T>` to alter the
/// window into the data provided.
pub fn new(t: T) -> Self {
Self { range: 0..t.as_ref().len(), inner: t }
}
/// Gets a shared reference to the underlying buffer inside of this
/// `Window`.
pub fn get_ref(&self) -> &T {
&self.inner
}
/// Gets a mutable reference to the underlying buffer inside of this
/// `Window`.
pub fn get_mut(&mut self) -> &mut T {
&mut self.inner
}
/// Consumes this `Window`, returning the underlying buffer.
pub fn into_inner(self) -> T {
self.inner
}
/// Returns the starting index of this window into the underlying buffer
/// `T`.
pub fn start(&self) -> usize {
self.range.start
}
/// Returns the end index of this window into the underlying buffer
/// `T`.
pub fn end(&self) -> usize {
self.range.end
}
/// Changes the range of this window to the range specified.
///
/// # Panics
///
/// This method will panic if `range` is out of bounds for the underlying
/// slice or if [`start_bound()`] of `range` comes after the [`end_bound()`].
///
/// [`start_bound()`]: std::ops::RangeBounds::start_bound
/// [`end_bound()`]: std::ops::RangeBounds::end_bound
pub fn set<R: RangeBounds<usize>>(&mut self, range: R) {
let start = match range.start_bound() {
Bound::Included(n) => *n,
Bound::Excluded(n) => *n + 1,
Bound::Unbounded => 0,
};
let end = match range.end_bound() {
Bound::Included(n) => *n + 1,
Bound::Excluded(n) => *n,
Bound::Unbounded => self.inner.as_ref().len(),
};
assert!(end <= self.inner.as_ref().len());
assert!(start <= end);
self.range.start = start;
self.range.end = end;
}
}
impl<T: AsRef<[u8]>> AsRef<[u8]> for Window<T> {
fn as_ref(&self) -> &[u8] {
&self.inner.as_ref()[self.range.start..self.range.end]
}
}
impl<T: AsMut<[u8]>> AsMut<[u8]> for Window<T> {
fn as_mut(&mut self) -> &mut [u8] |
}
| {
&mut self.inner.as_mut()[self.range.start..self.range.end]
} | identifier_body |
window.rs | use std::ops::{Bound, Range, RangeBounds};
/// A owned window around an underlying buffer.
/// | /// always appropriate, and are sometimes difficult to store in tasks. This
/// type strives to fill this gap by providing an "owned slice" around an
/// underlying buffer of bytes.
///
/// A `Window<T>` wraps an underlying buffer, `T`, and has configurable
/// start/end indexes to alter the behavior of the `AsRef<[u8]>` implementation
/// that this type carries.
///
/// This type can be particularly useful when working with the `write_all`
/// combinator in this crate. Data can be sliced via `Window`, consumed by
/// `write_all`, and then earned back once the write operation finishes through
/// the `into_inner` method on this type.
#[derive(Debug)]
pub struct Window<T> {
inner: T,
range: Range<usize>,
}
impl<T: AsRef<[u8]>> Window<T> {
/// Creates a new window around the buffer `t` defaulting to the entire
/// slice.
///
/// Further methods can be called on the returned `Window<T>` to alter the
/// window into the data provided.
pub fn new(t: T) -> Self {
Self { range: 0..t.as_ref().len(), inner: t }
}
/// Gets a shared reference to the underlying buffer inside of this
/// `Window`.
pub fn get_ref(&self) -> &T {
&self.inner
}
/// Gets a mutable reference to the underlying buffer inside of this
/// `Window`.
pub fn get_mut(&mut self) -> &mut T {
&mut self.inner
}
/// Consumes this `Window`, returning the underlying buffer.
pub fn into_inner(self) -> T {
self.inner
}
/// Returns the starting index of this window into the underlying buffer
/// `T`.
pub fn start(&self) -> usize {
self.range.start
}
/// Returns the end index of this window into the underlying buffer
/// `T`.
pub fn end(&self) -> usize {
self.range.end
}
/// Changes the range of this window to the range specified.
///
/// # Panics
///
/// This method will panic if `range` is out of bounds for the underlying
/// slice or if [`start_bound()`] of `range` comes after the [`end_bound()`].
///
/// [`start_bound()`]: std::ops::RangeBounds::start_bound
/// [`end_bound()`]: std::ops::RangeBounds::end_bound
pub fn set<R: RangeBounds<usize>>(&mut self, range: R) {
let start = match range.start_bound() {
Bound::Included(n) => *n,
Bound::Excluded(n) => *n + 1,
Bound::Unbounded => 0,
};
let end = match range.end_bound() {
Bound::Included(n) => *n + 1,
Bound::Excluded(n) => *n,
Bound::Unbounded => self.inner.as_ref().len(),
};
assert!(end <= self.inner.as_ref().len());
assert!(start <= end);
self.range.start = start;
self.range.end = end;
}
}
impl<T: AsRef<[u8]>> AsRef<[u8]> for Window<T> {
fn as_ref(&self) -> &[u8] {
&self.inner.as_ref()[self.range.start..self.range.end]
}
}
impl<T: AsMut<[u8]>> AsMut<[u8]> for Window<T> {
fn as_mut(&mut self) -> &mut [u8] {
&mut self.inner.as_mut()[self.range.start..self.range.end]
}
} | /// Normally slices work great for considering sub-portions of a buffer, but
/// unfortunately a slice is a *borrowed* type in Rust which has an associated
/// lifetime. When working with future and async I/O these lifetimes are not | random_line_split |
issue-5554.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[feature(macro_rules)];
use std::default::Default;
pub struct X<T> {
a: T,
}
// reordering these bounds stops the ICE
impl<T: Default + Eq + Default> Default for X<T> {
fn default() -> X<T> {
X { a: Default::default() } | macro_rules! constants {
() => {
let _ : X<int> = Default::default();
}
}
pub fn main() {
constants!();
} | }
}
| random_line_split |
issue-5554.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[feature(macro_rules)];
use std::default::Default;
pub struct X<T> {
a: T,
}
// reordering these bounds stops the ICE
impl<T: Default + Eq + Default> Default for X<T> {
fn default() -> X<T> {
X { a: Default::default() }
}
}
macro_rules! constants {
() => {
let _ : X<int> = Default::default();
}
}
pub fn | () {
constants!();
}
| main | identifier_name |
lib.rs | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
#![forbid(unsafe_code)]
//! This module provides algorithms for accessing and updating a Merkle Accumulator structure
//! persisted in a key-value store. Note that this doesn't write to the storage directly, rather,
//! it reads from it via the `HashReader` trait and yields writes via an in memory `HashMap`.
//!
//! # Merkle Accumulator
//! Given an ever growing (append only) series of "leaf" hashes, we construct an evolving Merkle
//! Tree for which proofs of inclusion/exclusion of a leaf hash at a leaf index in a snapshot
//! of the tree (represented by root hash) can be given.
//!
//! # Leaf Nodes
//! Leaf nodes carry hash values to be stored and proved. They are only appended to the tree but
//! never deleted or updated.
//!
//! # Internal Nodes
//! A non-leaf node carries the hash value derived from both its left and right children.
//!
//! # Placeholder Nodes
//! To make sure each Leaf node has a Merkle Proof towards the root, placeholder nodes are added so
//! that along the route from a leaf to the root, each node has a sibling. Placeholder nodes have
//! the hash value `ACCUMULATOR_PLACEHOLDER_HASH`
//!
//! A placeholder node can appear as either a Leaf node or a non-Leaf node, but there is at most one
//! placeholder leaf at any time.
//!
//! # Frozen Nodes & Non-frozen Nodes
//! As leaves are added to the tree, placeholder nodes get replaced by non-placeholder nodes, and
//! when a node has all its descendants being non-placeholder, it becomes "Frozen" -- its hash value
//! won't change again in the event of new leaves being added. All leaves appended (not counting the
//! one possible placeholder leaf) are by definition Frozen.
//!
//! Other nodes, which have one or more placeholder descendants are Non-Frozen. As new elements are
//! appended to the accumulator the hash value of these nodes will change.
//!
//! # Leaf Count
//! Given a count of the number of leaves in a Merkle Accumulator it is possible to determine the
//! shape of the accumulator -- which nodes are filled and which nodes are placeholder nodes.
//!
//! Example:
//! Logical view of a Merkle Accumulator with 5 leaves:
//! ```text
//! Non-fzn
//! / \
//! / \
//! / \
//! Fzn2 Non-fzn
//! / \ / \
//! / \ / \
//! Fzn1 Fzn3 Non-fzn [Placeholder]
//! / \ / \ / \
//! L0 L1 L2 L3 L4 [Placeholder]
//! ```
//!
//! # Position and Physical Representation
//! As a Merkle Accumulator tree expands to the right and upwards, we number newly frozen nodes
//! monotonically. One way to do it is simply to use in-order index of nodes, and this is what
//! we do for the in-memory representation. We call the stated numbers identifying nodes below
//! simply "Position", and unless otherwise stated, this is the in-order position.
//!
//! For writing to disk however, we write all the children of a node before the parent.
//! Thus for disk write order, it is more convenient to use the post-order position as an index.
//! And with that we can map a Merkle Accumulator into a key-value storage: key is the post-order
//! position of a node, and the value is hash value it carries.
//!
//! We store only Frozen nodes, and generate non-Frozen nodes on the fly when accessing the tree.
//! This way, the physical representation of the tree is append-only, i.e. once written to physical
//! storage, nodes won't be either modified or deleted.
//!
//! Here is what we persist for the logical tree in the above example:
//!
//! ```text
//! Fzn2(6)
//! / \
//! / \
//! Fzn1(2) Fzn3(5)
//! / \ / \
//! L0(0) L1(1) L2(3) L3(4) L4(7)
//! ```
//!
//! When the next leaf node is persisted, the physical representation will be:
//!
//! ```text
//! Fzn2(6)
//! / \
//! / \
//! Fzn1(2) Fzn3(5) Fzn4(9)
//! / \ / \ / \
//! L0(0) L1(1) L2(3) L3(4) L4(7) L5(8)
//! ```
//!
//! The numbering corresponds to the post-order traversal of the tree.
//!
//! To think in key-value pairs:
//! ```text
//! |<-key->|<--value-->|
//! | 0 | hash_L0 |
//! | 1 | hash_L1 |
//! | 2 | hash_Fzn1 |
//! | ... | ... |
//! ```
#[cfg(any(test, feature = "fuzzing"))]
pub mod test_helpers;
use anyhow::{ensure, format_err, Result};
use diem_crypto::hash::{CryptoHash, CryptoHasher, HashValue, ACCUMULATOR_PLACEHOLDER_HASH};
use diem_types::proof::{
definition::{LeafCount, MAX_ACCUMULATOR_PROOF_DEPTH},
position::{FrozenSubTreeIterator, FrozenSubtreeSiblingIterator, Position},
AccumulatorConsistencyProof, AccumulatorProof, AccumulatorRangeProof, MerkleTreeInternalNode,
};
use mirai_annotations::*;
use std::marker::PhantomData;
/// Defines the interface between `MerkleAccumulator` and underlying storage.
pub trait HashReader {
/// Return `HashValue` carried by the node at `Position`.
fn get(&self, position: Position) -> Result<HashValue>;
}
/// A `Node` in a `MerkleAccumulator` tree is a `HashValue` at a `Position`
type Node = (Position, HashValue);
/// In this live Merkle Accumulator algorithms.
pub struct MerkleAccumulator<R, H> {
reader: PhantomData<R>,
hasher: PhantomData<H>,
}
impl<R, H> MerkleAccumulator<R, H>
where
R: HashReader,
H: CryptoHasher,
{
/// Given an existing Merkle Accumulator (represented by `num_existing_leaves` and a `reader`
/// that is able to fetch all existing frozen nodes), and a list of leaves to be appended,
/// returns the result root hash and new nodes to be frozen.
pub fn append(
reader: &R,
num_existing_leaves: LeafCount,
new_leaves: &[HashValue],
) -> Result<(HashValue, Vec<Node>)> {
MerkleAccumulatorView::<R, H>::new(reader, num_existing_leaves).append(new_leaves)
}
/// Get proof of inclusion of the leaf at `leaf_index` in this Merkle Accumulator of
/// `num_leaves` leaves in total. Siblings are read via `reader` (or generated dynamically
/// if they are non-frozen).
///
/// See [`diem_types::proof::AccumulatorProof`] for proof format.
pub fn get_proof(
reader: &R,
num_leaves: LeafCount,
leaf_index: u64,
) -> Result<AccumulatorProof<H>> {
MerkleAccumulatorView::<R, H>::new(reader, num_leaves).get_proof(leaf_index)
}
/// Gets a proof that shows the full accumulator is consistent with a smaller accumulator.
///
/// See [`diem_types::proof::AccumulatorConsistencyProof`] for proof format.
pub fn get_consistency_proof(
reader: &R,
full_acc_leaves: LeafCount,
sub_acc_leaves: LeafCount,
) -> Result<AccumulatorConsistencyProof> {
MerkleAccumulatorView::<R, H>::new(reader, full_acc_leaves)
.get_consistency_proof(sub_acc_leaves)
}
/// Gets a proof that shows a range of leaves are part of the accumulator.
///
/// See [`diem_types::proof::AccumulatorRangeProof`] for proof format.
pub fn get_range_proof(
reader: &R,
full_acc_leaves: LeafCount,
first_leaf_index: Option<u64>,
num_leaves: LeafCount,
) -> Result<AccumulatorRangeProof<H>> {
MerkleAccumulatorView::<R, H>::new(reader, full_acc_leaves)
.get_range_proof(first_leaf_index, num_leaves)
}
/// See `get_range_proof`. This is the version of it that returns `Position`s only.
pub fn get_range_proof_positions(
reader: &R,
full_acc_leaves: LeafCount,
first_leaf_index: Option<u64>,
num_leaves: LeafCount,
) -> Result<(Vec<Position>, Vec<Position>)> {
MerkleAccumulatorView::<R, H>::new(reader, full_acc_leaves)
.get_range_proof_positions(first_leaf_index, num_leaves)
}
/// From left to right, gets frozen subtree root hashes of the accumulator. For example, if the
/// accumulator has 5 leaves, `x` and `e` are returned.
/// ```text
/// root
/// / \
/// / \
/// / \
/// x o
/// / \ / \
/// / \ / \
/// o o o placeholder
/// / \ / \ / \
/// a b c d e placeholder
/// ```
pub fn get_frozen_subtree_hashes(reader: &R, num_leaves: LeafCount) -> Result<Vec<HashValue>> {
MerkleAccumulatorView::<R, H>::new(reader, num_leaves).get_frozen_subtree_hashes()
}
/// Get root hash at a specific version (hence num_leaves).
pub fn get_root_hash(reader: &R, num_leaves: LeafCount) -> Result<HashValue> {
MerkleAccumulatorView::<R, H>::new(reader, num_leaves).get_root_hash()
}
}
/// Actual implementation of Merkle Accumulator algorithms, which carries the `reader` and
/// `num_leaves` on an instance for convenience
struct MerkleAccumulatorView<'a, R, H> {
reader: &'a R,
num_leaves: LeafCount,
hasher: PhantomData<H>,
}
impl<'a, R, H> MerkleAccumulatorView<'a, R, H>
where | Self {
reader,
num_leaves,
hasher: PhantomData,
}
}
/// implementation for pub interface `MerkleAccumulator::append`
fn append(&self, new_leaves: &[HashValue]) -> Result<(HashValue, Vec<Node>)> {
// Deal with the case where new_leaves is empty
if new_leaves.is_empty() {
if self.num_leaves == 0 {
return Ok((*ACCUMULATOR_PLACEHOLDER_HASH, Vec::new()));
} else {
let root_hash = self.get_hash(Position::root_from_leaf_count(self.num_leaves))?;
return Ok((root_hash, Vec::new()));
}
}
let num_new_leaves = new_leaves.len();
let last_new_leaf_count = self.num_leaves + num_new_leaves as LeafCount;
let root_level = Position::root_level_from_leaf_count(last_new_leaf_count);
let mut to_freeze = Vec::with_capacity(Self::max_to_freeze(num_new_leaves, root_level));
// Iterate over the new leaves, adding them to to_freeze and then adding any frozen parents
// when right children are encountered. This has the effect of creating frozen nodes in
// perfect post-order, which can be used as a strictly increasing append only index for
// the underlying storage.
//
// We will track newly created left siblings while iterating so we can pair them with their
// right sibling, if and when it becomes frozen. If the frozen left sibling is not created
// in this iteration, it must already exist in storage.
let mut left_siblings: Vec<(_, _)> = Vec::new();
for (leaf_offset, leaf) in new_leaves.iter().enumerate() {
let leaf_pos = Position::from_leaf_index(self.num_leaves + leaf_offset as LeafCount);
let mut hash = *leaf;
to_freeze.push((leaf_pos, hash));
let mut pos = leaf_pos;
while pos.is_right_child() {
let sibling = pos.sibling();
hash = match left_siblings.pop() {
Some((x, left_hash)) => {
assert_eq!(x, sibling);
Self::hash_internal_node(left_hash, hash)
}
None => Self::hash_internal_node(self.reader.get(sibling)?, hash),
};
pos = pos.parent();
to_freeze.push((pos, hash));
}
// The node remaining must be a left child, possibly the root of a complete binary tree.
left_siblings.push((pos, hash));
}
// Now reconstruct the final root hash by walking up to root level and adding
// placeholder hash nodes as needed on the right, and left siblings that have either
// been newly created or read from storage.
let (mut pos, mut hash) = left_siblings.pop().expect("Must have at least one node");
for _ in pos.level()..root_level as u32 {
hash = if pos.is_left_child() {
Self::hash_internal_node(hash, *ACCUMULATOR_PLACEHOLDER_HASH)
} else {
let sibling = pos.sibling();
match left_siblings.pop() {
Some((x, left_hash)) => {
assert_eq!(x, sibling);
Self::hash_internal_node(left_hash, hash)
}
None => Self::hash_internal_node(self.reader.get(sibling)?, hash),
}
};
pos = pos.parent();
}
assert!(left_siblings.is_empty());
Ok((hash, to_freeze))
}
/// upper bound of num of frozen nodes:
/// new leaves and resulting frozen internal nodes forming a complete binary subtree
/// num_new_leaves * 2 - 1 < num_new_leaves * 2
/// and the full route from root of that subtree to the accumulator root turns frozen
/// height - (log2(num_new_leaves) + 1) < height - 1 = root_level
fn max_to_freeze(num_new_leaves: usize, root_level: u32) -> usize {
precondition!(root_level as usize <= MAX_ACCUMULATOR_PROOF_DEPTH);
precondition!(num_new_leaves < (usize::max_value() / 2));
precondition!(num_new_leaves * 2 <= usize::max_value() - root_level as usize);
num_new_leaves * 2 + root_level as usize
}
fn hash_internal_node(left: HashValue, right: HashValue) -> HashValue {
MerkleTreeInternalNode::<H>::new(left, right).hash()
}
fn rightmost_leaf_index(&self) -> u64 {
(self.num_leaves - 1) as u64
}
fn get_hash(&self, position: Position) -> Result<HashValue> {
let idx = self.rightmost_leaf_index();
if position.is_placeholder(idx) {
Ok(*ACCUMULATOR_PLACEHOLDER_HASH)
} else if position.is_freezable(idx) {
self.reader.get(position)
} else {
// non-frozen non-placeholder node
Ok(Self::hash_internal_node(
self.get_hash(position.left_child())?,
self.get_hash(position.right_child())?,
))
}
}
fn get_hashes(&self, positions: &[Position]) -> Result<Vec<HashValue>> {
positions.iter().map(|p| self.get_hash(*p)).collect()
}
fn get_root_hash(&self) -> Result<HashValue> {
self.get_hash(Position::root_from_leaf_count(self.num_leaves))
}
/// implementation for pub interface `MerkleAccumulator::get_proof`
fn get_proof(&self, leaf_index: u64) -> Result<AccumulatorProof<H>> {
ensure!(
leaf_index < self.num_leaves as u64,
"invalid leaf_index {}, num_leaves {}",
leaf_index,
self.num_leaves
);
let siblings = self.get_siblings(leaf_index, |_p| true)?;
Ok(AccumulatorProof::new(siblings))
}
/// Implementation for public interface `MerkleAccumulator::get_consistency_proof`.
fn get_consistency_proof(
&self,
sub_acc_leaves: LeafCount,
) -> Result<AccumulatorConsistencyProof> {
ensure!(
sub_acc_leaves <= self.num_leaves,
"Can't get accumulator consistency proof for a version newer than the local version. \
Local next version: {}, asked next version: {}",
self.num_leaves,
sub_acc_leaves,
);
let subtrees = FrozenSubtreeSiblingIterator::new(sub_acc_leaves, self.num_leaves)
.map(|p| self.reader.get(p))
.collect::<Result<Vec<_>>>()?;
Ok(AccumulatorConsistencyProof::new(subtrees))
}
/// Implementation for public interface `MerkleAccumulator::get_range_proof`.
fn get_range_proof(
&self,
first_leaf_index: Option<u64>,
num_leaves: LeafCount,
) -> Result<AccumulatorRangeProof<H>> {
let (left_siblings, right_siblings) =
self.get_range_proof_positions(first_leaf_index, num_leaves)?;
Ok(AccumulatorRangeProof::new(
self.get_hashes(&left_siblings)?,
self.get_hashes(&right_siblings)?,
))
}
fn get_range_proof_positions(
&self,
first_leaf_index: Option<u64>,
num_leaves: LeafCount,
) -> Result<(Vec<Position>, Vec<Position>)> {
if first_leaf_index.is_none() {
ensure!(
num_leaves == 0,
"num_leaves is not zero while first_leaf_index is None.",
);
return Ok((Vec::new(), Vec::new()));
}
let first_leaf_index = first_leaf_index.expect("first_leaf_index should not be None.");
ensure!(
num_leaves > 0,
"num_leaves is zero while first_leaf_index is not None.",
);
let last_leaf_index = first_leaf_index
.checked_add(num_leaves - 1)
.ok_or_else(|| format_err!("Requesting too many leaves."))?;
ensure!(
last_leaf_index < self.num_leaves as u64,
"Invalid last_leaf_index: {}, num_leaves: {}",
last_leaf_index,
self.num_leaves,
);
let left_siblings = self.get_sibling_positions(first_leaf_index, |p| p.is_left_child());
let right_siblings = self.get_sibling_positions(last_leaf_index, |p| p.is_right_child());
Ok((left_siblings, right_siblings))
}
fn get_siblings(
&self,
leaf_index: u64,
filter: impl Fn(Position) -> bool,
) -> Result<Vec<HashValue>> {
self.get_hashes(&self.get_sibling_positions(leaf_index, filter))
}
/// Helper function to get siblings on the path from the given leaf to the root. An additional
/// filter function can be applied to filter out certain siblings.
fn get_sibling_positions(
&self,
leaf_index: u64,
filter: impl Fn(Position) -> bool,
) -> Vec<Position> {
let root_pos = Position::root_from_leaf_count(self.num_leaves);
Position::from_leaf_index(leaf_index)
.iter_ancestor_sibling()
.take(root_pos.level() as usize)
.filter(|p| filter(*p))
.collect()
}
/// Implementation for public interface `MerkleAccumulator::get_frozen_subtree_hashes`.
fn get_frozen_subtree_hashes(&self) -> Result<Vec<HashValue>> {
FrozenSubTreeIterator::new(self.num_leaves)
.map(|p| self.reader.get(p))
.collect::<Result<Vec<_>>>()
}
}
#[cfg(test)]
mod tests; | R: HashReader,
H: CryptoHasher,
{
fn new(reader: &'a R, num_leaves: LeafCount) -> Self { | random_line_split |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.