file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
send_msg.rs | //! Client Node Example.
//!
//! The node sends a message (atom) to the specified erlang node.
//!
//! # Usage Examples
//!
//! ```bash
//! $ cargo run --example send_msg -- --help
//! $ cargo run --example send_msg -- --peer foo --destination foo --cookie erlang_cookie -m hello
//! ```
extern crate clap;
extern crate eetf;
extern crate erl_dist;
extern crate fibers;
extern crate futures;
use clap::{App, Arg};
use erl_dist::channel;
use erl_dist::{EpmdClient, Handshake, Message};
use fibers::net::TcpStream;
use fibers::{Executor, InPlaceExecutor, Spawn};
use futures::future::Either;
use futures::{Future, Sink};
use std::io::{Error, ErrorKind};
use std::net::SocketAddr;
fn | () {
let matches = App::new("send_msg")
.arg(
Arg::with_name("EPMD_HOST")
.short("h")
.takes_value(true)
.default_value("127.0.0.1"),
)
.arg(
Arg::with_name("EPMD_PORT")
.short("p")
.takes_value(true)
.default_value("4369"),
)
.arg(
Arg::with_name("PEER_NAME")
.long("peer")
.takes_value(true)
.default_value("foo"),
)
.arg(
Arg::with_name("COOKIE")
.long("cookie")
.takes_value(true)
.default_value("WPKYDIOSJIMJUURLRUHV"),
)
.arg(
Arg::with_name("SELF_NODE")
.long("self")
.takes_value(true)
.default_value("bar@localhost"),
)
.arg(
Arg::with_name("DESTINATION")
.short("d")
.long("destination")
.takes_value(true)
.default_value("foo"),
)
.arg(
Arg::with_name("MESSAGE")
.short("m")
.long("message")
.takes_value(true)
.default_value("hello_world"),
)
.get_matches();
let peer_name = matches.value_of("PEER_NAME").unwrap().to_string();
let self_node = matches.value_of("SELF_NODE").unwrap().to_string();
let cookie = matches.value_of("COOKIE").unwrap().to_string();
let epmd_host = matches.value_of("EPMD_HOST").unwrap();
let epmd_port = matches.value_of("EPMD_PORT").unwrap();
let epmd_addr: SocketAddr = format!("{}:{}", epmd_host, epmd_port)
.parse()
.expect("Invalid epmd address");
let dest_proc = matches.value_of("DESTINATION").unwrap().to_string();
let message = matches.value_of("MESSAGE").unwrap().to_string();
let self_node0 = self_node.to_string();
let mut executor = InPlaceExecutor::new().unwrap();
let monitor = executor.spawn_monitor(
TcpStream::connect(epmd_addr.clone())
.and_then(move |epmd_socket| {
// Gets peer node information from the EPMD
EpmdClient::new().get_node_info(epmd_socket, &peer_name)
})
.and_then(move |info| {
if let Some(addr) = info.map(|i| SocketAddr::new(epmd_addr.ip(), i.port)) {
// Executes the client side handshake
Either::A(TcpStream::connect(addr).and_then(move |socket| {
let handshake = Handshake::new(&self_node, &cookie);
handshake.connect(socket)
}))
} else {
Either::B(futures::failed(Error::new(
ErrorKind::NotFound,
"target node is not found",
)))
}
})
.and_then(move |peer| {
// Sends a message to the peer node
println!("# Connected: {}", peer.name);
println!("# Distribution Flags: {:?}", peer.flags);
let tx = channel::sender(peer.stream);
let from_pid = eetf::Pid::new(self_node0, 0, 0, 0);
let atom = eetf::Atom::from(message);
let message = Message::reg_send(from_pid, dest_proc, atom);
println!("# Send: {:?}", message);
tx.send(message)
}),
);
let _ = executor.run_fiber(monitor).unwrap().expect("Failed");
println!("# DONE");
}
| main | identifier_name |
trait-coercion-generic.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
trait Trait<T> {
fn f(&self, x: T);
}
struct Struct {
x: int,
y: int,
}
impl Trait<&'static str> for Struct {
fn f(&self, x: &'static str) {
println!("Hi, {}!", x);
}
} | let a = Struct { x: 1, y: 2 };
let b: Box<Trait<&'static str>> = box a;
b.f("Mary");
let c: &Trait<&'static str> = &a;
c.f("Joe");
} |
pub fn main() { | random_line_split |
trait-coercion-generic.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
trait Trait<T> {
fn f(&self, x: T);
}
struct Struct {
x: int,
y: int,
}
impl Trait<&'static str> for Struct {
fn f(&self, x: &'static str) {
println!("Hi, {}!", x);
}
}
pub fn | () {
let a = Struct { x: 1, y: 2 };
let b: Box<Trait<&'static str>> = box a;
b.f("Mary");
let c: &Trait<&'static str> = &a;
c.f("Joe");
}
| main | identifier_name |
trait-coercion-generic.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
trait Trait<T> {
fn f(&self, x: T);
}
struct Struct {
x: int,
y: int,
}
impl Trait<&'static str> for Struct {
fn f(&self, x: &'static str) |
}
pub fn main() {
let a = Struct { x: 1, y: 2 };
let b: Box<Trait<&'static str>> = box a;
b.f("Mary");
let c: &Trait<&'static str> = &a;
c.f("Joe");
}
| {
println!("Hi, {}!", x);
} | identifier_body |
basic_block.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use llvm;
use llvm::{BasicBlockRef};
use trans::value::{Users, Value};
use std::iter::{Filter, Map};
pub struct BasicBlock(pub BasicBlockRef);
pub type Preds<'a> = Map<'a, Value, BasicBlock, Filter<'a, Value, Users>>;
/**
* Wrapper for LLVM BasicBlockRef
*/
impl BasicBlock {
pub fn get(&self) -> BasicBlockRef |
pub fn as_value(self) -> Value {
unsafe {
Value(llvm::LLVMBasicBlockAsValue(self.get()))
}
}
pub fn pred_iter(self) -> Preds<'static> {
self.as_value().user_iter()
.filter(|user| user.is_a_terminator_inst())
.map(|user| user.get_parent().unwrap())
}
pub fn get_single_predecessor(self) -> Option<BasicBlock> {
let mut iter = self.pred_iter();
match (iter.next(), iter.next()) {
(Some(first), None) => Some(first),
_ => None
}
}
}
| {
let BasicBlock(v) = *self; v
} | identifier_body |
basic_block.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use llvm;
use llvm::{BasicBlockRef};
use trans::value::{Users, Value};
use std::iter::{Filter, Map};
pub struct | (pub BasicBlockRef);
pub type Preds<'a> = Map<'a, Value, BasicBlock, Filter<'a, Value, Users>>;
/**
* Wrapper for LLVM BasicBlockRef
*/
impl BasicBlock {
pub fn get(&self) -> BasicBlockRef {
let BasicBlock(v) = *self; v
}
pub fn as_value(self) -> Value {
unsafe {
Value(llvm::LLVMBasicBlockAsValue(self.get()))
}
}
pub fn pred_iter(self) -> Preds<'static> {
self.as_value().user_iter()
.filter(|user| user.is_a_terminator_inst())
.map(|user| user.get_parent().unwrap())
}
pub fn get_single_predecessor(self) -> Option<BasicBlock> {
let mut iter = self.pred_iter();
match (iter.next(), iter.next()) {
(Some(first), None) => Some(first),
_ => None
}
}
}
| BasicBlock | identifier_name |
basic_block.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use llvm;
use llvm::{BasicBlockRef};
use trans::value::{Users, Value};
use std::iter::{Filter, Map};
pub struct BasicBlock(pub BasicBlockRef);
pub type Preds<'a> = Map<'a, Value, BasicBlock, Filter<'a, Value, Users>>;
/**
* Wrapper for LLVM BasicBlockRef
*/
impl BasicBlock {
pub fn get(&self) -> BasicBlockRef {
let BasicBlock(v) = *self; v
}
pub fn as_value(self) -> Value {
unsafe {
Value(llvm::LLVMBasicBlockAsValue(self.get()))
}
}
pub fn pred_iter(self) -> Preds<'static> {
self.as_value().user_iter()
.filter(|user| user.is_a_terminator_inst())
.map(|user| user.get_parent().unwrap())
}
pub fn get_single_predecessor(self) -> Option<BasicBlock> { | }
}
} | let mut iter = self.pred_iter();
match (iter.next(), iter.next()) {
(Some(first), None) => Some(first),
_ => None | random_line_split |
array.rs | //! Generic-length array strategy.
// Adapted from proptest's array code
// Copyright 2017 Jason Lingle
use core::{marker::PhantomData, mem::MaybeUninit};
use proptest::{
strategy::{NewTree, Strategy, ValueTree},
test_runner::TestRunner,
};
#[must_use = "strategies do nothing unless used"]
#[derive(Clone, Copy, Debug)]
pub struct UniformArrayStrategy<S, T> {
strategy: S,
_marker: PhantomData<T>,
}
| Self {
strategy,
_marker: PhantomData,
}
}
}
pub struct ArrayValueTree<T> {
tree: T,
shrinker: usize,
last_shrinker: Option<usize>,
}
impl<T, S, const LANES: usize> Strategy for UniformArrayStrategy<S, [T; LANES]>
where
T: core::fmt::Debug,
S: Strategy<Value = T>,
{
type Tree = ArrayValueTree<[S::Tree; LANES]>;
type Value = [T; LANES];
fn new_tree(&self, runner: &mut TestRunner) -> NewTree<Self> {
let tree: [S::Tree; LANES] = unsafe {
let mut tree: [MaybeUninit<S::Tree>; LANES] = MaybeUninit::uninit().assume_init();
for t in tree.iter_mut() {
*t = MaybeUninit::new(self.strategy.new_tree(runner)?)
}
core::mem::transmute_copy(&tree)
};
Ok(ArrayValueTree {
tree,
shrinker: 0,
last_shrinker: None,
})
}
}
impl<T: ValueTree, const LANES: usize> ValueTree for ArrayValueTree<[T; LANES]> {
type Value = [T::Value; LANES];
fn current(&self) -> Self::Value {
unsafe {
let mut value: [MaybeUninit<T::Value>; LANES] = MaybeUninit::uninit().assume_init();
for (tree_elem, value_elem) in self.tree.iter().zip(value.iter_mut()) {
*value_elem = MaybeUninit::new(tree_elem.current());
}
core::mem::transmute_copy(&value)
}
}
fn simplify(&mut self) -> bool {
while self.shrinker < LANES {
if self.tree[self.shrinker].simplify() {
self.last_shrinker = Some(self.shrinker);
return true;
} else {
self.shrinker += 1;
}
}
false
}
fn complicate(&mut self) -> bool {
if let Some(shrinker) = self.last_shrinker {
self.shrinker = shrinker;
if self.tree[shrinker].complicate() {
true
} else {
self.last_shrinker = None;
false
}
} else {
false
}
}
} | impl<S, T> UniformArrayStrategy<S, T> {
pub const fn new(strategy: S) -> Self { | random_line_split |
array.rs | //! Generic-length array strategy.
// Adapted from proptest's array code
// Copyright 2017 Jason Lingle
use core::{marker::PhantomData, mem::MaybeUninit};
use proptest::{
strategy::{NewTree, Strategy, ValueTree},
test_runner::TestRunner,
};
#[must_use = "strategies do nothing unless used"]
#[derive(Clone, Copy, Debug)]
pub struct UniformArrayStrategy<S, T> {
strategy: S,
_marker: PhantomData<T>,
}
impl<S, T> UniformArrayStrategy<S, T> {
pub const fn new(strategy: S) -> Self |
}
pub struct ArrayValueTree<T> {
tree: T,
shrinker: usize,
last_shrinker: Option<usize>,
}
impl<T, S, const LANES: usize> Strategy for UniformArrayStrategy<S, [T; LANES]>
where
T: core::fmt::Debug,
S: Strategy<Value = T>,
{
type Tree = ArrayValueTree<[S::Tree; LANES]>;
type Value = [T; LANES];
fn new_tree(&self, runner: &mut TestRunner) -> NewTree<Self> {
let tree: [S::Tree; LANES] = unsafe {
let mut tree: [MaybeUninit<S::Tree>; LANES] = MaybeUninit::uninit().assume_init();
for t in tree.iter_mut() {
*t = MaybeUninit::new(self.strategy.new_tree(runner)?)
}
core::mem::transmute_copy(&tree)
};
Ok(ArrayValueTree {
tree,
shrinker: 0,
last_shrinker: None,
})
}
}
impl<T: ValueTree, const LANES: usize> ValueTree for ArrayValueTree<[T; LANES]> {
type Value = [T::Value; LANES];
fn current(&self) -> Self::Value {
unsafe {
let mut value: [MaybeUninit<T::Value>; LANES] = MaybeUninit::uninit().assume_init();
for (tree_elem, value_elem) in self.tree.iter().zip(value.iter_mut()) {
*value_elem = MaybeUninit::new(tree_elem.current());
}
core::mem::transmute_copy(&value)
}
}
fn simplify(&mut self) -> bool {
while self.shrinker < LANES {
if self.tree[self.shrinker].simplify() {
self.last_shrinker = Some(self.shrinker);
return true;
} else {
self.shrinker += 1;
}
}
false
}
fn complicate(&mut self) -> bool {
if let Some(shrinker) = self.last_shrinker {
self.shrinker = shrinker;
if self.tree[shrinker].complicate() {
true
} else {
self.last_shrinker = None;
false
}
} else {
false
}
}
}
| {
Self {
strategy,
_marker: PhantomData,
}
} | identifier_body |
array.rs | //! Generic-length array strategy.
// Adapted from proptest's array code
// Copyright 2017 Jason Lingle
use core::{marker::PhantomData, mem::MaybeUninit};
use proptest::{
strategy::{NewTree, Strategy, ValueTree},
test_runner::TestRunner,
};
#[must_use = "strategies do nothing unless used"]
#[derive(Clone, Copy, Debug)]
pub struct | <S, T> {
strategy: S,
_marker: PhantomData<T>,
}
impl<S, T> UniformArrayStrategy<S, T> {
pub const fn new(strategy: S) -> Self {
Self {
strategy,
_marker: PhantomData,
}
}
}
pub struct ArrayValueTree<T> {
tree: T,
shrinker: usize,
last_shrinker: Option<usize>,
}
impl<T, S, const LANES: usize> Strategy for UniformArrayStrategy<S, [T; LANES]>
where
T: core::fmt::Debug,
S: Strategy<Value = T>,
{
type Tree = ArrayValueTree<[S::Tree; LANES]>;
type Value = [T; LANES];
fn new_tree(&self, runner: &mut TestRunner) -> NewTree<Self> {
let tree: [S::Tree; LANES] = unsafe {
let mut tree: [MaybeUninit<S::Tree>; LANES] = MaybeUninit::uninit().assume_init();
for t in tree.iter_mut() {
*t = MaybeUninit::new(self.strategy.new_tree(runner)?)
}
core::mem::transmute_copy(&tree)
};
Ok(ArrayValueTree {
tree,
shrinker: 0,
last_shrinker: None,
})
}
}
impl<T: ValueTree, const LANES: usize> ValueTree for ArrayValueTree<[T; LANES]> {
type Value = [T::Value; LANES];
fn current(&self) -> Self::Value {
unsafe {
let mut value: [MaybeUninit<T::Value>; LANES] = MaybeUninit::uninit().assume_init();
for (tree_elem, value_elem) in self.tree.iter().zip(value.iter_mut()) {
*value_elem = MaybeUninit::new(tree_elem.current());
}
core::mem::transmute_copy(&value)
}
}
fn simplify(&mut self) -> bool {
while self.shrinker < LANES {
if self.tree[self.shrinker].simplify() {
self.last_shrinker = Some(self.shrinker);
return true;
} else {
self.shrinker += 1;
}
}
false
}
fn complicate(&mut self) -> bool {
if let Some(shrinker) = self.last_shrinker {
self.shrinker = shrinker;
if self.tree[shrinker].complicate() {
true
} else {
self.last_shrinker = None;
false
}
} else {
false
}
}
}
| UniformArrayStrategy | identifier_name |
array.rs | //! Generic-length array strategy.
// Adapted from proptest's array code
// Copyright 2017 Jason Lingle
use core::{marker::PhantomData, mem::MaybeUninit};
use proptest::{
strategy::{NewTree, Strategy, ValueTree},
test_runner::TestRunner,
};
#[must_use = "strategies do nothing unless used"]
#[derive(Clone, Copy, Debug)]
pub struct UniformArrayStrategy<S, T> {
strategy: S,
_marker: PhantomData<T>,
}
impl<S, T> UniformArrayStrategy<S, T> {
pub const fn new(strategy: S) -> Self {
Self {
strategy,
_marker: PhantomData,
}
}
}
pub struct ArrayValueTree<T> {
tree: T,
shrinker: usize,
last_shrinker: Option<usize>,
}
impl<T, S, const LANES: usize> Strategy for UniformArrayStrategy<S, [T; LANES]>
where
T: core::fmt::Debug,
S: Strategy<Value = T>,
{
type Tree = ArrayValueTree<[S::Tree; LANES]>;
type Value = [T; LANES];
fn new_tree(&self, runner: &mut TestRunner) -> NewTree<Self> {
let tree: [S::Tree; LANES] = unsafe {
let mut tree: [MaybeUninit<S::Tree>; LANES] = MaybeUninit::uninit().assume_init();
for t in tree.iter_mut() {
*t = MaybeUninit::new(self.strategy.new_tree(runner)?)
}
core::mem::transmute_copy(&tree)
};
Ok(ArrayValueTree {
tree,
shrinker: 0,
last_shrinker: None,
})
}
}
impl<T: ValueTree, const LANES: usize> ValueTree for ArrayValueTree<[T; LANES]> {
type Value = [T::Value; LANES];
fn current(&self) -> Self::Value {
unsafe {
let mut value: [MaybeUninit<T::Value>; LANES] = MaybeUninit::uninit().assume_init();
for (tree_elem, value_elem) in self.tree.iter().zip(value.iter_mut()) {
*value_elem = MaybeUninit::new(tree_elem.current());
}
core::mem::transmute_copy(&value)
}
}
fn simplify(&mut self) -> bool {
while self.shrinker < LANES {
if self.tree[self.shrinker].simplify() {
self.last_shrinker = Some(self.shrinker);
return true;
} else {
self.shrinker += 1;
}
}
false
}
fn complicate(&mut self) -> bool {
if let Some(shrinker) = self.last_shrinker | else {
false
}
}
}
| {
self.shrinker = shrinker;
if self.tree[shrinker].complicate() {
true
} else {
self.last_shrinker = None;
false
}
} | conditional_block |
set_pusher.rs | //! [POST /_matrix/client/r0/pushers/set](https://matrix.org/docs/spec/client_server/r0.6.0#post-matrix-client-r0-pushers-set)
use ruma_api::ruma_api;
use super::Pusher;
ruma_api! {
metadata {
description: "This endpoint allows the creation, modification and deletion of pushers for this user ID.",
method: POST,
name: "set_pusher",
path: "/_matrix/client/r0/pushers/set", |
request {
/// The pusher to configure
#[serde(flatten)]
pub pusher: Pusher,
/// Controls if another pusher with the same pushkey and app id should be created.
/// See the spec for details.
#[serde(default)]
pub append: bool
}
response {}
error: crate::Error
} | rate_limited: true,
requires_authentication: true,
} | random_line_split |
lib.rs | //! rimd is a set of utilities to deal with midi messages and standard
//! midi files (SMF). It handles both standard midi messages and the meta
//! messages that are found in SMFs.
//!
//! rimd is fairly low level, and messages are stored and accessed in
//! their underlying format (i.e. a vector of u8s). There are some
//! utility methods for accessing the various pieces of a message, and
//! for constructing new messages.
//!
//! For example usage see the bin directory.
//!
//! For a description of the underlying format of midi messages see:<br/>
//! http://www.midi.org/techspecs/midimessages.php<br/>
//! For a description of the underlying format of meta messages see:<br/>
//! http://cs.fit.edu/~ryan/cse4051/projects/midi/midi.html#meta_event
extern crate byteorder;
extern crate encoding;
extern crate num_traits;
#[macro_use] extern crate num_derive;
use std::error;
use std::convert::From;
use std::fs::File;
use std::io::{Error,Read};
use std::path::Path;
use std::fmt;
use std::string::FromUtf8Error;
pub use midi:: {
Status,
MidiError,
MidiMessage,
STATUS_MASK,
CHANNEL_MASK,
make_status,
};
pub use meta:: {
MetaCommand,
MetaError,
MetaEvent,
};
pub use builder:: {
SMFBuilder,
AbsoluteEvent,
};
use reader:: {
SMFReader,
};
pub use writer:: {
SMFWriter,
};
pub use util:: {
note_num_to_name,
};
mod builder;
mod midi;
mod meta;
mod reader;
mod writer;
mod util;
/// Format of the SMF
#[derive(Debug,Clone,Copy,PartialEq)]
pub enum SMFFormat {
/// single track file format
Single = 0,
/// multiple track file format
MultiTrack = 1,
/// multiple song file format (i.e., a series of single type files)
MultiSong = 2,
}
impl fmt::Display for SMFFormat {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}",match *self {
SMFFormat::Single => "single track",
SMFFormat::MultiTrack => "multiple track",
SMFFormat::MultiSong => "multiple song",
})
}
}
/// An event can be either a midi message or a meta event
#[derive(Debug,Clone)]
pub enum | {
Midi(MidiMessage),
Meta(MetaEvent),
}
impl fmt::Display for Event {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Event::Midi(ref m) => { write!(f, "{}", m) }
Event::Meta(ref m) => { write!(f, "{}", m) }
}
}
}
impl Event {
/// Return the number of bytes this event uses.
pub fn len(&self) -> usize {
match *self {
Event::Midi(ref m) => { m.data.len() }
Event::Meta(ref m) => {
let v = SMFWriter::vtime_to_vec(m.length);
// +1 for command byte +1 for 0xFF to indicate Meta event
v.len() + m.data.len() + 2
}
}
}
}
/// An event occuring in the track.
#[derive(Debug,Clone)]
pub struct TrackEvent {
/// A delta offset, indicating how many ticks after the previous
/// event this event occurs
pub vtime: u64,
/// The actual event
pub event: Event,
}
impl fmt::Display for TrackEvent {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "vtime: {}\t{}",self.vtime,self.event)
}
}
impl TrackEvent {
pub fn fmt_with_time_offset(&self, cur_time: u64) -> String {
format!("time: {}\t{}",(self.vtime+cur_time),self.event)
}
/// Return the number of bytes this event uses in the track,
/// including the space for the time offset.
pub fn len(&self) -> usize {
let v = SMFWriter::vtime_to_vec(self.vtime);
v.len() + self.event.len()
}
}
/// A sequence of midi/meta events
#[derive(Debug, Clone)]
pub struct Track {
/// Optional copyright notice
pub copyright: Option<String>,
/// Optional name for this track
pub name: Option<String>,
/// Vector of the events in this track
pub events: Vec<TrackEvent>
}
impl fmt::Display for Track {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Track, copyright: {}, name: {}",
match self.copyright {
Some(ref c) => &c[..],
None => "[none]"
},
match self.name {
Some(ref n) => &n[..],
None => "[none]"
})
}
}
/// An error that occured in parsing an SMF
#[derive(Debug)]
pub enum SMFError {
InvalidSMFFile(&'static str),
MidiError(MidiError),
MetaError(MetaError),
Error(Error),
}
impl From<Error> for SMFError {
fn from(err: Error) -> SMFError {
SMFError::Error(err)
}
}
impl From<MidiError> for SMFError {
fn from(err: MidiError) -> SMFError {
SMFError::MidiError(err)
}
}
impl From<MetaError> for SMFError {
fn from(err: MetaError) -> SMFError {
SMFError::MetaError(err)
}
}
impl From<FromUtf8Error> for SMFError {
fn from(_: FromUtf8Error) -> SMFError {
SMFError::InvalidSMFFile("Invalid UTF8 data in file")
}
}
impl error::Error for SMFError {
fn description(&self) -> &str {
match *self {
SMFError::InvalidSMFFile(_) => "The SMF file was invalid",
SMFError::Error(ref e) => e.description(),
SMFError::MidiError(ref m) => m.description(),
SMFError::MetaError(ref m) => m.description(),
}
}
fn cause(&self) -> Option<&error::Error> {
match *self {
SMFError::MidiError(ref m) => Some(m as &error::Error),
SMFError::MetaError(ref m) => Some(m as &error::Error),
SMFError::Error(ref err) => Some(err as &error::Error),
_ => None,
}
}
}
impl fmt::Display for SMFError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
SMFError::InvalidSMFFile(s) => write!(f,"SMF file is invalid: {}",s),
SMFError::MidiError(ref err) => { write!(f,"{}",err) },
SMFError::MetaError(ref err) => { write!(f,"{}",err) },
SMFError::Error(ref err) => { write!(f,"{}",err) },
}
}
}
/// A standard midi file
#[derive(Debug, Clone)]
pub struct SMF {
/// The format of the SMF
pub format: SMFFormat,
/// Vector holding each track in this SMF
pub tracks: Vec<Track>,
/// The unit of time for delta timing. If the value is positive,
/// then it represents the units per beat. For example, +96 would
/// mean 96 ticks per beat. If the value is negative, delta times
/// are in SMPTE compatible units.
pub division: i16,
}
impl SMF {
/// Read an SMF file at the given path
pub fn from_file(path: &Path) -> Result<SMF,SMFError> {
let mut file = try!(File::open(path));
SMFReader::read_smf(&mut file)
}
/// Read an SMF from the given reader
pub fn from_reader(reader: &mut Read) -> Result<SMF,SMFError> {
SMFReader::read_smf(reader)
}
/// Convert a type 0 (single track) to type 1 (multi track) SMF
/// Does nothing if the SMF is already in type 1
/// Returns None if the SMF is in type 2 (multi song)
pub fn to_multi_track(&self) -> Option<SMF> {
match self.format {
SMFFormat::MultiTrack => Some(self.clone()),
SMFFormat::MultiSong => None,
SMFFormat::Single => {
let mut tracks = vec![Vec::<TrackEvent>::new(); 1 + 16]; // meta track and 16 for the 16 channels
let mut time = 0;
for event in &self.tracks[0].events {
time += event.vtime;
match event.event {
Event::Midi(ref msg) if msg.channel().is_some() => {
let events = &mut tracks[msg.channel().unwrap() as usize + 1];
events.push(TrackEvent {vtime: time, event: event.event.clone()});
}
/*MidiEvent::Meta(ref msg) if [
MetaCommand::MIDIChannelPrefixAssignment,
MetaCommand::MIDIPortPrefixAssignment,
MetaCommand::SequenceOrTrackName,
MetaCommand::InstrumentName,
].contains(&msg.command) => {
println!("prefix: {:?}", event);
}*/
_ => {
tracks[0].push(TrackEvent {vtime: time, event: event.event.clone()});
}
}
}
let mut out = SMF {
format: SMFFormat::MultiTrack,
tracks: vec![],
division: self.division,
};
for events in &mut tracks {
if events.len() > 0 {
let mut time = 0;
for event in events.iter_mut() {
let tmp = event.vtime;
event.vtime -= time;
time = tmp;
}
out.tracks.push(Track {events: events.clone(), copyright: None, name: None});
}
}
out.tracks[0].name = self.tracks[0].name.clone();
out.tracks[0].copyright = self.tracks[0].copyright.clone();
Some(out)
}
}
}
}
| Event | identifier_name |
lib.rs | //! rimd is a set of utilities to deal with midi messages and standard
//! midi files (SMF). It handles both standard midi messages and the meta
//! messages that are found in SMFs.
//!
//! rimd is fairly low level, and messages are stored and accessed in
//! their underlying format (i.e. a vector of u8s). There are some
//! utility methods for accessing the various pieces of a message, and
//! for constructing new messages.
//!
//! For example usage see the bin directory.
//!
//! For a description of the underlying format of midi messages see:<br/>
//! http://www.midi.org/techspecs/midimessages.php<br/>
//! For a description of the underlying format of meta messages see:<br/>
//! http://cs.fit.edu/~ryan/cse4051/projects/midi/midi.html#meta_event
extern crate byteorder;
extern crate encoding;
extern crate num_traits;
#[macro_use] extern crate num_derive;
use std::error;
use std::convert::From;
use std::fs::File;
use std::io::{Error,Read};
use std::path::Path;
use std::fmt;
use std::string::FromUtf8Error;
pub use midi:: {
Status,
MidiError,
MidiMessage,
STATUS_MASK,
CHANNEL_MASK,
make_status,
};
pub use meta:: {
MetaCommand,
MetaError,
MetaEvent,
};
pub use builder:: {
SMFBuilder,
AbsoluteEvent,
};
use reader:: {
SMFReader,
};
pub use writer:: {
SMFWriter,
};
pub use util:: {
note_num_to_name,
};
mod builder;
mod midi;
mod meta;
mod reader;
mod writer;
mod util;
/// Format of the SMF
#[derive(Debug,Clone,Copy,PartialEq)]
pub enum SMFFormat {
/// single track file format
Single = 0,
/// multiple track file format
MultiTrack = 1,
/// multiple song file format (i.e., a series of single type files)
MultiSong = 2,
}
impl fmt::Display for SMFFormat {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}",match *self {
SMFFormat::Single => "single track",
SMFFormat::MultiTrack => "multiple track",
SMFFormat::MultiSong => "multiple song",
})
}
}
/// An event can be either a midi message or a meta event
#[derive(Debug,Clone)]
pub enum Event {
Midi(MidiMessage),
Meta(MetaEvent),
}
impl fmt::Display for Event {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Event::Midi(ref m) => { write!(f, "{}", m) }
Event::Meta(ref m) => { write!(f, "{}", m) }
}
}
}
impl Event {
/// Return the number of bytes this event uses.
pub fn len(&self) -> usize {
match *self {
Event::Midi(ref m) => { m.data.len() }
Event::Meta(ref m) => {
let v = SMFWriter::vtime_to_vec(m.length);
// +1 for command byte +1 for 0xFF to indicate Meta event
v.len() + m.data.len() + 2
}
}
}
}
/// An event occuring in the track.
#[derive(Debug,Clone)]
pub struct TrackEvent {
/// A delta offset, indicating how many ticks after the previous
/// event this event occurs
pub vtime: u64,
/// The actual event
pub event: Event,
}
impl fmt::Display for TrackEvent {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "vtime: {}\t{}",self.vtime,self.event)
}
}
impl TrackEvent {
pub fn fmt_with_time_offset(&self, cur_time: u64) -> String {
format!("time: {}\t{}",(self.vtime+cur_time),self.event)
}
/// Return the number of bytes this event uses in the track,
/// including the space for the time offset.
pub fn len(&self) -> usize {
let v = SMFWriter::vtime_to_vec(self.vtime);
v.len() + self.event.len()
}
}
/// A sequence of midi/meta events
#[derive(Debug, Clone)]
pub struct Track {
/// Optional copyright notice
pub copyright: Option<String>,
/// Optional name for this track
pub name: Option<String>,
/// Vector of the events in this track
pub events: Vec<TrackEvent>
}
impl fmt::Display for Track {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Track, copyright: {}, name: {}",
match self.copyright {
Some(ref c) => &c[..],
None => "[none]"
},
match self.name {
Some(ref n) => &n[..],
None => "[none]"
})
}
}
/// An error that occured in parsing an SMF
#[derive(Debug)]
pub enum SMFError {
InvalidSMFFile(&'static str),
MidiError(MidiError),
MetaError(MetaError),
Error(Error),
}
impl From<Error> for SMFError {
fn from(err: Error) -> SMFError {
SMFError::Error(err)
}
}
impl From<MidiError> for SMFError {
fn from(err: MidiError) -> SMFError {
SMFError::MidiError(err)
}
} | fn from(err: MetaError) -> SMFError {
SMFError::MetaError(err)
}
}
impl From<FromUtf8Error> for SMFError {
fn from(_: FromUtf8Error) -> SMFError {
SMFError::InvalidSMFFile("Invalid UTF8 data in file")
}
}
impl error::Error for SMFError {
fn description(&self) -> &str {
match *self {
SMFError::InvalidSMFFile(_) => "The SMF file was invalid",
SMFError::Error(ref e) => e.description(),
SMFError::MidiError(ref m) => m.description(),
SMFError::MetaError(ref m) => m.description(),
}
}
fn cause(&self) -> Option<&error::Error> {
match *self {
SMFError::MidiError(ref m) => Some(m as &error::Error),
SMFError::MetaError(ref m) => Some(m as &error::Error),
SMFError::Error(ref err) => Some(err as &error::Error),
_ => None,
}
}
}
impl fmt::Display for SMFError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
SMFError::InvalidSMFFile(s) => write!(f,"SMF file is invalid: {}",s),
SMFError::MidiError(ref err) => { write!(f,"{}",err) },
SMFError::MetaError(ref err) => { write!(f,"{}",err) },
SMFError::Error(ref err) => { write!(f,"{}",err) },
}
}
}
/// A standard midi file
#[derive(Debug, Clone)]
pub struct SMF {
/// The format of the SMF
pub format: SMFFormat,
/// Vector holding each track in this SMF
pub tracks: Vec<Track>,
/// The unit of time for delta timing. If the value is positive,
/// then it represents the units per beat. For example, +96 would
/// mean 96 ticks per beat. If the value is negative, delta times
/// are in SMPTE compatible units.
pub division: i16,
}
impl SMF {
/// Read an SMF file at the given path
pub fn from_file(path: &Path) -> Result<SMF,SMFError> {
let mut file = try!(File::open(path));
SMFReader::read_smf(&mut file)
}
/// Read an SMF from the given reader
pub fn from_reader(reader: &mut Read) -> Result<SMF,SMFError> {
SMFReader::read_smf(reader)
}
/// Convert a type 0 (single track) to type 1 (multi track) SMF
/// Does nothing if the SMF is already in type 1
/// Returns None if the SMF is in type 2 (multi song)
pub fn to_multi_track(&self) -> Option<SMF> {
match self.format {
SMFFormat::MultiTrack => Some(self.clone()),
SMFFormat::MultiSong => None,
SMFFormat::Single => {
let mut tracks = vec![Vec::<TrackEvent>::new(); 1 + 16]; // meta track and 16 for the 16 channels
let mut time = 0;
for event in &self.tracks[0].events {
time += event.vtime;
match event.event {
Event::Midi(ref msg) if msg.channel().is_some() => {
let events = &mut tracks[msg.channel().unwrap() as usize + 1];
events.push(TrackEvent {vtime: time, event: event.event.clone()});
}
/*MidiEvent::Meta(ref msg) if [
MetaCommand::MIDIChannelPrefixAssignment,
MetaCommand::MIDIPortPrefixAssignment,
MetaCommand::SequenceOrTrackName,
MetaCommand::InstrumentName,
].contains(&msg.command) => {
println!("prefix: {:?}", event);
}*/
_ => {
tracks[0].push(TrackEvent {vtime: time, event: event.event.clone()});
}
}
}
let mut out = SMF {
format: SMFFormat::MultiTrack,
tracks: vec![],
division: self.division,
};
for events in &mut tracks {
if events.len() > 0 {
let mut time = 0;
for event in events.iter_mut() {
let tmp = event.vtime;
event.vtime -= time;
time = tmp;
}
out.tracks.push(Track {events: events.clone(), copyright: None, name: None});
}
}
out.tracks[0].name = self.tracks[0].name.clone();
out.tracks[0].copyright = self.tracks[0].copyright.clone();
Some(out)
}
}
}
} |
impl From<MetaError> for SMFError { | random_line_split |
lib.rs | //! rimd is a set of utilities to deal with midi messages and standard
//! midi files (SMF). It handles both standard midi messages and the meta
//! messages that are found in SMFs.
//!
//! rimd is fairly low level, and messages are stored and accessed in
//! their underlying format (i.e. a vector of u8s). There are some
//! utility methods for accessing the various pieces of a message, and
//! for constructing new messages.
//!
//! For example usage see the bin directory.
//!
//! For a description of the underlying format of midi messages see:<br/>
//! http://www.midi.org/techspecs/midimessages.php<br/>
//! For a description of the underlying format of meta messages see:<br/>
//! http://cs.fit.edu/~ryan/cse4051/projects/midi/midi.html#meta_event
extern crate byteorder;
extern crate encoding;
extern crate num_traits;
#[macro_use] extern crate num_derive;
use std::error;
use std::convert::From;
use std::fs::File;
use std::io::{Error,Read};
use std::path::Path;
use std::fmt;
use std::string::FromUtf8Error;
pub use midi:: {
Status,
MidiError,
MidiMessage,
STATUS_MASK,
CHANNEL_MASK,
make_status,
};
pub use meta:: {
MetaCommand,
MetaError,
MetaEvent,
};
pub use builder:: {
SMFBuilder,
AbsoluteEvent,
};
use reader:: {
SMFReader,
};
pub use writer:: {
SMFWriter,
};
pub use util:: {
note_num_to_name,
};
mod builder;
mod midi;
mod meta;
mod reader;
mod writer;
mod util;
/// Format of the SMF
#[derive(Debug,Clone,Copy,PartialEq)]
pub enum SMFFormat {
/// single track file format
Single = 0,
/// multiple track file format
MultiTrack = 1,
/// multiple song file format (i.e., a series of single type files)
MultiSong = 2,
}
impl fmt::Display for SMFFormat {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}",match *self {
SMFFormat::Single => "single track",
SMFFormat::MultiTrack => "multiple track",
SMFFormat::MultiSong => "multiple song",
})
}
}
/// An event can be either a midi message or a meta event
#[derive(Debug,Clone)]
pub enum Event {
Midi(MidiMessage),
Meta(MetaEvent),
}
impl fmt::Display for Event {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Event::Midi(ref m) => { write!(f, "{}", m) }
Event::Meta(ref m) => { write!(f, "{}", m) }
}
}
}
impl Event {
/// Return the number of bytes this event uses.
pub fn len(&self) -> usize {
match *self {
Event::Midi(ref m) => { m.data.len() }
Event::Meta(ref m) => {
let v = SMFWriter::vtime_to_vec(m.length);
// +1 for command byte +1 for 0xFF to indicate Meta event
v.len() + m.data.len() + 2
}
}
}
}
/// An event occuring in the track.
#[derive(Debug,Clone)]
pub struct TrackEvent {
/// A delta offset, indicating how many ticks after the previous
/// event this event occurs
pub vtime: u64,
/// The actual event
pub event: Event,
}
impl fmt::Display for TrackEvent {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "vtime: {}\t{}",self.vtime,self.event)
}
}
impl TrackEvent {
pub fn fmt_with_time_offset(&self, cur_time: u64) -> String {
format!("time: {}\t{}",(self.vtime+cur_time),self.event)
}
/// Return the number of bytes this event uses in the track,
/// including the space for the time offset.
pub fn len(&self) -> usize {
let v = SMFWriter::vtime_to_vec(self.vtime);
v.len() + self.event.len()
}
}
/// A sequence of midi/meta events
#[derive(Debug, Clone)]
pub struct Track {
/// Optional copyright notice
pub copyright: Option<String>,
/// Optional name for this track
pub name: Option<String>,
/// Vector of the events in this track
pub events: Vec<TrackEvent>
}
impl fmt::Display for Track {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Track, copyright: {}, name: {}",
match self.copyright {
Some(ref c) => &c[..],
None => "[none]"
},
match self.name {
Some(ref n) => &n[..],
None => "[none]"
})
}
}
/// An error that occured in parsing an SMF
#[derive(Debug)]
pub enum SMFError {
InvalidSMFFile(&'static str),
MidiError(MidiError),
MetaError(MetaError),
Error(Error),
}
impl From<Error> for SMFError {
fn from(err: Error) -> SMFError {
SMFError::Error(err)
}
}
impl From<MidiError> for SMFError {
fn from(err: MidiError) -> SMFError {
SMFError::MidiError(err)
}
}
impl From<MetaError> for SMFError {
fn from(err: MetaError) -> SMFError |
}
impl From<FromUtf8Error> for SMFError {
fn from(_: FromUtf8Error) -> SMFError {
SMFError::InvalidSMFFile("Invalid UTF8 data in file")
}
}
impl error::Error for SMFError {
fn description(&self) -> &str {
match *self {
SMFError::InvalidSMFFile(_) => "The SMF file was invalid",
SMFError::Error(ref e) => e.description(),
SMFError::MidiError(ref m) => m.description(),
SMFError::MetaError(ref m) => m.description(),
}
}
fn cause(&self) -> Option<&error::Error> {
match *self {
SMFError::MidiError(ref m) => Some(m as &error::Error),
SMFError::MetaError(ref m) => Some(m as &error::Error),
SMFError::Error(ref err) => Some(err as &error::Error),
_ => None,
}
}
}
impl fmt::Display for SMFError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
SMFError::InvalidSMFFile(s) => write!(f,"SMF file is invalid: {}",s),
SMFError::MidiError(ref err) => { write!(f,"{}",err) },
SMFError::MetaError(ref err) => { write!(f,"{}",err) },
SMFError::Error(ref err) => { write!(f,"{}",err) },
}
}
}
/// A standard midi file
#[derive(Debug, Clone)]
pub struct SMF {
/// The format of the SMF
pub format: SMFFormat,
/// Vector holding each track in this SMF
pub tracks: Vec<Track>,
/// The unit of time for delta timing. If the value is positive,
/// then it represents the units per beat. For example, +96 would
/// mean 96 ticks per beat. If the value is negative, delta times
/// are in SMPTE compatible units.
pub division: i16,
}
impl SMF {
/// Read an SMF file at the given path
pub fn from_file(path: &Path) -> Result<SMF,SMFError> {
let mut file = try!(File::open(path));
SMFReader::read_smf(&mut file)
}
/// Read an SMF from the given reader
pub fn from_reader(reader: &mut Read) -> Result<SMF,SMFError> {
SMFReader::read_smf(reader)
}
/// Convert a type 0 (single track) to type 1 (multi track) SMF
/// Does nothing if the SMF is already in type 1
/// Returns None if the SMF is in type 2 (multi song)
pub fn to_multi_track(&self) -> Option<SMF> {
match self.format {
SMFFormat::MultiTrack => Some(self.clone()),
SMFFormat::MultiSong => None,
SMFFormat::Single => {
let mut tracks = vec![Vec::<TrackEvent>::new(); 1 + 16]; // meta track and 16 for the 16 channels
let mut time = 0;
for event in &self.tracks[0].events {
time += event.vtime;
match event.event {
Event::Midi(ref msg) if msg.channel().is_some() => {
let events = &mut tracks[msg.channel().unwrap() as usize + 1];
events.push(TrackEvent {vtime: time, event: event.event.clone()});
}
/*MidiEvent::Meta(ref msg) if [
MetaCommand::MIDIChannelPrefixAssignment,
MetaCommand::MIDIPortPrefixAssignment,
MetaCommand::SequenceOrTrackName,
MetaCommand::InstrumentName,
].contains(&msg.command) => {
println!("prefix: {:?}", event);
}*/
_ => {
tracks[0].push(TrackEvent {vtime: time, event: event.event.clone()});
}
}
}
let mut out = SMF {
format: SMFFormat::MultiTrack,
tracks: vec![],
division: self.division,
};
for events in &mut tracks {
if events.len() > 0 {
let mut time = 0;
for event in events.iter_mut() {
let tmp = event.vtime;
event.vtime -= time;
time = tmp;
}
out.tracks.push(Track {events: events.clone(), copyright: None, name: None});
}
}
out.tracks[0].name = self.tracks[0].name.clone();
out.tracks[0].copyright = self.tracks[0].copyright.clone();
Some(out)
}
}
}
}
| {
SMFError::MetaError(err)
} | identifier_body |
configuration.rs | //! Konfiguration Datei Managment
//!
use errors::*;
use std::fs::File;
use std::path::Path;
use std::io::Read;
pub struct Configuration;
impl Configuration {
/// Liest die Konfiguration
///
/// # Return values
///
/// Diese Funktion liefert ein Result. Das Result enthält die Konfiguration, als String, oder ein Error,
/// wenn die Konfiguration nicht ausgelesen werden konnte.
///
/// # Parameters
///
/// # Examples
///
/// ```rust
/// assert!(true);
/// ```
pub fn g | ) -> Result<String> {
// TODO: In production nur Konfig von `/boot` verwenden!
let possible_paths = vec![
Path::new("/boot/xMZ-Mod-Touch.json"),
Path::new("/usr/share/xmz-mod-touch-server/xMZ-Mod-Touch.json.production"),
Path::new("xMZ-Mod-Touch.json"),
];
let mut ret = String::new();
for p in possible_paths {
if Path::new(p).exists() {
match File::open(&p) {
Ok(mut file) => {
println!("Verwende Konfigurationsdatei: {}", p.display());
file.read_to_string(&mut ret)?;
}
Err(_) => panic!("Could not open file: {}", p.display()),
};
break;
}
}
Ok(ret)
}
}
| et_config( | identifier_name |
configuration.rs | //! Konfiguration Datei Managment
//!
use errors::*;
use std::fs::File;
use std::path::Path;
use std::io::Read;
pub struct Configuration;
impl Configuration {
/// Liest die Konfiguration
///
/// # Return values
///
/// Diese Funktion liefert ein Result. Das Result enthält die Konfiguration, als String, oder ein Error,
/// wenn die Konfiguration nicht ausgelesen werden konnte.
///
/// # Parameters
///
/// # Examples
///
/// ```rust
/// assert!(true);
/// ```
pub fn get_config() -> Result<String> {
// TODO: In production nur Konfig von `/boot` verwenden!
let possible_paths = vec![
Path::new("/boot/xMZ-Mod-Touch.json"),
Path::new("/usr/share/xmz-mod-touch-server/xMZ-Mod-Touch.json.production"),
Path::new("xMZ-Mod-Touch.json"),
];
let mut ret = String::new();
for p in possible_paths {
if Path::new(p).exists() { | }
Ok(ret)
}
}
|
match File::open(&p) {
Ok(mut file) => {
println!("Verwende Konfigurationsdatei: {}", p.display());
file.read_to_string(&mut ret)?;
}
Err(_) => panic!("Could not open file: {}", p.display()),
};
break;
}
| conditional_block |
configuration.rs | //! Konfiguration Datei Managment
//!
use errors::*;
use std::fs::File;
use std::path::Path;
use std::io::Read;
pub struct Configuration;
impl Configuration {
/// Liest die Konfiguration
///
/// # Return values
///
/// Diese Funktion liefert ein Result. Das Result enthält die Konfiguration, als String, oder ein Error,
/// wenn die Konfiguration nicht ausgelesen werden konnte.
///
/// # Parameters
///
/// # Examples
///
/// ```rust
/// assert!(true);
/// ```
pub fn get_config() -> Result<String> {
// TODO: In production nur Konfig von `/boot` verwenden!
let possible_paths = vec![
Path::new("/boot/xMZ-Mod-Touch.json"),
Path::new("/usr/share/xmz-mod-touch-server/xMZ-Mod-Touch.json.production"),
Path::new("xMZ-Mod-Touch.json"),
];
let mut ret = String::new();
for p in possible_paths {
if Path::new(p).exists() {
match File::open(&p) {
Ok(mut file) => {
println!("Verwende Konfigurationsdatei: {}", p.display());
file.read_to_string(&mut ret)?;
}
Err(_) => panic!("Could not open file: {}", p.display()),
};
break;
}
}
Ok(ret) | }
} | random_line_split |
|
get_last_with_len.rs | //! lint on using `x.get(x.len() - 1)` instead of `x.last()`
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet_with_applicability;
use clippy_utils::ty::is_type_diagnostic_item;
use clippy_utils::SpanlessEq;
use if_chain::if_chain;
use rustc_ast::ast::LitKind;
use rustc_errors::Applicability;
use rustc_hir::{BinOpKind, Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::source_map::Spanned;
use rustc_span::sym;
declare_clippy_lint! {
/// ### What it does
/// Checks for using `x.get(x.len() - 1)` instead of
/// `x.last()`.
///
/// ### Why is this bad?
/// Using `x.last()` is easier to read and has the same
/// result.
///
/// Note that using `x[x.len() - 1]` is semantically different from
/// `x.last()`. Indexing into the array will panic on out-of-bounds
/// accesses, while `x.get()` and `x.last()` will return `None`.
///
/// There is another lint (get_unwrap) that covers the case of using
/// `x.get(index).unwrap()` instead of `x[index]`.
///
/// ### Example
/// ```rust
/// // Bad
/// let x = vec![2, 3, 5];
/// let last_element = x.get(x.len() - 1);
///
/// // Good
/// let x = vec![2, 3, 5];
/// let last_element = x.last();
/// ```
pub GET_LAST_WITH_LEN,
complexity,
"Using `x.get(x.len() - 1)` when `x.last()` is correct and simpler"
}
declare_lint_pass!(GetLastWithLen => [GET_LAST_WITH_LEN]);
impl<'tcx> LateLintPass<'tcx> for GetLastWithLen {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) | lhs,
rhs,
) = &get_index_arg.kind;
// LHS of subtraction is "x.len()"
if let ExprKind::MethodCall(arg_lhs_path, _, lhs_args, _) = &lhs.kind;
if arg_lhs_path.ident.name == sym::len;
if let Some(arg_lhs_struct) = lhs_args.get(0);
// The two vectors referenced (x in x.get(...) and in x.len())
if SpanlessEq::new(cx).eq_expr(struct_calling_on, arg_lhs_struct);
// RHS of subtraction is 1
if let ExprKind::Lit(rhs_lit) = &rhs.kind;
if let LitKind::Int(1,..) = rhs_lit.node;
then {
let mut applicability = Applicability::MachineApplicable;
let vec_name = snippet_with_applicability(
cx,
struct_calling_on.span, "vec",
&mut applicability,
);
span_lint_and_sugg(
cx,
GET_LAST_WITH_LEN,
expr.span,
&format!("accessing last element with `{0}.get({0}.len() - 1)`", vec_name),
"try",
format!("{}.last()", vec_name),
applicability,
);
}
}
}
}
| {
if_chain! {
// Is a method call
if let ExprKind::MethodCall(path, _, args, _) = expr.kind;
// Method name is "get"
if path.ident.name == sym!(get);
// Argument 0 (the struct we're calling the method on) is a vector
if let Some(struct_calling_on) = args.get(0);
let struct_ty = cx.typeck_results().expr_ty(struct_calling_on);
if is_type_diagnostic_item(cx, struct_ty, sym::Vec);
// Argument to "get" is a subtraction
if let Some(get_index_arg) = args.get(1);
if let ExprKind::Binary(
Spanned {
node: BinOpKind::Sub,
..
}, | identifier_body |
get_last_with_len.rs | //! lint on using `x.get(x.len() - 1)` instead of `x.last()`
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet_with_applicability;
use clippy_utils::ty::is_type_diagnostic_item;
use clippy_utils::SpanlessEq;
use if_chain::if_chain;
use rustc_ast::ast::LitKind;
use rustc_errors::Applicability;
use rustc_hir::{BinOpKind, Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::source_map::Spanned;
use rustc_span::sym;
declare_clippy_lint! {
/// ### What it does
/// Checks for using `x.get(x.len() - 1)` instead of
/// `x.last()`.
///
/// ### Why is this bad?
/// Using `x.last()` is easier to read and has the same
/// result.
///
/// Note that using `x[x.len() - 1]` is semantically different from
/// `x.last()`. Indexing into the array will panic on out-of-bounds
/// accesses, while `x.get()` and `x.last()` will return `None`.
///
/// There is another lint (get_unwrap) that covers the case of using
/// `x.get(index).unwrap()` instead of `x[index]`.
///
/// ### Example
/// ```rust
/// // Bad
/// let x = vec![2, 3, 5];
/// let last_element = x.get(x.len() - 1);
///
/// // Good
/// let x = vec![2, 3, 5];
/// let last_element = x.last();
/// ```
pub GET_LAST_WITH_LEN,
complexity,
"Using `x.get(x.len() - 1)` when `x.last()` is correct and simpler" | fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
if_chain! {
// Is a method call
if let ExprKind::MethodCall(path, _, args, _) = expr.kind;
// Method name is "get"
if path.ident.name == sym!(get);
// Argument 0 (the struct we're calling the method on) is a vector
if let Some(struct_calling_on) = args.get(0);
let struct_ty = cx.typeck_results().expr_ty(struct_calling_on);
if is_type_diagnostic_item(cx, struct_ty, sym::Vec);
// Argument to "get" is a subtraction
if let Some(get_index_arg) = args.get(1);
if let ExprKind::Binary(
Spanned {
node: BinOpKind::Sub,
..
},
lhs,
rhs,
) = &get_index_arg.kind;
// LHS of subtraction is "x.len()"
if let ExprKind::MethodCall(arg_lhs_path, _, lhs_args, _) = &lhs.kind;
if arg_lhs_path.ident.name == sym::len;
if let Some(arg_lhs_struct) = lhs_args.get(0);
// The two vectors referenced (x in x.get(...) and in x.len())
if SpanlessEq::new(cx).eq_expr(struct_calling_on, arg_lhs_struct);
// RHS of subtraction is 1
if let ExprKind::Lit(rhs_lit) = &rhs.kind;
if let LitKind::Int(1,..) = rhs_lit.node;
then {
let mut applicability = Applicability::MachineApplicable;
let vec_name = snippet_with_applicability(
cx,
struct_calling_on.span, "vec",
&mut applicability,
);
span_lint_and_sugg(
cx,
GET_LAST_WITH_LEN,
expr.span,
&format!("accessing last element with `{0}.get({0}.len() - 1)`", vec_name),
"try",
format!("{}.last()", vec_name),
applicability,
);
}
}
}
} | }
declare_lint_pass!(GetLastWithLen => [GET_LAST_WITH_LEN]);
impl<'tcx> LateLintPass<'tcx> for GetLastWithLen { | random_line_split |
get_last_with_len.rs | //! lint on using `x.get(x.len() - 1)` instead of `x.last()`
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet_with_applicability;
use clippy_utils::ty::is_type_diagnostic_item;
use clippy_utils::SpanlessEq;
use if_chain::if_chain;
use rustc_ast::ast::LitKind;
use rustc_errors::Applicability;
use rustc_hir::{BinOpKind, Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::source_map::Spanned;
use rustc_span::sym;
declare_clippy_lint! {
/// ### What it does
/// Checks for using `x.get(x.len() - 1)` instead of
/// `x.last()`.
///
/// ### Why is this bad?
/// Using `x.last()` is easier to read and has the same
/// result.
///
/// Note that using `x[x.len() - 1]` is semantically different from
/// `x.last()`. Indexing into the array will panic on out-of-bounds
/// accesses, while `x.get()` and `x.last()` will return `None`.
///
/// There is another lint (get_unwrap) that covers the case of using
/// `x.get(index).unwrap()` instead of `x[index]`.
///
/// ### Example
/// ```rust
/// // Bad
/// let x = vec![2, 3, 5];
/// let last_element = x.get(x.len() - 1);
///
/// // Good
/// let x = vec![2, 3, 5];
/// let last_element = x.last();
/// ```
pub GET_LAST_WITH_LEN,
complexity,
"Using `x.get(x.len() - 1)` when `x.last()` is correct and simpler"
}
declare_lint_pass!(GetLastWithLen => [GET_LAST_WITH_LEN]);
impl<'tcx> LateLintPass<'tcx> for GetLastWithLen {
fn | (&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
if_chain! {
// Is a method call
if let ExprKind::MethodCall(path, _, args, _) = expr.kind;
// Method name is "get"
if path.ident.name == sym!(get);
// Argument 0 (the struct we're calling the method on) is a vector
if let Some(struct_calling_on) = args.get(0);
let struct_ty = cx.typeck_results().expr_ty(struct_calling_on);
if is_type_diagnostic_item(cx, struct_ty, sym::Vec);
// Argument to "get" is a subtraction
if let Some(get_index_arg) = args.get(1);
if let ExprKind::Binary(
Spanned {
node: BinOpKind::Sub,
..
},
lhs,
rhs,
) = &get_index_arg.kind;
// LHS of subtraction is "x.len()"
if let ExprKind::MethodCall(arg_lhs_path, _, lhs_args, _) = &lhs.kind;
if arg_lhs_path.ident.name == sym::len;
if let Some(arg_lhs_struct) = lhs_args.get(0);
// The two vectors referenced (x in x.get(...) and in x.len())
if SpanlessEq::new(cx).eq_expr(struct_calling_on, arg_lhs_struct);
// RHS of subtraction is 1
if let ExprKind::Lit(rhs_lit) = &rhs.kind;
if let LitKind::Int(1,..) = rhs_lit.node;
then {
let mut applicability = Applicability::MachineApplicable;
let vec_name = snippet_with_applicability(
cx,
struct_calling_on.span, "vec",
&mut applicability,
);
span_lint_and_sugg(
cx,
GET_LAST_WITH_LEN,
expr.span,
&format!("accessing last element with `{0}.get({0}.len() - 1)`", vec_name),
"try",
format!("{}.last()", vec_name),
applicability,
);
}
}
}
}
| check_expr | identifier_name |
svh-a-change-type-arg.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! The `svh-a-*.rs` files are all deviations from the base file
//! svh-a-base.rs with some difference (usually in `fn foo`) that
//! should not affect the strict version hash (SVH) computation
//! (#14132).
#![crate_name = "a"]
use std::marker::MarkerTrait;
macro_rules! three {
() => { 3 }
}
pub trait U : MarkerTrait {}
pub trait V : MarkerTrait {}
impl U for () {}
impl V for () {}
static A_CONSTANT : int = 2;
pub fn foo<T:U>(_: i32) -> int {
3
}
pub fn an_unused_name() -> int { | 4
} | random_line_split |
|
svh-a-change-type-arg.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! The `svh-a-*.rs` files are all deviations from the base file
//! svh-a-base.rs with some difference (usually in `fn foo`) that
//! should not affect the strict version hash (SVH) computation
//! (#14132).
#![crate_name = "a"]
use std::marker::MarkerTrait;
macro_rules! three {
() => { 3 }
}
pub trait U : MarkerTrait {}
pub trait V : MarkerTrait {}
impl U for () {}
impl V for () {}
static A_CONSTANT : int = 2;
pub fn foo<T:U>(_: i32) -> int {
3
}
pub fn an_unused_name() -> int | {
4
} | identifier_body |
|
svh-a-change-type-arg.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! The `svh-a-*.rs` files are all deviations from the base file
//! svh-a-base.rs with some difference (usually in `fn foo`) that
//! should not affect the strict version hash (SVH) computation
//! (#14132).
#![crate_name = "a"]
use std::marker::MarkerTrait;
macro_rules! three {
() => { 3 }
}
pub trait U : MarkerTrait {}
pub trait V : MarkerTrait {}
impl U for () {}
impl V for () {}
static A_CONSTANT : int = 2;
pub fn | <T:U>(_: i32) -> int {
3
}
pub fn an_unused_name() -> int {
4
}
| foo | identifier_name |
input.pp.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// minimal junk
#![feature(no_core)]
#![no_core]
macro_rules! foo /* 60#0 */(( $ x : ident ) => { y + $ x });
fn bar /* 62#0 */() |
fn y /* 61#0 */() { }
| { let x /* 59#2 */ = 1; y /* 61#4 */ + x /* 59#5 */ } | identifier_body |
input.pp.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your | // minimal junk
#![feature(no_core)]
#![no_core]
macro_rules! foo /* 60#0 */(( $ x : ident ) => { y + $ x });
fn bar /* 62#0 */() { let x /* 59#2 */ = 1; y /* 61#4 */ + x /* 59#5 */ }
fn y /* 61#0 */() { } | // option. This file may not be copied, modified, or distributed
// except according to those terms.
| random_line_split |
input.pp.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// minimal junk
#![feature(no_core)]
#![no_core]
macro_rules! foo /* 60#0 */(( $ x : ident ) => { y + $ x });
fn | /* 62#0 */() { let x /* 59#2 */ = 1; y /* 61#4 */ + x /* 59#5 */ }
fn y /* 61#0 */() { }
| bar | identifier_name |
rec.rs | use super::Fibonacci;
pub struct | ;
impl Fibonacci for &Recursive {
fn fib(self, n: u64) -> u64 {
if n == 0 || n == 1 {
1
} else {
self.fib(n - 1) + self.fib(n - 2)
}
}
}
#[cfg(test)]
mod tests {
use super::super::Fibonacci;
use super::Recursive;
macro_rules! fib_test {
($name:ident, $($i:expr, $e:expr),+) => {
#[test]
fn $name() {
let r = Recursive;
$({
let o = r.fib($i);
assert_eq!(o, $e);
})*
}
}
}
fib_test!(zero, 0, 1);
fib_test!(one, 1, 1);
fib_test!(two, 2, 2);
fib_test!(three, 3, 3);
}
| Recursive | identifier_name |
rec.rs | use super::Fibonacci;
pub struct Recursive;
impl Fibonacci for &Recursive {
fn fib(self, n: u64) -> u64 {
if n == 0 || n == 1 {
1
} else {
self.fib(n - 1) + self.fib(n - 2)
}
}
}
#[cfg(test)]
mod tests {
use super::super::Fibonacci;
use super::Recursive;
macro_rules! fib_test {
($name:ident, $($i:expr, $e:expr),+) => {
#[test]
fn $name() {
let r = Recursive;
$({
let o = r.fib($i);
assert_eq!(o, $e);
})*
}
}
} | fib_test!(two, 2, 2);
fib_test!(three, 3, 3);
} |
fib_test!(zero, 0, 1);
fib_test!(one, 1, 1); | random_line_split |
rec.rs | use super::Fibonacci;
pub struct Recursive;
impl Fibonacci for &Recursive {
fn fib(self, n: u64) -> u64 {
if n == 0 || n == 1 | else {
self.fib(n - 1) + self.fib(n - 2)
}
}
}
#[cfg(test)]
mod tests {
use super::super::Fibonacci;
use super::Recursive;
macro_rules! fib_test {
($name:ident, $($i:expr, $e:expr),+) => {
#[test]
fn $name() {
let r = Recursive;
$({
let o = r.fib($i);
assert_eq!(o, $e);
})*
}
}
}
fib_test!(zero, 0, 1);
fib_test!(one, 1, 1);
fib_test!(two, 2, 2);
fib_test!(three, 3, 3);
}
| {
1
} | conditional_block |
macro-pat.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
macro_rules! mypat {
() => (
Some('y')
)
}
macro_rules! char_x {
() => (
'x'
)
}
macro_rules! some {
($x:pat) => (
Some($x)
)
}
macro_rules! indirect {
() => (
some!(char_x!())
)
}
macro_rules! ident_pat {
($x:ident) => (
$x
)
}
fn f(c: Option<char>) -> uint {
match c { | mypat!() => 2,
_ => 3,
}
}
pub fn main() {
assert_eq!(1u, f(Some('x')));
assert_eq!(2u, f(Some('y')));
assert_eq!(3u, f(None));
assert_eq!(1, match Some('x') {
Some(char_x!()) => 1,
_ => 2,
});
assert_eq!(1, match Some('x') {
some!(char_x!()) => 1,
_ => 2,
});
assert_eq!(1, match Some('x') {
indirect!() => 1,
_ => 2,
});
assert_eq!(3, {
let ident_pat!(x) = 2;
x+1
});
} | Some('x') => 1, | random_line_split |
macro-pat.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
macro_rules! mypat {
() => (
Some('y')
)
}
macro_rules! char_x {
() => (
'x'
)
}
macro_rules! some {
($x:pat) => (
Some($x)
)
}
macro_rules! indirect {
() => (
some!(char_x!())
)
}
macro_rules! ident_pat {
($x:ident) => (
$x
)
}
fn f(c: Option<char>) -> uint {
match c {
Some('x') => 1,
mypat!() => 2,
_ => 3,
}
}
pub fn main() | assert_eq!(3, {
let ident_pat!(x) = 2;
x+1
});
}
| {
assert_eq!(1u, f(Some('x')));
assert_eq!(2u, f(Some('y')));
assert_eq!(3u, f(None));
assert_eq!(1, match Some('x') {
Some(char_x!()) => 1,
_ => 2,
});
assert_eq!(1, match Some('x') {
some!(char_x!()) => 1,
_ => 2,
});
assert_eq!(1, match Some('x') {
indirect!() => 1,
_ => 2,
});
| identifier_body |
macro-pat.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
macro_rules! mypat {
() => (
Some('y')
)
}
macro_rules! char_x {
() => (
'x'
)
}
macro_rules! some {
($x:pat) => (
Some($x)
)
}
macro_rules! indirect {
() => (
some!(char_x!())
)
}
macro_rules! ident_pat {
($x:ident) => (
$x
)
}
fn | (c: Option<char>) -> uint {
match c {
Some('x') => 1,
mypat!() => 2,
_ => 3,
}
}
pub fn main() {
assert_eq!(1u, f(Some('x')));
assert_eq!(2u, f(Some('y')));
assert_eq!(3u, f(None));
assert_eq!(1, match Some('x') {
Some(char_x!()) => 1,
_ => 2,
});
assert_eq!(1, match Some('x') {
some!(char_x!()) => 1,
_ => 2,
});
assert_eq!(1, match Some('x') {
indirect!() => 1,
_ => 2,
});
assert_eq!(3, {
let ident_pat!(x) = 2;
x+1
});
}
| f | identifier_name |
error.rs | // Copyright 2015-2017 Parity Technologies
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::fmt;
use std::error::Error as StdError;
#[derive(Debug, PartialEq, Eq)]
/// Error concerning the RLP decoder.
pub enum DecoderError {
/// Data has additional bytes at the end of the valid RLP fragment.
RlpIsTooBig,
/// Data has too few bytes for valid RLP.
RlpIsTooShort,
/// Expect an encoded list, RLP was something else.
RlpExpectedToBeList,
/// Expect encoded data, RLP was something else.
RlpExpectedToBeData,
/// Expected a different size list.
RlpIncorrectListLen,
/// Data length number has a prefixed zero byte, invalid for numbers.
RlpDataLenWithZeroPrefix,
/// List length number has a prefixed zero byte, invalid for numbers.
RlpListLenWithZeroPrefix,
/// Non-canonical (longer than necessary) representation used for data or list.
RlpInvalidIndirection,
/// Declared length is inconsistent with data specified after.
RlpInconsistentLengthAndData,
/// Custom rlp decoding error.
Custom(&'static str),
}
impl StdError for DecoderError {
fn | (&self) -> &str {
"builder error"
}
}
impl fmt::Display for DecoderError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&self, f)
}
}
| description | identifier_name |
error.rs | // Copyright 2015-2017 Parity Technologies
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::fmt;
use std::error::Error as StdError; |
#[derive(Debug, PartialEq, Eq)]
/// Error concerning the RLP decoder.
pub enum DecoderError {
/// Data has additional bytes at the end of the valid RLP fragment.
RlpIsTooBig,
/// Data has too few bytes for valid RLP.
RlpIsTooShort,
/// Expect an encoded list, RLP was something else.
RlpExpectedToBeList,
/// Expect encoded data, RLP was something else.
RlpExpectedToBeData,
/// Expected a different size list.
RlpIncorrectListLen,
/// Data length number has a prefixed zero byte, invalid for numbers.
RlpDataLenWithZeroPrefix,
/// List length number has a prefixed zero byte, invalid for numbers.
RlpListLenWithZeroPrefix,
/// Non-canonical (longer than necessary) representation used for data or list.
RlpInvalidIndirection,
/// Declared length is inconsistent with data specified after.
RlpInconsistentLengthAndData,
/// Custom rlp decoding error.
Custom(&'static str),
}
impl StdError for DecoderError {
fn description(&self) -> &str {
"builder error"
}
}
impl fmt::Display for DecoderError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&self, f)
}
} | random_line_split |
|
utility.rs | // Copyright 2016 Kyle Mayes
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Various utilities.
use std::cell::{RefCell};
use std::rc::{Rc};
use rustc_errors::{DiagnosticBuilder, FatalError, Handler, Level};
use rustc_errors::emitter::{Emitter};
use syntax::ext::tt::transcribe;
use syntax::ast::*;
use syntax::codemap::{CodeMap, Span, DUMMY_SP};
use syntax::parse::{ParseSess, PResult};
use syntax::parse::common::{SeqSep};
use syntax::parse::lexer::{Reader, TokenAndSpan};
use syntax::parse::parser::{Parser, PathStyle};
use syntax::parse::token::{BinOpToken, Token};
use syntax::ptr::{P};
use syntax::tokenstream::{Delimited, TokenTree};
/// A result type for reporting errors in plugins.
pub type PluginResult<T> = Result<T, (Span, String)>;
//================================================
// Macros
//================================================
// parse! _______________________________________
/// Defines a parsing method for `TransactionParser` that parses a particular AST entity.
macro_rules! parse {
($name:ident($($argument:expr), *)$(.$method:ident())*, $description:expr, $ty:ty) => {
pub fn $name(&mut self, name: &str) -> PluginResult<(Span, $ty)> {
self.parse_expected($description, name, |p| p.$name($($argument), *))
}
};
(OPTION: $name:ident($($argument:expr), *)$(.$method:ident())*, $description:expr, $ty:ty) => {
pub fn $name(&mut self, name: &str) -> PluginResult<(Span, $ty)> {
self.parse_expected_option($description, name, |p| p.$name($($argument), *))
}
};
}
//================================================
// Structs
//================================================
// SaveEmitter ___________________________________
/// The most recent fatal parsing error, if any.
thread_local! { static ERROR: RefCell<Option<(Span, String)>> = RefCell::default() }
/// A diagnostic emitter that saves fatal parsing errors to a thread local variable.
struct SaveEmitter;
impl SaveEmitter {
//- Static -----------------------------------
/// Returns the last fatal parsing error.
fn get_error() -> (Span, String) {
ERROR.with(|e| e.borrow().clone().unwrap_or_else(|| (DUMMY_SP, "no error".into())))
}
}
impl Emitter for SaveEmitter {
fn emit(&mut self, builder: &DiagnosticBuilder) {
if builder.level == Level::Fatal {
let span = builder.span.primary_span().unwrap_or(DUMMY_SP);
ERROR.with(|e| *e.borrow_mut() = Some((span, builder.message.clone())));
}
}
}
// TokenReader ___________________________________
/// A `Reader` that wraps a slice of `TokenAndSpan`s.
#[derive(Clone)]
struct TokenReader<'s> {
session: &'s ParseSess,
tokens: &'s [TokenAndSpan],
index: usize,
}
impl<'s> TokenReader<'s> {
//- Constructors -----------------------------
/// Constructs a new `TokenReader`.
fn new(session: &'s ParseSess, tokens: &'s [TokenAndSpan]) -> TokenReader<'s> {
TokenReader { session: session, tokens: tokens, index: 0 }
}
}
impl<'s> Reader for TokenReader<'s> {
fn is_eof(&self) -> bool {
self.index + 1 >= self.tokens.len()
}
fn try_next_token(&mut self) -> Result<TokenAndSpan, ()> {
let next = self.tokens[self.index].clone();
if!self.is_eof() {
self.index += 1;
}
Ok(next)
}
fn fatal(&self, _: &str) -> FatalError { unreachable!() }
fn err(&self, _: &str) { }
fn emit_fatal_errors(&mut self) { }
fn peek(&self) -> TokenAndSpan {
self.tokens[self.index].clone()
}
}
// Transaction ___________________________________
/// A parser transaction.
pub struct Transaction(usize);
impl Transaction {
//- Accessors ---------------------------------
/// Resets the parser to the state it was in when this transaction was created.
pub fn rollback(&self, parser: &mut TransactionParser) {
parser.index = self.0;
}
}
// TransactionParser _____________________________
/// A wrapper around a `Parser` which allows for rolling back parsing actions.
#[allow(missing_debug_implementations)]
pub struct TransactionParser {
session: ParseSess,
tokens: Vec<TokenAndSpan>,
index: usize,
span: Span,
}
impl TransactionParser {
//- Constructors -----------------------------
/// Constructs a new `TransactionParser`.
pub fn new(session: &ParseSess, tts: &[TokenTree]) -> TransactionParser {
let handler = Handler::with_emitter(false, false, Box::new(SaveEmitter));
let mut codemap = CodeMap::new();
codemap.files = session.codemap().files.clone();
TransactionParser {
session: ParseSess::with_span_handler(handler, Rc::new(codemap)),
tokens: flatten_tts(session, tts),
index: 0,
span: span_tts(tts),
}
}
//- Accessors --------------------------------
/// Returns the span of current token.
pub fn get_span(&self) -> Span {
self.tokens.get(self.index).map_or(self.span, |t| t.sp)
}
/// Returns the span of the last token processed.
pub fn get_last_span(&self) -> Span {
self.tokens.get(self.index.saturating_sub(1)).map_or(self.span, |t| t.sp)
}
/// Returns whether the current token is the EOF token.
fn is_eof(&self) -> bool {
self.index + 1 >= self.tokens.len()
}
/// Returns the span of the remaining tokens, if any.
pub fn get_remainder_span(&self) -> Option<Span> {
if self.is_eof() {
None
} else {
Some(span_spans(self.get_span(), self.span))
}
}
/// Creates a new transaction which saves the current state of this parser.
pub fn transaction(&self) -> Transaction {
Transaction(self.index)
}
/// Returns a parsing error.
fn get_error(&self, mut span: Span, description: &str, name: Option<&str>) -> (Span, String) {
let mut message = if let Some(name) = name {
format!("expected {}: '{}'", description, name)
} else {
format!("expected {}", description)
};
if self.is_eof() {
span = self.span;
message = format!("unexpected end of arguments: {}", message);
}
(span, message)
}
//- Mutators ---------------------------------
/// Applies a parsing action to this parser, returning the result of the action.
#[cfg_attr(feature="clippy", allow(needless_lifetimes))]
pub fn apply<'s, T, F: FnOnce(&mut Parser<'s>) -> T>(&'s mut self, f: F) -> (Span, T) {
let reader = TokenReader::new(&self.session, &self.tokens[self.index..]);
let mut parser = Parser::new(&self.session, Box::new(reader), None, false);
let start = self.get_span();
let result = f(&mut parser);
self.index += parser.tokens_consumed;
let end = self.get_last_span();
(span_spans(start, end), result)
}
/// Attempts to consume the supplied token, returning whether a token was consumed.
pub fn eat(&mut self, token: &Token) -> bool {
self.apply(|p| p.eat(token)).1
}
/// Returns the next token.
pub fn next_token(
&mut self, description: &str, name: Option<&str>
) -> PluginResult<(Span, Token)> {
match self.tokens[self.index].tok.clone() {
Token::Eof => Err(self.get_error(DUMMY_SP, description, name)),
token => { self.index += 1; Ok((self.get_last_span(), token)) },
}
}
/// Applies a parsing action to this parser, returning the result of the action.
fn parse_expected<'s, T, F: FnOnce(&mut Parser<'s>) -> PResult<'s, T>>(
&'s mut self, description: &str, name: &str, f: F
) -> PluginResult<(Span, T)> {
let this: *const TransactionParser = self as *const TransactionParser;
let span = match self.apply(f) {
(span, Ok(value)) => return Ok((span, value)),
(span, Err(mut err)) => { err.cancel(); span },
};
// FIXME: hack to get around mutable borrow bug
let error = unsafe { (*this).get_error(span, description, Some(name)) };
Err(error)
}
/// Applies a parsing action to this parser, returning the result of the action.
fn parse_expected_option<'s, T, F: FnOnce(&mut Parser<'s>) -> PResult<'s, Option<T>>>(
&'s mut self, description: &str, name: &str, f: F
) -> PluginResult<(Span, T)> {
let this: *const TransactionParser = self as *const TransactionParser;
let span = match self.apply(f) {
(span, Ok(Some(value))) => return Ok((span, value)),
(span, Ok(_)) => { span },
(span, Err(mut err)) => { err.cancel(); span },
};
// FIXME: hack to get around mutable borrow bug
let error = unsafe { (*this).get_error(span, description, Some(name)) };
Err(error)
}
parse!(parse_attribute(true), "attribute", Attribute);
parse!(parse_block(), "block", P<Block>);
parse!(parse_expr(), "expression", P<Expr>);
parse!(parse_ident(), "identifier", Ident);
parse!(OPTION: parse_item(), "item", P<Item>);
parse!(parse_lifetime(), "lifetime", Lifetime);
parse!(parse_lit(), "literal", Lit);
parse!(parse_meta_item(), "meta item", MetaItem);
parse!(parse_pat(), "pattern", P<Pat>);
parse!(parse_path(PathStyle::Type), "path", Path);
parse!(OPTION: parse_stmt(), "statement", Stmt);
parse!(parse_ty(), "type", P<Ty>);
parse!(parse_token_tree(), "token tree", TokenTree);
pub fn parse_binop(&mut self, name: &str) -> PluginResult<(Span, BinOpToken)> {
match try!(self.next_token("binary operator", Some(name))) {
(span, Token::BinOp(binop)) | (span, Token::BinOpEq(binop)) => Ok((span, binop)),
(span, _) => Err((span, "expected binary operator".into())),
}
}
pub fn parse_delim(&mut self, name: &str) -> PluginResult<(Span, Delimited)> {
let (start, delim) = match try!(self.next_token("opening delimiter", Some(name))) {
(span, Token::OpenDelim(delim)) => (span, delim),
(span, _) => return Err((span, "expected opening delimiter".into())),
}; | let end = self.get_last_span();
let delimited = Delimited {
delim: delim,
open_span: start,
tts: try!(tts),
close_span: end,
};
Ok((span_spans(start, end), delimited))
}
pub fn parse_token(&mut self, name: &str) -> PluginResult<(Span, Token)> {
self.next_token("token", Some(name))
}
}
//================================================
// Functions
//================================================
/// Flattens the supplied token trees.
fn flatten_tts(session: &ParseSess, tts: &[TokenTree]) -> Vec<TokenAndSpan> {
let mut reader = transcribe::new_tt_reader(&session.span_diagnostic, None, tts.into());
let mut tokens = vec![];
while reader.peek().tok!= Token::Eof {
tokens.push(reader.next_token());
}
tokens.push(reader.next_token());
tokens
}
/// Returns a span that spans the supplied spans.
pub fn span_spans(start: Span, end: Span) -> Span {
Span { lo: start.lo, hi: end.hi, expn_id: start.expn_id }
}
/// Returns a span that spans all of the supplied token trees.
pub fn span_tts(tts: &[TokenTree]) -> Span {
let start = tts.get(0).map_or(DUMMY_SP, TokenTree::get_span);
let end = tts.iter().last().map_or(DUMMY_SP, TokenTree::get_span);
span_spans(start, end)
} | let tts = self.apply(|p| {
let sep = SeqSep { sep: None, trailing_sep_allowed: false };
p.parse_seq_to_end(&Token::CloseDelim(delim), sep, |p| p.parse_token_tree())
}).1.map_err(|mut err| { err.cancel(); SaveEmitter::get_error() }); | random_line_split |
utility.rs | // Copyright 2016 Kyle Mayes
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Various utilities.
use std::cell::{RefCell};
use std::rc::{Rc};
use rustc_errors::{DiagnosticBuilder, FatalError, Handler, Level};
use rustc_errors::emitter::{Emitter};
use syntax::ext::tt::transcribe;
use syntax::ast::*;
use syntax::codemap::{CodeMap, Span, DUMMY_SP};
use syntax::parse::{ParseSess, PResult};
use syntax::parse::common::{SeqSep};
use syntax::parse::lexer::{Reader, TokenAndSpan};
use syntax::parse::parser::{Parser, PathStyle};
use syntax::parse::token::{BinOpToken, Token};
use syntax::ptr::{P};
use syntax::tokenstream::{Delimited, TokenTree};
/// A result type for reporting errors in plugins.
pub type PluginResult<T> = Result<T, (Span, String)>;
//================================================
// Macros
//================================================
// parse! _______________________________________
/// Defines a parsing method for `TransactionParser` that parses a particular AST entity.
macro_rules! parse {
($name:ident($($argument:expr), *)$(.$method:ident())*, $description:expr, $ty:ty) => {
pub fn $name(&mut self, name: &str) -> PluginResult<(Span, $ty)> {
self.parse_expected($description, name, |p| p.$name($($argument), *))
}
};
(OPTION: $name:ident($($argument:expr), *)$(.$method:ident())*, $description:expr, $ty:ty) => {
pub fn $name(&mut self, name: &str) -> PluginResult<(Span, $ty)> {
self.parse_expected_option($description, name, |p| p.$name($($argument), *))
}
};
}
//================================================
// Structs
//================================================
// SaveEmitter ___________________________________
/// The most recent fatal parsing error, if any.
thread_local! { static ERROR: RefCell<Option<(Span, String)>> = RefCell::default() }
/// A diagnostic emitter that saves fatal parsing errors to a thread local variable.
struct SaveEmitter;
impl SaveEmitter {
//- Static -----------------------------------
/// Returns the last fatal parsing error.
fn get_error() -> (Span, String) {
ERROR.with(|e| e.borrow().clone().unwrap_or_else(|| (DUMMY_SP, "no error".into())))
}
}
impl Emitter for SaveEmitter {
fn emit(&mut self, builder: &DiagnosticBuilder) {
if builder.level == Level::Fatal {
let span = builder.span.primary_span().unwrap_or(DUMMY_SP);
ERROR.with(|e| *e.borrow_mut() = Some((span, builder.message.clone())));
}
}
}
// TokenReader ___________________________________
/// A `Reader` that wraps a slice of `TokenAndSpan`s.
#[derive(Clone)]
struct TokenReader<'s> {
session: &'s ParseSess,
tokens: &'s [TokenAndSpan],
index: usize,
}
impl<'s> TokenReader<'s> {
//- Constructors -----------------------------
/// Constructs a new `TokenReader`.
fn new(session: &'s ParseSess, tokens: &'s [TokenAndSpan]) -> TokenReader<'s> {
TokenReader { session: session, tokens: tokens, index: 0 }
}
}
impl<'s> Reader for TokenReader<'s> {
fn is_eof(&self) -> bool {
self.index + 1 >= self.tokens.len()
}
fn try_next_token(&mut self) -> Result<TokenAndSpan, ()> {
let next = self.tokens[self.index].clone();
if!self.is_eof() {
self.index += 1;
}
Ok(next)
}
fn fatal(&self, _: &str) -> FatalError { unreachable!() }
fn err(&self, _: &str) { }
fn emit_fatal_errors(&mut self) { }
fn peek(&self) -> TokenAndSpan {
self.tokens[self.index].clone()
}
}
// Transaction ___________________________________
/// A parser transaction.
pub struct Transaction(usize);
impl Transaction {
//- Accessors ---------------------------------
/// Resets the parser to the state it was in when this transaction was created.
pub fn rollback(&self, parser: &mut TransactionParser) {
parser.index = self.0;
}
}
// TransactionParser _____________________________
/// A wrapper around a `Parser` which allows for rolling back parsing actions.
#[allow(missing_debug_implementations)]
pub struct TransactionParser {
session: ParseSess,
tokens: Vec<TokenAndSpan>,
index: usize,
span: Span,
}
impl TransactionParser {
//- Constructors -----------------------------
/// Constructs a new `TransactionParser`.
pub fn new(session: &ParseSess, tts: &[TokenTree]) -> TransactionParser {
let handler = Handler::with_emitter(false, false, Box::new(SaveEmitter));
let mut codemap = CodeMap::new();
codemap.files = session.codemap().files.clone();
TransactionParser {
session: ParseSess::with_span_handler(handler, Rc::new(codemap)),
tokens: flatten_tts(session, tts),
index: 0,
span: span_tts(tts),
}
}
//- Accessors --------------------------------
/// Returns the span of current token.
pub fn get_span(&self) -> Span {
self.tokens.get(self.index).map_or(self.span, |t| t.sp)
}
/// Returns the span of the last token processed.
pub fn get_last_span(&self) -> Span {
self.tokens.get(self.index.saturating_sub(1)).map_or(self.span, |t| t.sp)
}
/// Returns whether the current token is the EOF token.
fn is_eof(&self) -> bool {
self.index + 1 >= self.tokens.len()
}
/// Returns the span of the remaining tokens, if any.
pub fn get_remainder_span(&self) -> Option<Span> {
if self.is_eof() {
None
} else {
Some(span_spans(self.get_span(), self.span))
}
}
/// Creates a new transaction which saves the current state of this parser.
pub fn transaction(&self) -> Transaction {
Transaction(self.index)
}
/// Returns a parsing error.
fn get_error(&self, mut span: Span, description: &str, name: Option<&str>) -> (Span, String) {
let mut message = if let Some(name) = name {
format!("expected {}: '{}'", description, name)
} else {
format!("expected {}", description)
};
if self.is_eof() {
span = self.span;
message = format!("unexpected end of arguments: {}", message);
}
(span, message)
}
//- Mutators ---------------------------------
/// Applies a parsing action to this parser, returning the result of the action.
#[cfg_attr(feature="clippy", allow(needless_lifetimes))]
pub fn apply<'s, T, F: FnOnce(&mut Parser<'s>) -> T>(&'s mut self, f: F) -> (Span, T) {
let reader = TokenReader::new(&self.session, &self.tokens[self.index..]);
let mut parser = Parser::new(&self.session, Box::new(reader), None, false);
let start = self.get_span();
let result = f(&mut parser);
self.index += parser.tokens_consumed;
let end = self.get_last_span();
(span_spans(start, end), result)
}
/// Attempts to consume the supplied token, returning whether a token was consumed.
pub fn eat(&mut self, token: &Token) -> bool {
self.apply(|p| p.eat(token)).1
}
/// Returns the next token.
pub fn next_token(
&mut self, description: &str, name: Option<&str>
) -> PluginResult<(Span, Token)> {
match self.tokens[self.index].tok.clone() {
Token::Eof => Err(self.get_error(DUMMY_SP, description, name)),
token => { self.index += 1; Ok((self.get_last_span(), token)) },
}
}
/// Applies a parsing action to this parser, returning the result of the action.
fn parse_expected<'s, T, F: FnOnce(&mut Parser<'s>) -> PResult<'s, T>>(
&'s mut self, description: &str, name: &str, f: F
) -> PluginResult<(Span, T)> {
let this: *const TransactionParser = self as *const TransactionParser;
let span = match self.apply(f) {
(span, Ok(value)) => return Ok((span, value)),
(span, Err(mut err)) => { err.cancel(); span },
};
// FIXME: hack to get around mutable borrow bug
let error = unsafe { (*this).get_error(span, description, Some(name)) };
Err(error)
}
/// Applies a parsing action to this parser, returning the result of the action.
fn parse_expected_option<'s, T, F: FnOnce(&mut Parser<'s>) -> PResult<'s, Option<T>>>(
&'s mut self, description: &str, name: &str, f: F
) -> PluginResult<(Span, T)> {
let this: *const TransactionParser = self as *const TransactionParser;
let span = match self.apply(f) {
(span, Ok(Some(value))) => return Ok((span, value)),
(span, Ok(_)) => { span },
(span, Err(mut err)) => { err.cancel(); span },
};
// FIXME: hack to get around mutable borrow bug
let error = unsafe { (*this).get_error(span, description, Some(name)) };
Err(error)
}
parse!(parse_attribute(true), "attribute", Attribute);
parse!(parse_block(), "block", P<Block>);
parse!(parse_expr(), "expression", P<Expr>);
parse!(parse_ident(), "identifier", Ident);
parse!(OPTION: parse_item(), "item", P<Item>);
parse!(parse_lifetime(), "lifetime", Lifetime);
parse!(parse_lit(), "literal", Lit);
parse!(parse_meta_item(), "meta item", MetaItem);
parse!(parse_pat(), "pattern", P<Pat>);
parse!(parse_path(PathStyle::Type), "path", Path);
parse!(OPTION: parse_stmt(), "statement", Stmt);
parse!(parse_ty(), "type", P<Ty>);
parse!(parse_token_tree(), "token tree", TokenTree);
pub fn parse_binop(&mut self, name: &str) -> PluginResult<(Span, BinOpToken)> {
match try!(self.next_token("binary operator", Some(name))) {
(span, Token::BinOp(binop)) | (span, Token::BinOpEq(binop)) => Ok((span, binop)),
(span, _) => Err((span, "expected binary operator".into())),
}
}
pub fn | (&mut self, name: &str) -> PluginResult<(Span, Delimited)> {
let (start, delim) = match try!(self.next_token("opening delimiter", Some(name))) {
(span, Token::OpenDelim(delim)) => (span, delim),
(span, _) => return Err((span, "expected opening delimiter".into())),
};
let tts = self.apply(|p| {
let sep = SeqSep { sep: None, trailing_sep_allowed: false };
p.parse_seq_to_end(&Token::CloseDelim(delim), sep, |p| p.parse_token_tree())
}).1.map_err(|mut err| { err.cancel(); SaveEmitter::get_error() });
let end = self.get_last_span();
let delimited = Delimited {
delim: delim,
open_span: start,
tts: try!(tts),
close_span: end,
};
Ok((span_spans(start, end), delimited))
}
pub fn parse_token(&mut self, name: &str) -> PluginResult<(Span, Token)> {
self.next_token("token", Some(name))
}
}
//================================================
// Functions
//================================================
/// Flattens the supplied token trees.
fn flatten_tts(session: &ParseSess, tts: &[TokenTree]) -> Vec<TokenAndSpan> {
let mut reader = transcribe::new_tt_reader(&session.span_diagnostic, None, tts.into());
let mut tokens = vec![];
while reader.peek().tok!= Token::Eof {
tokens.push(reader.next_token());
}
tokens.push(reader.next_token());
tokens
}
/// Returns a span that spans the supplied spans.
pub fn span_spans(start: Span, end: Span) -> Span {
Span { lo: start.lo, hi: end.hi, expn_id: start.expn_id }
}
/// Returns a span that spans all of the supplied token trees.
pub fn span_tts(tts: &[TokenTree]) -> Span {
let start = tts.get(0).map_or(DUMMY_SP, TokenTree::get_span);
let end = tts.iter().last().map_or(DUMMY_SP, TokenTree::get_span);
span_spans(start, end)
}
| parse_delim | identifier_name |
utility.rs | // Copyright 2016 Kyle Mayes
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Various utilities.
use std::cell::{RefCell};
use std::rc::{Rc};
use rustc_errors::{DiagnosticBuilder, FatalError, Handler, Level};
use rustc_errors::emitter::{Emitter};
use syntax::ext::tt::transcribe;
use syntax::ast::*;
use syntax::codemap::{CodeMap, Span, DUMMY_SP};
use syntax::parse::{ParseSess, PResult};
use syntax::parse::common::{SeqSep};
use syntax::parse::lexer::{Reader, TokenAndSpan};
use syntax::parse::parser::{Parser, PathStyle};
use syntax::parse::token::{BinOpToken, Token};
use syntax::ptr::{P};
use syntax::tokenstream::{Delimited, TokenTree};
/// A result type for reporting errors in plugins.
pub type PluginResult<T> = Result<T, (Span, String)>;
//================================================
// Macros
//================================================
// parse! _______________________________________
/// Defines a parsing method for `TransactionParser` that parses a particular AST entity.
macro_rules! parse {
($name:ident($($argument:expr), *)$(.$method:ident())*, $description:expr, $ty:ty) => {
pub fn $name(&mut self, name: &str) -> PluginResult<(Span, $ty)> {
self.parse_expected($description, name, |p| p.$name($($argument), *))
}
};
(OPTION: $name:ident($($argument:expr), *)$(.$method:ident())*, $description:expr, $ty:ty) => {
pub fn $name(&mut self, name: &str) -> PluginResult<(Span, $ty)> {
self.parse_expected_option($description, name, |p| p.$name($($argument), *))
}
};
}
//================================================
// Structs
//================================================
// SaveEmitter ___________________________________
/// The most recent fatal parsing error, if any.
thread_local! { static ERROR: RefCell<Option<(Span, String)>> = RefCell::default() }
/// A diagnostic emitter that saves fatal parsing errors to a thread local variable.
struct SaveEmitter;
impl SaveEmitter {
//- Static -----------------------------------
/// Returns the last fatal parsing error.
fn get_error() -> (Span, String) {
ERROR.with(|e| e.borrow().clone().unwrap_or_else(|| (DUMMY_SP, "no error".into())))
}
}
impl Emitter for SaveEmitter {
fn emit(&mut self, builder: &DiagnosticBuilder) {
if builder.level == Level::Fatal {
let span = builder.span.primary_span().unwrap_or(DUMMY_SP);
ERROR.with(|e| *e.borrow_mut() = Some((span, builder.message.clone())));
}
}
}
// TokenReader ___________________________________
/// A `Reader` that wraps a slice of `TokenAndSpan`s.
#[derive(Clone)]
struct TokenReader<'s> {
session: &'s ParseSess,
tokens: &'s [TokenAndSpan],
index: usize,
}
impl<'s> TokenReader<'s> {
//- Constructors -----------------------------
/// Constructs a new `TokenReader`.
fn new(session: &'s ParseSess, tokens: &'s [TokenAndSpan]) -> TokenReader<'s> {
TokenReader { session: session, tokens: tokens, index: 0 }
}
}
impl<'s> Reader for TokenReader<'s> {
fn is_eof(&self) -> bool {
self.index + 1 >= self.tokens.len()
}
fn try_next_token(&mut self) -> Result<TokenAndSpan, ()> {
let next = self.tokens[self.index].clone();
if!self.is_eof() {
self.index += 1;
}
Ok(next)
}
fn fatal(&self, _: &str) -> FatalError { unreachable!() }
fn err(&self, _: &str) { }
fn emit_fatal_errors(&mut self) { }
fn peek(&self) -> TokenAndSpan {
self.tokens[self.index].clone()
}
}
// Transaction ___________________________________
/// A parser transaction.
pub struct Transaction(usize);
impl Transaction {
//- Accessors ---------------------------------
/// Resets the parser to the state it was in when this transaction was created.
pub fn rollback(&self, parser: &mut TransactionParser) {
parser.index = self.0;
}
}
// TransactionParser _____________________________
/// A wrapper around a `Parser` which allows for rolling back parsing actions.
#[allow(missing_debug_implementations)]
pub struct TransactionParser {
session: ParseSess,
tokens: Vec<TokenAndSpan>,
index: usize,
span: Span,
}
impl TransactionParser {
//- Constructors -----------------------------
/// Constructs a new `TransactionParser`.
pub fn new(session: &ParseSess, tts: &[TokenTree]) -> TransactionParser {
let handler = Handler::with_emitter(false, false, Box::new(SaveEmitter));
let mut codemap = CodeMap::new();
codemap.files = session.codemap().files.clone();
TransactionParser {
session: ParseSess::with_span_handler(handler, Rc::new(codemap)),
tokens: flatten_tts(session, tts),
index: 0,
span: span_tts(tts),
}
}
//- Accessors --------------------------------
/// Returns the span of current token.
pub fn get_span(&self) -> Span {
self.tokens.get(self.index).map_or(self.span, |t| t.sp)
}
/// Returns the span of the last token processed.
pub fn get_last_span(&self) -> Span {
self.tokens.get(self.index.saturating_sub(1)).map_or(self.span, |t| t.sp)
}
/// Returns whether the current token is the EOF token.
fn is_eof(&self) -> bool {
self.index + 1 >= self.tokens.len()
}
/// Returns the span of the remaining tokens, if any.
pub fn get_remainder_span(&self) -> Option<Span> |
/// Creates a new transaction which saves the current state of this parser.
pub fn transaction(&self) -> Transaction {
Transaction(self.index)
}
/// Returns a parsing error.
fn get_error(&self, mut span: Span, description: &str, name: Option<&str>) -> (Span, String) {
let mut message = if let Some(name) = name {
format!("expected {}: '{}'", description, name)
} else {
format!("expected {}", description)
};
if self.is_eof() {
span = self.span;
message = format!("unexpected end of arguments: {}", message);
}
(span, message)
}
//- Mutators ---------------------------------
/// Applies a parsing action to this parser, returning the result of the action.
#[cfg_attr(feature="clippy", allow(needless_lifetimes))]
pub fn apply<'s, T, F: FnOnce(&mut Parser<'s>) -> T>(&'s mut self, f: F) -> (Span, T) {
let reader = TokenReader::new(&self.session, &self.tokens[self.index..]);
let mut parser = Parser::new(&self.session, Box::new(reader), None, false);
let start = self.get_span();
let result = f(&mut parser);
self.index += parser.tokens_consumed;
let end = self.get_last_span();
(span_spans(start, end), result)
}
/// Attempts to consume the supplied token, returning whether a token was consumed.
pub fn eat(&mut self, token: &Token) -> bool {
self.apply(|p| p.eat(token)).1
}
/// Returns the next token.
pub fn next_token(
&mut self, description: &str, name: Option<&str>
) -> PluginResult<(Span, Token)> {
match self.tokens[self.index].tok.clone() {
Token::Eof => Err(self.get_error(DUMMY_SP, description, name)),
token => { self.index += 1; Ok((self.get_last_span(), token)) },
}
}
/// Applies a parsing action to this parser, returning the result of the action.
fn parse_expected<'s, T, F: FnOnce(&mut Parser<'s>) -> PResult<'s, T>>(
&'s mut self, description: &str, name: &str, f: F
) -> PluginResult<(Span, T)> {
let this: *const TransactionParser = self as *const TransactionParser;
let span = match self.apply(f) {
(span, Ok(value)) => return Ok((span, value)),
(span, Err(mut err)) => { err.cancel(); span },
};
// FIXME: hack to get around mutable borrow bug
let error = unsafe { (*this).get_error(span, description, Some(name)) };
Err(error)
}
/// Applies a parsing action to this parser, returning the result of the action.
fn parse_expected_option<'s, T, F: FnOnce(&mut Parser<'s>) -> PResult<'s, Option<T>>>(
&'s mut self, description: &str, name: &str, f: F
) -> PluginResult<(Span, T)> {
let this: *const TransactionParser = self as *const TransactionParser;
let span = match self.apply(f) {
(span, Ok(Some(value))) => return Ok((span, value)),
(span, Ok(_)) => { span },
(span, Err(mut err)) => { err.cancel(); span },
};
// FIXME: hack to get around mutable borrow bug
let error = unsafe { (*this).get_error(span, description, Some(name)) };
Err(error)
}
parse!(parse_attribute(true), "attribute", Attribute);
parse!(parse_block(), "block", P<Block>);
parse!(parse_expr(), "expression", P<Expr>);
parse!(parse_ident(), "identifier", Ident);
parse!(OPTION: parse_item(), "item", P<Item>);
parse!(parse_lifetime(), "lifetime", Lifetime);
parse!(parse_lit(), "literal", Lit);
parse!(parse_meta_item(), "meta item", MetaItem);
parse!(parse_pat(), "pattern", P<Pat>);
parse!(parse_path(PathStyle::Type), "path", Path);
parse!(OPTION: parse_stmt(), "statement", Stmt);
parse!(parse_ty(), "type", P<Ty>);
parse!(parse_token_tree(), "token tree", TokenTree);
pub fn parse_binop(&mut self, name: &str) -> PluginResult<(Span, BinOpToken)> {
match try!(self.next_token("binary operator", Some(name))) {
(span, Token::BinOp(binop)) | (span, Token::BinOpEq(binop)) => Ok((span, binop)),
(span, _) => Err((span, "expected binary operator".into())),
}
}
pub fn parse_delim(&mut self, name: &str) -> PluginResult<(Span, Delimited)> {
let (start, delim) = match try!(self.next_token("opening delimiter", Some(name))) {
(span, Token::OpenDelim(delim)) => (span, delim),
(span, _) => return Err((span, "expected opening delimiter".into())),
};
let tts = self.apply(|p| {
let sep = SeqSep { sep: None, trailing_sep_allowed: false };
p.parse_seq_to_end(&Token::CloseDelim(delim), sep, |p| p.parse_token_tree())
}).1.map_err(|mut err| { err.cancel(); SaveEmitter::get_error() });
let end = self.get_last_span();
let delimited = Delimited {
delim: delim,
open_span: start,
tts: try!(tts),
close_span: end,
};
Ok((span_spans(start, end), delimited))
}
pub fn parse_token(&mut self, name: &str) -> PluginResult<(Span, Token)> {
self.next_token("token", Some(name))
}
}
//================================================
// Functions
//================================================
/// Flattens the supplied token trees.
fn flatten_tts(session: &ParseSess, tts: &[TokenTree]) -> Vec<TokenAndSpan> {
let mut reader = transcribe::new_tt_reader(&session.span_diagnostic, None, tts.into());
let mut tokens = vec![];
while reader.peek().tok!= Token::Eof {
tokens.push(reader.next_token());
}
tokens.push(reader.next_token());
tokens
}
/// Returns a span that spans the supplied spans.
pub fn span_spans(start: Span, end: Span) -> Span {
Span { lo: start.lo, hi: end.hi, expn_id: start.expn_id }
}
/// Returns a span that spans all of the supplied token trees.
pub fn span_tts(tts: &[TokenTree]) -> Span {
let start = tts.get(0).map_or(DUMMY_SP, TokenTree::get_span);
let end = tts.iter().last().map_or(DUMMY_SP, TokenTree::get_span);
span_spans(start, end)
}
| {
if self.is_eof() {
None
} else {
Some(span_spans(self.get_span(), self.span))
}
} | identifier_body |
main.rs | extern crate rand;
use std::io;
use rand::Rng; // provide a random number generator
use std::cmp::Ordering;
fn | () {
let lower_bound = 1;
let upper_bound = 101;
println!("Guess the num! {} ~ {}", lower_bound, upper_bound);
let secret = rand::thread_rng() // get a copy of the rng
.gen_range(lower_bound, upper_bound);
// println!("The secret num is: {}", secret);
loop {
let mut guess = String::new();
println!("Input your guess:");
// print!("Input your guess:");
// io::stdout().flush();
io::stdin().read_line(&mut guess)
.ok()
.expect("Failed to read line");
let guess: u32 = match guess.trim().parse() {
Ok(num) => num,
Err(_) => {
println!("Please Enter a number!\n");
continue;
}
};
println!("You guessed: {}", guess);
match guess.cmp(&secret) {
Ordering::Less => println!("Too small!"),
Ordering::Greater => println!("Too big!"),
Ordering::Equal => {
println!("You win!");
break;
}
}
}
}
| main | identifier_name |
main.rs | extern crate rand;
use std::io;
use rand::Rng; // provide a random number generator
use std::cmp::Ordering;
fn main() | Err(_) => {
println!("Please Enter a number!\n");
continue;
}
};
println!("You guessed: {}", guess);
match guess.cmp(&secret) {
Ordering::Less => println!("Too small!"),
Ordering::Greater => println!("Too big!"),
Ordering::Equal => {
println!("You win!");
break;
}
}
}
}
| {
let lower_bound = 1;
let upper_bound = 101;
println!("Guess the num! {} ~ {}", lower_bound, upper_bound);
let secret = rand::thread_rng() // get a copy of the rng
.gen_range(lower_bound, upper_bound);
// println!("The secret num is: {}", secret);
loop {
let mut guess = String::new();
println!("Input your guess:");
// print!("Input your guess:");
// io::stdout().flush();
io::stdin().read_line(&mut guess)
.ok()
.expect("Failed to read line");
let guess: u32 = match guess.trim().parse() {
Ok(num) => num, | identifier_body |
main.rs | extern crate rand;
use std::io;
use rand::Rng; // provide a random number generator
use std::cmp::Ordering;
fn main() {
let lower_bound = 1;
let upper_bound = 101;
println!("Guess the num! {} ~ {}", lower_bound, upper_bound);
let secret = rand::thread_rng() // get a copy of the rng
.gen_range(lower_bound, upper_bound);
// println!("The secret num is: {}", secret);
loop {
let mut guess = String::new();
println!("Input your guess:");
// print!("Input your guess:");
// io::stdout().flush();
io::stdin().read_line(&mut guess)
.ok()
.expect("Failed to read line");
let guess: u32 = match guess.trim().parse() {
Ok(num) => num,
Err(_) => |
};
println!("You guessed: {}", guess);
match guess.cmp(&secret) {
Ordering::Less => println!("Too small!"),
Ordering::Greater => println!("Too big!"),
Ordering::Equal => {
println!("You win!");
break;
}
}
}
}
| {
println!("Please Enter a number!\n");
continue;
} | conditional_block |
main.rs | extern crate rand;
use std::io;
use rand::Rng; // provide a random number generator
use std::cmp::Ordering;
fn main() {
let lower_bound = 1;
let upper_bound = 101;
println!("Guess the num! {} ~ {}", lower_bound, upper_bound);
let secret = rand::thread_rng() // get a copy of the rng
.gen_range(lower_bound, upper_bound);
// println!("The secret num is: {}", secret);
loop {
let mut guess = String::new();
println!("Input your guess:");
// print!("Input your guess:");
// io::stdout().flush();
io::stdin().read_line(&mut guess)
.ok()
.expect("Failed to read line");
let guess: u32 = match guess.trim().parse() {
Ok(num) => num,
Err(_) => {
println!("Please Enter a number!\n");
continue;
}
};
| Ordering::Less => println!("Too small!"),
Ordering::Greater => println!("Too big!"),
Ordering::Equal => {
println!("You win!");
break;
}
}
}
} | println!("You guessed: {}", guess);
match guess.cmp(&secret) { | random_line_split |
main.rs | extern crate tcod;
extern crate rand;
use std::cmp;
use tcod::console::*;
use tcod::colors::{self, Color};
use rand::Rng;
// Actual size of the window
const SCREEN_WIDTH: i32 = 80;
const SCREEN_HEIGHT: i32 = 50;
// Size of the map in the window
const MAP_WIDTH: i32 = 80;
const MAP_HEIGHT: i32 = 45;
// Parameters for the autodungeon generator
const ROOM_MAX_SIZE: i32 = 10;
const ROOM_MIN_SIZE: i32 = 6;
const MAX_ROOMS: i32 = 30;
const LIMIT_FPS: i32 = 20;
const COLOR_DARK_WALL: Color = Color { r: 0, g: 0, b: 100};
const COLOR_DARK_GROUND: Color = Color {r: 50, g:50, b: 150};
type Map = Vec<Vec<Tile>>;
#[derive(Clone, Copy, Debug)]
struct Tile {
blocked: bool,
block_sight: bool,
}
impl Tile {
pub fn empty() -> Self {
Tile {blocked: false, block_sight: false}
}
pub fn wall() -> Self {
Tile {blocked: true, block_sight: true}
}
}
#[derive(Clone, Copy, Debug)]
struct Rect {
x1: i32,
y1: i32,
x2: i32,
y2: i32,
}
impl Rect {
pub fn new(x: i32, y:i32, w: i32, h: i32) -> Self {
Rect { x1: x, y1: y, x2: x + w, y2: y + h }
}
pub fn center(&self) -> (i32, i32) {
let center_x=(self.x1 + self.x2) / 2;
let center_y=(self.y1 + self.y2) / 2;
(center_x, center_y)
}
pub fn intersects_with(&self, other: &Rect) -> bool {
(self.x1 <= other.x2) && (self.x2 >= other.x1) &&
(self.y1 <= other.y2) && (self.y2 >= other.y1)
}
}
#[derive(Debug)]
struct Object {
x: i32,
y: i32,
char: char,
color: Color,
}
impl Object {
pub fn new (x: i32, y: i32, char: char, color: Color) -> Self {
Object {
x: x,
y: y,
char: char,
color: color,
}
}
pub fn move_by(&mut self, dx: i32, dy: i32, map: &Map){
if!map[(self.x + dx) as usize][(self.y + dy) as usize].blocked {
self.x += dx;
self.y += dy;
}
}
pub fn | (&self, con: &mut Console)
{
con.set_default_foreground(self.color);
con.put_char(self.x, self.y, self.char, BackgroundFlag::None);
}
pub fn clear(&self, con: &mut Console)
{
con.put_char(self.x, self.y,'', BackgroundFlag::None)
}
}
fn create_room(room: Rect, map: &mut Map) {
for x in (room.x1 + 1)..room.x2 {
for y in (room.y1 + 1)..room.y2 {
map[x as usize][y as usize] = Tile::empty();
}
}
}
fn create_h_tunnel(x1: i32, x2: i32, y: i32, map: &mut Map) {
for x in cmp::min(x1, x2)..cmp::max(x1, x2) + 1 {
map[x as usize][y as usize] = Tile::empty();
}
}
fn create_v_tunnel(y1: i32, y2: i32, y: i32, map: &mut Map) {
for x in cmp::min(y1, y2)..cmp::max(y1, y2) + 1 {
map[x as usize][y as usize] = Tile::empty();
}
}
fn make_map() -> (Map, (i32, i32)) {
let mut map = vec![vec![Tile::wall(); MAP_HEIGHT as usize]; MAP_WIDTH as usize];
let mut rooms = vec![];
let mut starting_position = (0,0);
for _ in 0..MAX_ROOMS {
let w = rand::thread_rng().gen_range(ROOM_MIN_SIZE, ROOM_MAX_SIZE + 1);
let h = rand::thread_rng().gen_range(ROOM_MIN_SIZE, ROOM_MAX_SIZE + 1);
let x = rand::thread_rng().gen_range(0, MAP_WIDTH - w);
let y = rand::thread_rng().gen_range(0, MAP_HEIGHT - h);
let new_room = Rect::new(x, y, w, h);
let failed = rooms.iter().any(|other_room| new_room.intersects_with(other_room));
if!failed {
create_room(new_room, &mut map);
let (new_x, new_y) = new_room.center();
if rooms.is_empty() {
starting_position = (new_x, new_y);
} else {
let (prev_x, prev_y) = rooms[rooms.len() - 1].center();
if rand::random() {
create_h_tunnel(prev_x, new_x, prev_y, &mut map);
create_v_tunnel(prev_x, new_x, new_y, &mut map);
}
}
rooms.push(new_room);
}
}
(map, starting_position)
}
fn render_all(root: &mut Root, con: &mut Offscreen, objects: &[Object], map: &Map) {
for y in 0..MAP_HEIGHT {
for x in 0..MAP_WIDTH {
let wall = map[x as usize][y as usize].block_sight;
if wall {
con.set_char_background(x, y, COLOR_DARK_WALL, BackgroundFlag::Set);
} else {
con.set_char_background(x, y, COLOR_DARK_GROUND, BackgroundFlag::Set);
}
}
}
for object in objects {
object.draw(con);
}
blit (con, (0,0), (MAP_WIDTH, MAP_HEIGHT), root, (0,0), 1.0, 1.0);
}
fn handle_keys(root: &mut Root, player: &mut Object, map: &Map) -> bool {
use tcod::input::Key;
use tcod::input::KeyCode::*;
let key = root.wait_for_keypress(true);
match key {
Key { code: Enter, alt: true,.. } => {
let fullscreen = root.is_fullscreen();
root.set_fullscreen(!fullscreen);
}
Key { code: Escape,..} => return true,
Key { code: Up,..} => player.move_by(0, -1, map),
Key { code: Down,.. } => player.move_by(0, 1, map),
Key { code: Left,.. } => player.move_by(-1, 0, map),
Key { code: Right,.. } => player.move_by(1, 0, map),
_ => {},
}
false
}
fn main (){
let mut root = Root::initializer()
.font("arial10x10.png", FontLayout::Tcod)
.font_type(FontType::Greyscale)
.size(SCREEN_WIDTH, SCREEN_WIDTH)
.title("Dungeon Crawler")
.init();
tcod::system::set_fps(LIMIT_FPS);
let mut con = Offscreen::new(MAP_WIDTH, MAP_HEIGHT);
let (map, (player_x, player_y)) = make_map();
let player = Object::new(player_x, player_y, '@', colors::WHITE);
let npc = Object::new(SCREEN_WIDTH /2 -5, SCREEN_HEIGHT /2, '@', colors::YELLOW);
let mut objects = [player, npc];
while!root.window_closed() {
render_all(&mut root, &mut con, &objects, &map);
root.flush();
for object in &objects {
object.clear(&mut con)
}
let player = &mut objects[0];
let exit = handle_keys(&mut root, player, &map);
if exit {
break
}
}
}
| draw | identifier_name |
main.rs | extern crate tcod;
extern crate rand;
use std::cmp;
use tcod::console::*;
use tcod::colors::{self, Color};
use rand::Rng;
// Actual size of the window
const SCREEN_WIDTH: i32 = 80;
const SCREEN_HEIGHT: i32 = 50;
// Size of the map in the window
const MAP_WIDTH: i32 = 80;
const MAP_HEIGHT: i32 = 45;
// Parameters for the autodungeon generator
const ROOM_MAX_SIZE: i32 = 10;
const ROOM_MIN_SIZE: i32 = 6;
const MAX_ROOMS: i32 = 30;
const LIMIT_FPS: i32 = 20;
const COLOR_DARK_WALL: Color = Color { r: 0, g: 0, b: 100};
const COLOR_DARK_GROUND: Color = Color {r: 50, g:50, b: 150};
type Map = Vec<Vec<Tile>>;
#[derive(Clone, Copy, Debug)]
struct Tile {
blocked: bool,
block_sight: bool,
}
impl Tile {
pub fn empty() -> Self {
Tile {blocked: false, block_sight: false}
}
pub fn wall() -> Self {
Tile {blocked: true, block_sight: true}
}
}
#[derive(Clone, Copy, Debug)]
struct Rect {
x1: i32,
y1: i32,
x2: i32,
y2: i32,
}
impl Rect {
pub fn new(x: i32, y:i32, w: i32, h: i32) -> Self {
Rect { x1: x, y1: y, x2: x + w, y2: y + h }
}
pub fn center(&self) -> (i32, i32) {
let center_x=(self.x1 + self.x2) / 2;
let center_y=(self.y1 + self.y2) / 2;
(center_x, center_y)
}
pub fn intersects_with(&self, other: &Rect) -> bool {
(self.x1 <= other.x2) && (self.x2 >= other.x1) &&
(self.y1 <= other.y2) && (self.y2 >= other.y1)
}
}
#[derive(Debug)]
struct Object {
x: i32,
y: i32,
char: char,
color: Color,
}
impl Object {
pub fn new (x: i32, y: i32, char: char, color: Color) -> Self {
Object {
x: x,
y: y,
char: char,
color: color,
}
}
pub fn move_by(&mut self, dx: i32, dy: i32, map: &Map){
if!map[(self.x + dx) as usize][(self.y + dy) as usize].blocked {
self.x += dx;
self.y += dy;
}
}
pub fn draw(&self, con: &mut Console)
{
con.set_default_foreground(self.color); | pub fn clear(&self, con: &mut Console)
{
con.put_char(self.x, self.y,'', BackgroundFlag::None)
}
}
fn create_room(room: Rect, map: &mut Map) {
for x in (room.x1 + 1)..room.x2 {
for y in (room.y1 + 1)..room.y2 {
map[x as usize][y as usize] = Tile::empty();
}
}
}
fn create_h_tunnel(x1: i32, x2: i32, y: i32, map: &mut Map) {
for x in cmp::min(x1, x2)..cmp::max(x1, x2) + 1 {
map[x as usize][y as usize] = Tile::empty();
}
}
fn create_v_tunnel(y1: i32, y2: i32, y: i32, map: &mut Map) {
for x in cmp::min(y1, y2)..cmp::max(y1, y2) + 1 {
map[x as usize][y as usize] = Tile::empty();
}
}
fn make_map() -> (Map, (i32, i32)) {
let mut map = vec![vec![Tile::wall(); MAP_HEIGHT as usize]; MAP_WIDTH as usize];
let mut rooms = vec![];
let mut starting_position = (0,0);
for _ in 0..MAX_ROOMS {
let w = rand::thread_rng().gen_range(ROOM_MIN_SIZE, ROOM_MAX_SIZE + 1);
let h = rand::thread_rng().gen_range(ROOM_MIN_SIZE, ROOM_MAX_SIZE + 1);
let x = rand::thread_rng().gen_range(0, MAP_WIDTH - w);
let y = rand::thread_rng().gen_range(0, MAP_HEIGHT - h);
let new_room = Rect::new(x, y, w, h);
let failed = rooms.iter().any(|other_room| new_room.intersects_with(other_room));
if!failed {
create_room(new_room, &mut map);
let (new_x, new_y) = new_room.center();
if rooms.is_empty() {
starting_position = (new_x, new_y);
} else {
let (prev_x, prev_y) = rooms[rooms.len() - 1].center();
if rand::random() {
create_h_tunnel(prev_x, new_x, prev_y, &mut map);
create_v_tunnel(prev_x, new_x, new_y, &mut map);
}
}
rooms.push(new_room);
}
}
(map, starting_position)
}
fn render_all(root: &mut Root, con: &mut Offscreen, objects: &[Object], map: &Map) {
for y in 0..MAP_HEIGHT {
for x in 0..MAP_WIDTH {
let wall = map[x as usize][y as usize].block_sight;
if wall {
con.set_char_background(x, y, COLOR_DARK_WALL, BackgroundFlag::Set);
} else {
con.set_char_background(x, y, COLOR_DARK_GROUND, BackgroundFlag::Set);
}
}
}
for object in objects {
object.draw(con);
}
blit (con, (0,0), (MAP_WIDTH, MAP_HEIGHT), root, (0,0), 1.0, 1.0);
}
fn handle_keys(root: &mut Root, player: &mut Object, map: &Map) -> bool {
use tcod::input::Key;
use tcod::input::KeyCode::*;
let key = root.wait_for_keypress(true);
match key {
Key { code: Enter, alt: true,.. } => {
let fullscreen = root.is_fullscreen();
root.set_fullscreen(!fullscreen);
}
Key { code: Escape,..} => return true,
Key { code: Up,..} => player.move_by(0, -1, map),
Key { code: Down,.. } => player.move_by(0, 1, map),
Key { code: Left,.. } => player.move_by(-1, 0, map),
Key { code: Right,.. } => player.move_by(1, 0, map),
_ => {},
}
false
}
fn main (){
let mut root = Root::initializer()
.font("arial10x10.png", FontLayout::Tcod)
.font_type(FontType::Greyscale)
.size(SCREEN_WIDTH, SCREEN_WIDTH)
.title("Dungeon Crawler")
.init();
tcod::system::set_fps(LIMIT_FPS);
let mut con = Offscreen::new(MAP_WIDTH, MAP_HEIGHT);
let (map, (player_x, player_y)) = make_map();
let player = Object::new(player_x, player_y, '@', colors::WHITE);
let npc = Object::new(SCREEN_WIDTH /2 -5, SCREEN_HEIGHT /2, '@', colors::YELLOW);
let mut objects = [player, npc];
while!root.window_closed() {
render_all(&mut root, &mut con, &objects, &map);
root.flush();
for object in &objects {
object.clear(&mut con)
}
let player = &mut objects[0];
let exit = handle_keys(&mut root, player, &map);
if exit {
break
}
}
} | con.put_char(self.x, self.y, self.char, BackgroundFlag::None);
}
| random_line_split |
main.rs | extern crate tcod;
extern crate rand;
use std::cmp;
use tcod::console::*;
use tcod::colors::{self, Color};
use rand::Rng;
// Actual size of the window
const SCREEN_WIDTH: i32 = 80;
const SCREEN_HEIGHT: i32 = 50;
// Size of the map in the window
const MAP_WIDTH: i32 = 80;
const MAP_HEIGHT: i32 = 45;
// Parameters for the autodungeon generator
const ROOM_MAX_SIZE: i32 = 10;
const ROOM_MIN_SIZE: i32 = 6;
const MAX_ROOMS: i32 = 30;
const LIMIT_FPS: i32 = 20;
const COLOR_DARK_WALL: Color = Color { r: 0, g: 0, b: 100};
const COLOR_DARK_GROUND: Color = Color {r: 50, g:50, b: 150};
type Map = Vec<Vec<Tile>>;
#[derive(Clone, Copy, Debug)]
struct Tile {
blocked: bool,
block_sight: bool,
}
impl Tile {
pub fn empty() -> Self {
Tile {blocked: false, block_sight: false}
}
pub fn wall() -> Self {
Tile {blocked: true, block_sight: true}
}
}
#[derive(Clone, Copy, Debug)]
struct Rect {
x1: i32,
y1: i32,
x2: i32,
y2: i32,
}
impl Rect {
pub fn new(x: i32, y:i32, w: i32, h: i32) -> Self {
Rect { x1: x, y1: y, x2: x + w, y2: y + h }
}
pub fn center(&self) -> (i32, i32) {
let center_x=(self.x1 + self.x2) / 2;
let center_y=(self.y1 + self.y2) / 2;
(center_x, center_y)
}
pub fn intersects_with(&self, other: &Rect) -> bool {
(self.x1 <= other.x2) && (self.x2 >= other.x1) &&
(self.y1 <= other.y2) && (self.y2 >= other.y1)
}
}
#[derive(Debug)]
struct Object {
x: i32,
y: i32,
char: char,
color: Color,
}
impl Object {
pub fn new (x: i32, y: i32, char: char, color: Color) -> Self {
Object {
x: x,
y: y,
char: char,
color: color,
}
}
pub fn move_by(&mut self, dx: i32, dy: i32, map: &Map){
if!map[(self.x + dx) as usize][(self.y + dy) as usize].blocked {
self.x += dx;
self.y += dy;
}
}
pub fn draw(&self, con: &mut Console)
{
con.set_default_foreground(self.color);
con.put_char(self.x, self.y, self.char, BackgroundFlag::None);
}
pub fn clear(&self, con: &mut Console)
{
con.put_char(self.x, self.y,'', BackgroundFlag::None)
}
}
fn create_room(room: Rect, map: &mut Map) {
for x in (room.x1 + 1)..room.x2 {
for y in (room.y1 + 1)..room.y2 {
map[x as usize][y as usize] = Tile::empty();
}
}
}
fn create_h_tunnel(x1: i32, x2: i32, y: i32, map: &mut Map) {
for x in cmp::min(x1, x2)..cmp::max(x1, x2) + 1 {
map[x as usize][y as usize] = Tile::empty();
}
}
fn create_v_tunnel(y1: i32, y2: i32, y: i32, map: &mut Map) {
for x in cmp::min(y1, y2)..cmp::max(y1, y2) + 1 {
map[x as usize][y as usize] = Tile::empty();
}
}
fn make_map() -> (Map, (i32, i32)) {
let mut map = vec![vec![Tile::wall(); MAP_HEIGHT as usize]; MAP_WIDTH as usize];
let mut rooms = vec![];
let mut starting_position = (0,0);
for _ in 0..MAX_ROOMS {
let w = rand::thread_rng().gen_range(ROOM_MIN_SIZE, ROOM_MAX_SIZE + 1);
let h = rand::thread_rng().gen_range(ROOM_MIN_SIZE, ROOM_MAX_SIZE + 1);
let x = rand::thread_rng().gen_range(0, MAP_WIDTH - w);
let y = rand::thread_rng().gen_range(0, MAP_HEIGHT - h);
let new_room = Rect::new(x, y, w, h);
let failed = rooms.iter().any(|other_room| new_room.intersects_with(other_room));
if!failed {
create_room(new_room, &mut map);
let (new_x, new_y) = new_room.center();
if rooms.is_empty() {
starting_position = (new_x, new_y);
} else {
let (prev_x, prev_y) = rooms[rooms.len() - 1].center();
if rand::random() {
create_h_tunnel(prev_x, new_x, prev_y, &mut map);
create_v_tunnel(prev_x, new_x, new_y, &mut map);
}
}
rooms.push(new_room);
}
}
(map, starting_position)
}
fn render_all(root: &mut Root, con: &mut Offscreen, objects: &[Object], map: &Map) {
for y in 0..MAP_HEIGHT {
for x in 0..MAP_WIDTH {
let wall = map[x as usize][y as usize].block_sight;
if wall | else {
con.set_char_background(x, y, COLOR_DARK_GROUND, BackgroundFlag::Set);
}
}
}
for object in objects {
object.draw(con);
}
blit (con, (0,0), (MAP_WIDTH, MAP_HEIGHT), root, (0,0), 1.0, 1.0);
}
fn handle_keys(root: &mut Root, player: &mut Object, map: &Map) -> bool {
use tcod::input::Key;
use tcod::input::KeyCode::*;
let key = root.wait_for_keypress(true);
match key {
Key { code: Enter, alt: true,.. } => {
let fullscreen = root.is_fullscreen();
root.set_fullscreen(!fullscreen);
}
Key { code: Escape,..} => return true,
Key { code: Up,..} => player.move_by(0, -1, map),
Key { code: Down,.. } => player.move_by(0, 1, map),
Key { code: Left,.. } => player.move_by(-1, 0, map),
Key { code: Right,.. } => player.move_by(1, 0, map),
_ => {},
}
false
}
fn main (){
let mut root = Root::initializer()
.font("arial10x10.png", FontLayout::Tcod)
.font_type(FontType::Greyscale)
.size(SCREEN_WIDTH, SCREEN_WIDTH)
.title("Dungeon Crawler")
.init();
tcod::system::set_fps(LIMIT_FPS);
let mut con = Offscreen::new(MAP_WIDTH, MAP_HEIGHT);
let (map, (player_x, player_y)) = make_map();
let player = Object::new(player_x, player_y, '@', colors::WHITE);
let npc = Object::new(SCREEN_WIDTH /2 -5, SCREEN_HEIGHT /2, '@', colors::YELLOW);
let mut objects = [player, npc];
while!root.window_closed() {
render_all(&mut root, &mut con, &objects, &map);
root.flush();
for object in &objects {
object.clear(&mut con)
}
let player = &mut objects[0];
let exit = handle_keys(&mut root, player, &map);
if exit {
break
}
}
}
| {
con.set_char_background(x, y, COLOR_DARK_WALL, BackgroundFlag::Set);
} | conditional_block |
timeout.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use libc::c_int;
use std::mem;
use std::rt::task::BlockedTask;
use std::rt::rtio::IoResult;
use access;
use homing::{HomeHandle, HomingMissile};
use timer::TimerWatcher;
use uvll;
use uvio::UvIoFactory;
use {Loop, UvError, uv_error_to_io_error, Request, wakeup};
use {UvHandle, wait_until_woken_after};
/// Management of a timeout when gaining access to a portion of a duplex stream.
pub struct AccessTimeout<T> {
state: TimeoutState,
timer: Option<Box<TimerWatcher>>,
pub access: access::Access<T>,
}
pub struct Guard<'a, T> {
state: &'a mut TimeoutState,
pub access: access::Guard<'a, T>,
pub can_timeout: bool,
}
#[deriving(PartialEq)]
enum TimeoutState {
NoTimeout,
TimeoutPending(ClientState),
TimedOut,
}
#[deriving(PartialEq)]
enum ClientState {
NoWaiter,
AccessPending,
RequestPending,
}
struct TimerContext {
timeout: *mut AccessTimeout<()>,
callback: fn(*mut AccessTimeout<()>, &TimerContext),
user_unblock: fn(uint) -> Option<BlockedTask>,
user_payload: uint,
}
impl<T: Send> AccessTimeout<T> {
pub fn new(data: T) -> AccessTimeout<T> {
AccessTimeout {
state: NoTimeout,
timer: None,
access: access::Access::new(data),
}
}
/// Grants access to half of a duplex stream, timing out if necessary.
///
/// On success, Ok(Guard) is returned and access has been granted to the
/// stream. If a timeout occurs, then Err is returned with an appropriate
/// error.
pub fn grant<'a>(&'a mut self, m: HomingMissile) -> IoResult<Guard<'a, T>> {
// First, flag that we're attempting to acquire access. This will allow
// us to cancel the pending grant if we timeout out while waiting for a
// grant.
match self.state {
NoTimeout => {},
TimeoutPending(ref mut client) => *client = AccessPending,
TimedOut => return Err(uv_error_to_io_error(UvError(uvll::ECANCELED)))
}
let access = self.access.grant(self as *mut _ as uint, m);
// After acquiring the grant, we need to flag ourselves as having a
// pending request so the timeout knows to cancel the request.
let can_timeout = match self.state {
NoTimeout => false,
TimeoutPending(ref mut client) => { *client = RequestPending; true }
TimedOut => return Err(uv_error_to_io_error(UvError(uvll::ECANCELED)))
};
Ok(Guard {
access: access,
state: &mut self.state,
can_timeout: can_timeout
})
}
pub fn timed_out(&self) -> bool {
match self.state {
TimedOut => true,
_ => false,
}
}
/// Sets the pending timeout to the value specified.
///
/// The home/loop variables are used to construct a timer if one has not
/// been previously constructed.
///
/// The callback will be invoked if the timeout elapses, and the data of
/// the time will be set to `data`.
pub fn set_timeout(&mut self, ms: Option<u64>,
home: &HomeHandle,
loop_: &Loop,
cb: fn(uint) -> Option<BlockedTask>,
data: uint) {
self.state = NoTimeout;
let ms = match ms {
Some(ms) => ms,
None => return match self.timer {
Some(ref mut t) => t.stop(),
None => {}
}
};
// If we have a timeout, lazily initialize the timer which will be used
// to fire when the timeout runs out.
if self.timer.is_none() {
let mut timer = box TimerWatcher::new_home(loop_, home.clone());
let mut cx = box TimerContext {
timeout: self as *mut _ as *mut AccessTimeout<()>,
callback: real_cb::<T>,
user_unblock: cb,
user_payload: data,
};
unsafe {
timer.set_data(&mut *cx);
mem::forget(cx);
}
self.timer = Some(timer);
}
let timer = self.timer.get_mut_ref();
unsafe {
let cx = uvll::get_data_for_uv_handle(timer.handle);
let cx = cx as *mut TimerContext;
(*cx).user_unblock = cb;
(*cx).user_payload = data;
}
timer.stop();
timer.start(timer_cb, ms, 0);
self.state = TimeoutPending(NoWaiter);
extern fn timer_cb(timer: *mut uvll::uv_timer_t) {
let cx: &TimerContext = unsafe {
&*(uvll::get_data_for_uv_handle(timer) as *const TimerContext)
};
(cx.callback)(cx.timeout, cx);
}
fn real_cb<T: Send>(timeout: *mut AccessTimeout<()>, cx: &TimerContext) {
let timeout = timeout as *mut AccessTimeout<T>;
let me = unsafe { &mut *timeout };
match mem::replace(&mut me.state, TimedOut) {
TimedOut | NoTimeout => unreachable!(),
TimeoutPending(NoWaiter) => {}
TimeoutPending(AccessPending) => {
match unsafe { me.access.dequeue(me as *mut _ as uint) } {
Some(task) => task.reawaken(),
None => unreachable!(),
}
}
TimeoutPending(RequestPending) => {
match (cx.user_unblock)(cx.user_payload) {
Some(task) => task.reawaken(),
None => unreachable!(),
}
}
}
}
}
}
impl<T: Send> Clone for AccessTimeout<T> {
fn clone(&self) -> AccessTimeout<T> {
AccessTimeout {
access: self.access.clone(),
state: NoTimeout,
timer: None,
}
}
}
#[unsafe_destructor]
impl<'a, T> Drop for Guard<'a, T> {
fn drop(&mut self) |
}
#[unsafe_destructor]
impl<T> Drop for AccessTimeout<T> {
fn drop(&mut self) {
match self.timer {
Some(ref timer) => unsafe {
let data = uvll::get_data_for_uv_handle(timer.handle);
let _data: Box<TimerContext> = mem::transmute(data);
},
None => {}
}
}
}
////////////////////////////////////////////////////////////////////////////////
// Connect timeouts
////////////////////////////////////////////////////////////////////////////////
pub struct ConnectCtx {
pub status: c_int,
pub task: Option<BlockedTask>,
pub timer: Option<Box<TimerWatcher>>,
}
impl ConnectCtx {
pub fn connect<T>(
mut self, obj: T, timeout: Option<u64>, io: &mut UvIoFactory,
f: |&Request, &T, uvll::uv_connect_cb| -> c_int
) -> Result<T, UvError> {
let mut req = Request::new(uvll::UV_CONNECT);
let r = f(&req, &obj, connect_cb);
return match r {
0 => {
req.defuse(); // uv callback now owns this request
match timeout {
Some(t) => {
let mut timer = TimerWatcher::new(io);
timer.start(timer_cb, t, 0);
self.timer = Some(timer);
}
None => {}
}
wait_until_woken_after(&mut self.task, &io.loop_, || {
let data = &self as *const _ as *mut ConnectCtx;
match self.timer {
Some(ref mut timer) => unsafe { timer.set_data(data) },
None => {}
}
req.set_data(data);
});
// Make sure an erroneously fired callback doesn't have access
// to the context any more.
req.set_data(0 as *mut int);
// If we failed because of a timeout, drop the TcpWatcher as
// soon as possible because it's data is now set to null and we
// want to cancel the callback ASAP.
match self.status {
0 => Ok(obj),
n => { drop(obj); Err(UvError(n)) }
}
}
n => Err(UvError(n))
};
extern fn timer_cb(handle: *mut uvll::uv_timer_t) {
// Don't close the corresponding tcp request, just wake up the task
// and let RAII take care of the pending watcher.
let cx: &mut ConnectCtx = unsafe {
&mut *(uvll::get_data_for_uv_handle(handle) as *mut ConnectCtx)
};
cx.status = uvll::ECANCELED;
wakeup(&mut cx.task);
}
extern fn connect_cb(req: *mut uvll::uv_connect_t, status: c_int) {
// This callback can be invoked with ECANCELED if the watcher is
// closed by the timeout callback. In that case we just want to free
// the request and be along our merry way.
let req = Request::wrap(req);
if status == uvll::ECANCELED { return }
// Apparently on windows when the handle is closed this callback may
// not be invoked with ECANCELED but rather another error code.
// Either ways, if the data is null, then our timeout has expired
// and there's nothing we can do.
let data = unsafe { uvll::get_data_for_req(req.handle) };
if data.is_null() { return }
let cx: &mut ConnectCtx = unsafe { &mut *(data as *mut ConnectCtx) };
cx.status = status;
match cx.timer {
Some(ref mut t) => t.stop(),
None => {}
}
// Note that the timer callback doesn't cancel the connect request
// (that's the job of uv_close()), so it's possible for this
// callback to get triggered after the timeout callback fires, but
// before the task wakes up. In that case, we did indeed
// successfully connect, but we don't need to wake someone up. We
// updated the status above (correctly so), and the task will pick
// up on this when it wakes up.
if cx.task.is_some() {
wakeup(&mut cx.task);
}
}
}
}
pub struct AcceptTimeout<T> {
access: AccessTimeout<AcceptorState<T>>,
}
struct AcceptorState<T> {
blocked_acceptor: Option<BlockedTask>,
pending: Vec<IoResult<T>>,
}
impl<T: Send> AcceptTimeout<T> {
pub fn new() -> AcceptTimeout<T> {
AcceptTimeout {
access: AccessTimeout::new(AcceptorState {
blocked_acceptor: None,
pending: Vec::new(),
})
}
}
pub fn accept(&mut self,
missile: HomingMissile,
loop_: &Loop) -> IoResult<T> {
// If we've timed out but we're not closed yet, poll the state of the
// queue to see if we can peel off a connection.
if self.access.timed_out() &&!self.access.access.is_closed(&missile) {
let tmp = self.access.access.get_mut(&missile);
return match tmp.pending.remove(0) {
Some(msg) => msg,
None => Err(uv_error_to_io_error(UvError(uvll::ECANCELED)))
}
}
// Now that we're not polling, attempt to gain access and then peel off
// a connection. If we have no pending connections, then we need to go
// to sleep and wait for one.
//
// Note that if we're woken up for a pending connection then we're
// guaranteed that the check above will not steal our connection due to
// the single-threaded nature of the event loop.
let mut guard = try!(self.access.grant(missile));
if guard.access.is_closed() {
return Err(uv_error_to_io_error(UvError(uvll::EOF)))
}
match guard.access.pending.remove(0) {
Some(msg) => return msg,
None => {}
}
wait_until_woken_after(&mut guard.access.blocked_acceptor, loop_, || {});
match guard.access.pending.remove(0) {
_ if guard.access.is_closed() => {
Err(uv_error_to_io_error(UvError(uvll::EOF)))
}
Some(msg) => msg,
None => Err(uv_error_to_io_error(UvError(uvll::ECANCELED)))
}
}
pub unsafe fn push(&mut self, t: IoResult<T>) {
let state = self.access.access.unsafe_get();
(*state).pending.push(t);
let _ = (*state).blocked_acceptor.take().map(|t| t.reawaken());
}
pub fn set_timeout(&mut self,
ms: Option<u64>,
loop_: &Loop,
home: &HomeHandle) {
self.access.set_timeout(ms, home, loop_, cancel_accept::<T>,
self as *mut _ as uint);
fn cancel_accept<T: Send>(me: uint) -> Option<BlockedTask> {
unsafe {
let me: &mut AcceptTimeout<T> = mem::transmute(me);
(*me.access.access.unsafe_get()).blocked_acceptor.take()
}
}
}
pub fn close(&mut self, m: HomingMissile) {
self.access.access.close(&m);
let task = self.access.access.get_mut(&m).blocked_acceptor.take();
drop(m);
let _ = task.map(|t| t.reawaken());
}
}
impl<T: Send> Clone for AcceptTimeout<T> {
fn clone(&self) -> AcceptTimeout<T> {
AcceptTimeout { access: self.access.clone() }
}
}
| {
match *self.state {
TimeoutPending(NoWaiter) | TimeoutPending(AccessPending) =>
unreachable!(),
NoTimeout | TimedOut => {}
TimeoutPending(RequestPending) => {
*self.state = TimeoutPending(NoWaiter);
}
}
} | identifier_body |
timeout.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use libc::c_int;
use std::mem;
use std::rt::task::BlockedTask;
use std::rt::rtio::IoResult;
use access;
use homing::{HomeHandle, HomingMissile};
use timer::TimerWatcher;
use uvll;
use uvio::UvIoFactory;
use {Loop, UvError, uv_error_to_io_error, Request, wakeup};
use {UvHandle, wait_until_woken_after};
/// Management of a timeout when gaining access to a portion of a duplex stream.
pub struct AccessTimeout<T> {
state: TimeoutState,
timer: Option<Box<TimerWatcher>>,
pub access: access::Access<T>,
}
pub struct Guard<'a, T> {
state: &'a mut TimeoutState,
pub access: access::Guard<'a, T>,
pub can_timeout: bool,
}
#[deriving(PartialEq)]
enum TimeoutState {
NoTimeout,
TimeoutPending(ClientState),
TimedOut,
}
#[deriving(PartialEq)]
enum ClientState {
NoWaiter,
AccessPending,
RequestPending,
}
struct TimerContext {
timeout: *mut AccessTimeout<()>,
callback: fn(*mut AccessTimeout<()>, &TimerContext),
user_unblock: fn(uint) -> Option<BlockedTask>,
user_payload: uint,
}
impl<T: Send> AccessTimeout<T> {
pub fn new(data: T) -> AccessTimeout<T> {
AccessTimeout {
state: NoTimeout,
timer: None,
access: access::Access::new(data),
}
}
/// Grants access to half of a duplex stream, timing out if necessary.
///
/// On success, Ok(Guard) is returned and access has been granted to the
/// stream. If a timeout occurs, then Err is returned with an appropriate
/// error.
pub fn grant<'a>(&'a mut self, m: HomingMissile) -> IoResult<Guard<'a, T>> {
// First, flag that we're attempting to acquire access. This will allow
// us to cancel the pending grant if we timeout out while waiting for a
// grant.
match self.state {
NoTimeout => {},
TimeoutPending(ref mut client) => *client = AccessPending,
TimedOut => return Err(uv_error_to_io_error(UvError(uvll::ECANCELED)))
}
let access = self.access.grant(self as *mut _ as uint, m);
// After acquiring the grant, we need to flag ourselves as having a
// pending request so the timeout knows to cancel the request.
let can_timeout = match self.state {
NoTimeout => false,
TimeoutPending(ref mut client) => { *client = RequestPending; true }
TimedOut => return Err(uv_error_to_io_error(UvError(uvll::ECANCELED)))
};
Ok(Guard {
access: access,
state: &mut self.state,
can_timeout: can_timeout
})
}
pub fn timed_out(&self) -> bool {
match self.state {
TimedOut => true,
_ => false,
}
}
/// Sets the pending timeout to the value specified.
///
/// The home/loop variables are used to construct a timer if one has not
/// been previously constructed.
///
/// The callback will be invoked if the timeout elapses, and the data of
/// the time will be set to `data`.
pub fn set_timeout(&mut self, ms: Option<u64>,
home: &HomeHandle,
loop_: &Loop,
cb: fn(uint) -> Option<BlockedTask>,
data: uint) {
self.state = NoTimeout;
let ms = match ms {
Some(ms) => ms,
None => return match self.timer {
Some(ref mut t) => t.stop(),
None => {}
}
};
// If we have a timeout, lazily initialize the timer which will be used
// to fire when the timeout runs out.
if self.timer.is_none() {
let mut timer = box TimerWatcher::new_home(loop_, home.clone());
let mut cx = box TimerContext {
timeout: self as *mut _ as *mut AccessTimeout<()>,
callback: real_cb::<T>,
user_unblock: cb,
user_payload: data,
};
unsafe {
timer.set_data(&mut *cx);
mem::forget(cx);
}
self.timer = Some(timer);
}
let timer = self.timer.get_mut_ref();
unsafe {
let cx = uvll::get_data_for_uv_handle(timer.handle);
let cx = cx as *mut TimerContext;
(*cx).user_unblock = cb;
(*cx).user_payload = data;
}
timer.stop();
timer.start(timer_cb, ms, 0);
self.state = TimeoutPending(NoWaiter);
extern fn timer_cb(timer: *mut uvll::uv_timer_t) {
let cx: &TimerContext = unsafe {
&*(uvll::get_data_for_uv_handle(timer) as *const TimerContext)
};
(cx.callback)(cx.timeout, cx);
}
fn real_cb<T: Send>(timeout: *mut AccessTimeout<()>, cx: &TimerContext) {
let timeout = timeout as *mut AccessTimeout<T>;
let me = unsafe { &mut *timeout };
match mem::replace(&mut me.state, TimedOut) {
TimedOut | NoTimeout => unreachable!(),
TimeoutPending(NoWaiter) => {}
TimeoutPending(AccessPending) => {
match unsafe { me.access.dequeue(me as *mut _ as uint) } {
Some(task) => task.reawaken(),
None => unreachable!(),
}
}
TimeoutPending(RequestPending) => {
match (cx.user_unblock)(cx.user_payload) {
Some(task) => task.reawaken(),
None => unreachable!(),
}
}
}
}
}
}
impl<T: Send> Clone for AccessTimeout<T> {
fn clone(&self) -> AccessTimeout<T> {
AccessTimeout {
access: self.access.clone(),
state: NoTimeout,
timer: None,
}
}
}
#[unsafe_destructor]
impl<'a, T> Drop for Guard<'a, T> {
fn | (&mut self) {
match *self.state {
TimeoutPending(NoWaiter) | TimeoutPending(AccessPending) =>
unreachable!(),
NoTimeout | TimedOut => {}
TimeoutPending(RequestPending) => {
*self.state = TimeoutPending(NoWaiter);
}
}
}
}
#[unsafe_destructor]
impl<T> Drop for AccessTimeout<T> {
fn drop(&mut self) {
match self.timer {
Some(ref timer) => unsafe {
let data = uvll::get_data_for_uv_handle(timer.handle);
let _data: Box<TimerContext> = mem::transmute(data);
},
None => {}
}
}
}
////////////////////////////////////////////////////////////////////////////////
// Connect timeouts
////////////////////////////////////////////////////////////////////////////////
pub struct ConnectCtx {
pub status: c_int,
pub task: Option<BlockedTask>,
pub timer: Option<Box<TimerWatcher>>,
}
impl ConnectCtx {
pub fn connect<T>(
mut self, obj: T, timeout: Option<u64>, io: &mut UvIoFactory,
f: |&Request, &T, uvll::uv_connect_cb| -> c_int
) -> Result<T, UvError> {
let mut req = Request::new(uvll::UV_CONNECT);
let r = f(&req, &obj, connect_cb);
return match r {
0 => {
req.defuse(); // uv callback now owns this request
match timeout {
Some(t) => {
let mut timer = TimerWatcher::new(io);
timer.start(timer_cb, t, 0);
self.timer = Some(timer);
}
None => {}
}
wait_until_woken_after(&mut self.task, &io.loop_, || {
let data = &self as *const _ as *mut ConnectCtx;
match self.timer {
Some(ref mut timer) => unsafe { timer.set_data(data) },
None => {}
}
req.set_data(data);
});
// Make sure an erroneously fired callback doesn't have access
// to the context any more.
req.set_data(0 as *mut int);
// If we failed because of a timeout, drop the TcpWatcher as
// soon as possible because it's data is now set to null and we
// want to cancel the callback ASAP.
match self.status {
0 => Ok(obj),
n => { drop(obj); Err(UvError(n)) }
}
}
n => Err(UvError(n))
};
extern fn timer_cb(handle: *mut uvll::uv_timer_t) {
// Don't close the corresponding tcp request, just wake up the task
// and let RAII take care of the pending watcher.
let cx: &mut ConnectCtx = unsafe {
&mut *(uvll::get_data_for_uv_handle(handle) as *mut ConnectCtx)
};
cx.status = uvll::ECANCELED;
wakeup(&mut cx.task);
}
extern fn connect_cb(req: *mut uvll::uv_connect_t, status: c_int) {
// This callback can be invoked with ECANCELED if the watcher is
// closed by the timeout callback. In that case we just want to free
// the request and be along our merry way.
let req = Request::wrap(req);
if status == uvll::ECANCELED { return }
// Apparently on windows when the handle is closed this callback may
// not be invoked with ECANCELED but rather another error code.
// Either ways, if the data is null, then our timeout has expired
// and there's nothing we can do.
let data = unsafe { uvll::get_data_for_req(req.handle) };
if data.is_null() { return }
let cx: &mut ConnectCtx = unsafe { &mut *(data as *mut ConnectCtx) };
cx.status = status;
match cx.timer {
Some(ref mut t) => t.stop(),
None => {}
}
// Note that the timer callback doesn't cancel the connect request
// (that's the job of uv_close()), so it's possible for this
// callback to get triggered after the timeout callback fires, but
// before the task wakes up. In that case, we did indeed
// successfully connect, but we don't need to wake someone up. We
// updated the status above (correctly so), and the task will pick
// up on this when it wakes up.
if cx.task.is_some() {
wakeup(&mut cx.task);
}
}
}
}
pub struct AcceptTimeout<T> {
access: AccessTimeout<AcceptorState<T>>,
}
struct AcceptorState<T> {
blocked_acceptor: Option<BlockedTask>,
pending: Vec<IoResult<T>>,
}
impl<T: Send> AcceptTimeout<T> {
pub fn new() -> AcceptTimeout<T> {
AcceptTimeout {
access: AccessTimeout::new(AcceptorState {
blocked_acceptor: None,
pending: Vec::new(),
})
}
}
pub fn accept(&mut self,
missile: HomingMissile,
loop_: &Loop) -> IoResult<T> {
// If we've timed out but we're not closed yet, poll the state of the
// queue to see if we can peel off a connection.
if self.access.timed_out() &&!self.access.access.is_closed(&missile) {
let tmp = self.access.access.get_mut(&missile);
return match tmp.pending.remove(0) {
Some(msg) => msg,
None => Err(uv_error_to_io_error(UvError(uvll::ECANCELED)))
}
}
// Now that we're not polling, attempt to gain access and then peel off
// a connection. If we have no pending connections, then we need to go
// to sleep and wait for one.
//
// Note that if we're woken up for a pending connection then we're
// guaranteed that the check above will not steal our connection due to
// the single-threaded nature of the event loop.
let mut guard = try!(self.access.grant(missile));
if guard.access.is_closed() {
return Err(uv_error_to_io_error(UvError(uvll::EOF)))
}
match guard.access.pending.remove(0) {
Some(msg) => return msg,
None => {}
}
wait_until_woken_after(&mut guard.access.blocked_acceptor, loop_, || {});
match guard.access.pending.remove(0) {
_ if guard.access.is_closed() => {
Err(uv_error_to_io_error(UvError(uvll::EOF)))
}
Some(msg) => msg,
None => Err(uv_error_to_io_error(UvError(uvll::ECANCELED)))
}
}
pub unsafe fn push(&mut self, t: IoResult<T>) {
let state = self.access.access.unsafe_get();
(*state).pending.push(t);
let _ = (*state).blocked_acceptor.take().map(|t| t.reawaken());
}
pub fn set_timeout(&mut self,
ms: Option<u64>,
loop_: &Loop,
home: &HomeHandle) {
self.access.set_timeout(ms, home, loop_, cancel_accept::<T>,
self as *mut _ as uint);
fn cancel_accept<T: Send>(me: uint) -> Option<BlockedTask> {
unsafe {
let me: &mut AcceptTimeout<T> = mem::transmute(me);
(*me.access.access.unsafe_get()).blocked_acceptor.take()
}
}
}
pub fn close(&mut self, m: HomingMissile) {
self.access.access.close(&m);
let task = self.access.access.get_mut(&m).blocked_acceptor.take();
drop(m);
let _ = task.map(|t| t.reawaken());
}
}
impl<T: Send> Clone for AcceptTimeout<T> {
fn clone(&self) -> AcceptTimeout<T> {
AcceptTimeout { access: self.access.clone() }
}
}
| drop | identifier_name |
timeout.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use libc::c_int;
use std::mem;
use std::rt::task::BlockedTask;
use std::rt::rtio::IoResult;
use access;
use homing::{HomeHandle, HomingMissile};
use timer::TimerWatcher;
use uvll;
use uvio::UvIoFactory;
use {Loop, UvError, uv_error_to_io_error, Request, wakeup};
use {UvHandle, wait_until_woken_after};
/// Management of a timeout when gaining access to a portion of a duplex stream.
pub struct AccessTimeout<T> {
state: TimeoutState,
timer: Option<Box<TimerWatcher>>,
pub access: access::Access<T>,
}
pub struct Guard<'a, T> {
state: &'a mut TimeoutState,
pub access: access::Guard<'a, T>,
pub can_timeout: bool,
}
#[deriving(PartialEq)]
enum TimeoutState {
NoTimeout,
TimeoutPending(ClientState),
TimedOut,
}
#[deriving(PartialEq)]
enum ClientState {
NoWaiter,
AccessPending,
RequestPending,
}
struct TimerContext {
timeout: *mut AccessTimeout<()>,
callback: fn(*mut AccessTimeout<()>, &TimerContext),
user_unblock: fn(uint) -> Option<BlockedTask>,
user_payload: uint,
}
impl<T: Send> AccessTimeout<T> {
pub fn new(data: T) -> AccessTimeout<T> {
AccessTimeout {
state: NoTimeout,
timer: None,
access: access::Access::new(data),
}
}
/// Grants access to half of a duplex stream, timing out if necessary.
///
/// On success, Ok(Guard) is returned and access has been granted to the
/// stream. If a timeout occurs, then Err is returned with an appropriate
/// error.
pub fn grant<'a>(&'a mut self, m: HomingMissile) -> IoResult<Guard<'a, T>> {
// First, flag that we're attempting to acquire access. This will allow
// us to cancel the pending grant if we timeout out while waiting for a
// grant.
match self.state {
NoTimeout => {},
TimeoutPending(ref mut client) => *client = AccessPending,
TimedOut => return Err(uv_error_to_io_error(UvError(uvll::ECANCELED)))
}
let access = self.access.grant(self as *mut _ as uint, m);
// After acquiring the grant, we need to flag ourselves as having a
// pending request so the timeout knows to cancel the request.
let can_timeout = match self.state {
NoTimeout => false,
TimeoutPending(ref mut client) => { *client = RequestPending; true }
TimedOut => return Err(uv_error_to_io_error(UvError(uvll::ECANCELED)))
};
Ok(Guard {
access: access,
state: &mut self.state,
can_timeout: can_timeout
})
}
pub fn timed_out(&self) -> bool {
match self.state {
TimedOut => true,
_ => false,
}
}
/// Sets the pending timeout to the value specified.
///
/// The home/loop variables are used to construct a timer if one has not
/// been previously constructed.
///
/// The callback will be invoked if the timeout elapses, and the data of
/// the time will be set to `data`.
pub fn set_timeout(&mut self, ms: Option<u64>,
home: &HomeHandle,
loop_: &Loop,
cb: fn(uint) -> Option<BlockedTask>,
data: uint) {
self.state = NoTimeout;
let ms = match ms {
Some(ms) => ms,
None => return match self.timer {
Some(ref mut t) => t.stop(),
None => {}
}
};
// If we have a timeout, lazily initialize the timer which will be used
// to fire when the timeout runs out.
if self.timer.is_none() {
let mut timer = box TimerWatcher::new_home(loop_, home.clone());
let mut cx = box TimerContext {
timeout: self as *mut _ as *mut AccessTimeout<()>,
callback: real_cb::<T>,
user_unblock: cb,
user_payload: data,
};
unsafe {
timer.set_data(&mut *cx);
mem::forget(cx);
}
self.timer = Some(timer);
}
let timer = self.timer.get_mut_ref();
unsafe {
let cx = uvll::get_data_for_uv_handle(timer.handle);
let cx = cx as *mut TimerContext;
(*cx).user_unblock = cb;
(*cx).user_payload = data;
}
timer.stop();
timer.start(timer_cb, ms, 0);
self.state = TimeoutPending(NoWaiter);
extern fn timer_cb(timer: *mut uvll::uv_timer_t) {
let cx: &TimerContext = unsafe {
&*(uvll::get_data_for_uv_handle(timer) as *const TimerContext)
};
(cx.callback)(cx.timeout, cx);
}
fn real_cb<T: Send>(timeout: *mut AccessTimeout<()>, cx: &TimerContext) {
let timeout = timeout as *mut AccessTimeout<T>;
let me = unsafe { &mut *timeout };
match mem::replace(&mut me.state, TimedOut) {
TimedOut | NoTimeout => unreachable!(),
TimeoutPending(NoWaiter) => {}
TimeoutPending(AccessPending) => {
match unsafe { me.access.dequeue(me as *mut _ as uint) } {
Some(task) => task.reawaken(),
None => unreachable!(),
}
}
TimeoutPending(RequestPending) => {
match (cx.user_unblock)(cx.user_payload) {
Some(task) => task.reawaken(),
None => unreachable!(),
}
}
}
}
}
}
impl<T: Send> Clone for AccessTimeout<T> {
fn clone(&self) -> AccessTimeout<T> {
AccessTimeout {
access: self.access.clone(),
state: NoTimeout,
timer: None,
}
}
}
#[unsafe_destructor]
impl<'a, T> Drop for Guard<'a, T> {
fn drop(&mut self) {
match *self.state {
TimeoutPending(NoWaiter) | TimeoutPending(AccessPending) =>
unreachable!(),
NoTimeout | TimedOut => {}
TimeoutPending(RequestPending) => {
*self.state = TimeoutPending(NoWaiter);
}
}
}
}
#[unsafe_destructor]
impl<T> Drop for AccessTimeout<T> {
fn drop(&mut self) { | match self.timer {
Some(ref timer) => unsafe {
let data = uvll::get_data_for_uv_handle(timer.handle);
let _data: Box<TimerContext> = mem::transmute(data);
},
None => {}
}
}
}
////////////////////////////////////////////////////////////////////////////////
// Connect timeouts
////////////////////////////////////////////////////////////////////////////////
pub struct ConnectCtx {
pub status: c_int,
pub task: Option<BlockedTask>,
pub timer: Option<Box<TimerWatcher>>,
}
impl ConnectCtx {
pub fn connect<T>(
mut self, obj: T, timeout: Option<u64>, io: &mut UvIoFactory,
f: |&Request, &T, uvll::uv_connect_cb| -> c_int
) -> Result<T, UvError> {
let mut req = Request::new(uvll::UV_CONNECT);
let r = f(&req, &obj, connect_cb);
return match r {
0 => {
req.defuse(); // uv callback now owns this request
match timeout {
Some(t) => {
let mut timer = TimerWatcher::new(io);
timer.start(timer_cb, t, 0);
self.timer = Some(timer);
}
None => {}
}
wait_until_woken_after(&mut self.task, &io.loop_, || {
let data = &self as *const _ as *mut ConnectCtx;
match self.timer {
Some(ref mut timer) => unsafe { timer.set_data(data) },
None => {}
}
req.set_data(data);
});
// Make sure an erroneously fired callback doesn't have access
// to the context any more.
req.set_data(0 as *mut int);
// If we failed because of a timeout, drop the TcpWatcher as
// soon as possible because it's data is now set to null and we
// want to cancel the callback ASAP.
match self.status {
0 => Ok(obj),
n => { drop(obj); Err(UvError(n)) }
}
}
n => Err(UvError(n))
};
extern fn timer_cb(handle: *mut uvll::uv_timer_t) {
// Don't close the corresponding tcp request, just wake up the task
// and let RAII take care of the pending watcher.
let cx: &mut ConnectCtx = unsafe {
&mut *(uvll::get_data_for_uv_handle(handle) as *mut ConnectCtx)
};
cx.status = uvll::ECANCELED;
wakeup(&mut cx.task);
}
extern fn connect_cb(req: *mut uvll::uv_connect_t, status: c_int) {
// This callback can be invoked with ECANCELED if the watcher is
// closed by the timeout callback. In that case we just want to free
// the request and be along our merry way.
let req = Request::wrap(req);
if status == uvll::ECANCELED { return }
// Apparently on windows when the handle is closed this callback may
// not be invoked with ECANCELED but rather another error code.
// Either ways, if the data is null, then our timeout has expired
// and there's nothing we can do.
let data = unsafe { uvll::get_data_for_req(req.handle) };
if data.is_null() { return }
let cx: &mut ConnectCtx = unsafe { &mut *(data as *mut ConnectCtx) };
cx.status = status;
match cx.timer {
Some(ref mut t) => t.stop(),
None => {}
}
// Note that the timer callback doesn't cancel the connect request
// (that's the job of uv_close()), so it's possible for this
// callback to get triggered after the timeout callback fires, but
// before the task wakes up. In that case, we did indeed
// successfully connect, but we don't need to wake someone up. We
// updated the status above (correctly so), and the task will pick
// up on this when it wakes up.
if cx.task.is_some() {
wakeup(&mut cx.task);
}
}
}
}
pub struct AcceptTimeout<T> {
access: AccessTimeout<AcceptorState<T>>,
}
struct AcceptorState<T> {
blocked_acceptor: Option<BlockedTask>,
pending: Vec<IoResult<T>>,
}
impl<T: Send> AcceptTimeout<T> {
pub fn new() -> AcceptTimeout<T> {
AcceptTimeout {
access: AccessTimeout::new(AcceptorState {
blocked_acceptor: None,
pending: Vec::new(),
})
}
}
pub fn accept(&mut self,
missile: HomingMissile,
loop_: &Loop) -> IoResult<T> {
// If we've timed out but we're not closed yet, poll the state of the
// queue to see if we can peel off a connection.
if self.access.timed_out() &&!self.access.access.is_closed(&missile) {
let tmp = self.access.access.get_mut(&missile);
return match tmp.pending.remove(0) {
Some(msg) => msg,
None => Err(uv_error_to_io_error(UvError(uvll::ECANCELED)))
}
}
// Now that we're not polling, attempt to gain access and then peel off
// a connection. If we have no pending connections, then we need to go
// to sleep and wait for one.
//
// Note that if we're woken up for a pending connection then we're
// guaranteed that the check above will not steal our connection due to
// the single-threaded nature of the event loop.
let mut guard = try!(self.access.grant(missile));
if guard.access.is_closed() {
return Err(uv_error_to_io_error(UvError(uvll::EOF)))
}
match guard.access.pending.remove(0) {
Some(msg) => return msg,
None => {}
}
wait_until_woken_after(&mut guard.access.blocked_acceptor, loop_, || {});
match guard.access.pending.remove(0) {
_ if guard.access.is_closed() => {
Err(uv_error_to_io_error(UvError(uvll::EOF)))
}
Some(msg) => msg,
None => Err(uv_error_to_io_error(UvError(uvll::ECANCELED)))
}
}
pub unsafe fn push(&mut self, t: IoResult<T>) {
let state = self.access.access.unsafe_get();
(*state).pending.push(t);
let _ = (*state).blocked_acceptor.take().map(|t| t.reawaken());
}
pub fn set_timeout(&mut self,
ms: Option<u64>,
loop_: &Loop,
home: &HomeHandle) {
self.access.set_timeout(ms, home, loop_, cancel_accept::<T>,
self as *mut _ as uint);
fn cancel_accept<T: Send>(me: uint) -> Option<BlockedTask> {
unsafe {
let me: &mut AcceptTimeout<T> = mem::transmute(me);
(*me.access.access.unsafe_get()).blocked_acceptor.take()
}
}
}
pub fn close(&mut self, m: HomingMissile) {
self.access.access.close(&m);
let task = self.access.access.get_mut(&m).blocked_acceptor.take();
drop(m);
let _ = task.map(|t| t.reawaken());
}
}
impl<T: Send> Clone for AcceptTimeout<T> {
fn clone(&self) -> AcceptTimeout<T> {
AcceptTimeout { access: self.access.clone() }
}
} | random_line_split |
|
login.rs | use command_prelude::*;
use std::io::{self, BufRead};
use cargo::core::{Source, SourceId};
use cargo::sources::RegistrySource;
use cargo::util::{CargoError, CargoResultExt};
use cargo::ops;
pub fn cli() -> App {
subcommand("login")
.about(
"Save an api token from the registry locally. \
If token is not specified, it will be read from stdin.",
)
.arg(Arg::with_name("token"))
.arg(opt("host", "Host to set the token for").value_name("HOST"))
.arg(opt("registry", "Registry to use").value_name("REGISTRY"))
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult | .unwrap_or(config.api.unwrap())
}
};
println!("please visit {}me and paste the API Token below", host);
let mut line = String::new();
let input = io::stdin();
input
.lock()
.read_line(&mut line)
.chain_err(|| "failed to read stdin")
.map_err(CargoError::from)?;
line.trim().to_string()
}
};
ops::registry_login(config, token, registry)?;
Ok(())
}
| {
let registry = args.registry(config)?;
let token = match args.value_of("token") {
Some(token) => token.to_string(),
None => {
let host = match registry {
Some(ref _registry) => {
return Err(format_err!(
"token must be provided when \
--registry is provided."
).into());
}
None => {
let src = SourceId::crates_io(config)?;
let mut src = RegistrySource::remote(&src, config);
src.update()?;
let config = src.config()?.unwrap();
args.value_of("host")
.map(|s| s.to_string()) | identifier_body |
login.rs | use command_prelude::*;
use std::io::{self, BufRead};
use cargo::core::{Source, SourceId};
use cargo::sources::RegistrySource;
use cargo::util::{CargoError, CargoResultExt};
use cargo::ops;
pub fn cli() -> App {
subcommand("login")
.about(
"Save an api token from the registry locally. \
If token is not specified, it will be read from stdin.",
)
.arg(Arg::with_name("token"))
.arg(opt("host", "Host to set the token for").value_name("HOST"))
.arg(opt("registry", "Registry to use").value_name("REGISTRY"))
}
pub fn | (config: &mut Config, args: &ArgMatches) -> CliResult {
let registry = args.registry(config)?;
let token = match args.value_of("token") {
Some(token) => token.to_string(),
None => {
let host = match registry {
Some(ref _registry) => {
return Err(format_err!(
"token must be provided when \
--registry is provided."
).into());
}
None => {
let src = SourceId::crates_io(config)?;
let mut src = RegistrySource::remote(&src, config);
src.update()?;
let config = src.config()?.unwrap();
args.value_of("host")
.map(|s| s.to_string())
.unwrap_or(config.api.unwrap())
}
};
println!("please visit {}me and paste the API Token below", host);
let mut line = String::new();
let input = io::stdin();
input
.lock()
.read_line(&mut line)
.chain_err(|| "failed to read stdin")
.map_err(CargoError::from)?;
line.trim().to_string()
}
};
ops::registry_login(config, token, registry)?;
Ok(())
}
| exec | identifier_name |
login.rs | use command_prelude::*;
use std::io::{self, BufRead};
use cargo::core::{Source, SourceId}; | use cargo::ops;
pub fn cli() -> App {
subcommand("login")
.about(
"Save an api token from the registry locally. \
If token is not specified, it will be read from stdin.",
)
.arg(Arg::with_name("token"))
.arg(opt("host", "Host to set the token for").value_name("HOST"))
.arg(opt("registry", "Registry to use").value_name("REGISTRY"))
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let registry = args.registry(config)?;
let token = match args.value_of("token") {
Some(token) => token.to_string(),
None => {
let host = match registry {
Some(ref _registry) => {
return Err(format_err!(
"token must be provided when \
--registry is provided."
).into());
}
None => {
let src = SourceId::crates_io(config)?;
let mut src = RegistrySource::remote(&src, config);
src.update()?;
let config = src.config()?.unwrap();
args.value_of("host")
.map(|s| s.to_string())
.unwrap_or(config.api.unwrap())
}
};
println!("please visit {}me and paste the API Token below", host);
let mut line = String::new();
let input = io::stdin();
input
.lock()
.read_line(&mut line)
.chain_err(|| "failed to read stdin")
.map_err(CargoError::from)?;
line.trim().to_string()
}
};
ops::registry_login(config, token, registry)?;
Ok(())
} | use cargo::sources::RegistrySource;
use cargo::util::{CargoError, CargoResultExt}; | random_line_split |
abi.rs | use std::{fmt::Debug, result::Result};
use wasm_bindgen::{
convert::FromWasmAbi,
describe::{inform, WasmDescribe},
JsValue,
};
pub type JsResult<T> = Result<T, JsValue>;
pub trait Context<T> {
fn context(self, msg: &dyn Debug) -> JsResult<T>;
}
impl<T, TError> Context<T> for Result<T, TError>
where
TError: Debug,
{
fn context(self, msg: &dyn Debug) -> JsResult<T> {
match self {
Ok(v) => Ok(v),
Err(err) => Err(format!("{:?} {:?}", msg, err).into()),
}
}
}
pub struct LogLevel(pub log::Level);
impl WasmDescribe for LogLevel {
fn describe() {
inform(wasm_bindgen::describe::I8);
}
}
impl FromWasmAbi for LogLevel {
type Abi = u32;
unsafe fn from_abi(js: Self::Abi) -> Self {
match js { | 2 => LogLevel(log::Level::Info),
3 => LogLevel(log::Level::Warn),
_ => LogLevel(log::Level::Error),
}
}
} | 0 => LogLevel(log::Level::Trace),
1 => LogLevel(log::Level::Debug), | random_line_split |
abi.rs | use std::{fmt::Debug, result::Result};
use wasm_bindgen::{
convert::FromWasmAbi,
describe::{inform, WasmDescribe},
JsValue,
};
pub type JsResult<T> = Result<T, JsValue>;
pub trait Context<T> {
fn context(self, msg: &dyn Debug) -> JsResult<T>;
}
impl<T, TError> Context<T> for Result<T, TError>
where
TError: Debug,
{
fn context(self, msg: &dyn Debug) -> JsResult<T> |
}
pub struct LogLevel(pub log::Level);
impl WasmDescribe for LogLevel {
fn describe() {
inform(wasm_bindgen::describe::I8);
}
}
impl FromWasmAbi for LogLevel {
type Abi = u32;
unsafe fn from_abi(js: Self::Abi) -> Self {
match js {
0 => LogLevel(log::Level::Trace),
1 => LogLevel(log::Level::Debug),
2 => LogLevel(log::Level::Info),
3 => LogLevel(log::Level::Warn),
_ => LogLevel(log::Level::Error),
}
}
}
| {
match self {
Ok(v) => Ok(v),
Err(err) => Err(format!("{:?} {:?}", msg, err).into()),
}
} | identifier_body |
abi.rs | use std::{fmt::Debug, result::Result};
use wasm_bindgen::{
convert::FromWasmAbi,
describe::{inform, WasmDescribe},
JsValue,
};
pub type JsResult<T> = Result<T, JsValue>;
pub trait Context<T> {
fn context(self, msg: &dyn Debug) -> JsResult<T>;
}
impl<T, TError> Context<T> for Result<T, TError>
where
TError: Debug,
{
fn | (self, msg: &dyn Debug) -> JsResult<T> {
match self {
Ok(v) => Ok(v),
Err(err) => Err(format!("{:?} {:?}", msg, err).into()),
}
}
}
pub struct LogLevel(pub log::Level);
impl WasmDescribe for LogLevel {
fn describe() {
inform(wasm_bindgen::describe::I8);
}
}
impl FromWasmAbi for LogLevel {
type Abi = u32;
unsafe fn from_abi(js: Self::Abi) -> Self {
match js {
0 => LogLevel(log::Level::Trace),
1 => LogLevel(log::Level::Debug),
2 => LogLevel(log::Level::Info),
3 => LogLevel(log::Level::Warn),
_ => LogLevel(log::Level::Error),
}
}
}
| context | identifier_name |
map.rs | use std::ops::Range;
use cgmath::{Vector3, InnerSpace, Zero};
use noise::{BasicMulti, Seedable, NoiseModule};
use spherical_voronoi as sv;
use rand::{thread_rng, Rng, Rand};
use color::Color;
use ideal::{IdVec, IdsIter};
use settings;
#[derive(Copy, Clone, PartialEq)]
#[derive(Serialize, Deserialize)]
pub enum CornerKind {
Water,
Land,
Coast,
River,
}
#[derive(Clone)]
#[derive(Serialize, Deserialize)]
pub struct CornerData {
regions: Vec<Region>,
position: Vector3<f64>,
kind: CornerKind,
elevation: f64,
}
create_id!(Corner);
#[derive(Copy, Clone, PartialEq)]
#[derive(Serialize, Deserialize)]
pub enum BorderKind {
River,
Coast,
None,
}
#[derive(Clone)]
#[derive(Serialize, Deserialize)]
pub struct BorderData {
kind: BorderKind,
corners: (Corner, Corner),
regions: (Region, Region),
}
create_id!(Border);
#[derive(Clone, PartialEq, Debug)]
#[derive(Serialize, Deserialize)]
pub struct BiomeData {
name: String,
color: Color,
is_land: bool,
}
create_id!(Biome);
#[derive(Clone)]
#[derive(Serialize, Deserialize)]
pub struct RegionData {
biome: Biome,
corners: Vec<Corner>,
borders: Vec<Border>,
center: Vector3<f64>,
elevation: f64,
}
create_id!(Region);
#[derive(Serialize, Deserialize)]
pub struct Map {
biomes: IdVec<Biome, BiomeData>,
corners: IdVec<Corner, CornerData>,
borders: IdVec<Border, BorderData>,
regions: IdVec<Region, RegionData>,
}
pub struct Neighbors<'a, 'b> {
borders: &'a IdVec<Border, BorderData>,
region: Region,
inner: ::std::slice::Iter<'b, Border>,
}
impl<'a, 'b> ::std::iter::Iterator for Neighbors<'a, 'b> {
type Item = Region;
fn next(&mut self) -> Option<Self::Item> {
if let Some(border) = self.inner.next() {
let (region0, region1) = self.borders[*border].regions;
if region0 == self.region {
Some(region1)
} else {
Some(region0)
}
} else {
None
}
}
}
impl Map {
pub fn regions(&self) -> IdsIter<Region> {
self.regions.ids()
}
pub fn borders(&self) -> IdsIter<Border> {
self.borders.ids()
}
pub fn corners(&self) -> IdsIter<Corner> {
self.corners.ids()
}
pub fn neighbors(&self, region: Region) -> Neighbors {
Neighbors {
borders: &self.borders,
region: region,
inner: self.regions[region].borders.iter(),
}
}
pub fn border_corners(&self, border: Border) -> (Corner, Corner) {
self.borders[border].corners
}
pub fn border_regions(&self, border: Border) -> (Region, Region) {
self.borders[border].regions
}
pub fn border_kind(&self, border: Border) -> BorderKind {
self.borders[border].kind
}
pub fn biome(&self, region: Region) -> Biome {
self.regions[region].biome
}
pub fn is_land(&self, region: Region) -> bool {
self.biomes[self.biome(region)].is_land
}
pub fn biome_color(&self, biome: Biome) -> Color {
self.biomes[biome].color
}
pub fn region_center(&self, region: Region) -> Vector3<f64> {
self.regions[region].center
}
pub fn region_borders(&self, region: Region) -> &[Border] {
&self.regions[region].borders
}
pub fn region_elevation(&self, region: Region) -> f64 {
self.regions[region].elevation
}
pub fn corner_elevation(&self, corner: Corner) -> f64 {
self.corners[corner].elevation
}
pub fn corner_position(&self, corner: Corner) -> Vector3<f64> {
self.corners[corner].position
}
pub fn corner_regions(&self, corner: Corner) -> &[Region] {
&self.corners[corner].regions
}
}
#[derive(Clone, Serialize, Deserialize, Debug)]
pub struct Generator {
size: usize,
relaxations: usize,
sea_level: f64,
elevation_equator_bias: f64,
temperature_equator_bias: f64,
moisture_equator_bias: f64,
noise: settings::NoiseSettings,
biomes: Vec<BiomeData>,
ocean: Biome,
sea: Biome,
land_biomes: Vec<Vec<Biome>>,
}
impl Generator {
pub fn generate(&self) -> Map {
let mut visitor = Visitor::default();
sv::build_relaxed(&generate_points(self.size), &mut visitor, self.relaxations);
let Visitor { mut corners, mut borders, mut regions } = visitor;
for region in regions.ids() {
regions[region].center = regions[region].corners
.iter()
.fold(Vector3::zero(), |acc, &corner| acc + corners[corner].position)
.normalize()
.into();
}
let mut biomes = IdVec::default();
for biome in self.biomes.iter() {
biomes.push(biome.clone());
}
let mut map = Map {
biomes: biomes,
corners: corners,
borders: borders,
regions: regions,
};
self.generate_biomes(&mut map);
//self.generate_rivers(&mut map);
map
}
fn generate_biomes(&self, map: &mut Map) {
let elevations = self.generate_noise(map, self.elevation_equator_bias);
let temperatures = self.generate_noise(map, self.temperature_equator_bias);
let moistures = self.generate_noise(map, self.moisture_equator_bias);
for region in map.regions() {
let elevation = elevations[region];
map.regions[region].elevation = elevation;
if elevation < self.sea_level {
map.regions[region].biome = self.ocean;
} else {
map.regions[region].biome = self.get_biome(temperatures[region], moistures[region]);
}
}
for region in map.regions() {
if map.biome(region) == self.ocean &&
map.neighbors(region).any(|neighbor| map.is_land(neighbor)) |
}
for corner in map.corners() {
let elevation = {
let regions = map.corner_regions(corner);
let len = regions.len() as f64;
regions.iter().map(|®ion| map.region_elevation(region)).sum::<f64>() / len
};
map.corners[corner].elevation = elevation;
}
}
fn get_biome(&self, temperature: f64, moisture: f64) -> Biome {
let biomes = get_element(self.land_biomes.as_slice(), temperature);
*get_element(biomes, moisture)
}
fn generate_noise(&self, map: &Map, equator_bias: f64) -> IdVec<Region, f64> {
let noise = BasicMulti::new().set_seed(thread_rng().gen());
let mut pairs = Vec::new();
for region in map.regions() {
let center = map.region_center(region);
pairs.push((region, noise.get([center.x, center.y, center.z]) + equator_bias * (1.0 - center[2].abs())));
}
pairs.sort_by(|left, right| left.1.partial_cmp(&right.1).unwrap());
let mut values = IdVec::from(vec![0.0; pairs.len()]);
for (index, &(region, _)) in pairs.iter().enumerate() {
values[region] = index as f64 / pairs.len() as f64;
}
values
}
// fn generate_rivers(&self, map: &mut Map) {
// for vertex in map.diagram.vertices() {
// let mut elevation = map.vertex_elevation(vertex);
// if elevation > 0.8 {
// let mut vertex = vertex;
// while map.vertex_kind(vertex) == VertexKind::Land {
// map.vertices[vertex.index()].kind = VertexKind::River;
// let mut edge = None;
// let mut next = vertex;
// // find the lowest adjacent corner
// for &e in map.diagram.vertex_edges(vertex) {
// let v = map.diagram.other_edge_vertex(e, vertex);
// let el = map.vertex_elevation(v);
// if el < elevation {
// elevation = el;
// edge = Some(e);
// next = v;
// }
// }
// if let Some(edge) = edge {
// map.edges[edge.index()].kind = EdgeKind::River;
// vertex = next;
// } else {
// break;
// }
// }
// }
// }
// }
}
#[derive(Default)]
struct Visitor {
corners: IdVec<Corner, CornerData>,
borders: IdVec<Border, BorderData>,
regions: IdVec<Region, RegionData>,
}
impl Visitor {
fn common_regions(&self, corner0: Corner, corner1: Corner) -> (Region, Region) {
let mut regions = (Region::invalid(), Region::invalid());
for ®ion0 in self.corners[corner0].regions.iter() {
for ®ion1 in self.corners[corner1].regions.iter() {
if region0 == region1 {
if regions.0.is_invalid() {
regions.0 = region0;
} else {
regions.1 = region0;
}
}
}
}
regions
}
}
impl sv::Visitor for Visitor {
fn vertex(&mut self, position: Vector3<f64>, cells: [usize; 3]) {
let region0 = Region(cells[0]);
let region1 = Region(cells[1]);
let region2 = Region(cells[2]);
let corner = self.corners.push(CornerData {
regions: vec![region0, region1, region2],
position: position,
elevation: 0.0,
kind: CornerKind::Water,
});
self.regions[region0].corners.push(corner);
self.regions[region1].corners.push(corner);
self.regions[region2].corners.push(corner);
}
fn edge(&mut self, vertices: [usize; 2]) {
let corner0 = Corner(vertices[0]);
let corner1 = Corner(vertices[1]);
let (region0, region1) = self.common_regions(corner0, corner1);
let border = self.borders.push(BorderData {
kind: BorderKind::None,
corners: (corner0, corner1),
regions: (region0, region1),
});
self.regions[region0].borders.push(border);
self.regions[region1].borders.push(border);
}
fn cell(&mut self) {
self.regions.push(RegionData {
corners: Vec::new(),
borders: Vec::new(),
center: Vector3::zero(),
elevation: 0.0,
biome: Biome(0),
});
}
}
fn generate_points(count: usize) -> Vec<Vector3<f64>> {
let mut points = Vec::with_capacity(count);
let mut created = 0;
while created < count {
let x1 = thread_rng().gen_range(-1.0f64, 1.0);
let x2 = thread_rng().gen_range(-1.0f64, 1.0);
let norm2 = x1 * x1 + x2 * x2;
if norm2 < 1.0 {
created += 1;
let x = 2.0 * x1 * (1.0 - norm2).sqrt();
let y = 2.0 * x2 * (1.0 - norm2).sqrt();
let z = 1.0 - 2.0 * norm2;
points.push(Vector3::new(x, y, z));
}
}
points
}
fn get_element<T>(items: &[T], index: f64) -> &T {
&items[((items.len() as f64) * index).floor() as usize]
}
| {
map.regions[region].biome = self.sea;
} | conditional_block |
map.rs | use std::ops::Range;
use cgmath::{Vector3, InnerSpace, Zero};
use noise::{BasicMulti, Seedable, NoiseModule};
use spherical_voronoi as sv;
use rand::{thread_rng, Rng, Rand};
use color::Color;
use ideal::{IdVec, IdsIter};
use settings;
#[derive(Copy, Clone, PartialEq)]
#[derive(Serialize, Deserialize)]
pub enum CornerKind {
Water,
Land,
Coast,
River,
}
#[derive(Clone)]
#[derive(Serialize, Deserialize)]
pub struct | {
regions: Vec<Region>,
position: Vector3<f64>,
kind: CornerKind,
elevation: f64,
}
create_id!(Corner);
#[derive(Copy, Clone, PartialEq)]
#[derive(Serialize, Deserialize)]
pub enum BorderKind {
River,
Coast,
None,
}
#[derive(Clone)]
#[derive(Serialize, Deserialize)]
pub struct BorderData {
kind: BorderKind,
corners: (Corner, Corner),
regions: (Region, Region),
}
create_id!(Border);
#[derive(Clone, PartialEq, Debug)]
#[derive(Serialize, Deserialize)]
pub struct BiomeData {
name: String,
color: Color,
is_land: bool,
}
create_id!(Biome);
#[derive(Clone)]
#[derive(Serialize, Deserialize)]
pub struct RegionData {
biome: Biome,
corners: Vec<Corner>,
borders: Vec<Border>,
center: Vector3<f64>,
elevation: f64,
}
create_id!(Region);
#[derive(Serialize, Deserialize)]
pub struct Map {
biomes: IdVec<Biome, BiomeData>,
corners: IdVec<Corner, CornerData>,
borders: IdVec<Border, BorderData>,
regions: IdVec<Region, RegionData>,
}
pub struct Neighbors<'a, 'b> {
borders: &'a IdVec<Border, BorderData>,
region: Region,
inner: ::std::slice::Iter<'b, Border>,
}
impl<'a, 'b> ::std::iter::Iterator for Neighbors<'a, 'b> {
type Item = Region;
fn next(&mut self) -> Option<Self::Item> {
if let Some(border) = self.inner.next() {
let (region0, region1) = self.borders[*border].regions;
if region0 == self.region {
Some(region1)
} else {
Some(region0)
}
} else {
None
}
}
}
impl Map {
pub fn regions(&self) -> IdsIter<Region> {
self.regions.ids()
}
pub fn borders(&self) -> IdsIter<Border> {
self.borders.ids()
}
pub fn corners(&self) -> IdsIter<Corner> {
self.corners.ids()
}
pub fn neighbors(&self, region: Region) -> Neighbors {
Neighbors {
borders: &self.borders,
region: region,
inner: self.regions[region].borders.iter(),
}
}
pub fn border_corners(&self, border: Border) -> (Corner, Corner) {
self.borders[border].corners
}
pub fn border_regions(&self, border: Border) -> (Region, Region) {
self.borders[border].regions
}
pub fn border_kind(&self, border: Border) -> BorderKind {
self.borders[border].kind
}
pub fn biome(&self, region: Region) -> Biome {
self.regions[region].biome
}
pub fn is_land(&self, region: Region) -> bool {
self.biomes[self.biome(region)].is_land
}
pub fn biome_color(&self, biome: Biome) -> Color {
self.biomes[biome].color
}
pub fn region_center(&self, region: Region) -> Vector3<f64> {
self.regions[region].center
}
pub fn region_borders(&self, region: Region) -> &[Border] {
&self.regions[region].borders
}
pub fn region_elevation(&self, region: Region) -> f64 {
self.regions[region].elevation
}
pub fn corner_elevation(&self, corner: Corner) -> f64 {
self.corners[corner].elevation
}
pub fn corner_position(&self, corner: Corner) -> Vector3<f64> {
self.corners[corner].position
}
pub fn corner_regions(&self, corner: Corner) -> &[Region] {
&self.corners[corner].regions
}
}
#[derive(Clone, Serialize, Deserialize, Debug)]
pub struct Generator {
size: usize,
relaxations: usize,
sea_level: f64,
elevation_equator_bias: f64,
temperature_equator_bias: f64,
moisture_equator_bias: f64,
noise: settings::NoiseSettings,
biomes: Vec<BiomeData>,
ocean: Biome,
sea: Biome,
land_biomes: Vec<Vec<Biome>>,
}
impl Generator {
pub fn generate(&self) -> Map {
let mut visitor = Visitor::default();
sv::build_relaxed(&generate_points(self.size), &mut visitor, self.relaxations);
let Visitor { mut corners, mut borders, mut regions } = visitor;
for region in regions.ids() {
regions[region].center = regions[region].corners
.iter()
.fold(Vector3::zero(), |acc, &corner| acc + corners[corner].position)
.normalize()
.into();
}
let mut biomes = IdVec::default();
for biome in self.biomes.iter() {
biomes.push(biome.clone());
}
let mut map = Map {
biomes: biomes,
corners: corners,
borders: borders,
regions: regions,
};
self.generate_biomes(&mut map);
//self.generate_rivers(&mut map);
map
}
fn generate_biomes(&self, map: &mut Map) {
let elevations = self.generate_noise(map, self.elevation_equator_bias);
let temperatures = self.generate_noise(map, self.temperature_equator_bias);
let moistures = self.generate_noise(map, self.moisture_equator_bias);
for region in map.regions() {
let elevation = elevations[region];
map.regions[region].elevation = elevation;
if elevation < self.sea_level {
map.regions[region].biome = self.ocean;
} else {
map.regions[region].biome = self.get_biome(temperatures[region], moistures[region]);
}
}
for region in map.regions() {
if map.biome(region) == self.ocean &&
map.neighbors(region).any(|neighbor| map.is_land(neighbor)) {
map.regions[region].biome = self.sea;
}
}
for corner in map.corners() {
let elevation = {
let regions = map.corner_regions(corner);
let len = regions.len() as f64;
regions.iter().map(|®ion| map.region_elevation(region)).sum::<f64>() / len
};
map.corners[corner].elevation = elevation;
}
}
fn get_biome(&self, temperature: f64, moisture: f64) -> Biome {
let biomes = get_element(self.land_biomes.as_slice(), temperature);
*get_element(biomes, moisture)
}
fn generate_noise(&self, map: &Map, equator_bias: f64) -> IdVec<Region, f64> {
let noise = BasicMulti::new().set_seed(thread_rng().gen());
let mut pairs = Vec::new();
for region in map.regions() {
let center = map.region_center(region);
pairs.push((region, noise.get([center.x, center.y, center.z]) + equator_bias * (1.0 - center[2].abs())));
}
pairs.sort_by(|left, right| left.1.partial_cmp(&right.1).unwrap());
let mut values = IdVec::from(vec![0.0; pairs.len()]);
for (index, &(region, _)) in pairs.iter().enumerate() {
values[region] = index as f64 / pairs.len() as f64;
}
values
}
// fn generate_rivers(&self, map: &mut Map) {
// for vertex in map.diagram.vertices() {
// let mut elevation = map.vertex_elevation(vertex);
// if elevation > 0.8 {
// let mut vertex = vertex;
// while map.vertex_kind(vertex) == VertexKind::Land {
// map.vertices[vertex.index()].kind = VertexKind::River;
// let mut edge = None;
// let mut next = vertex;
// // find the lowest adjacent corner
// for &e in map.diagram.vertex_edges(vertex) {
// let v = map.diagram.other_edge_vertex(e, vertex);
// let el = map.vertex_elevation(v);
// if el < elevation {
// elevation = el;
// edge = Some(e);
// next = v;
// }
// }
// if let Some(edge) = edge {
// map.edges[edge.index()].kind = EdgeKind::River;
// vertex = next;
// } else {
// break;
// }
// }
// }
// }
// }
}
#[derive(Default)]
struct Visitor {
corners: IdVec<Corner, CornerData>,
borders: IdVec<Border, BorderData>,
regions: IdVec<Region, RegionData>,
}
impl Visitor {
fn common_regions(&self, corner0: Corner, corner1: Corner) -> (Region, Region) {
let mut regions = (Region::invalid(), Region::invalid());
for ®ion0 in self.corners[corner0].regions.iter() {
for ®ion1 in self.corners[corner1].regions.iter() {
if region0 == region1 {
if regions.0.is_invalid() {
regions.0 = region0;
} else {
regions.1 = region0;
}
}
}
}
regions
}
}
impl sv::Visitor for Visitor {
fn vertex(&mut self, position: Vector3<f64>, cells: [usize; 3]) {
let region0 = Region(cells[0]);
let region1 = Region(cells[1]);
let region2 = Region(cells[2]);
let corner = self.corners.push(CornerData {
regions: vec![region0, region1, region2],
position: position,
elevation: 0.0,
kind: CornerKind::Water,
});
self.regions[region0].corners.push(corner);
self.regions[region1].corners.push(corner);
self.regions[region2].corners.push(corner);
}
fn edge(&mut self, vertices: [usize; 2]) {
let corner0 = Corner(vertices[0]);
let corner1 = Corner(vertices[1]);
let (region0, region1) = self.common_regions(corner0, corner1);
let border = self.borders.push(BorderData {
kind: BorderKind::None,
corners: (corner0, corner1),
regions: (region0, region1),
});
self.regions[region0].borders.push(border);
self.regions[region1].borders.push(border);
}
fn cell(&mut self) {
self.regions.push(RegionData {
corners: Vec::new(),
borders: Vec::new(),
center: Vector3::zero(),
elevation: 0.0,
biome: Biome(0),
});
}
}
fn generate_points(count: usize) -> Vec<Vector3<f64>> {
let mut points = Vec::with_capacity(count);
let mut created = 0;
while created < count {
let x1 = thread_rng().gen_range(-1.0f64, 1.0);
let x2 = thread_rng().gen_range(-1.0f64, 1.0);
let norm2 = x1 * x1 + x2 * x2;
if norm2 < 1.0 {
created += 1;
let x = 2.0 * x1 * (1.0 - norm2).sqrt();
let y = 2.0 * x2 * (1.0 - norm2).sqrt();
let z = 1.0 - 2.0 * norm2;
points.push(Vector3::new(x, y, z));
}
}
points
}
fn get_element<T>(items: &[T], index: f64) -> &T {
&items[((items.len() as f64) * index).floor() as usize]
}
| CornerData | identifier_name |
map.rs | use std::ops::Range;
use cgmath::{Vector3, InnerSpace, Zero};
use noise::{BasicMulti, Seedable, NoiseModule};
use spherical_voronoi as sv;
use rand::{thread_rng, Rng, Rand};
use color::Color;
use ideal::{IdVec, IdsIter};
use settings;
#[derive(Copy, Clone, PartialEq)]
#[derive(Serialize, Deserialize)]
pub enum CornerKind {
Water,
Land,
Coast,
River,
}
#[derive(Clone)]
#[derive(Serialize, Deserialize)]
pub struct CornerData {
regions: Vec<Region>,
position: Vector3<f64>,
kind: CornerKind,
elevation: f64,
}
create_id!(Corner);
#[derive(Copy, Clone, PartialEq)]
#[derive(Serialize, Deserialize)]
pub enum BorderKind {
River,
Coast,
None,
}
#[derive(Clone)]
#[derive(Serialize, Deserialize)]
pub struct BorderData {
kind: BorderKind,
corners: (Corner, Corner),
regions: (Region, Region),
}
create_id!(Border);
#[derive(Clone, PartialEq, Debug)]
#[derive(Serialize, Deserialize)]
pub struct BiomeData {
name: String,
color: Color,
is_land: bool,
}
create_id!(Biome);
#[derive(Clone)]
#[derive(Serialize, Deserialize)]
pub struct RegionData {
biome: Biome,
corners: Vec<Corner>,
borders: Vec<Border>,
center: Vector3<f64>,
elevation: f64,
}
create_id!(Region);
#[derive(Serialize, Deserialize)]
pub struct Map {
biomes: IdVec<Biome, BiomeData>,
corners: IdVec<Corner, CornerData>,
borders: IdVec<Border, BorderData>,
regions: IdVec<Region, RegionData>, | borders: &'a IdVec<Border, BorderData>,
region: Region,
inner: ::std::slice::Iter<'b, Border>,
}
impl<'a, 'b> ::std::iter::Iterator for Neighbors<'a, 'b> {
type Item = Region;
fn next(&mut self) -> Option<Self::Item> {
if let Some(border) = self.inner.next() {
let (region0, region1) = self.borders[*border].regions;
if region0 == self.region {
Some(region1)
} else {
Some(region0)
}
} else {
None
}
}
}
impl Map {
pub fn regions(&self) -> IdsIter<Region> {
self.regions.ids()
}
pub fn borders(&self) -> IdsIter<Border> {
self.borders.ids()
}
pub fn corners(&self) -> IdsIter<Corner> {
self.corners.ids()
}
pub fn neighbors(&self, region: Region) -> Neighbors {
Neighbors {
borders: &self.borders,
region: region,
inner: self.regions[region].borders.iter(),
}
}
pub fn border_corners(&self, border: Border) -> (Corner, Corner) {
self.borders[border].corners
}
pub fn border_regions(&self, border: Border) -> (Region, Region) {
self.borders[border].regions
}
pub fn border_kind(&self, border: Border) -> BorderKind {
self.borders[border].kind
}
pub fn biome(&self, region: Region) -> Biome {
self.regions[region].biome
}
pub fn is_land(&self, region: Region) -> bool {
self.biomes[self.biome(region)].is_land
}
pub fn biome_color(&self, biome: Biome) -> Color {
self.biomes[biome].color
}
pub fn region_center(&self, region: Region) -> Vector3<f64> {
self.regions[region].center
}
pub fn region_borders(&self, region: Region) -> &[Border] {
&self.regions[region].borders
}
pub fn region_elevation(&self, region: Region) -> f64 {
self.regions[region].elevation
}
pub fn corner_elevation(&self, corner: Corner) -> f64 {
self.corners[corner].elevation
}
pub fn corner_position(&self, corner: Corner) -> Vector3<f64> {
self.corners[corner].position
}
pub fn corner_regions(&self, corner: Corner) -> &[Region] {
&self.corners[corner].regions
}
}
#[derive(Clone, Serialize, Deserialize, Debug)]
pub struct Generator {
size: usize,
relaxations: usize,
sea_level: f64,
elevation_equator_bias: f64,
temperature_equator_bias: f64,
moisture_equator_bias: f64,
noise: settings::NoiseSettings,
biomes: Vec<BiomeData>,
ocean: Biome,
sea: Biome,
land_biomes: Vec<Vec<Biome>>,
}
impl Generator {
pub fn generate(&self) -> Map {
let mut visitor = Visitor::default();
sv::build_relaxed(&generate_points(self.size), &mut visitor, self.relaxations);
let Visitor { mut corners, mut borders, mut regions } = visitor;
for region in regions.ids() {
regions[region].center = regions[region].corners
.iter()
.fold(Vector3::zero(), |acc, &corner| acc + corners[corner].position)
.normalize()
.into();
}
let mut biomes = IdVec::default();
for biome in self.biomes.iter() {
biomes.push(biome.clone());
}
let mut map = Map {
biomes: biomes,
corners: corners,
borders: borders,
regions: regions,
};
self.generate_biomes(&mut map);
//self.generate_rivers(&mut map);
map
}
fn generate_biomes(&self, map: &mut Map) {
let elevations = self.generate_noise(map, self.elevation_equator_bias);
let temperatures = self.generate_noise(map, self.temperature_equator_bias);
let moistures = self.generate_noise(map, self.moisture_equator_bias);
for region in map.regions() {
let elevation = elevations[region];
map.regions[region].elevation = elevation;
if elevation < self.sea_level {
map.regions[region].biome = self.ocean;
} else {
map.regions[region].biome = self.get_biome(temperatures[region], moistures[region]);
}
}
for region in map.regions() {
if map.biome(region) == self.ocean &&
map.neighbors(region).any(|neighbor| map.is_land(neighbor)) {
map.regions[region].biome = self.sea;
}
}
for corner in map.corners() {
let elevation = {
let regions = map.corner_regions(corner);
let len = regions.len() as f64;
regions.iter().map(|®ion| map.region_elevation(region)).sum::<f64>() / len
};
map.corners[corner].elevation = elevation;
}
}
fn get_biome(&self, temperature: f64, moisture: f64) -> Biome {
let biomes = get_element(self.land_biomes.as_slice(), temperature);
*get_element(biomes, moisture)
}
fn generate_noise(&self, map: &Map, equator_bias: f64) -> IdVec<Region, f64> {
let noise = BasicMulti::new().set_seed(thread_rng().gen());
let mut pairs = Vec::new();
for region in map.regions() {
let center = map.region_center(region);
pairs.push((region, noise.get([center.x, center.y, center.z]) + equator_bias * (1.0 - center[2].abs())));
}
pairs.sort_by(|left, right| left.1.partial_cmp(&right.1).unwrap());
let mut values = IdVec::from(vec![0.0; pairs.len()]);
for (index, &(region, _)) in pairs.iter().enumerate() {
values[region] = index as f64 / pairs.len() as f64;
}
values
}
// fn generate_rivers(&self, map: &mut Map) {
// for vertex in map.diagram.vertices() {
// let mut elevation = map.vertex_elevation(vertex);
// if elevation > 0.8 {
// let mut vertex = vertex;
// while map.vertex_kind(vertex) == VertexKind::Land {
// map.vertices[vertex.index()].kind = VertexKind::River;
// let mut edge = None;
// let mut next = vertex;
// // find the lowest adjacent corner
// for &e in map.diagram.vertex_edges(vertex) {
// let v = map.diagram.other_edge_vertex(e, vertex);
// let el = map.vertex_elevation(v);
// if el < elevation {
// elevation = el;
// edge = Some(e);
// next = v;
// }
// }
// if let Some(edge) = edge {
// map.edges[edge.index()].kind = EdgeKind::River;
// vertex = next;
// } else {
// break;
// }
// }
// }
// }
// }
}
#[derive(Default)]
struct Visitor {
corners: IdVec<Corner, CornerData>,
borders: IdVec<Border, BorderData>,
regions: IdVec<Region, RegionData>,
}
impl Visitor {
fn common_regions(&self, corner0: Corner, corner1: Corner) -> (Region, Region) {
let mut regions = (Region::invalid(), Region::invalid());
for ®ion0 in self.corners[corner0].regions.iter() {
for ®ion1 in self.corners[corner1].regions.iter() {
if region0 == region1 {
if regions.0.is_invalid() {
regions.0 = region0;
} else {
regions.1 = region0;
}
}
}
}
regions
}
}
impl sv::Visitor for Visitor {
fn vertex(&mut self, position: Vector3<f64>, cells: [usize; 3]) {
let region0 = Region(cells[0]);
let region1 = Region(cells[1]);
let region2 = Region(cells[2]);
let corner = self.corners.push(CornerData {
regions: vec![region0, region1, region2],
position: position,
elevation: 0.0,
kind: CornerKind::Water,
});
self.regions[region0].corners.push(corner);
self.regions[region1].corners.push(corner);
self.regions[region2].corners.push(corner);
}
fn edge(&mut self, vertices: [usize; 2]) {
let corner0 = Corner(vertices[0]);
let corner1 = Corner(vertices[1]);
let (region0, region1) = self.common_regions(corner0, corner1);
let border = self.borders.push(BorderData {
kind: BorderKind::None,
corners: (corner0, corner1),
regions: (region0, region1),
});
self.regions[region0].borders.push(border);
self.regions[region1].borders.push(border);
}
fn cell(&mut self) {
self.regions.push(RegionData {
corners: Vec::new(),
borders: Vec::new(),
center: Vector3::zero(),
elevation: 0.0,
biome: Biome(0),
});
}
}
fn generate_points(count: usize) -> Vec<Vector3<f64>> {
let mut points = Vec::with_capacity(count);
let mut created = 0;
while created < count {
let x1 = thread_rng().gen_range(-1.0f64, 1.0);
let x2 = thread_rng().gen_range(-1.0f64, 1.0);
let norm2 = x1 * x1 + x2 * x2;
if norm2 < 1.0 {
created += 1;
let x = 2.0 * x1 * (1.0 - norm2).sqrt();
let y = 2.0 * x2 * (1.0 - norm2).sqrt();
let z = 1.0 - 2.0 * norm2;
points.push(Vector3::new(x, y, z));
}
}
points
}
fn get_element<T>(items: &[T], index: f64) -> &T {
&items[((items.len() as f64) * index).floor() as usize]
} | }
pub struct Neighbors<'a, 'b> { | random_line_split |
lib.rs | //! This crate provides the basic environments for Kailua.
//!
//! * Location types ([`Unit`](./struct.Unit.html), [`Pos`](./struct.Pos.html),
//! [`Span`](./struct.Span.html)) and a location-bundled container | //!
//! * The resolver for locations
//! ([`kailua_env::source`](./source/index.html))
//!
//! * An arbitrary mapping from location ranges to values
//! ([`kailua_env::spanmap`](./spanmap/index.html))
mod loc;
pub mod scope;
pub mod source;
pub mod spanmap;
pub use loc::{Unit, Pos, Span, Spanned, WithLoc};
pub use scope::{Scope, ScopedId, ScopeMap};
pub use source::{Source, SourceFile, SourceSlice, SourceData};
pub use spanmap::SpanMap; | //! ([`Spanned`](./struct.Spanned.html))
//!
//! * Scope identifiers and a location-to-scope map
//! ([`kailua_env::scope`](./scope/index.html)) | random_line_split |
extensions.rs | use serde::{Deserialize, Serialize, Serializer};
use std::convert::From;
use std::fmt;
#[derive(PartialEq, Eq, Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
/// Represents all the possible [CloudEvents extension](https://github.com/cloudevents/spec/blob/master/spec.md#extension-context-attributes) values
pub enum ExtensionValue {
/// Represents a [`String`] value.
String(String),
/// Represents a [`bool`] value.
Boolean(bool),
/// Represents an integer [`i64`] value.
Integer(i64),
}
impl From<&str> for ExtensionValue {
fn from(s: &str) -> Self {
ExtensionValue::String(String::from(s))
}
}
impl From<String> for ExtensionValue {
fn from(s: String) -> Self |
}
impl From<bool> for ExtensionValue {
fn from(s: bool) -> Self {
ExtensionValue::Boolean(s)
}
}
impl From<i64> for ExtensionValue {
fn from(s: i64) -> Self {
ExtensionValue::Integer(s)
}
}
impl ExtensionValue {
pub fn from_string<S>(s: S) -> Self
where
S: Into<String>,
{
ExtensionValue::from(s.into())
}
pub fn from_i64<S>(s: S) -> Self
where
S: Into<i64>,
{
ExtensionValue::from(s.into())
}
pub fn from_bool<S>(s: S) -> Self
where
S: Into<bool>,
{
ExtensionValue::from(s.into())
}
}
impl fmt::Display for ExtensionValue {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ExtensionValue::String(s) => f.write_str(s),
ExtensionValue::Boolean(b) => f.serialize_bool(*b),
ExtensionValue::Integer(i) => f.serialize_i64(*i),
}
}
}
| {
ExtensionValue::String(s)
} | identifier_body |
extensions.rs | use serde::{Deserialize, Serialize, Serializer};
use std::convert::From;
use std::fmt;
#[derive(PartialEq, Eq, Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
/// Represents all the possible [CloudEvents extension](https://github.com/cloudevents/spec/blob/master/spec.md#extension-context-attributes) values
pub enum ExtensionValue {
/// Represents a [`String`] value.
String(String),
/// Represents a [`bool`] value.
Boolean(bool),
/// Represents an integer [`i64`] value.
Integer(i64),
}
impl From<&str> for ExtensionValue {
fn from(s: &str) -> Self {
ExtensionValue::String(String::from(s))
}
}
impl From<String> for ExtensionValue {
fn | (s: String) -> Self {
ExtensionValue::String(s)
}
}
impl From<bool> for ExtensionValue {
fn from(s: bool) -> Self {
ExtensionValue::Boolean(s)
}
}
impl From<i64> for ExtensionValue {
fn from(s: i64) -> Self {
ExtensionValue::Integer(s)
}
}
impl ExtensionValue {
pub fn from_string<S>(s: S) -> Self
where
S: Into<String>,
{
ExtensionValue::from(s.into())
}
pub fn from_i64<S>(s: S) -> Self
where
S: Into<i64>,
{
ExtensionValue::from(s.into())
}
pub fn from_bool<S>(s: S) -> Self
where
S: Into<bool>,
{
ExtensionValue::from(s.into())
}
}
impl fmt::Display for ExtensionValue {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ExtensionValue::String(s) => f.write_str(s),
ExtensionValue::Boolean(b) => f.serialize_bool(*b),
ExtensionValue::Integer(i) => f.serialize_i64(*i),
}
}
}
| from | identifier_name |
extensions.rs | use std::convert::From;
use std::fmt;
#[derive(PartialEq, Eq, Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
/// Represents all the possible [CloudEvents extension](https://github.com/cloudevents/spec/blob/master/spec.md#extension-context-attributes) values
pub enum ExtensionValue {
/// Represents a [`String`] value.
String(String),
/// Represents a [`bool`] value.
Boolean(bool),
/// Represents an integer [`i64`] value.
Integer(i64),
}
impl From<&str> for ExtensionValue {
fn from(s: &str) -> Self {
ExtensionValue::String(String::from(s))
}
}
impl From<String> for ExtensionValue {
fn from(s: String) -> Self {
ExtensionValue::String(s)
}
}
impl From<bool> for ExtensionValue {
fn from(s: bool) -> Self {
ExtensionValue::Boolean(s)
}
}
impl From<i64> for ExtensionValue {
fn from(s: i64) -> Self {
ExtensionValue::Integer(s)
}
}
impl ExtensionValue {
pub fn from_string<S>(s: S) -> Self
where
S: Into<String>,
{
ExtensionValue::from(s.into())
}
pub fn from_i64<S>(s: S) -> Self
where
S: Into<i64>,
{
ExtensionValue::from(s.into())
}
pub fn from_bool<S>(s: S) -> Self
where
S: Into<bool>,
{
ExtensionValue::from(s.into())
}
}
impl fmt::Display for ExtensionValue {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ExtensionValue::String(s) => f.write_str(s),
ExtensionValue::Boolean(b) => f.serialize_bool(*b),
ExtensionValue::Integer(i) => f.serialize_i64(*i),
}
}
} | use serde::{Deserialize, Serialize, Serializer}; | random_line_split |
|
issue-7867.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
enum A { B, C }
mod foo { pub fn bar() | }
fn main() {
match (true, false) {
A::B => (), //~ ERROR expected `(bool, bool)`, found `A` (expected tuple, found enum A)
_ => ()
}
match &Some(42i) {
Some(x) => (), //~ ERROR expected `&core::option::Option<int>`,
// found `core::option::Option<_>`
None => () //~ ERROR expected `&core::option::Option<int>`,
// found `core::option::Option<_>`
}
}
| {} | identifier_body |
issue-7867.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
enum A { B, C }
mod foo { pub fn | () {} }
fn main() {
match (true, false) {
A::B => (), //~ ERROR expected `(bool, bool)`, found `A` (expected tuple, found enum A)
_ => ()
}
match &Some(42i) {
Some(x) => (), //~ ERROR expected `&core::option::Option<int>`,
// found `core::option::Option<_>`
None => () //~ ERROR expected `&core::option::Option<int>`,
// found `core::option::Option<_>`
}
}
| bar | identifier_name |
issue-7867.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
| match (true, false) {
A::B => (), //~ ERROR expected `(bool, bool)`, found `A` (expected tuple, found enum A)
_ => ()
}
match &Some(42i) {
Some(x) => (), //~ ERROR expected `&core::option::Option<int>`,
// found `core::option::Option<_>`
None => () //~ ERROR expected `&core::option::Option<int>`,
// found `core::option::Option<_>`
}
} | enum A { B, C }
mod foo { pub fn bar() {} }
fn main() { | random_line_split |
asm-target-clobbers.rs | // only-x86_64
// revisions: base avx512
// [avx512]compile-flags: -C target-feature=+avx512f
#![crate_type = "rlib"]
#![feature(asm)]
// CHECK-LABEL: @avx512_clobber
// base: call void asm sideeffect inteldialect "", "~{xmm31}"()
// avx512: call float asm sideeffect inteldialect "", "=&{xmm31}"()
#[no_mangle]
pub unsafe fn avx512_clobber() |
// CHECK-LABEL: @eax_clobber
// CHECK: call i32 asm sideeffect inteldialect "", "=&{ax}"()
#[no_mangle]
pub unsafe fn eax_clobber() {
asm!("", out("eax") _, options(nostack, nomem, preserves_flags));
}
| {
asm!("", out("zmm31") _, options(nostack, nomem, preserves_flags));
} | identifier_body |
asm-target-clobbers.rs | // only-x86_64
// revisions: base avx512
// [avx512]compile-flags: -C target-feature=+avx512f
#![crate_type = "rlib"]
#![feature(asm)]
// CHECK-LABEL: @avx512_clobber
// base: call void asm sideeffect inteldialect "", "~{xmm31}"()
// avx512: call float asm sideeffect inteldialect "", "=&{xmm31}"()
#[no_mangle]
pub unsafe fn | () {
asm!("", out("zmm31") _, options(nostack, nomem, preserves_flags));
}
// CHECK-LABEL: @eax_clobber
// CHECK: call i32 asm sideeffect inteldialect "", "=&{ax}"()
#[no_mangle]
pub unsafe fn eax_clobber() {
asm!("", out("eax") _, options(nostack, nomem, preserves_flags));
}
| avx512_clobber | identifier_name |
asm-target-clobbers.rs | // only-x86_64
// revisions: base avx512
// [avx512]compile-flags: -C target-feature=+avx512f
#![crate_type = "rlib"]
#![feature(asm)]
// CHECK-LABEL: @avx512_clobber
// base: call void asm sideeffect inteldialect "", "~{xmm31}"() | pub unsafe fn avx512_clobber() {
asm!("", out("zmm31") _, options(nostack, nomem, preserves_flags));
}
// CHECK-LABEL: @eax_clobber
// CHECK: call i32 asm sideeffect inteldialect "", "=&{ax}"()
#[no_mangle]
pub unsafe fn eax_clobber() {
asm!("", out("eax") _, options(nostack, nomem, preserves_flags));
} | // avx512: call float asm sideeffect inteldialect "", "=&{xmm31}"()
#[no_mangle] | random_line_split |
ecies_hkdf_recipient_kem.rs | // Copyright 2021 The Tink-Rust Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
////////////////////////////////////////////////////////////////////////////////
use crate::{subtle, subtle::EcPrivateKey};
use tink_core::TinkError;
use tink_proto::{EcPointFormat, HashType};
/// Represents a HKDF-based KEM (key encapsulation mechanism) for ECIES recipient.
pub(crate) struct | <'a> {
recipient_private_key: &'a EcPrivateKey,
}
impl<'a> EciesHkdfRecipientKem<'a> {
pub fn new(priv_key: &'a EcPrivateKey) -> Self {
Self {
recipient_private_key: priv_key,
}
}
/// Uses the KEM to generate a new HKDF-based key.
pub(crate) fn decapsulate(
&self,
kem: &[u8],
hash_alg: HashType,
salt: &[u8],
info: &[u8],
key_size: usize,
point_format: EcPointFormat,
) -> Result<Vec<u8>, TinkError> {
let pub_point = subtle::point_decode(
self.recipient_private_key.public_key().curve(),
point_format,
kem,
)?;
let secret = subtle::compute_shared_secret(&pub_point, self.recipient_private_key)?;
let mut i = kem.to_vec();
i.extend_from_slice(&secret);
tink_core::subtle::compute_hkdf(hash_alg, &i, salt, info, key_size)
}
}
| EciesHkdfRecipientKem | identifier_name |
ecies_hkdf_recipient_kem.rs | // Copyright 2021 The Tink-Rust Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
////////////////////////////////////////////////////////////////////////////////
use crate::{subtle, subtle::EcPrivateKey};
use tink_core::TinkError;
use tink_proto::{EcPointFormat, HashType};
/// Represents a HKDF-based KEM (key encapsulation mechanism) for ECIES recipient.
pub(crate) struct EciesHkdfRecipientKem<'a> {
recipient_private_key: &'a EcPrivateKey,
}
impl<'a> EciesHkdfRecipientKem<'a> {
pub fn new(priv_key: &'a EcPrivateKey) -> Self {
Self { | recipient_private_key: priv_key,
}
}
/// Uses the KEM to generate a new HKDF-based key.
pub(crate) fn decapsulate(
&self,
kem: &[u8],
hash_alg: HashType,
salt: &[u8],
info: &[u8],
key_size: usize,
point_format: EcPointFormat,
) -> Result<Vec<u8>, TinkError> {
let pub_point = subtle::point_decode(
self.recipient_private_key.public_key().curve(),
point_format,
kem,
)?;
let secret = subtle::compute_shared_secret(&pub_point, self.recipient_private_key)?;
let mut i = kem.to_vec();
i.extend_from_slice(&secret);
tink_core::subtle::compute_hkdf(hash_alg, &i, salt, info, key_size)
}
} | random_line_split |
|
ecies_hkdf_recipient_kem.rs | // Copyright 2021 The Tink-Rust Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
////////////////////////////////////////////////////////////////////////////////
use crate::{subtle, subtle::EcPrivateKey};
use tink_core::TinkError;
use tink_proto::{EcPointFormat, HashType};
/// Represents a HKDF-based KEM (key encapsulation mechanism) for ECIES recipient.
pub(crate) struct EciesHkdfRecipientKem<'a> {
recipient_private_key: &'a EcPrivateKey,
}
impl<'a> EciesHkdfRecipientKem<'a> {
pub fn new(priv_key: &'a EcPrivateKey) -> Self {
Self {
recipient_private_key: priv_key,
}
}
/// Uses the KEM to generate a new HKDF-based key.
pub(crate) fn decapsulate(
&self,
kem: &[u8],
hash_alg: HashType,
salt: &[u8],
info: &[u8],
key_size: usize,
point_format: EcPointFormat,
) -> Result<Vec<u8>, TinkError> |
}
| {
let pub_point = subtle::point_decode(
self.recipient_private_key.public_key().curve(),
point_format,
kem,
)?;
let secret = subtle::compute_shared_secret(&pub_point, self.recipient_private_key)?;
let mut i = kem.to_vec();
i.extend_from_slice(&secret);
tink_core::subtle::compute_hkdf(hash_alg, &i, salt, info, key_size)
} | identifier_body |
year.rs | //
// imag - the personal information management suite for the commandline
// Copyright (C) 2015, 2016 Matthias Beyer <[email protected]> and contributors
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; version
// 2.1 of the License.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
//
use std::str::FromStr;
use filters::filter::Filter;
use chrono::NaiveDateTime;
use libimagerror::trace::trace_error;
use libimagerror::trace::MapErrTrace;
use libimagerror::iter::TraceIterator;
use libimagstore::store::FileLockEntry;
use libimagtimetrack::timetrackingstore::TimeTrackStore;
use libimagtimetrack::timetracking::TimeTracking;
use libimagtimetrack::tag::TimeTrackingTag;
use libimagtimetrack::iter::filter::*;
use libimagrt::runtime::Runtime;
pub fn year(rt: &Runtime) -> i32 | None => {
NaiveDate::from_ymd(now.year() + 1, 1, 1).and_hms(0, 0, 0)
},
Some(Ok(dt)) => dt,
Some(Err(e)) => {
trace_error(&e);
return 1
}
};
let tags = cmd
.values_of("tags")
.map(|ts| ts.into_iter().map(String::from).map(TimeTrackingTag::from).collect());
let start_time_filter = has_start_time_where(move |dt: &NaiveDateTime| {
start <= *dt
});
let end_time_filter = has_end_time_where(move |dt: &NaiveDateTime| {
end >= *dt
});
let tags_filter = move |fle: &FileLockEntry| {
match tags {
Some(ref tags) => has_one_of_tags(&tags).filter(fle),
None => true,
}
};
tags_filter.and(start_time_filter).and(end_time_filter)
};
rt.store()
.get_timetrackings()
.and_then(|iter| {
iter.trace_unwrap()
.filter(|e| filter.filter(e))
.fold(Ok(()), |acc, e| {
acc.and_then(|_| {
debug!("Processing {:?}", e.get_location());
let tag = e.get_timetrack_tag()?;
debug!(" -> tag = {:?}", tag);
let start = e.get_start_datetime()?;
debug!(" -> start = {:?}", start);
let end = e.get_end_datetime()?;
debug!(" -> end = {:?}", end);
match (start, end) {
(None, _) => println!("{} has no start time.", tag),
(Some(s), None) => println!("{} | {} -...", tag, s),
(Some(s), Some(e)) => println!("{} | {} - {}", tag, s, e),
}
Ok(())
})
})
})
.map(|_| 0)
.map_err_trace()
.unwrap_or(1)
}
| {
let cmd = rt.cli().subcommand().1.unwrap(); // checked in main
let filter = {
use chrono::offset::Local;
use chrono::naive::NaiveDate;
use chrono::Datelike;
let now = Local::now();
let start = match cmd.value_of("start").map(::chrono::naive::NaiveDateTime::from_str) {
None => NaiveDate::from_ymd(now.year(), 1, 1).and_hms(0, 0, 0),
Some(Ok(dt)) => dt,
Some(Err(e)) => {
trace_error(&e);
return 1
}
};
let end = match cmd.value_of("end").map(::chrono::naive::NaiveDateTime::from_str) { | identifier_body |
year.rs | //
// imag - the personal information management suite for the commandline
// Copyright (C) 2015, 2016 Matthias Beyer <[email protected]> and contributors
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; version
// 2.1 of the License.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
//
use std::str::FromStr;
use filters::filter::Filter;
use chrono::NaiveDateTime;
use libimagerror::trace::trace_error;
use libimagerror::trace::MapErrTrace;
use libimagerror::iter::TraceIterator;
use libimagstore::store::FileLockEntry;
use libimagtimetrack::timetrackingstore::TimeTrackStore;
use libimagtimetrack::timetracking::TimeTracking;
use libimagtimetrack::tag::TimeTrackingTag;
use libimagtimetrack::iter::filter::*;
use libimagrt::runtime::Runtime;
pub fn year(rt: &Runtime) -> i32 {
let cmd = rt.cli().subcommand().1.unwrap(); // checked in main
let filter = {
use chrono::offset::Local;
use chrono::naive::NaiveDate;
use chrono::Datelike;
let now = Local::now();
let start = match cmd.value_of("start").map(::chrono::naive::NaiveDateTime::from_str) {
None => NaiveDate::from_ymd(now.year(), 1, 1).and_hms(0, 0, 0),
Some(Ok(dt)) => dt,
Some(Err(e)) => |
};
let end = match cmd.value_of("end").map(::chrono::naive::NaiveDateTime::from_str) {
None => {
NaiveDate::from_ymd(now.year() + 1, 1, 1).and_hms(0, 0, 0)
},
Some(Ok(dt)) => dt,
Some(Err(e)) => {
trace_error(&e);
return 1
}
};
let tags = cmd
.values_of("tags")
.map(|ts| ts.into_iter().map(String::from).map(TimeTrackingTag::from).collect());
let start_time_filter = has_start_time_where(move |dt: &NaiveDateTime| {
start <= *dt
});
let end_time_filter = has_end_time_where(move |dt: &NaiveDateTime| {
end >= *dt
});
let tags_filter = move |fle: &FileLockEntry| {
match tags {
Some(ref tags) => has_one_of_tags(&tags).filter(fle),
None => true,
}
};
tags_filter.and(start_time_filter).and(end_time_filter)
};
rt.store()
.get_timetrackings()
.and_then(|iter| {
iter.trace_unwrap()
.filter(|e| filter.filter(e))
.fold(Ok(()), |acc, e| {
acc.and_then(|_| {
debug!("Processing {:?}", e.get_location());
let tag = e.get_timetrack_tag()?;
debug!(" -> tag = {:?}", tag);
let start = e.get_start_datetime()?;
debug!(" -> start = {:?}", start);
let end = e.get_end_datetime()?;
debug!(" -> end = {:?}", end);
match (start, end) {
(None, _) => println!("{} has no start time.", tag),
(Some(s), None) => println!("{} | {} -...", tag, s),
(Some(s), Some(e)) => println!("{} | {} - {}", tag, s, e),
}
Ok(())
})
})
})
.map(|_| 0)
.map_err_trace()
.unwrap_or(1)
}
| {
trace_error(&e);
return 1
} | conditional_block |
year.rs | //
// imag - the personal information management suite for the commandline
// Copyright (C) 2015, 2016 Matthias Beyer <[email protected]> and contributors
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; version
// 2.1 of the License.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
//
use std::str::FromStr;
use filters::filter::Filter;
use chrono::NaiveDateTime;
use libimagerror::trace::trace_error;
use libimagerror::trace::MapErrTrace;
use libimagerror::iter::TraceIterator;
use libimagstore::store::FileLockEntry;
use libimagtimetrack::timetrackingstore::TimeTrackStore;
use libimagtimetrack::timetracking::TimeTracking;
use libimagtimetrack::tag::TimeTrackingTag;
use libimagtimetrack::iter::filter::*;
use libimagrt::runtime::Runtime;
pub fn | (rt: &Runtime) -> i32 {
let cmd = rt.cli().subcommand().1.unwrap(); // checked in main
let filter = {
use chrono::offset::Local;
use chrono::naive::NaiveDate;
use chrono::Datelike;
let now = Local::now();
let start = match cmd.value_of("start").map(::chrono::naive::NaiveDateTime::from_str) {
None => NaiveDate::from_ymd(now.year(), 1, 1).and_hms(0, 0, 0),
Some(Ok(dt)) => dt,
Some(Err(e)) => {
trace_error(&e);
return 1
}
};
let end = match cmd.value_of("end").map(::chrono::naive::NaiveDateTime::from_str) {
None => {
NaiveDate::from_ymd(now.year() + 1, 1, 1).and_hms(0, 0, 0)
},
Some(Ok(dt)) => dt,
Some(Err(e)) => {
trace_error(&e);
return 1
}
};
let tags = cmd
.values_of("tags")
.map(|ts| ts.into_iter().map(String::from).map(TimeTrackingTag::from).collect());
let start_time_filter = has_start_time_where(move |dt: &NaiveDateTime| {
start <= *dt
});
let end_time_filter = has_end_time_where(move |dt: &NaiveDateTime| {
end >= *dt
});
let tags_filter = move |fle: &FileLockEntry| {
match tags {
Some(ref tags) => has_one_of_tags(&tags).filter(fle),
None => true,
}
};
tags_filter.and(start_time_filter).and(end_time_filter)
};
rt.store()
.get_timetrackings()
.and_then(|iter| {
iter.trace_unwrap()
.filter(|e| filter.filter(e))
.fold(Ok(()), |acc, e| {
acc.and_then(|_| {
debug!("Processing {:?}", e.get_location());
let tag = e.get_timetrack_tag()?;
debug!(" -> tag = {:?}", tag);
let start = e.get_start_datetime()?;
debug!(" -> start = {:?}", start);
let end = e.get_end_datetime()?;
debug!(" -> end = {:?}", end);
match (start, end) {
(None, _) => println!("{} has no start time.", tag),
(Some(s), None) => println!("{} | {} -...", tag, s),
(Some(s), Some(e)) => println!("{} | {} - {}", tag, s, e),
}
Ok(())
})
})
})
.map(|_| 0)
.map_err_trace()
.unwrap_or(1)
}
| year | identifier_name |
year.rs | //
// imag - the personal information management suite for the commandline
// Copyright (C) 2015, 2016 Matthias Beyer <[email protected]> and contributors
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; version
// 2.1 of the License.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
//
use std::str::FromStr;
use filters::filter::Filter;
use chrono::NaiveDateTime;
use libimagerror::trace::trace_error;
use libimagerror::trace::MapErrTrace;
use libimagerror::iter::TraceIterator;
use libimagstore::store::FileLockEntry;
use libimagtimetrack::timetrackingstore::TimeTrackStore;
use libimagtimetrack::timetracking::TimeTracking;
use libimagtimetrack::tag::TimeTrackingTag;
use libimagtimetrack::iter::filter::*;
use libimagrt::runtime::Runtime;
pub fn year(rt: &Runtime) -> i32 {
let cmd = rt.cli().subcommand().1.unwrap(); // checked in main
let filter = {
use chrono::offset::Local; | let start = match cmd.value_of("start").map(::chrono::naive::NaiveDateTime::from_str) {
None => NaiveDate::from_ymd(now.year(), 1, 1).and_hms(0, 0, 0),
Some(Ok(dt)) => dt,
Some(Err(e)) => {
trace_error(&e);
return 1
}
};
let end = match cmd.value_of("end").map(::chrono::naive::NaiveDateTime::from_str) {
None => {
NaiveDate::from_ymd(now.year() + 1, 1, 1).and_hms(0, 0, 0)
},
Some(Ok(dt)) => dt,
Some(Err(e)) => {
trace_error(&e);
return 1
}
};
let tags = cmd
.values_of("tags")
.map(|ts| ts.into_iter().map(String::from).map(TimeTrackingTag::from).collect());
let start_time_filter = has_start_time_where(move |dt: &NaiveDateTime| {
start <= *dt
});
let end_time_filter = has_end_time_where(move |dt: &NaiveDateTime| {
end >= *dt
});
let tags_filter = move |fle: &FileLockEntry| {
match tags {
Some(ref tags) => has_one_of_tags(&tags).filter(fle),
None => true,
}
};
tags_filter.and(start_time_filter).and(end_time_filter)
};
rt.store()
.get_timetrackings()
.and_then(|iter| {
iter.trace_unwrap()
.filter(|e| filter.filter(e))
.fold(Ok(()), |acc, e| {
acc.and_then(|_| {
debug!("Processing {:?}", e.get_location());
let tag = e.get_timetrack_tag()?;
debug!(" -> tag = {:?}", tag);
let start = e.get_start_datetime()?;
debug!(" -> start = {:?}", start);
let end = e.get_end_datetime()?;
debug!(" -> end = {:?}", end);
match (start, end) {
(None, _) => println!("{} has no start time.", tag),
(Some(s), None) => println!("{} | {} -...", tag, s),
(Some(s), Some(e)) => println!("{} | {} - {}", tag, s, e),
}
Ok(())
})
})
})
.map(|_| 0)
.map_err_trace()
.unwrap_or(1)
} | use chrono::naive::NaiveDate;
use chrono::Datelike;
let now = Local::now();
| random_line_split |
lib.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![deny(unsafe_code)]
extern crate ipc_channel;
extern crate msg;
#[macro_use]
extern crate serde_derive;
pub extern crate rust_webvr as webvr;
mod webvr_traits;
pub use webvr::VRDisplayData as WebVRDisplayData;
pub use webvr::VRDisplayCapabilities as WebVRDisplayCapabilities;
pub use webvr::VRDisplayEvent as WebVRDisplayEvent;
pub use webvr::VRDisplayEventReason as WebVRDisplayEventReason;
pub use webvr::VREye as WebVREye;
pub use webvr::VREyeParameters as WebVREyeParameters;
pub use webvr::VRFieldOfView as WebVRFieldOfView;
pub use webvr::VRFrameData as WebVRFrameData;
pub use webvr::VRLayer as WebVRLayer; | pub use webvr::VRStageParameters as WebVRStageParameters;
pub use webvr_traits::{WebVRMsg, WebVRResult}; | pub use webvr::VRPose as WebVRPose; | random_line_split |
os_release.rs | use std::fs;
pub enum OsReleaseId {
Amazon,
CentOs,
Debian,
Ubuntu,
}
const OS_RELEASE_PATH: &str = "/etc/os-release";
impl OsReleaseId {
fn from_os_release_str(s: &str) -> Option<Self> {
let id_line = s.lines().find(|l| l.starts_with("ID="))?; | let id = id_line.trim_start_matches("ID=").trim_matches('"');
match id {
"amzn" => Some(OsReleaseId::Amazon),
"centos" => Some(OsReleaseId::CentOs),
"debian" => Some(OsReleaseId::Debian),
"ubuntu" => Some(OsReleaseId::Ubuntu),
_ => None,
}
}
pub fn parse_os_release() -> Option<Self> {
fs::read_to_string(OS_RELEASE_PATH)
.ok()
.as_deref()
.and_then(Self::from_os_release_str)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_from_os_release() {
let actual =
OsReleaseId::from_os_release_str(include_str!("os-release-data/amazonlinux-2"));
assert!(matches!(actual, Some(OsReleaseId::Amazon)));
let actual = OsReleaseId::from_os_release_str(include_str!("os-release-data/centos-7.8"));
assert!(matches!(actual, Some(OsReleaseId::CentOs)));
let actual = OsReleaseId::from_os_release_str(include_str!("os-release-data/debian-8"));
assert!(matches!(actual, Some(OsReleaseId::Debian)));
let actual = OsReleaseId::from_os_release_str(include_str!("os-release-data/ubuntu-14.04"));
assert!(matches!(actual, Some(OsReleaseId::Ubuntu)));
}
} | random_line_split |
|
os_release.rs | use std::fs;
pub enum OsReleaseId {
Amazon,
CentOs,
Debian,
Ubuntu,
}
const OS_RELEASE_PATH: &str = "/etc/os-release";
impl OsReleaseId {
fn from_os_release_str(s: &str) -> Option<Self> {
let id_line = s.lines().find(|l| l.starts_with("ID="))?;
let id = id_line.trim_start_matches("ID=").trim_matches('"');
match id {
"amzn" => Some(OsReleaseId::Amazon),
"centos" => Some(OsReleaseId::CentOs),
"debian" => Some(OsReleaseId::Debian),
"ubuntu" => Some(OsReleaseId::Ubuntu),
_ => None,
}
}
pub fn | () -> Option<Self> {
fs::read_to_string(OS_RELEASE_PATH)
.ok()
.as_deref()
.and_then(Self::from_os_release_str)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_from_os_release() {
let actual =
OsReleaseId::from_os_release_str(include_str!("os-release-data/amazonlinux-2"));
assert!(matches!(actual, Some(OsReleaseId::Amazon)));
let actual = OsReleaseId::from_os_release_str(include_str!("os-release-data/centos-7.8"));
assert!(matches!(actual, Some(OsReleaseId::CentOs)));
let actual = OsReleaseId::from_os_release_str(include_str!("os-release-data/debian-8"));
assert!(matches!(actual, Some(OsReleaseId::Debian)));
let actual = OsReleaseId::from_os_release_str(include_str!("os-release-data/ubuntu-14.04"));
assert!(matches!(actual, Some(OsReleaseId::Ubuntu)));
}
}
| parse_os_release | identifier_name |
mpmc_bounded_queue.rs | /* Copyright (c) 2010-2011 Dmitry Vyukov. All rights reserved.
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY DMITRY VYUKOV "AS IS" AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
* SHALL DMITRY VYUKOV OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation are
* those of the authors and should not be interpreted as representing official
* policies, either expressed or implied, of Dmitry Vyukov.
*/
#![allow(missing_docs, dead_code)]
// http://www.1024cores.net/home/lock-free-algorithms/queues/bounded-mpmc-queue
// This queue is copy pasted from old rust stdlib.
use std::sync::Arc;
use std::cell::UnsafeCell;
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering::{Relaxed, Release, Acquire};
struct Node<T> {
sequence: AtomicUsize,
value: Option<T>,
}
unsafe impl<T: Send> Send for Node<T> {}
unsafe impl<T: Sync> Sync for Node<T> {}
struct State<T> {
pad0: [u8; 64],
buffer: Vec<UnsafeCell<Node<T>>>,
mask: usize,
pad1: [u8; 64],
enqueue_pos: AtomicUsize,
pad2: [u8; 64],
dequeue_pos: AtomicUsize,
pad3: [u8; 64],
}
unsafe impl<T: Send> Send for State<T> {}
unsafe impl<T: Send + Sync> Sync for State<T> {}
pub struct Queue<T> {
state: Arc<State<T>>,
}
impl<T> State<T> {
fn with_capacity(capacity: usize) -> State<T> {
let capacity = if capacity < 2 || (capacity & (capacity - 1))!= 0 {
if capacity < 2 {
2
} else {
// use next power of 2 as capacity
capacity.next_power_of_two()
}
} else {
capacity
};
let buffer = (0..capacity).map(|i| {
UnsafeCell::new(Node { sequence:AtomicUsize::new(i), value: None })
}).collect::<Vec<_>>();
State{
pad0: [0; 64],
buffer: buffer,
mask: capacity-1,
pad1: [0; 64],
enqueue_pos: AtomicUsize::new(0),
pad2: [0; 64],
dequeue_pos: AtomicUsize::new(0),
pad3: [0; 64],
}
}
fn push(&self, value: T) -> Result<(), T> {
let mask = self.mask;
let mut pos = self.enqueue_pos.load(Relaxed);
loop {
let node = &self.buffer[pos & mask];
let seq = unsafe { (*node.get()).sequence.load(Acquire) };
let diff: isize = seq as isize - pos as isize;
if diff == 0 {
let enqueue_pos = self.enqueue_pos.compare_and_swap(pos, pos+1, Relaxed);
if enqueue_pos == pos {
unsafe {
(*node.get()).value = Some(value);
(*node.get()).sequence.store(pos+1, Release);
}
break
} else {
pos = enqueue_pos;
}
} else if diff < 0 {
return Err(value);
} else {
pos = self.enqueue_pos.load(Relaxed);
}
}
Ok(())
}
fn pop(&self) -> Option<T> {
let mask = self.mask;
let mut pos = self.dequeue_pos.load(Relaxed);
loop {
let node = &self.buffer[pos & mask];
let seq = unsafe { (*node.get()).sequence.load(Acquire) };
let diff: isize = seq as isize - (pos + 1) as isize;
if diff == 0 {
let dequeue_pos = self.dequeue_pos.compare_and_swap(pos, pos+1, Relaxed);
if dequeue_pos == pos {
unsafe {
let value = (*node.get()).value.take();
(*node.get()).sequence.store(pos + mask + 1, Release);
return value
}
} else {
pos = dequeue_pos;
}
} else if diff < 0 {
return None
} else {
pos = self.dequeue_pos.load(Relaxed);
}
}
}
}
impl<T> Queue<T> {
pub fn with_capacity(capacity: usize) -> Queue<T> {
Queue{
state: Arc::new(State::with_capacity(capacity))
}
}
pub fn push(&self, value: T) -> Result<(), T> {
self.state.push(value)
}
pub fn pop(&self) -> Option<T> {
self.state.pop()
}
}
impl<T> Clone for Queue<T> {
fn clone(&self) -> Queue<T> {
Queue { state: self.state.clone() }
}
}
#[cfg(test)]
mod tests {
use std::thread;
use std::sync::mpsc::channel;
use super::Queue;
| let q = Queue::with_capacity(nthreads*nmsgs);
assert_eq!(None, q.pop());
let (tx, rx) = channel();
for _ in 0..nthreads {
let q = q.clone();
let tx = tx.clone();
thread::spawn(move || {
let q = q;
for i in 0..nmsgs {
assert!(q.push(i).is_ok());
}
tx.send(()).unwrap();
});
}
let mut completion_rxs = vec![];
for _ in 0..nthreads {
let (tx, rx) = channel();
completion_rxs.push(rx);
let q = q.clone();
thread::spawn(move || {
let q = q;
let mut i = 0;
loop {
match q.pop() {
None => {},
Some(_) => {
i += 1;
if i == nmsgs { break }
}
}
}
tx.send(i).unwrap();
});
}
for rx in &mut completion_rxs {
assert_eq!(nmsgs, rx.recv().unwrap());
}
for _ in 0..nthreads {
rx.recv().unwrap();
}
}
} | #[test]
fn test() {
let nthreads = 8;
let nmsgs = 1000; | random_line_split |
mpmc_bounded_queue.rs | /* Copyright (c) 2010-2011 Dmitry Vyukov. All rights reserved.
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY DMITRY VYUKOV "AS IS" AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
* SHALL DMITRY VYUKOV OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation are
* those of the authors and should not be interpreted as representing official
* policies, either expressed or implied, of Dmitry Vyukov.
*/
#![allow(missing_docs, dead_code)]
// http://www.1024cores.net/home/lock-free-algorithms/queues/bounded-mpmc-queue
// This queue is copy pasted from old rust stdlib.
use std::sync::Arc;
use std::cell::UnsafeCell;
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering::{Relaxed, Release, Acquire};
struct Node<T> {
sequence: AtomicUsize,
value: Option<T>,
}
unsafe impl<T: Send> Send for Node<T> {}
unsafe impl<T: Sync> Sync for Node<T> {}
struct State<T> {
pad0: [u8; 64],
buffer: Vec<UnsafeCell<Node<T>>>,
mask: usize,
pad1: [u8; 64],
enqueue_pos: AtomicUsize,
pad2: [u8; 64],
dequeue_pos: AtomicUsize,
pad3: [u8; 64],
}
unsafe impl<T: Send> Send for State<T> {}
unsafe impl<T: Send + Sync> Sync for State<T> {}
pub struct | <T> {
state: Arc<State<T>>,
}
impl<T> State<T> {
fn with_capacity(capacity: usize) -> State<T> {
let capacity = if capacity < 2 || (capacity & (capacity - 1))!= 0 {
if capacity < 2 {
2
} else {
// use next power of 2 as capacity
capacity.next_power_of_two()
}
} else {
capacity
};
let buffer = (0..capacity).map(|i| {
UnsafeCell::new(Node { sequence:AtomicUsize::new(i), value: None })
}).collect::<Vec<_>>();
State{
pad0: [0; 64],
buffer: buffer,
mask: capacity-1,
pad1: [0; 64],
enqueue_pos: AtomicUsize::new(0),
pad2: [0; 64],
dequeue_pos: AtomicUsize::new(0),
pad3: [0; 64],
}
}
fn push(&self, value: T) -> Result<(), T> {
let mask = self.mask;
let mut pos = self.enqueue_pos.load(Relaxed);
loop {
let node = &self.buffer[pos & mask];
let seq = unsafe { (*node.get()).sequence.load(Acquire) };
let diff: isize = seq as isize - pos as isize;
if diff == 0 {
let enqueue_pos = self.enqueue_pos.compare_and_swap(pos, pos+1, Relaxed);
if enqueue_pos == pos {
unsafe {
(*node.get()).value = Some(value);
(*node.get()).sequence.store(pos+1, Release);
}
break
} else {
pos = enqueue_pos;
}
} else if diff < 0 {
return Err(value);
} else {
pos = self.enqueue_pos.load(Relaxed);
}
}
Ok(())
}
fn pop(&self) -> Option<T> {
let mask = self.mask;
let mut pos = self.dequeue_pos.load(Relaxed);
loop {
let node = &self.buffer[pos & mask];
let seq = unsafe { (*node.get()).sequence.load(Acquire) };
let diff: isize = seq as isize - (pos + 1) as isize;
if diff == 0 {
let dequeue_pos = self.dequeue_pos.compare_and_swap(pos, pos+1, Relaxed);
if dequeue_pos == pos {
unsafe {
let value = (*node.get()).value.take();
(*node.get()).sequence.store(pos + mask + 1, Release);
return value
}
} else {
pos = dequeue_pos;
}
} else if diff < 0 {
return None
} else {
pos = self.dequeue_pos.load(Relaxed);
}
}
}
}
impl<T> Queue<T> {
pub fn with_capacity(capacity: usize) -> Queue<T> {
Queue{
state: Arc::new(State::with_capacity(capacity))
}
}
pub fn push(&self, value: T) -> Result<(), T> {
self.state.push(value)
}
pub fn pop(&self) -> Option<T> {
self.state.pop()
}
}
impl<T> Clone for Queue<T> {
fn clone(&self) -> Queue<T> {
Queue { state: self.state.clone() }
}
}
#[cfg(test)]
mod tests {
use std::thread;
use std::sync::mpsc::channel;
use super::Queue;
#[test]
fn test() {
let nthreads = 8;
let nmsgs = 1000;
let q = Queue::with_capacity(nthreads*nmsgs);
assert_eq!(None, q.pop());
let (tx, rx) = channel();
for _ in 0..nthreads {
let q = q.clone();
let tx = tx.clone();
thread::spawn(move || {
let q = q;
for i in 0..nmsgs {
assert!(q.push(i).is_ok());
}
tx.send(()).unwrap();
});
}
let mut completion_rxs = vec![];
for _ in 0..nthreads {
let (tx, rx) = channel();
completion_rxs.push(rx);
let q = q.clone();
thread::spawn(move || {
let q = q;
let mut i = 0;
loop {
match q.pop() {
None => {},
Some(_) => {
i += 1;
if i == nmsgs { break }
}
}
}
tx.send(i).unwrap();
});
}
for rx in &mut completion_rxs {
assert_eq!(nmsgs, rx.recv().unwrap());
}
for _ in 0..nthreads {
rx.recv().unwrap();
}
}
}
| Queue | identifier_name |
mpmc_bounded_queue.rs | /* Copyright (c) 2010-2011 Dmitry Vyukov. All rights reserved.
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY DMITRY VYUKOV "AS IS" AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
* SHALL DMITRY VYUKOV OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation are
* those of the authors and should not be interpreted as representing official
* policies, either expressed or implied, of Dmitry Vyukov.
*/
#![allow(missing_docs, dead_code)]
// http://www.1024cores.net/home/lock-free-algorithms/queues/bounded-mpmc-queue
// This queue is copy pasted from old rust stdlib.
use std::sync::Arc;
use std::cell::UnsafeCell;
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering::{Relaxed, Release, Acquire};
struct Node<T> {
sequence: AtomicUsize,
value: Option<T>,
}
unsafe impl<T: Send> Send for Node<T> {}
unsafe impl<T: Sync> Sync for Node<T> {}
struct State<T> {
pad0: [u8; 64],
buffer: Vec<UnsafeCell<Node<T>>>,
mask: usize,
pad1: [u8; 64],
enqueue_pos: AtomicUsize,
pad2: [u8; 64],
dequeue_pos: AtomicUsize,
pad3: [u8; 64],
}
unsafe impl<T: Send> Send for State<T> {}
unsafe impl<T: Send + Sync> Sync for State<T> {}
pub struct Queue<T> {
state: Arc<State<T>>,
}
impl<T> State<T> {
fn with_capacity(capacity: usize) -> State<T> {
let capacity = if capacity < 2 || (capacity & (capacity - 1))!= 0 {
if capacity < 2 {
2
} else {
// use next power of 2 as capacity
capacity.next_power_of_two()
}
} else {
capacity
};
let buffer = (0..capacity).map(|i| {
UnsafeCell::new(Node { sequence:AtomicUsize::new(i), value: None })
}).collect::<Vec<_>>();
State{
pad0: [0; 64],
buffer: buffer,
mask: capacity-1,
pad1: [0; 64],
enqueue_pos: AtomicUsize::new(0),
pad2: [0; 64],
dequeue_pos: AtomicUsize::new(0),
pad3: [0; 64],
}
}
fn push(&self, value: T) -> Result<(), T> {
let mask = self.mask;
let mut pos = self.enqueue_pos.load(Relaxed);
loop {
let node = &self.buffer[pos & mask];
let seq = unsafe { (*node.get()).sequence.load(Acquire) };
let diff: isize = seq as isize - pos as isize;
if diff == 0 {
let enqueue_pos = self.enqueue_pos.compare_and_swap(pos, pos+1, Relaxed);
if enqueue_pos == pos {
unsafe {
(*node.get()).value = Some(value);
(*node.get()).sequence.store(pos+1, Release);
}
break
} else {
pos = enqueue_pos;
}
} else if diff < 0 {
return Err(value);
} else {
pos = self.enqueue_pos.load(Relaxed);
}
}
Ok(())
}
fn pop(&self) -> Option<T> {
let mask = self.mask;
let mut pos = self.dequeue_pos.load(Relaxed);
loop {
let node = &self.buffer[pos & mask];
let seq = unsafe { (*node.get()).sequence.load(Acquire) };
let diff: isize = seq as isize - (pos + 1) as isize;
if diff == 0 {
let dequeue_pos = self.dequeue_pos.compare_and_swap(pos, pos+1, Relaxed);
if dequeue_pos == pos {
unsafe {
let value = (*node.get()).value.take();
(*node.get()).sequence.store(pos + mask + 1, Release);
return value
}
} else {
pos = dequeue_pos;
}
} else if diff < 0 {
return None
} else {
pos = self.dequeue_pos.load(Relaxed);
}
}
}
}
impl<T> Queue<T> {
pub fn with_capacity(capacity: usize) -> Queue<T> {
Queue{
state: Arc::new(State::with_capacity(capacity))
}
}
pub fn push(&self, value: T) -> Result<(), T> {
self.state.push(value)
}
pub fn pop(&self) -> Option<T> {
self.state.pop()
}
}
impl<T> Clone for Queue<T> {
fn clone(&self) -> Queue<T> {
Queue { state: self.state.clone() }
}
}
#[cfg(test)]
mod tests {
use std::thread;
use std::sync::mpsc::channel;
use super::Queue;
#[test]
fn test() {
let nthreads = 8;
let nmsgs = 1000;
let q = Queue::with_capacity(nthreads*nmsgs);
assert_eq!(None, q.pop());
let (tx, rx) = channel();
for _ in 0..nthreads {
let q = q.clone();
let tx = tx.clone();
thread::spawn(move || {
let q = q;
for i in 0..nmsgs {
assert!(q.push(i).is_ok());
}
tx.send(()).unwrap();
});
}
let mut completion_rxs = vec![];
for _ in 0..nthreads {
let (tx, rx) = channel();
completion_rxs.push(rx);
let q = q.clone();
thread::spawn(move || {
let q = q;
let mut i = 0;
loop {
match q.pop() {
None => | ,
Some(_) => {
i += 1;
if i == nmsgs { break }
}
}
}
tx.send(i).unwrap();
});
}
for rx in &mut completion_rxs {
assert_eq!(nmsgs, rx.recv().unwrap());
}
for _ in 0..nthreads {
rx.recv().unwrap();
}
}
}
| {} | conditional_block |
wrapper.rs | t a potentially non-trivial
/// performance cost) by implementing Drop and making LayoutFoo non-Copy.
#[derive(Clone, Copy)]
pub struct GeckoNode<'ln>(pub &'ln RawGeckoNode);
impl<'ln> fmt::Debug for GeckoNode<'ln> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if let Some(el) = self.as_element() {
el.fmt(f)
} else {
if self.is_text_node() {
write!(f, "<text node> ({:#x})", self.opaque().0)
} else {
write!(f, "<non-text node> ({:#x})", self.opaque().0)
}
}
}
}
impl<'ln> GeckoNode<'ln> {
fn from_content(content: &'ln nsIContent) -> Self {
GeckoNode(&content._base)
}
fn node_info(&self) -> &structs::NodeInfo {
debug_assert!(!self.0.mNodeInfo.mRawPtr.is_null());
unsafe { &*self.0.mNodeInfo.mRawPtr }
}
fn first_child(&self) -> Option<GeckoNode<'ln>> {
unsafe { self.0.mFirstChild.as_ref().map(GeckoNode::from_content) }
}
fn last_child(&self) -> Option<GeckoNode<'ln>> {
unsafe { Gecko_GetLastChild(self.0).map(GeckoNode) }
}
fn prev_sibling(&self) -> Option<GeckoNode<'ln>> {
unsafe { self.0.mPreviousSibling.as_ref().map(GeckoNode::from_content) }
}
fn next_sibling(&self) -> Option<GeckoNode<'ln>> {
unsafe { self.0.mNextSibling.as_ref().map(GeckoNode::from_content) }
}
}
impl<'ln> NodeInfo for GeckoNode<'ln> {
fn is_element(&self) -> bool {
use gecko_bindings::structs::nsINode_BooleanFlag;
self.0.mBoolFlags & (1u32 << nsINode_BooleanFlag::NodeIsElement as u32)!= 0
}
fn is_text_node(&self) -> bool {
// This is a DOM constant that isn't going to change.
const TEXT_NODE: u16 = 3;
self.node_info().mInner.mNodeType == TEXT_NODE
}
}
impl<'ln> TNode for GeckoNode<'ln> {
type ConcreteElement = GeckoElement<'ln>;
type ConcreteChildrenIterator = GeckoChildrenIterator<'ln>;
fn to_unsafe(&self) -> UnsafeNode {
(self.0 as *const _ as usize, 0)
}
unsafe fn from_unsafe(n: &UnsafeNode) -> Self {
GeckoNode(&*(n.0 as *mut RawGeckoNode))
}
fn children(self) -> LayoutIterator<GeckoChildrenIterator<'ln>> {
let maybe_iter = unsafe { Gecko_MaybeCreateStyleChildrenIterator(self.0) };
if let Some(iter) = maybe_iter.into_owned_opt() {
LayoutIterator(GeckoChildrenIterator::GeckoIterator(iter))
} else {
LayoutIterator(GeckoChildrenIterator::Current(self.first_child()))
}
}
fn opaque(&self) -> OpaqueNode {
let ptr: usize = self.0 as *const _ as usize;
OpaqueNode(ptr)
}
fn debug_id(self) -> usize {
unimplemented!()
}
fn as_element(&self) -> Option<GeckoElement<'ln>> {
if self.is_element() {
unsafe { Some(GeckoElement(&*(self.0 as *const _ as *const RawGeckoElement))) }
} else {
None
}
}
fn can_be_fragmented(&self) -> bool {
// FIXME(SimonSapin): Servo uses this to implement CSS multicol / fragmentation
// Maybe this isn’t useful for Gecko?
false
}
unsafe fn set_can_be_fragmented(&self, _value: bool) {
// FIXME(SimonSapin): Servo uses this to implement CSS multicol / fragmentation
// Maybe this isn’t useful for Gecko?
}
fn parent_node(&self) -> Option<Self> {
unsafe { bindings::Gecko_GetParentNode(self.0).map(GeckoNode) }
}
fn is_in_doc(&self) -> bool {
unsafe { bindings::Gecko_IsInDocument(self.0) }
}
fn needs_dirty_on_viewport_size_changed(&self) -> bool {
// Gecko's node doesn't have the DIRTY_ON_VIEWPORT_SIZE_CHANGE flag,
// so we force them to be dirtied on viewport size change, regardless if
// they use viewport percentage size or not.
// TODO(shinglyu): implement this in Gecko: https://github.com/servo/servo/pull/11890
true
}
// TODO(shinglyu): implement this in Gecko: https://github.com/servo/servo/pull/11890
unsafe fn set_dirty_on_viewport_size_changed(&self) {}
}
/// A wrapper on top of two kind of iterators, depending on the parent being
/// iterated.
///
/// We generally iterate children by traversing the light-tree siblings of the
/// first child like Servo does.
///
/// However, for nodes with anonymous children, we use a custom (heavier-weight)
/// Gecko-implemented iterator.
///
/// FIXME(emilio): If we take into account shadow DOM, we're going to need the
/// flat tree pretty much always. We can try to optimize the case where there's
/// no shadow root sibling, probably.
pub enum GeckoChildrenIterator<'a> {
/// A simple iterator that tracks the current node being iterated and
/// replaces it with the next sibling when requested.
Current(Option<GeckoNode<'a>>),
/// A Gecko-implemented iterator we need to drop appropriately.
GeckoIterator(bindings::StyleChildrenIteratorOwned),
}
impl<'a> Drop for GeckoChildrenIterator<'a> {
fn drop(&mut self) {
if let GeckoChildrenIterator::GeckoIterator(ref it) = *self {
unsafe {
Gecko_DropStyleChildrenIterator(ptr::read(it as *const _));
}
}
}
}
impl<'a> Iterator for GeckoChildrenIterator<'a> {
type Item = GeckoNode<'a>;
fn next(&mut self) -> Option<GeckoNode<'a>> {
match *self {
GeckoChildrenIterator::Current(curr) => {
let next = curr.and_then(|node| node.next_sibling());
*self = GeckoChildrenIterator::Current(next);
curr
},
GeckoChildrenIterator::GeckoIterator(ref mut it) => unsafe {
Gecko_GetNextStyleChild(it).map(GeckoNode)
}
}
}
}
/// A simple wrapper over a non-null Gecko `Element` pointer.
#[derive(Clone, Copy)]
pub struct GeckoElement<'le>(pub &'le RawGeckoElement);
impl<'le> fmt::Debug for GeckoElement<'le> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
try!(write!(f, "<{}", self.get_local_name()));
if let Some(id) = self.get_id() {
try!(write!(f, " id={}", id));
}
write!(f, "> ({:#x})", self.as_node().opaque().0)
}
}
impl<'le> GeckoElement<'le> {
/// Parse the style attribute of an element.
pub fn parse_style_attribute(value: &str) -> PropertyDeclarationBlock {
// FIXME(bholley): Real base URL and error reporter.
let base_url = &*DUMMY_BASE_URL;
// FIXME(heycam): Needs real ParserContextExtraData so that URLs parse
// properly.
let extra_data = ParserContextExtraData::default();
parse_style_attribute(value, &base_url, Box::new(StdoutErrorReporter), extra_data)
}
fn flags(&self) -> u32 {
self.raw_node()._base._base_1.mFlags
}
fn raw_node(&self) -> &RawGeckoNode {
&(self.0)._base._base._base
}
// FIXME: We can implement this without OOL calls, but we can't easily given
// GeckoNode is a raw reference.
//
// We can use a Cell<T>, but that's a bit of a pain.
fn set_flags(&self, flags: u32) {
unsafe { Gecko_SetNodeFlags(self.as_node().0, flags) }
}
fn unset_flags(&self, flags: u32) {
unsafe { Gecko_UnsetNodeFlags(self.as_node().0, flags) }
}
/// Clear the element data for a given element.
pub fn clear_data(&self) {
let ptr = self.0.mServoData.get();
if!ptr.is_null() {
debug!("Dropping ElementData for {:?}", self);
let data = unsafe { Box::from_raw(self.0.mServoData.get()) };
self.0.mServoData.set(ptr::null_mut());
// Perform a mutable borrow of the data in debug builds. This
// serves as an assertion that there are no outstanding borrows
// when we destroy the data.
debug_assert!({ let _ = data.borrow_mut(); true });
}
}
/// Ensures the element has data, returning the existing data or allocating
/// it.
///
/// Only safe to call with exclusive access to the element, given otherwise
/// it could race to allocate and leak.
pub unsafe fn ensure_data(&self) -> &AtomicRefCell<ElementData> {
match self.get_data() {
Some(x) => x,
None => {
debug!("Creating ElementData for {:?}", self);
let ptr = Box::into_raw(Box::new(AtomicRefCell::new(ElementData::new(None))));
self.0.mServoData.set(ptr);
unsafe { &* ptr }
},
}
}
/// Creates a blank snapshot for this element.
pub fn create_snapshot(&self) -> Snapshot {
Snapshot::new(*self)
}
}
lazy_static! {
/// A dummy base url in order to get it where we don't have any available.
///
/// We need to get rid of this sooner than later.
pub static ref DUMMY_BASE_URL: ServoUrl = {
ServoUrl::parse("http://www.example.org").unwrap()
};
}
impl<'le> TElement for GeckoElement<'le> {
type ConcreteNode = GeckoNode<'le>;
fn as_node(&self) -> Self::ConcreteNode {
unsafe { GeckoNode(&*(self.0 as *const _ as *const RawGeckoNode)) }
}
fn style_attribute(&self) -> Option<&Arc<RwLock<PropertyDeclarationBlock>>> {
let declarations = unsafe { Gecko_GetServoDeclarationBlock(self.0) };
declarations.map(|s| s.as_arc_opt()).unwrap_or(None)
}
fn get_state(&self) -> ElementState {
unsafe {
ElementState::from_bits_truncate(Gecko_ElementState(self.0))
}
}
#[inline]
fn has_attr(&self, namespace: &Namespace, attr: &Atom) -> bool {
unsafe {
bindings::Gecko_HasAttr(self.0,
namespace.0.as_ptr(),
attr.as_ptr())
}
}
#[inline]
fn attr_equals(&self, namespace: &Namespace, attr: &Atom, val: &Atom) -> bool {
unsafe {
bindings::Gecko_AttrEquals(self.0,
namespace.0.as_ptr(),
attr.as_ptr(),
val.as_ptr(),
/* ignoreCase = */ false)
}
}
fn existing_style_for_restyle_damage<'a>(&'a self,
current_cv: Option<&'a Arc<ComputedValues>>,
pseudo: Option<&PseudoElement>)
-> Option<&'a nsStyleContext> {
if current_cv.is_none() {
// Don't bother in doing an ffi call to get null back.
return None;
}
unsafe {
let atom_ptr = pseudo.map(|p| p.as_atom().as_ptr())
.unwrap_or(ptr::null_mut());
let context_ptr = Gecko_GetStyleContext(self.as_node().0, atom_ptr);
context_ptr.as_ref()
}
}
fn has_dirty_descendants(&self) -> bool {
self.flags() & (NODE_HAS_DIRTY_DESCENDANTS_FOR_SERVO as u32)!= 0
}
unsafe fn set_dirty_descendants(&self) {
debug!("Setting dirty descendants: {:?}", self);
self.set_flags(NODE_HAS_DIRTY_DESCENDANTS_FOR_SERVO as u32)
}
unsafe fn unset_dirty_descendants(&self) {
self.unset_flags(NODE_HAS_DIRTY_DESCENDANTS_FOR_SERVO as u32)
}
fn store_children_to_process(&self, _: isize) {
// This is only used for bottom-up traversal, and is thus a no-op for Gecko.
}
fn did_process_child(&self) -> isize {
panic!("Atomic child count not implemented in Gecko");
}
fn get_data(&self) -> Option<&AtomicRefCell<ElementData>> {
unsafe { self.0.mServoData.get().as_ref() }
}
fn skip_root_and_item_based_display_fixup(&self) -> bool {
// We don't want to fix up display values of native anonymous content.
// Additionally, we want to skip root-based display fixup for document
// level native anonymous content subtree roots, since they're not
// really roots from the style fixup perspective. Checking that we
// are NAC handles both cases.
self.flags() & (NODE_IS_IN_NATIVE_ANONYMOUS_SUBTREE as u32)!= 0
}
}
impl<'le> PartialEq for GeckoElement<'le> {
fn eq(&self, other: &Self) -> bool {
self.0 as *const _ == other.0 as *const _
}
}
impl<'le> PresentationalHintsSynthetizer for GeckoElement<'le> {
fn synthesize_presentational_hints_for_legacy_attributes<V>(&self, _hints: &mut V)
where V: Push<ApplicableDeclarationBlock>,
{
// FIXME(bholley) - Need to implement this.
}
}
impl<'le> ::selectors::Element for GeckoElement<'le> {
fn parent_element(&self) -> Option<Self> {
| fn first_child_element(&self) -> Option<Self> {
let mut child = self.as_node().first_child();
while let Some(child_node) = child {
if let Some(el) = child_node.as_element() {
return Some(el)
}
child = child_node.next_sibling();
}
None
}
fn last_child_element(&self) -> Option<Self> {
let mut child = self.as_node().last_child();
while let Some(child_node) = child {
if let Some(el) = child_node.as_element() {
return Some(el)
}
child = child_node.prev_sibling();
}
None
}
fn prev_sibling_element(&self) -> Option<Self> {
let mut sibling = self.as_node().prev_sibling();
while let Some(sibling_node) = sibling {
if let Some(el) = sibling_node.as_element() {
return Some(el)
}
sibling = sibling_node.prev_sibling();
}
None
}
fn next_sibling_element(&self) -> Option<Self> {
let mut sibling = self.as_node().next_sibling();
while let Some(sibling_node) = sibling {
if let Some(el) = sibling_node.as_element() {
return Some(el)
}
sibling = sibling_node.next_sibling();
}
None
}
fn is_root(&self) -> bool {
unsafe {
Gecko_IsRootElement(self.0)
}
}
fn is_empty(&self) -> bool {
// XXX(emilio): Implement this properly.
false
}
fn get_local_name(&self) -> &WeakAtom {
unsafe {
WeakAtom::new(self.as_node().node_info().mInner.mName.raw())
}
}
fn get_namespace(&self) -> &WeakNamespace {
unsafe {
WeakNamespace::new(Gecko_Namespace(self.0))
}
}
fn match_non_ts_pseudo_class(&self, pseudo_class: NonTSPseudoClass) -> bool {
match pseudo_class {
// https://github.com/servo/servo/issues/8718
NonTSPseudoClass::AnyLink => unsafe { Gecko_IsLink(self.0) },
NonTSPseudoClass::Link => unsafe { Gecko_IsUnvisitedLink(self.0) },
NonTSPseudoClass::Visited => unsafe { Gecko_IsVisitedLink(self.0) },
NonTSPseudoClass::Active |
NonTSPseudoClass::Focus |
NonTSPseudoClass::Hover |
NonTSPseudoClass::Enabled |
NonTSPseudoClass::Disabled |
NonTSPseudoClass::Checked |
NonTSPseudoClass::ReadWrite |
NonTSPseudoClass::Fullscreen |
NonTSPseudoClass::Indeterminate => {
self.get_state().contains(pseudo_class.state_flag())
},
NonTSPseudoClass::ReadOnly => {
!self.get_state().contains(pseudo_class.state_flag())
}
NonTSPseudoClass::MozBrowserFrame => unsafe {
Gecko_MatchesElement(pseudo_class.to_gecko_pseudoclasstype().unwrap(), self.0)
}
}
}
fn get_id(&self) -> Option<Atom> {
let ptr = unsafe {
bindings::Gecko_AtomAttrValue(self.0,
atom!("id").as_ptr())
};
if ptr.is_null() {
None
} else {
Some(Atom::from(ptr))
}
}
fn has_class(&self, name: &Atom) -> bool {
snapshot_helpers::has_class(self.0,
name,
Gecko_ClassOrClassList)
}
fn each_class<F>(&self, callback: F)
where F: FnMut(&Atom)
{
snapshot_helpers::each_class(self.0,
callback,
Gecko_ClassOrClassList)
}
fn is_html_element_in_html_document(&self) -> bool {
unsafe {
Gecko_IsHTMLElementInHTMLDocument(self.0)
}
}
}
/// A few helpers to help with attribute selectors and snapshotting.
pub trait AttrSelectorHelpers {
/// Returns the namespace of the selector, or null otherwise.
fn ns_or_null(&self) -> *mut nsIAtom;
/// Returns the proper selector name depending on whether the requesting
/// element is an HTML element in an HTML document or not.
fn select_name(&self, is_html_element_in_html_document: bool) -> *mut nsIAtom;
}
impl AttrSelectorHelpers for AttrSelector<SelectorImpl> {
fn ns_or_null(&self) -> *mut nsIAtom {
match self.namespace {
NamespaceConstraint::Any => ptr::null_mut(),
NamespaceConstraint::Specific(ref ns) => ns.url.0.as_ptr(),
}
}
fn select_name(&self, is_html_element_in_html_document: bool) -> *mut nsIAtom {
if is_html_element_in_html_document {
self.lower_name.as_ptr()
} else {
self.name.as_ptr()
}
}
}
impl<'le> ::selectors::MatchAttr for GeckoElement<'le> {
type Impl = SelectorImpl;
fn match_attr_has(&self, attr: &AttrSelector<Self::Impl>) -> bool {
unsafe {
bindings::Gecko_HasAttr(self.0,
attr.ns_or_null(),
attr.select_name(self.is_html_element_in_html_document()))
}
}
fn match_attr_equals(&self, attr: &AttrSelector<Self::Impl>, value: &Atom) -> bool {
unsafe {
bindings::Gecko_AttrEquals(self.0,
attr.ns_or_null(),
| unsafe { bindings::Gecko_GetParentElement(self.0).map(GeckoElement) }
}
| identifier_body |
wrapper.rs | t a potentially non-trivial
/// performance cost) by implementing Drop and making LayoutFoo non-Copy.
#[derive(Clone, Copy)]
pub struct GeckoNode<'ln>(pub &'ln RawGeckoNode);
impl<'ln> fmt::Debug for GeckoNode<'ln> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if let Some(el) = self.as_element() {
el.fmt(f)
} else {
if self.is_text_node() {
write!(f, "<text node> ({:#x})", self.opaque().0)
} else {
write!(f, "<non-text node> ({:#x})", self.opaque().0)
}
}
}
}
impl<'ln> GeckoNode<'ln> {
fn from_content(content: &'ln nsIContent) -> Self {
GeckoNode(&content._base)
}
fn node_info(&self) -> &structs::NodeInfo {
debug_assert!(!self.0.mNodeInfo.mRawPtr.is_null());
unsafe { &*self.0.mNodeInfo.mRawPtr }
}
fn first_child(&self) -> Option<GeckoNode<'ln>> {
unsafe { self.0.mFirstChild.as_ref().map(GeckoNode::from_content) }
}
fn last_child(&self) -> Option<GeckoNode<'ln>> {
unsafe { Gecko_GetLastChild(self.0).map(GeckoNode) }
}
fn prev_sibling(&self) -> Option<GeckoNode<'ln>> {
unsafe { self.0.mPreviousSibling.as_ref().map(GeckoNode::from_content) }
}
fn next_sibling(&self) -> Option<GeckoNode<'ln>> {
unsafe { self.0.mNextSibling.as_ref().map(GeckoNode::from_content) }
}
}
impl<'ln> NodeInfo for GeckoNode<'ln> {
fn is_element(&self) -> bool {
use gecko_bindings::structs::nsINode_BooleanFlag;
self.0.mBoolFlags & (1u32 << nsINode_BooleanFlag::NodeIsElement as u32)!= 0
}
fn is_text_node(&self) -> bool {
// This is a DOM constant that isn't going to change.
const TEXT_NODE: u16 = 3;
self.node_info().mInner.mNodeType == TEXT_NODE
}
}
impl<'ln> TNode for GeckoNode<'ln> {
type ConcreteElement = GeckoElement<'ln>;
type ConcreteChildrenIterator = GeckoChildrenIterator<'ln>;
fn to_unsafe(&self) -> UnsafeNode {
(self.0 as *const _ as usize, 0)
}
unsafe fn from_unsafe(n: &UnsafeNode) -> Self {
GeckoNode(&*(n.0 as *mut RawGeckoNode))
}
fn children(self) -> LayoutIterator<GeckoChildrenIterator<'ln>> {
let maybe_iter = unsafe { Gecko_MaybeCreateStyleChildrenIterator(self.0) };
if let Some(iter) = maybe_iter.into_owned_opt() {
LayoutIterator(GeckoChildrenIterator::GeckoIterator(iter))
} else {
LayoutIterator(GeckoChildrenIterator::Current(self.first_child()))
}
}
fn opaque(&self) -> OpaqueNode {
let ptr: usize = self.0 as *const _ as usize;
OpaqueNode(ptr)
}
fn debug_id(self) -> usize {
unimplemented!()
}
fn as_element(&self) -> Option<GeckoElement<'ln>> {
if self.is_element() {
unsafe { Some(GeckoElement(&*(self.0 as *const _ as *const RawGeckoElement))) }
} else {
None
}
}
fn can_be_fragmented(&self) -> bool {
// FIXME(SimonSapin): Servo uses this to implement CSS multicol / fragmentation
// Maybe this isn’t useful for Gecko?
false
}
unsafe fn set_can_be_fragmented(&self, _value: bool) {
// FIXME(SimonSapin): Servo uses this to implement CSS multicol / fragmentation
// Maybe this isn’t useful for Gecko?
}
fn parent_node(&self) -> Option<Self> {
unsafe { bindings::Gecko_GetParentNode(self.0).map(GeckoNode) }
}
fn is_in_doc(&self) -> bool {
unsafe { bindings::Gecko_IsInDocument(self.0) }
}
fn needs_dirty_on_viewport_size_changed(&self) -> bool {
// Gecko's node doesn't have the DIRTY_ON_VIEWPORT_SIZE_CHANGE flag,
// so we force them to be dirtied on viewport size change, regardless if
// they use viewport percentage size or not.
// TODO(shinglyu): implement this in Gecko: https://github.com/servo/servo/pull/11890
true
}
// TODO(shinglyu): implement this in Gecko: https://github.com/servo/servo/pull/11890
unsafe fn set_dirty_on_viewport_size_changed(&self) {}
}
/// A wrapper on top of two kind of iterators, depending on the parent being
/// iterated.
///
/// We generally iterate children by traversing the light-tree siblings of the
/// first child like Servo does.
///
/// However, for nodes with anonymous children, we use a custom (heavier-weight)
/// Gecko-implemented iterator.
///
/// FIXME(emilio): If we take into account shadow DOM, we're going to need the
/// flat tree pretty much always. We can try to optimize the case where there's
/// no shadow root sibling, probably.
pub enum GeckoChildrenIterator<'a> {
/// A simple iterator that tracks the current node being iterated and
/// replaces it with the next sibling when requested.
Current(Option<GeckoNode<'a>>),
/// A Gecko-implemented iterator we need to drop appropriately.
GeckoIterator(bindings::StyleChildrenIteratorOwned),
}
impl<'a> Drop for GeckoChildrenIterator<'a> {
fn drop(&mut self) {
if let GeckoChildrenIterator::GeckoIterator(ref it) = *self {
unsafe {
Gecko_DropStyleChildrenIterator(ptr::read(it as *const _));
}
}
}
}
impl<'a> Iterator for GeckoChildrenIterator<'a> {
type Item = GeckoNode<'a>;
fn next(&mut self) -> Option<GeckoNode<'a>> {
match *self {
GeckoChildrenIterator::Current(curr) => {
let next = curr.and_then(|node| node.next_sibling());
*self = GeckoChildrenIterator::Current(next);
curr
},
GeckoChildrenIterator::GeckoIterator(ref mut it) => unsafe {
Gecko_GetNextStyleChild(it).map(GeckoNode)
}
}
}
}
/// A simple wrapper over a non-null Gecko `Element` pointer.
#[derive(Clone, Copy)]
pub struct GeckoElement<'le>(pub &'le RawGeckoElement);
impl<'le> fmt::Debug for GeckoElement<'le> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
try!(write!(f, "<{}", self.get_local_name()));
if let Some(id) = self.get_id() {
try!(write!(f, " id={}", id));
}
write!(f, "> ({:#x})", self.as_node().opaque().0)
}
}
impl<'le> GeckoElement<'le> {
/// Parse the style attribute of an element.
pub fn parse_style_attribute(value: &str) -> PropertyDeclarationBlock {
// FIXME(bholley): Real base URL and error reporter.
let base_url = &*DUMMY_BASE_URL;
// FIXME(heycam): Needs real ParserContextExtraData so that URLs parse
// properly.
let extra_data = ParserContextExtraData::default();
parse_style_attribute(value, &base_url, Box::new(StdoutErrorReporter), extra_data)
}
fn flags(&self) -> u32 {
self.raw_node()._base._base_1.mFlags
}
fn raw_node(&self) -> &RawGeckoNode {
&(self.0)._base._base._base
}
// FIXME: We can implement this without OOL calls, but we can't easily given
// GeckoNode is a raw reference.
//
// We can use a Cell<T>, but that's a bit of a pain.
fn set_flags(&self, flags: u32) {
unsafe { Gecko_SetNodeFlags(self.as_node().0, flags) }
}
fn unset_flags(&self, flags: u32) {
unsafe { Gecko_UnsetNodeFlags(self.as_node().0, flags) }
}
/// Clear the element data for a given element.
pub fn clear_data(&self) {
let ptr = self.0.mServoData.get();
if!ptr.is_null() {
debug!("Dropping ElementData for {:?}", self);
let data = unsafe { Box::from_raw(self.0.mServoData.get()) };
self.0.mServoData.set(ptr::null_mut());
// Perform a mutable borrow of the data in debug builds. This
// serves as an assertion that there are no outstanding borrows
// when we destroy the data.
debug_assert!({ let _ = data.borrow_mut(); true });
}
}
/// Ensures the element has data, returning the existing data or allocating
/// it.
///
/// Only safe to call with exclusive access to the element, given otherwise
/// it could race to allocate and leak.
pub unsafe fn ensure_data(&self) -> &AtomicRefCell<ElementData> {
match self.get_data() {
Some(x) => x,
None => {
debug!("Creating ElementData for {:?}", self);
let ptr = Box::into_raw(Box::new(AtomicRefCell::new(ElementData::new(None))));
self.0.mServoData.set(ptr);
unsafe { &* ptr }
},
}
}
/// Creates a blank snapshot for this element.
pub fn create_snapshot(&self) -> Snapshot {
Snapshot::new(*self)
}
}
lazy_static! {
/// A dummy base url in order to get it where we don't have any available.
///
/// We need to get rid of this sooner than later.
pub static ref DUMMY_BASE_URL: ServoUrl = {
ServoUrl::parse("http://www.example.org").unwrap()
};
}
impl<'le> TElement for GeckoElement<'le> {
type ConcreteNode = GeckoNode<'le>;
fn as_node(&self) -> Self::ConcreteNode {
unsafe { GeckoNode(&*(self.0 as *const _ as *const RawGeckoNode)) }
}
fn style_attribute(&self) -> Option<&Arc<RwLock<PropertyDeclarationBlock>>> {
let declarations = unsafe { Gecko_GetServoDeclarationBlock(self.0) };
declarations.map(|s| s.as_arc_opt()).unwrap_or(None)
}
fn get_state(&self) -> ElementState {
unsafe {
ElementState::from_bits_truncate(Gecko_ElementState(self.0))
}
}
#[inline]
fn has_attr(&self, namespace: &Namespace, attr: &Atom) -> bool {
unsafe {
bindings::Gecko_HasAttr(self.0,
namespace.0.as_ptr(),
attr.as_ptr())
}
}
#[inline]
fn attr_equals(&self, namespace: &Namespace, attr: &Atom, val: &Atom) -> bool {
unsafe {
bindings::Gecko_AttrEquals(self.0,
namespace.0.as_ptr(),
attr.as_ptr(),
val.as_ptr(),
/* ignoreCase = */ false)
}
}
fn existing_style_for_restyle_damage<'a>(&'a self,
current_cv: Option<&'a Arc<ComputedValues>>,
pseudo: Option<&PseudoElement>)
-> Option<&'a nsStyleContext> {
if current_cv.is_none() {
// Don't bother in doing an ffi call to get null back.
return None;
}
unsafe {
let atom_ptr = pseudo.map(|p| p.as_atom().as_ptr())
.unwrap_or(ptr::null_mut());
let context_ptr = Gecko_GetStyleContext(self.as_node().0, atom_ptr);
context_ptr.as_ref()
}
}
fn has_dirty_descendants(&self) -> bool {
self.flags() & (NODE_HAS_DIRTY_DESCENDANTS_FOR_SERVO as u32)!= 0
}
unsafe fn set_dirty_descendants(&self) {
debug!("Setting dirty descendants: {:?}", self);
self.set_flags(NODE_HAS_DIRTY_DESCENDANTS_FOR_SERVO as u32)
}
unsafe fn unset_dirty_descendants(&self) {
self.unset_flags(NODE_HAS_DIRTY_DESCENDANTS_FOR_SERVO as u32)
}
fn store_children_to_process(&self, _: isize) {
// This is only used for bottom-up traversal, and is thus a no-op for Gecko.
}
fn did_process_child(&self) -> isize {
panic!("Atomic child count not implemented in Gecko");
}
fn get_data(&self) -> Option<&AtomicRefCell<ElementData>> {
unsafe { self.0.mServoData.get().as_ref() }
}
fn skip_root_and_item_based_display_fixup(&self) -> bool {
// We don't want to fix up display values of native anonymous content.
// Additionally, we want to skip root-based display fixup for document
// level native anonymous content subtree roots, since they're not
// really roots from the style fixup perspective. Checking that we
// are NAC handles both cases.
self.flags() & (NODE_IS_IN_NATIVE_ANONYMOUS_SUBTREE as u32)!= 0
}
}
impl<'le> PartialEq for GeckoElement<'le> {
fn eq(&self, other: &Self) -> bool {
self.0 as *const _ == other.0 as *const _
}
}
impl<'le> PresentationalHintsSynthetizer for GeckoElement<'le> {
fn synthesize_presentational_hints_for_legacy_attributes<V>(&self, _hints: &mut V)
where V: Push<ApplicableDeclarationBlock>,
{
// FIXME(bholley) - Need to implement this.
}
}
impl<'le> ::selectors::Element for GeckoElement<'le> {
fn parent_element(&self) -> Option<Self> {
unsafe { bindings::Gecko_GetParentElement(self.0).map(GeckoElement) }
}
fn first_child_element(&self) -> Option<Self> {
let mut child = self.as_node().first_child();
while let Some(child_node) = child {
if let Some(el) = child_node.as_element() {
return Some(el)
}
child = child_node.next_sibling();
}
None
}
fn last_child_element(&self) -> Option<Self> {
let mut child = self.as_node().last_child();
while let Some(child_node) = child {
if let Some(el) = child_node.as_element() {
return Some(el)
}
child = child_node.prev_sibling();
}
None
}
fn prev_sibling_element(&self) -> Option<Self> {
let mut sibling = self.as_node().prev_sibling();
while let Some(sibling_node) = sibling {
if let Some(el) = sibling_node.as_element() {
return Some(el)
}
sibling = sibling_node.prev_sibling();
}
None
}
fn next_sibling_element(&self) -> Option<Self> {
let mut sibling = self.as_node().next_sibling();
while let Some(sibling_node) = sibling {
if let Some(el) = sibling_node.as_element() {
return Some(el)
}
sibling = sibling_node.next_sibling();
}
None
}
fn is_root(&self) -> bool {
unsafe {
Gecko_IsRootElement(self.0)
}
}
fn is_empty(&self) -> bool {
// XXX(emilio): Implement this properly.
false
}
fn get_local_name(&self) -> &WeakAtom {
unsafe {
WeakAtom::new(self.as_node().node_info().mInner.mName.raw())
}
}
fn get_namespace(&self) -> &WeakNamespace {
unsafe {
WeakNamespace::new(Gecko_Namespace(self.0))
}
}
fn match_non_ts_pseudo_class(&self, pseudo_class: NonTSPseudoClass) -> bool {
match pseudo_class {
// https://github.com/servo/servo/issues/8718
NonTSPseudoClass::AnyLink => unsafe { Gecko_IsLink(self.0) },
NonTSPseudoClass::Link => unsafe { Gecko_IsUnvisitedLink(self.0) },
NonTSPseudoClass::Visited => unsafe { Gecko_IsVisitedLink(self.0) },
NonTSPseudoClass::Active |
NonTSPseudoClass::Focus |
NonTSPseudoClass::Hover |
NonTSPseudoClass::Enabled |
NonTSPseudoClass::Disabled |
NonTSPseudoClass::Checked |
NonTSPseudoClass::ReadWrite |
NonTSPseudoClass::Fullscreen |
NonTSPseudoClass::Indeterminate => {
| NonTSPseudoClass::ReadOnly => {
!self.get_state().contains(pseudo_class.state_flag())
}
NonTSPseudoClass::MozBrowserFrame => unsafe {
Gecko_MatchesElement(pseudo_class.to_gecko_pseudoclasstype().unwrap(), self.0)
}
}
}
fn get_id(&self) -> Option<Atom> {
let ptr = unsafe {
bindings::Gecko_AtomAttrValue(self.0,
atom!("id").as_ptr())
};
if ptr.is_null() {
None
} else {
Some(Atom::from(ptr))
}
}
fn has_class(&self, name: &Atom) -> bool {
snapshot_helpers::has_class(self.0,
name,
Gecko_ClassOrClassList)
}
fn each_class<F>(&self, callback: F)
where F: FnMut(&Atom)
{
snapshot_helpers::each_class(self.0,
callback,
Gecko_ClassOrClassList)
}
fn is_html_element_in_html_document(&self) -> bool {
unsafe {
Gecko_IsHTMLElementInHTMLDocument(self.0)
}
}
}
/// A few helpers to help with attribute selectors and snapshotting.
pub trait AttrSelectorHelpers {
/// Returns the namespace of the selector, or null otherwise.
fn ns_or_null(&self) -> *mut nsIAtom;
/// Returns the proper selector name depending on whether the requesting
/// element is an HTML element in an HTML document or not.
fn select_name(&self, is_html_element_in_html_document: bool) -> *mut nsIAtom;
}
impl AttrSelectorHelpers for AttrSelector<SelectorImpl> {
fn ns_or_null(&self) -> *mut nsIAtom {
match self.namespace {
NamespaceConstraint::Any => ptr::null_mut(),
NamespaceConstraint::Specific(ref ns) => ns.url.0.as_ptr(),
}
}
fn select_name(&self, is_html_element_in_html_document: bool) -> *mut nsIAtom {
if is_html_element_in_html_document {
self.lower_name.as_ptr()
} else {
self.name.as_ptr()
}
}
}
impl<'le> ::selectors::MatchAttr for GeckoElement<'le> {
type Impl = SelectorImpl;
fn match_attr_has(&self, attr: &AttrSelector<Self::Impl>) -> bool {
unsafe {
bindings::Gecko_HasAttr(self.0,
attr.ns_or_null(),
attr.select_name(self.is_html_element_in_html_document()))
}
}
fn match_attr_equals(&self, attr: &AttrSelector<Self::Impl>, value: &Atom) -> bool {
unsafe {
bindings::Gecko_AttrEquals(self.0,
attr.ns_or_null(),
| self.get_state().contains(pseudo_class.state_flag())
},
| conditional_block |
wrapper.rs | // TODO(shinglyu): implement this in Gecko: https://github.com/servo/servo/pull/11890
true
}
// TODO(shinglyu): implement this in Gecko: https://github.com/servo/servo/pull/11890
unsafe fn set_dirty_on_viewport_size_changed(&self) {}
}
/// A wrapper on top of two kind of iterators, depending on the parent being
/// iterated.
///
/// We generally iterate children by traversing the light-tree siblings of the
/// first child like Servo does.
///
/// However, for nodes with anonymous children, we use a custom (heavier-weight)
/// Gecko-implemented iterator.
///
/// FIXME(emilio): If we take into account shadow DOM, we're going to need the
/// flat tree pretty much always. We can try to optimize the case where there's
/// no shadow root sibling, probably.
pub enum GeckoChildrenIterator<'a> {
/// A simple iterator that tracks the current node being iterated and
/// replaces it with the next sibling when requested.
Current(Option<GeckoNode<'a>>),
/// A Gecko-implemented iterator we need to drop appropriately.
GeckoIterator(bindings::StyleChildrenIteratorOwned),
}
impl<'a> Drop for GeckoChildrenIterator<'a> {
fn drop(&mut self) {
if let GeckoChildrenIterator::GeckoIterator(ref it) = *self {
unsafe {
Gecko_DropStyleChildrenIterator(ptr::read(it as *const _));
}
}
}
}
impl<'a> Iterator for GeckoChildrenIterator<'a> {
type Item = GeckoNode<'a>;
fn next(&mut self) -> Option<GeckoNode<'a>> {
match *self {
GeckoChildrenIterator::Current(curr) => {
let next = curr.and_then(|node| node.next_sibling());
*self = GeckoChildrenIterator::Current(next);
curr
},
GeckoChildrenIterator::GeckoIterator(ref mut it) => unsafe {
Gecko_GetNextStyleChild(it).map(GeckoNode)
}
}
}
}
/// A simple wrapper over a non-null Gecko `Element` pointer.
#[derive(Clone, Copy)]
pub struct GeckoElement<'le>(pub &'le RawGeckoElement);
impl<'le> fmt::Debug for GeckoElement<'le> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
try!(write!(f, "<{}", self.get_local_name()));
if let Some(id) = self.get_id() {
try!(write!(f, " id={}", id));
}
write!(f, "> ({:#x})", self.as_node().opaque().0)
}
}
impl<'le> GeckoElement<'le> {
/// Parse the style attribute of an element.
pub fn parse_style_attribute(value: &str) -> PropertyDeclarationBlock {
// FIXME(bholley): Real base URL and error reporter.
let base_url = &*DUMMY_BASE_URL;
// FIXME(heycam): Needs real ParserContextExtraData so that URLs parse
// properly.
let extra_data = ParserContextExtraData::default();
parse_style_attribute(value, &base_url, Box::new(StdoutErrorReporter), extra_data)
}
fn flags(&self) -> u32 {
self.raw_node()._base._base_1.mFlags
}
fn raw_node(&self) -> &RawGeckoNode {
&(self.0)._base._base._base
}
// FIXME: We can implement this without OOL calls, but we can't easily given
// GeckoNode is a raw reference.
//
// We can use a Cell<T>, but that's a bit of a pain.
fn set_flags(&self, flags: u32) {
unsafe { Gecko_SetNodeFlags(self.as_node().0, flags) }
}
fn unset_flags(&self, flags: u32) {
unsafe { Gecko_UnsetNodeFlags(self.as_node().0, flags) }
}
/// Clear the element data for a given element.
pub fn clear_data(&self) {
let ptr = self.0.mServoData.get();
if!ptr.is_null() {
debug!("Dropping ElementData for {:?}", self);
let data = unsafe { Box::from_raw(self.0.mServoData.get()) };
self.0.mServoData.set(ptr::null_mut());
// Perform a mutable borrow of the data in debug builds. This
// serves as an assertion that there are no outstanding borrows
// when we destroy the data.
debug_assert!({ let _ = data.borrow_mut(); true });
}
}
/// Ensures the element has data, returning the existing data or allocating
/// it.
///
/// Only safe to call with exclusive access to the element, given otherwise
/// it could race to allocate and leak.
pub unsafe fn ensure_data(&self) -> &AtomicRefCell<ElementData> {
match self.get_data() {
Some(x) => x,
None => {
debug!("Creating ElementData for {:?}", self);
let ptr = Box::into_raw(Box::new(AtomicRefCell::new(ElementData::new(None))));
self.0.mServoData.set(ptr);
unsafe { &* ptr }
},
}
}
/// Creates a blank snapshot for this element.
pub fn create_snapshot(&self) -> Snapshot {
Snapshot::new(*self)
}
}
lazy_static! {
/// A dummy base url in order to get it where we don't have any available.
///
/// We need to get rid of this sooner than later.
pub static ref DUMMY_BASE_URL: ServoUrl = {
ServoUrl::parse("http://www.example.org").unwrap()
};
}
impl<'le> TElement for GeckoElement<'le> {
type ConcreteNode = GeckoNode<'le>;
fn as_node(&self) -> Self::ConcreteNode {
unsafe { GeckoNode(&*(self.0 as *const _ as *const RawGeckoNode)) }
}
fn style_attribute(&self) -> Option<&Arc<RwLock<PropertyDeclarationBlock>>> {
let declarations = unsafe { Gecko_GetServoDeclarationBlock(self.0) };
declarations.map(|s| s.as_arc_opt()).unwrap_or(None)
}
fn get_state(&self) -> ElementState {
unsafe {
ElementState::from_bits_truncate(Gecko_ElementState(self.0))
}
}
#[inline]
fn has_attr(&self, namespace: &Namespace, attr: &Atom) -> bool {
unsafe {
bindings::Gecko_HasAttr(self.0,
namespace.0.as_ptr(),
attr.as_ptr())
}
}
#[inline]
fn attr_equals(&self, namespace: &Namespace, attr: &Atom, val: &Atom) -> bool {
unsafe {
bindings::Gecko_AttrEquals(self.0,
namespace.0.as_ptr(),
attr.as_ptr(),
val.as_ptr(),
/* ignoreCase = */ false)
}
}
fn existing_style_for_restyle_damage<'a>(&'a self,
current_cv: Option<&'a Arc<ComputedValues>>,
pseudo: Option<&PseudoElement>)
-> Option<&'a nsStyleContext> {
if current_cv.is_none() {
// Don't bother in doing an ffi call to get null back.
return None;
}
unsafe {
let atom_ptr = pseudo.map(|p| p.as_atom().as_ptr())
.unwrap_or(ptr::null_mut());
let context_ptr = Gecko_GetStyleContext(self.as_node().0, atom_ptr);
context_ptr.as_ref()
}
}
fn has_dirty_descendants(&self) -> bool {
self.flags() & (NODE_HAS_DIRTY_DESCENDANTS_FOR_SERVO as u32)!= 0
}
unsafe fn set_dirty_descendants(&self) {
debug!("Setting dirty descendants: {:?}", self);
self.set_flags(NODE_HAS_DIRTY_DESCENDANTS_FOR_SERVO as u32)
}
unsafe fn unset_dirty_descendants(&self) {
self.unset_flags(NODE_HAS_DIRTY_DESCENDANTS_FOR_SERVO as u32)
}
fn store_children_to_process(&self, _: isize) {
// This is only used for bottom-up traversal, and is thus a no-op for Gecko.
}
fn did_process_child(&self) -> isize {
panic!("Atomic child count not implemented in Gecko");
}
fn get_data(&self) -> Option<&AtomicRefCell<ElementData>> {
unsafe { self.0.mServoData.get().as_ref() }
}
fn skip_root_and_item_based_display_fixup(&self) -> bool {
// We don't want to fix up display values of native anonymous content.
// Additionally, we want to skip root-based display fixup for document
// level native anonymous content subtree roots, since they're not
// really roots from the style fixup perspective. Checking that we
// are NAC handles both cases.
self.flags() & (NODE_IS_IN_NATIVE_ANONYMOUS_SUBTREE as u32)!= 0
}
}
impl<'le> PartialEq for GeckoElement<'le> {
fn eq(&self, other: &Self) -> bool {
self.0 as *const _ == other.0 as *const _
}
}
impl<'le> PresentationalHintsSynthetizer for GeckoElement<'le> {
fn synthesize_presentational_hints_for_legacy_attributes<V>(&self, _hints: &mut V)
where V: Push<ApplicableDeclarationBlock>,
{
// FIXME(bholley) - Need to implement this.
}
}
impl<'le> ::selectors::Element for GeckoElement<'le> {
fn parent_element(&self) -> Option<Self> {
unsafe { bindings::Gecko_GetParentElement(self.0).map(GeckoElement) }
}
fn first_child_element(&self) -> Option<Self> {
let mut child = self.as_node().first_child();
while let Some(child_node) = child {
if let Some(el) = child_node.as_element() {
return Some(el)
}
child = child_node.next_sibling();
}
None
}
fn last_child_element(&self) -> Option<Self> {
let mut child = self.as_node().last_child();
while let Some(child_node) = child {
if let Some(el) = child_node.as_element() {
return Some(el)
}
child = child_node.prev_sibling();
}
None
}
fn prev_sibling_element(&self) -> Option<Self> {
let mut sibling = self.as_node().prev_sibling();
while let Some(sibling_node) = sibling {
if let Some(el) = sibling_node.as_element() {
return Some(el)
}
sibling = sibling_node.prev_sibling();
}
None
}
fn next_sibling_element(&self) -> Option<Self> {
let mut sibling = self.as_node().next_sibling();
while let Some(sibling_node) = sibling {
if let Some(el) = sibling_node.as_element() {
return Some(el)
}
sibling = sibling_node.next_sibling();
}
None
}
fn is_root(&self) -> bool {
unsafe {
Gecko_IsRootElement(self.0)
}
}
fn is_empty(&self) -> bool {
// XXX(emilio): Implement this properly.
false
}
fn get_local_name(&self) -> &WeakAtom {
unsafe {
WeakAtom::new(self.as_node().node_info().mInner.mName.raw())
}
}
fn get_namespace(&self) -> &WeakNamespace {
unsafe {
WeakNamespace::new(Gecko_Namespace(self.0))
}
}
fn match_non_ts_pseudo_class(&self, pseudo_class: NonTSPseudoClass) -> bool {
match pseudo_class {
// https://github.com/servo/servo/issues/8718
NonTSPseudoClass::AnyLink => unsafe { Gecko_IsLink(self.0) },
NonTSPseudoClass::Link => unsafe { Gecko_IsUnvisitedLink(self.0) },
NonTSPseudoClass::Visited => unsafe { Gecko_IsVisitedLink(self.0) },
NonTSPseudoClass::Active |
NonTSPseudoClass::Focus |
NonTSPseudoClass::Hover |
NonTSPseudoClass::Enabled |
NonTSPseudoClass::Disabled |
NonTSPseudoClass::Checked |
NonTSPseudoClass::ReadWrite |
NonTSPseudoClass::Fullscreen |
NonTSPseudoClass::Indeterminate => {
self.get_state().contains(pseudo_class.state_flag())
},
NonTSPseudoClass::ReadOnly => {
!self.get_state().contains(pseudo_class.state_flag())
}
NonTSPseudoClass::MozBrowserFrame => unsafe {
Gecko_MatchesElement(pseudo_class.to_gecko_pseudoclasstype().unwrap(), self.0)
}
}
}
fn get_id(&self) -> Option<Atom> {
let ptr = unsafe {
bindings::Gecko_AtomAttrValue(self.0,
atom!("id").as_ptr())
};
if ptr.is_null() {
None
} else {
Some(Atom::from(ptr))
}
}
fn has_class(&self, name: &Atom) -> bool {
snapshot_helpers::has_class(self.0,
name,
Gecko_ClassOrClassList)
}
fn each_class<F>(&self, callback: F)
where F: FnMut(&Atom)
{
snapshot_helpers::each_class(self.0,
callback,
Gecko_ClassOrClassList)
}
fn is_html_element_in_html_document(&self) -> bool {
unsafe {
Gecko_IsHTMLElementInHTMLDocument(self.0)
}
}
}
/// A few helpers to help with attribute selectors and snapshotting.
pub trait AttrSelectorHelpers {
/// Returns the namespace of the selector, or null otherwise.
fn ns_or_null(&self) -> *mut nsIAtom;
/// Returns the proper selector name depending on whether the requesting
/// element is an HTML element in an HTML document or not.
fn select_name(&self, is_html_element_in_html_document: bool) -> *mut nsIAtom;
}
impl AttrSelectorHelpers for AttrSelector<SelectorImpl> {
fn ns_or_null(&self) -> *mut nsIAtom {
match self.namespace {
NamespaceConstraint::Any => ptr::null_mut(),
NamespaceConstraint::Specific(ref ns) => ns.url.0.as_ptr(),
}
}
fn select_name(&self, is_html_element_in_html_document: bool) -> *mut nsIAtom {
if is_html_element_in_html_document {
self.lower_name.as_ptr()
} else {
self.name.as_ptr()
}
}
}
impl<'le> ::selectors::MatchAttr for GeckoElement<'le> {
type Impl = SelectorImpl;
fn match_attr_has(&self, attr: &AttrSelector<Self::Impl>) -> bool {
unsafe {
bindings::Gecko_HasAttr(self.0,
attr.ns_or_null(),
attr.select_name(self.is_html_element_in_html_document()))
}
}
fn match_attr_equals(&self, attr: &AttrSelector<Self::Impl>, value: &Atom) -> bool {
unsafe {
bindings::Gecko_AttrEquals(self.0,
attr.ns_or_null(),
attr.select_name(self.is_html_element_in_html_document()),
value.as_ptr(),
/* ignoreCase = */ false)
}
}
fn match_attr_equals_ignore_ascii_case(&self, attr: &AttrSelector<Self::Impl>, value: &Atom) -> bool {
unsafe {
bindings::Gecko_AttrEquals(self.0,
attr.ns_or_null(),
attr.select_name(self.is_html_element_in_html_document()),
value.as_ptr(),
/* ignoreCase = */ false)
}
}
fn match_attr_includes(&self, attr: &AttrSelector<Self::Impl>, value: &Atom) -> bool {
unsafe {
bindings::Gecko_AttrIncludes(self.0,
attr.ns_or_null(),
attr.select_name(self.is_html_element_in_html_document()),
value.as_ptr())
}
}
fn match_attr_dash(&self, attr: &AttrSelector<Self::Impl>, value: &Atom) -> bool {
unsafe {
bindings::Gecko_AttrDashEquals(self.0,
attr.ns_or_null(),
attr.select_name(self.is_html_element_in_html_document()),
value.as_ptr())
}
}
fn match_attr_prefix(&self, attr: &AttrSelector<Self::Impl>, value: &Atom) -> bool {
unsafe {
bindings::Gecko_AttrHasPrefix(self.0,
attr.ns_or_null(),
attr.select_name(self.is_html_element_in_html_document()),
value.as_ptr())
}
}
fn match_attr_substring(&self, attr: &AttrSelector<Self::Impl>, value: &Atom) -> bool {
unsafe {
bindings::Gecko_AttrHasSubstring(self.0,
attr.ns_or_null(),
attr.select_name(self.is_html_element_in_html_document()),
value.as_ptr())
}
}
fn match_attr_suffix(&self, attr: &AttrSelector<Self::Impl>, value: &Atom) -> bool { | unsafe {
bindings::Gecko_AttrHasSuffix(self.0,
attr.ns_or_null(),
attr.select_name(self.is_html_element_in_html_document()),
value.as_ptr()) | random_line_split |
|
wrapper.rs | a potentially non-trivial
/// performance cost) by implementing Drop and making LayoutFoo non-Copy.
#[derive(Clone, Copy)]
pub struct GeckoNode<'ln>(pub &'ln RawGeckoNode);
impl<'ln> fmt::Debug for GeckoNode<'ln> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if let Some(el) = self.as_element() {
el.fmt(f)
} else {
if self.is_text_node() {
write!(f, "<text node> ({:#x})", self.opaque().0)
} else {
write!(f, "<non-text node> ({:#x})", self.opaque().0)
}
}
}
}
impl<'ln> GeckoNode<'ln> {
fn from_content(content: &'ln nsIContent) -> Self {
GeckoNode(&content._base)
}
fn node_info(&self) -> &structs::NodeInfo {
debug_assert!(!self.0.mNodeInfo.mRawPtr.is_null());
unsafe { &*self.0.mNodeInfo.mRawPtr }
}
fn first_child(&self) -> Option<GeckoNode<'ln>> {
unsafe { self.0.mFirstChild.as_ref().map(GeckoNode::from_content) }
}
fn last_child(&self) -> Option<GeckoNode<'ln>> {
unsafe { Gecko_GetLastChild(self.0).map(GeckoNode) }
}
fn prev_sibling(&self) -> Option<GeckoNode<'ln>> {
unsafe { self.0.mPreviousSibling.as_ref().map(GeckoNode::from_content) }
}
fn next_sibling(&self) -> Option<GeckoNode<'ln>> {
unsafe { self.0.mNextSibling.as_ref().map(GeckoNode::from_content) }
}
}
impl<'ln> NodeInfo for GeckoNode<'ln> {
fn is_element(&self) -> bool {
use gecko_bindings::structs::nsINode_BooleanFlag;
self.0.mBoolFlags & (1u32 << nsINode_BooleanFlag::NodeIsElement as u32)!= 0
}
fn is_text_node(&self) -> bool {
// This is a DOM constant that isn't going to change.
const TEXT_NODE: u16 = 3;
self.node_info().mInner.mNodeType == TEXT_NODE
}
}
impl<'ln> TNode for GeckoNode<'ln> {
type ConcreteElement = GeckoElement<'ln>;
type ConcreteChildrenIterator = GeckoChildrenIterator<'ln>;
fn to_unsafe(&self) -> UnsafeNode {
(self.0 as *const _ as usize, 0)
}
unsafe fn from_unsafe(n: &UnsafeNode) -> Self {
GeckoNode(&*(n.0 as *mut RawGeckoNode))
}
fn children(self) -> LayoutIterator<GeckoChildrenIterator<'ln>> {
let maybe_iter = unsafe { Gecko_MaybeCreateStyleChildrenIterator(self.0) };
if let Some(iter) = maybe_iter.into_owned_opt() {
LayoutIterator(GeckoChildrenIterator::GeckoIterator(iter))
} else {
LayoutIterator(GeckoChildrenIterator::Current(self.first_child()))
}
}
fn opaque(&self) -> OpaqueNode {
let ptr: usize = self.0 as *const _ as usize;
OpaqueNode(ptr)
}
fn debug_id(self) -> usize {
unimplemented!()
}
fn as_element(&self) -> Option<GeckoElement<'ln>> {
if self.is_element() {
unsafe { Some(GeckoElement(&*(self.0 as *const _ as *const RawGeckoElement))) }
} else {
None
}
}
fn can_be_fragmented(&self) -> bool {
// FIXME(SimonSapin): Servo uses this to implement CSS multicol / fragmentation
// Maybe this isn’t useful for Gecko?
false
}
unsafe fn set_can_be_fragmented(&self, _value: bool) {
// FIXME(SimonSapin): Servo uses this to implement CSS multicol / fragmentation
// Maybe this isn’t useful for Gecko?
}
fn parent_node(&self) -> Option<Self> {
unsafe { bindings::Gecko_GetParentNode(self.0).map(GeckoNode) }
}
fn is_in_doc(&self) -> bool {
unsafe { bindings::Gecko_IsInDocument(self.0) }
}
fn needs_dirty_on_viewport_size_changed(&self) -> bool {
// Gecko's node doesn't have the DIRTY_ON_VIEWPORT_SIZE_CHANGE flag,
// so we force them to be dirtied on viewport size change, regardless if
// they use viewport percentage size or not.
// TODO(shinglyu): implement this in Gecko: https://github.com/servo/servo/pull/11890
true
}
// TODO(shinglyu): implement this in Gecko: https://github.com/servo/servo/pull/11890
unsafe fn set_dirty_on_viewport_size_changed(&self) {}
}
/// A wrapper on top of two kind of iterators, depending on the parent being
/// iterated.
///
/// We generally iterate children by traversing the light-tree siblings of the
/// first child like Servo does.
///
/// However, for nodes with anonymous children, we use a custom (heavier-weight)
/// Gecko-implemented iterator.
///
/// FIXME(emilio): If we take into account shadow DOM, we're going to need the
/// flat tree pretty much always. We can try to optimize the case where there's
/// no shadow root sibling, probably.
pub enum GeckoChildrenIterator<'a> {
/// A simple iterator that tracks the current node being iterated and
/// replaces it with the next sibling when requested.
Current(Option<GeckoNode<'a>>),
/// A Gecko-implemented iterator we need to drop appropriately.
GeckoIterator(bindings::StyleChildrenIteratorOwned),
}
impl<'a> Drop for GeckoChildrenIterator<'a> {
fn drop(&mut self) {
if let GeckoChildrenIterator::GeckoIterator(ref it) = *self {
unsafe {
Gecko_DropStyleChildrenIterator(ptr::read(it as *const _));
}
}
}
}
impl<'a> Iterator for GeckoChildrenIterator<'a> {
type Item = GeckoNode<'a>;
fn next(&mut self) -> Option<GeckoNode<'a>> {
match *self {
GeckoChildrenIterator::Current(curr) => {
let next = curr.and_then(|node| node.next_sibling());
*self = GeckoChildrenIterator::Current(next);
curr
},
GeckoChildrenIterator::GeckoIterator(ref mut it) => unsafe {
Gecko_GetNextStyleChild(it).map(GeckoNode)
}
}
}
}
/// A simple wrapper over a non-null Gecko `Element` pointer.
#[derive(Clone, Copy)]
pub struct GeckoElement<'le>(pub &'le RawGeckoElement);
impl<'le> fmt::Debug for GeckoElement<'le> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
try!(write!(f, "<{}", self.get_local_name()));
if let Some(id) = self.get_id() {
try!(write!(f, " id={}", id));
}
write!(f, "> ({:#x})", self.as_node().opaque().0)
}
}
impl<'le> GeckoElement<'le> {
/// Parse the style attribute of an element.
pub fn parse_style_attribute(value: &str) -> PropertyDeclarationBlock {
// FIXME(bholley): Real base URL and error reporter.
let base_url = &*DUMMY_BASE_URL;
// FIXME(heycam): Needs real ParserContextExtraData so that URLs parse
// properly.
let extra_data = ParserContextExtraData::default();
parse_style_attribute(value, &base_url, Box::new(StdoutErrorReporter), extra_data)
}
fn flags(&self) -> u32 {
self.raw_node()._base._base_1.mFlags
}
fn raw_node(&self) -> &RawGeckoNode {
&(self.0)._base._base._base
}
// FIXME: We can implement this without OOL calls, but we can't easily given
// GeckoNode is a raw reference.
//
// We can use a Cell<T>, but that's a bit of a pain.
fn set_flags(&self, flags: u32) {
unsafe { Gecko_SetNodeFlags(self.as_node().0, flags) }
}
fn unset_flags(&self, flags: u32) {
unsafe { Gecko_UnsetNodeFlags(self.as_node().0, flags) }
}
/// Clear the element data for a given element.
pub fn clear_data(&self) {
let ptr = self.0.mServoData.get();
if!ptr.is_null() {
debug!("Dropping ElementData for {:?}", self);
let data = unsafe { Box::from_raw(self.0.mServoData.get()) };
self.0.mServoData.set(ptr::null_mut());
// Perform a mutable borrow of the data in debug builds. This
// serves as an assertion that there are no outstanding borrows
// when we destroy the data.
debug_assert!({ let _ = data.borrow_mut(); true });
}
}
/// Ensures the element has data, returning the existing data or allocating
/// it.
///
/// Only safe to call with exclusive access to the element, given otherwise
/// it could race to allocate and leak.
pub unsafe fn ensure_data(&self) -> &AtomicRefCell<ElementData> {
match self.get_data() {
Some(x) => x,
None => {
debug!("Creating ElementData for {:?}", self);
let ptr = Box::into_raw(Box::new(AtomicRefCell::new(ElementData::new(None))));
self.0.mServoData.set(ptr);
unsafe { &* ptr }
},
}
}
/// Creates a blank snapshot for this element.
pub fn create_snapshot(&self) -> Snapshot {
Snapshot::new(*self)
}
}
lazy_static! {
/// A dummy base url in order to get it where we don't have any available.
///
/// We need to get rid of this sooner than later.
pub static ref DUMMY_BASE_URL: ServoUrl = {
ServoUrl::parse("http://www.example.org").unwrap()
};
}
impl<'le> TElement for GeckoElement<'le> {
type ConcreteNode = GeckoNode<'le>;
fn as_node(&self) -> Self::ConcreteNode {
unsafe { GeckoNode(&*(self.0 as *const _ as *const RawGeckoNode)) }
}
fn style_attribute(&self) -> Option<&Arc<RwLock<PropertyDeclarationBlock>>> {
let declarations = unsafe { Gecko_GetServoDeclarationBlock(self.0) };
declarations.map(|s| s.as_arc_opt()).unwrap_or(None)
}
fn get_state(&self) -> ElementState {
unsafe {
ElementState::from_bits_truncate(Gecko_ElementState(self.0))
}
}
#[inline]
fn has_attr(&self, namespace: &Namespace, attr: &Atom) -> bool {
unsafe {
bindings::Gecko_HasAttr(self.0,
namespace.0.as_ptr(),
attr.as_ptr())
}
}
#[inline]
fn attr_equals(&self, namespace: &Namespace, attr: &Atom, val: &Atom) -> bool {
unsafe {
bindings::Gecko_AttrEquals(self.0,
namespace.0.as_ptr(),
attr.as_ptr(),
val.as_ptr(),
/* ignoreCase = */ false)
}
}
fn existing_style_for_restyle_damage<'a>(&'a self,
current_cv: Option<&'a Arc<ComputedValues>>,
pseudo: Option<&PseudoElement>)
-> Option<&'a nsStyleContext> {
if current_cv.is_none() {
// Don't bother in doing an ffi call to get null back.
return None;
}
unsafe {
let atom_ptr = pseudo.map(|p| p.as_atom().as_ptr())
.unwrap_or(ptr::null_mut());
let context_ptr = Gecko_GetStyleContext(self.as_node().0, atom_ptr);
context_ptr.as_ref()
}
}
fn has_dirty_descendants(&self) -> bool {
self.flags() & (NODE_HAS_DIRTY_DESCENDANTS_FOR_SERVO as u32)!= 0
}
unsafe fn set_dirty_descendants(&self) {
debug!("Setting dirty descendants: {:?}", self);
self.set_flags(NODE_HAS_DIRTY_DESCENDANTS_FOR_SERVO as u32)
}
unsafe fn unse | lf) {
self.unset_flags(NODE_HAS_DIRTY_DESCENDANTS_FOR_SERVO as u32)
}
fn store_children_to_process(&self, _: isize) {
// This is only used for bottom-up traversal, and is thus a no-op for Gecko.
}
fn did_process_child(&self) -> isize {
panic!("Atomic child count not implemented in Gecko");
}
fn get_data(&self) -> Option<&AtomicRefCell<ElementData>> {
unsafe { self.0.mServoData.get().as_ref() }
}
fn skip_root_and_item_based_display_fixup(&self) -> bool {
// We don't want to fix up display values of native anonymous content.
// Additionally, we want to skip root-based display fixup for document
// level native anonymous content subtree roots, since they're not
// really roots from the style fixup perspective. Checking that we
// are NAC handles both cases.
self.flags() & (NODE_IS_IN_NATIVE_ANONYMOUS_SUBTREE as u32)!= 0
}
}
impl<'le> PartialEq for GeckoElement<'le> {
fn eq(&self, other: &Self) -> bool {
self.0 as *const _ == other.0 as *const _
}
}
impl<'le> PresentationalHintsSynthetizer for GeckoElement<'le> {
fn synthesize_presentational_hints_for_legacy_attributes<V>(&self, _hints: &mut V)
where V: Push<ApplicableDeclarationBlock>,
{
// FIXME(bholley) - Need to implement this.
}
}
impl<'le> ::selectors::Element for GeckoElement<'le> {
fn parent_element(&self) -> Option<Self> {
unsafe { bindings::Gecko_GetParentElement(self.0).map(GeckoElement) }
}
fn first_child_element(&self) -> Option<Self> {
let mut child = self.as_node().first_child();
while let Some(child_node) = child {
if let Some(el) = child_node.as_element() {
return Some(el)
}
child = child_node.next_sibling();
}
None
}
fn last_child_element(&self) -> Option<Self> {
let mut child = self.as_node().last_child();
while let Some(child_node) = child {
if let Some(el) = child_node.as_element() {
return Some(el)
}
child = child_node.prev_sibling();
}
None
}
fn prev_sibling_element(&self) -> Option<Self> {
let mut sibling = self.as_node().prev_sibling();
while let Some(sibling_node) = sibling {
if let Some(el) = sibling_node.as_element() {
return Some(el)
}
sibling = sibling_node.prev_sibling();
}
None
}
fn next_sibling_element(&self) -> Option<Self> {
let mut sibling = self.as_node().next_sibling();
while let Some(sibling_node) = sibling {
if let Some(el) = sibling_node.as_element() {
return Some(el)
}
sibling = sibling_node.next_sibling();
}
None
}
fn is_root(&self) -> bool {
unsafe {
Gecko_IsRootElement(self.0)
}
}
fn is_empty(&self) -> bool {
// XXX(emilio): Implement this properly.
false
}
fn get_local_name(&self) -> &WeakAtom {
unsafe {
WeakAtom::new(self.as_node().node_info().mInner.mName.raw())
}
}
fn get_namespace(&self) -> &WeakNamespace {
unsafe {
WeakNamespace::new(Gecko_Namespace(self.0))
}
}
fn match_non_ts_pseudo_class(&self, pseudo_class: NonTSPseudoClass) -> bool {
match pseudo_class {
// https://github.com/servo/servo/issues/8718
NonTSPseudoClass::AnyLink => unsafe { Gecko_IsLink(self.0) },
NonTSPseudoClass::Link => unsafe { Gecko_IsUnvisitedLink(self.0) },
NonTSPseudoClass::Visited => unsafe { Gecko_IsVisitedLink(self.0) },
NonTSPseudoClass::Active |
NonTSPseudoClass::Focus |
NonTSPseudoClass::Hover |
NonTSPseudoClass::Enabled |
NonTSPseudoClass::Disabled |
NonTSPseudoClass::Checked |
NonTSPseudoClass::ReadWrite |
NonTSPseudoClass::Fullscreen |
NonTSPseudoClass::Indeterminate => {
self.get_state().contains(pseudo_class.state_flag())
},
NonTSPseudoClass::ReadOnly => {
!self.get_state().contains(pseudo_class.state_flag())
}
NonTSPseudoClass::MozBrowserFrame => unsafe {
Gecko_MatchesElement(pseudo_class.to_gecko_pseudoclasstype().unwrap(), self.0)
}
}
}
fn get_id(&self) -> Option<Atom> {
let ptr = unsafe {
bindings::Gecko_AtomAttrValue(self.0,
atom!("id").as_ptr())
};
if ptr.is_null() {
None
} else {
Some(Atom::from(ptr))
}
}
fn has_class(&self, name: &Atom) -> bool {
snapshot_helpers::has_class(self.0,
name,
Gecko_ClassOrClassList)
}
fn each_class<F>(&self, callback: F)
where F: FnMut(&Atom)
{
snapshot_helpers::each_class(self.0,
callback,
Gecko_ClassOrClassList)
}
fn is_html_element_in_html_document(&self) -> bool {
unsafe {
Gecko_IsHTMLElementInHTMLDocument(self.0)
}
}
}
/// A few helpers to help with attribute selectors and snapshotting.
pub trait AttrSelectorHelpers {
/// Returns the namespace of the selector, or null otherwise.
fn ns_or_null(&self) -> *mut nsIAtom;
/// Returns the proper selector name depending on whether the requesting
/// element is an HTML element in an HTML document or not.
fn select_name(&self, is_html_element_in_html_document: bool) -> *mut nsIAtom;
}
impl AttrSelectorHelpers for AttrSelector<SelectorImpl> {
fn ns_or_null(&self) -> *mut nsIAtom {
match self.namespace {
NamespaceConstraint::Any => ptr::null_mut(),
NamespaceConstraint::Specific(ref ns) => ns.url.0.as_ptr(),
}
}
fn select_name(&self, is_html_element_in_html_document: bool) -> *mut nsIAtom {
if is_html_element_in_html_document {
self.lower_name.as_ptr()
} else {
self.name.as_ptr()
}
}
}
impl<'le> ::selectors::MatchAttr for GeckoElement<'le> {
type Impl = SelectorImpl;
fn match_attr_has(&self, attr: &AttrSelector<Self::Impl>) -> bool {
unsafe {
bindings::Gecko_HasAttr(self.0,
attr.ns_or_null(),
attr.select_name(self.is_html_element_in_html_document()))
}
}
fn match_attr_equals(&self, attr: &AttrSelector<Self::Impl>, value: &Atom) -> bool {
unsafe {
bindings::Gecko_AttrEquals(self.0,
attr.ns_or_null(),
| t_dirty_descendants(&se | identifier_name |
extensions.rs | // Copyright (c) 2020 DDN. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
use crate::auth::csrf_token;
use iml_wire_types::{GroupType, Session};
use seed::{fetch, prelude::*, *};
/// Extension methods for the Session API object.
pub(crate) trait SessionExt {
/// Does the user need to login?
fn needs_login(&self) -> bool;
/// Does a logged in user exist?
fn has_user(&self) -> bool;
/// Does the user fall within the group?
fn group_allowed(&self, group: GroupType) -> bool;
}
impl SessionExt for Session {
fn needs_login(&self) -> bool {
self.user.is_none() &&!self.read_enabled
}
fn has_user(&self) -> bool {
self.user.is_some()
}
fn group_allowed(&self, group: GroupType) -> bool {
self.user
.as_ref()
.and_then(|x| x.groups.as_ref())
.and_then(|xs| {
xs.iter().find(|y| {
//Superusers can do everything.
if y.name == GroupType::Superusers {
return true;
}
//Filesystem administrators can do everything a filesystem user can do.
if y.name == GroupType::FilesystemAdministrators && group == GroupType::FilesystemUsers {
return true;
}
// Fallback to matching on names.
y.name == group
})
})
.is_some()
}
}
/// Extension methods for`fetch::Request`
pub(crate) trait RequestExt: Sized {
fn api_call(path: impl ToString) -> Self;
fn api_query(path: impl ToString, args: impl serde::Serialize) -> Result<Self, serde_urlencoded::ser::Error>;
fn api_item(path: impl ToString, item: impl ToString) -> Self;
fn graphql_query<T: serde::Serialize>(x: &T) -> Self;
fn with_auth(self: Self) -> Self;
}
impl RequestExt for fetch::Request {
fn api_call(path: impl ToString) -> Self {
Self::new(format!("/api/{}/", path.to_string()))
}
fn api_query(path: impl ToString, args: impl serde::Serialize) -> Result<Self, serde_urlencoded::ser::Error> {
let qs = format!("?{}", serde_urlencoded::to_string(args)?);
Ok(Self::new(format!("/api/{}/{}", path.to_string(), qs)))
}
fn api_item(path: impl ToString, item: impl ToString) -> Self {
Self::api_call(format!("{}/{}", path.to_string(), item.to_string()))
}
fn graphql_query<T: serde::Serialize>(x: &T) -> Self {
Self::new("/graphql")
.with_auth()
.method(fetch::Method::Post)
.send_json(x)
}
fn with_auth(self) -> Self |
}
/// Allows for merging attributes onto an existing item
pub(crate) trait MergeAttrs {
fn merge_attrs(self, attrs: Attrs) -> Self;
}
impl MergeAttrs for Attrs {
fn merge_attrs(mut self, attrs: Attrs) -> Self {
self.merge(attrs);
self
}
}
impl<T> MergeAttrs for Node<T> {
fn merge_attrs(self, attrs: Attrs) -> Self {
if let Self::Element(mut el) = self {
el.attrs.merge(attrs);
Self::Element(el)
} else {
self
}
}
}
pub(crate) trait NodeExt<T> {
fn with_listener(self, event_handler: EventHandler<T>) -> Self;
fn with_style(self, key: impl Into<St>, val: impl Into<CSSValue>) -> Self;
}
impl<T> NodeExt<T> for Node<T> {
fn with_listener(mut self, event_handler: EventHandler<T>) -> Self {
self.add_listener(event_handler);
self
}
fn with_style(mut self, key: impl Into<St>, val: impl Into<CSSValue>) -> Self {
self.add_style(key, val);
self
}
}
/// Extension methods for`fetch::Request`
pub(crate) trait FailReasonExt {
fn message(&self) -> String;
}
impl<T> FailReasonExt for fetch::FailReason<T> {
fn message(&self) -> String {
match self {
Self::RequestError(err, _) => match err {
fetch::RequestError::DomException(e) => e.message(),
},
Self::Status(status, _) => format!("Status: {}", status.code),
Self::DataError(err, _) => match err {
fetch::DataError::DomException(e) => e.message(),
fetch::DataError::SerdeError(e, _) => format!("Serde error: {}", e),
},
}
}
}
/// Extension methods for`seed::browser::url::Url`
pub(crate) trait UrlExt {
/// Returns the path of the `Url`.
/// This fn will account for
/// the base (via the `base`) tag
/// and remove it from the path
fn get_path(&self) -> Vec<String>;
}
impl UrlExt for Url {
fn get_path(&self) -> Vec<String> {
let mut path = self.path.clone();
let base = match crate::UI_BASE.as_ref() {
Some(x) => x,
None => return path,
};
let has_base = path.get(0).filter(|x| x == &base).is_some();
if has_base {
path.remove(0);
}
path
}
}
| {
match csrf_token() {
Some(csrf) => self.header("X-CSRFToken", &csrf),
None => self,
}
} | identifier_body |
extensions.rs | // Copyright (c) 2020 DDN. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
use crate::auth::csrf_token;
use iml_wire_types::{GroupType, Session};
use seed::{fetch, prelude::*, *};
/// Extension methods for the Session API object.
pub(crate) trait SessionExt {
/// Does the user need to login?
fn needs_login(&self) -> bool;
/// Does a logged in user exist?
fn has_user(&self) -> bool;
/// Does the user fall within the group?
fn group_allowed(&self, group: GroupType) -> bool;
}
impl SessionExt for Session {
fn needs_login(&self) -> bool {
self.user.is_none() &&!self.read_enabled
}
fn has_user(&self) -> bool {
self.user.is_some()
}
fn group_allowed(&self, group: GroupType) -> bool {
self.user
.as_ref()
.and_then(|x| x.groups.as_ref())
.and_then(|xs| {
xs.iter().find(|y| {
//Superusers can do everything.
if y.name == GroupType::Superusers {
return true;
}
//Filesystem administrators can do everything a filesystem user can do.
if y.name == GroupType::FilesystemAdministrators && group == GroupType::FilesystemUsers {
return true;
}
// Fallback to matching on names.
y.name == group
})
})
.is_some()
}
}
/// Extension methods for`fetch::Request` | fn graphql_query<T: serde::Serialize>(x: &T) -> Self;
fn with_auth(self: Self) -> Self;
}
impl RequestExt for fetch::Request {
fn api_call(path: impl ToString) -> Self {
Self::new(format!("/api/{}/", path.to_string()))
}
fn api_query(path: impl ToString, args: impl serde::Serialize) -> Result<Self, serde_urlencoded::ser::Error> {
let qs = format!("?{}", serde_urlencoded::to_string(args)?);
Ok(Self::new(format!("/api/{}/{}", path.to_string(), qs)))
}
fn api_item(path: impl ToString, item: impl ToString) -> Self {
Self::api_call(format!("{}/{}", path.to_string(), item.to_string()))
}
fn graphql_query<T: serde::Serialize>(x: &T) -> Self {
Self::new("/graphql")
.with_auth()
.method(fetch::Method::Post)
.send_json(x)
}
fn with_auth(self) -> Self {
match csrf_token() {
Some(csrf) => self.header("X-CSRFToken", &csrf),
None => self,
}
}
}
/// Allows for merging attributes onto an existing item
pub(crate) trait MergeAttrs {
fn merge_attrs(self, attrs: Attrs) -> Self;
}
impl MergeAttrs for Attrs {
fn merge_attrs(mut self, attrs: Attrs) -> Self {
self.merge(attrs);
self
}
}
impl<T> MergeAttrs for Node<T> {
fn merge_attrs(self, attrs: Attrs) -> Self {
if let Self::Element(mut el) = self {
el.attrs.merge(attrs);
Self::Element(el)
} else {
self
}
}
}
pub(crate) trait NodeExt<T> {
fn with_listener(self, event_handler: EventHandler<T>) -> Self;
fn with_style(self, key: impl Into<St>, val: impl Into<CSSValue>) -> Self;
}
impl<T> NodeExt<T> for Node<T> {
fn with_listener(mut self, event_handler: EventHandler<T>) -> Self {
self.add_listener(event_handler);
self
}
fn with_style(mut self, key: impl Into<St>, val: impl Into<CSSValue>) -> Self {
self.add_style(key, val);
self
}
}
/// Extension methods for`fetch::Request`
pub(crate) trait FailReasonExt {
fn message(&self) -> String;
}
impl<T> FailReasonExt for fetch::FailReason<T> {
fn message(&self) -> String {
match self {
Self::RequestError(err, _) => match err {
fetch::RequestError::DomException(e) => e.message(),
},
Self::Status(status, _) => format!("Status: {}", status.code),
Self::DataError(err, _) => match err {
fetch::DataError::DomException(e) => e.message(),
fetch::DataError::SerdeError(e, _) => format!("Serde error: {}", e),
},
}
}
}
/// Extension methods for`seed::browser::url::Url`
pub(crate) trait UrlExt {
/// Returns the path of the `Url`.
/// This fn will account for
/// the base (via the `base`) tag
/// and remove it from the path
fn get_path(&self) -> Vec<String>;
}
impl UrlExt for Url {
fn get_path(&self) -> Vec<String> {
let mut path = self.path.clone();
let base = match crate::UI_BASE.as_ref() {
Some(x) => x,
None => return path,
};
let has_base = path.get(0).filter(|x| x == &base).is_some();
if has_base {
path.remove(0);
}
path
}
} | pub(crate) trait RequestExt: Sized {
fn api_call(path: impl ToString) -> Self;
fn api_query(path: impl ToString, args: impl serde::Serialize) -> Result<Self, serde_urlencoded::ser::Error>;
fn api_item(path: impl ToString, item: impl ToString) -> Self; | random_line_split |
extensions.rs | // Copyright (c) 2020 DDN. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
use crate::auth::csrf_token;
use iml_wire_types::{GroupType, Session};
use seed::{fetch, prelude::*, *};
/// Extension methods for the Session API object.
pub(crate) trait SessionExt {
/// Does the user need to login?
fn needs_login(&self) -> bool;
/// Does a logged in user exist?
fn has_user(&self) -> bool;
/// Does the user fall within the group?
fn group_allowed(&self, group: GroupType) -> bool;
}
impl SessionExt for Session {
fn needs_login(&self) -> bool {
self.user.is_none() &&!self.read_enabled
}
fn has_user(&self) -> bool {
self.user.is_some()
}
fn group_allowed(&self, group: GroupType) -> bool {
self.user
.as_ref()
.and_then(|x| x.groups.as_ref())
.and_then(|xs| {
xs.iter().find(|y| {
//Superusers can do everything.
if y.name == GroupType::Superusers {
return true;
}
//Filesystem administrators can do everything a filesystem user can do.
if y.name == GroupType::FilesystemAdministrators && group == GroupType::FilesystemUsers {
return true;
}
// Fallback to matching on names.
y.name == group
})
})
.is_some()
}
}
/// Extension methods for`fetch::Request`
pub(crate) trait RequestExt: Sized {
fn api_call(path: impl ToString) -> Self;
fn api_query(path: impl ToString, args: impl serde::Serialize) -> Result<Self, serde_urlencoded::ser::Error>;
fn api_item(path: impl ToString, item: impl ToString) -> Self;
fn graphql_query<T: serde::Serialize>(x: &T) -> Self;
fn with_auth(self: Self) -> Self;
}
impl RequestExt for fetch::Request {
fn api_call(path: impl ToString) -> Self {
Self::new(format!("/api/{}/", path.to_string()))
}
fn api_query(path: impl ToString, args: impl serde::Serialize) -> Result<Self, serde_urlencoded::ser::Error> {
let qs = format!("?{}", serde_urlencoded::to_string(args)?);
Ok(Self::new(format!("/api/{}/{}", path.to_string(), qs)))
}
fn api_item(path: impl ToString, item: impl ToString) -> Self {
Self::api_call(format!("{}/{}", path.to_string(), item.to_string()))
}
fn graphql_query<T: serde::Serialize>(x: &T) -> Self {
Self::new("/graphql")
.with_auth()
.method(fetch::Method::Post)
.send_json(x)
}
fn with_auth(self) -> Self {
match csrf_token() {
Some(csrf) => self.header("X-CSRFToken", &csrf),
None => self,
}
}
}
/// Allows for merging attributes onto an existing item
pub(crate) trait MergeAttrs {
fn merge_attrs(self, attrs: Attrs) -> Self;
}
impl MergeAttrs for Attrs {
fn | (mut self, attrs: Attrs) -> Self {
self.merge(attrs);
self
}
}
impl<T> MergeAttrs for Node<T> {
fn merge_attrs(self, attrs: Attrs) -> Self {
if let Self::Element(mut el) = self {
el.attrs.merge(attrs);
Self::Element(el)
} else {
self
}
}
}
pub(crate) trait NodeExt<T> {
fn with_listener(self, event_handler: EventHandler<T>) -> Self;
fn with_style(self, key: impl Into<St>, val: impl Into<CSSValue>) -> Self;
}
impl<T> NodeExt<T> for Node<T> {
fn with_listener(mut self, event_handler: EventHandler<T>) -> Self {
self.add_listener(event_handler);
self
}
fn with_style(mut self, key: impl Into<St>, val: impl Into<CSSValue>) -> Self {
self.add_style(key, val);
self
}
}
/// Extension methods for`fetch::Request`
pub(crate) trait FailReasonExt {
fn message(&self) -> String;
}
impl<T> FailReasonExt for fetch::FailReason<T> {
fn message(&self) -> String {
match self {
Self::RequestError(err, _) => match err {
fetch::RequestError::DomException(e) => e.message(),
},
Self::Status(status, _) => format!("Status: {}", status.code),
Self::DataError(err, _) => match err {
fetch::DataError::DomException(e) => e.message(),
fetch::DataError::SerdeError(e, _) => format!("Serde error: {}", e),
},
}
}
}
/// Extension methods for`seed::browser::url::Url`
pub(crate) trait UrlExt {
/// Returns the path of the `Url`.
/// This fn will account for
/// the base (via the `base`) tag
/// and remove it from the path
fn get_path(&self) -> Vec<String>;
}
impl UrlExt for Url {
fn get_path(&self) -> Vec<String> {
let mut path = self.path.clone();
let base = match crate::UI_BASE.as_ref() {
Some(x) => x,
None => return path,
};
let has_base = path.get(0).filter(|x| x == &base).is_some();
if has_base {
path.remove(0);
}
path
}
}
| merge_attrs | identifier_name |
ast.rs | use std::cell::Cell;
use std::fmt;
use std::vec::Vec;
pub type Var = String;
pub type Atom = String;
pub enum TopLevel {
Fact(Term),
Query(Term)
}
#[derive(Clone, Copy)]
pub enum Level {
Shallow, Deep
}
impl fmt::Display for Level {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
&Level::Shallow => write!(f, "A"),
&Level::Deep => write!(f, "X")
}
}
}
#[derive(Clone, Copy)]
pub enum Reg {
ArgAndNorm(usize, usize),
Norm(usize)
}
impl Reg {
pub fn has_arg(&self) -> bool {
match self {
&Reg::ArgAndNorm(_, _) => true,
_ => false
}
}
pub fn | (&self) -> usize {
match self {
&Reg::ArgAndNorm(_, norm) | &Reg::Norm(norm) => norm
}
}
}
pub enum Term {
Atom(Cell<usize>, Atom),
Clause(Cell<usize>, Atom, Vec<Box<Term>>),
Var(Cell<Reg>, Var)
}
pub enum TermRef<'a> {
Atom(Level, &'a Cell<usize>, &'a Atom),
Clause(Level, &'a Cell<usize>, &'a Atom, &'a Vec<Box<Term>>),
Var(Level, &'a Cell<Reg>, &'a Var)
}
#[derive(Clone)]
pub enum FactInstruction {
GetStructure(Level, Atom, usize, usize),
GetValue(usize, usize),
GetVariable(usize, usize),
Proceed,
UnifyVariable(usize),
UnifyValue(usize)
}
pub enum QueryInstruction {
Call(Atom, usize),
PutStructure(Level, Atom, usize, usize),
PutValue(usize, usize),
PutVariable(usize, usize),
SetVariable(usize),
SetValue(usize),
}
pub type CompiledFact = Vec<FactInstruction>;
pub type CompiledQuery = Vec<QueryInstruction>;
#[derive(Clone, Copy, PartialEq)]
pub enum Addr {
HeapCell(usize),
RegNum(usize)
}
#[derive(Clone)]
pub enum HeapCellValue {
NamedStr(usize, Atom),
Ref(usize),
Str(usize),
}
pub type Heap = Vec<HeapCellValue>;
pub type Registers = Vec<HeapCellValue>;
impl Term {
pub fn subterms(&self) -> usize {
match self {
&Term::Clause(_, _, ref terms) => terms.len(),
_ => 1
}
}
pub fn name(&self) -> &Atom {
match self {
&Term::Atom(_, ref atom)
| &Term::Var(_, ref atom)
| &Term::Clause(_, ref atom, _) => atom
}
}
pub fn arity(&self) -> usize {
match self {
&Term::Atom(_, _) | &Term::Var(_, _) => 0,
&Term::Clause(_, _, ref child_terms) => child_terms.len()
}
}
}
| norm | identifier_name |
ast.rs | use std::cell::Cell;
use std::fmt;
use std::vec::Vec;
pub type Var = String;
pub type Atom = String;
pub enum TopLevel {
Fact(Term),
Query(Term)
}
#[derive(Clone, Copy)]
pub enum Level {
Shallow, Deep
}
impl fmt::Display for Level {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
&Level::Shallow => write!(f, "A"),
&Level::Deep => write!(f, "X")
}
}
}
#[derive(Clone, Copy)]
pub enum Reg {
ArgAndNorm(usize, usize),
Norm(usize)
}
impl Reg {
pub fn has_arg(&self) -> bool {
match self {
&Reg::ArgAndNorm(_, _) => true,
_ => false
}
}
pub fn norm(&self) -> usize {
match self {
&Reg::ArgAndNorm(_, norm) | &Reg::Norm(norm) => norm
}
}
}
pub enum Term {
Atom(Cell<usize>, Atom),
Clause(Cell<usize>, Atom, Vec<Box<Term>>),
Var(Cell<Reg>, Var)
}
pub enum TermRef<'a> {
Atom(Level, &'a Cell<usize>, &'a Atom),
Clause(Level, &'a Cell<usize>, &'a Atom, &'a Vec<Box<Term>>),
Var(Level, &'a Cell<Reg>, &'a Var)
}
#[derive(Clone)]
pub enum FactInstruction {
GetStructure(Level, Atom, usize, usize),
GetValue(usize, usize),
GetVariable(usize, usize),
Proceed,
UnifyVariable(usize),
UnifyValue(usize)
}
pub enum QueryInstruction {
Call(Atom, usize),
PutStructure(Level, Atom, usize, usize),
PutValue(usize, usize),
PutVariable(usize, usize),
SetVariable(usize),
SetValue(usize),
}
pub type CompiledFact = Vec<FactInstruction>;
pub type CompiledQuery = Vec<QueryInstruction>;
#[derive(Clone, Copy, PartialEq)]
pub enum Addr {
HeapCell(usize),
RegNum(usize)
}
#[derive(Clone)]
pub enum HeapCellValue {
NamedStr(usize, Atom),
Ref(usize),
Str(usize),
}
pub type Heap = Vec<HeapCellValue>;
pub type Registers = Vec<HeapCellValue>;
impl Term {
pub fn subterms(&self) -> usize {
match self {
&Term::Clause(_, _, ref terms) => terms.len(),
_ => 1
}
}
pub fn name(&self) -> &Atom {
match self {
&Term::Atom(_, ref atom)
| &Term::Var(_, ref atom)
| &Term::Clause(_, ref atom, _) => atom
}
}
pub fn arity(&self) -> usize { | match self {
&Term::Atom(_, _) | &Term::Var(_, _) => 0,
&Term::Clause(_, _, ref child_terms) => child_terms.len()
}
}
} | random_line_split |
|
lib.rs | // This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
//! A library for interacting with Twitter.
//!
//! [Repository](https://github.com/QuietMisdreavus/twitter-rs)
//!
//! egg-mode is a Twitter library that aims to make as few assumptions about the user's codebase as
//! possible. Endpoints are exposed as bare functions where authentication details are passed in as
//! arguments, rather than as builder functions of a root "service" manager. The only exceptions to
//! this guideline are endpoints with many optional parameters, like posting a status update or
//! updating the metadata of a list.
//!
//! # About the examples in this documentation
//!
//! There are a couple prerequisites to using egg-mode, which its examples also assume:
//!
//! * All methods that hit the twitter API are `async` and should be awaited with the `.await` syntax.
//! All such calls return a result type with the `Error` enum as their Error value.
//! The resulting future must be executed on a `tokio` executor.
//! For more information, check out the [Rust `async` book][rust-futures] and the
//! [Tokio documentation guides][].
//!
//! * Twitter tracks API use through "tokens" which are managed by Twitter and processed separately
//! for each "authenticated user" you wish to connect to your app. egg-mode's [Token]
//! documentation describes how you can obtain one of these, but each example outside of the
//! authentication documentation brings in a `Token` "offscreen", to avoid distracting from the
//! rest of the example.
//!
//! [Token]: enum.Token.html
//! [tokio]: https://tokio.rs
//! [rust-futures]: https://rust-lang.github.io/async-book/
//! [Tokio documentation guides]: https://tokio.rs/docs/overview
//!
//! To load the profile information of a single user:
//!
//! ```rust,no_run
//! # use egg_mode::Token;
//! # #[tokio::main]
//! # async fn main() {
//! # let token: Token = unimplemented!();
//! let rustlang = egg_mode::user::show("rustlang", &token).await.unwrap();
//!
//! println!("{} (@{})", rustlang.name, rustlang.screen_name);
//! # }
//! ``` | //!
//! ```rust,no_run
//! # use egg_mode::Token;
//! use egg_mode::tweet::DraftTweet;
//! # #[tokio::main]
//! # async fn main() {
//! # let token: Token = unimplemented!();
//!
//! let post = DraftTweet::new("Hey Twitter!").send(&token).await.unwrap();
//! # }
//! ```
//!
//! # Types and Functions
//!
//! All of the main content of egg-mode is in submodules, but there are a few things here in the
//! crate root. To wit, it contains items related to authentication and a couple items that all the
//! submodules use.
//!
//! ## `Response<T>`
//!
//! Every method that calls Twitter and carries rate-limit information wraps its return value in a
//! [`Response`][] struct, that transmits this information to your app. From there, you can handle
//! the rate-limit information to hold off on that kind of request, or simply grab its `response`
//! field to get the output of whatever method you called. `Response` also implements `Deref`, so
//! for the most part you can access fields of the final result without having to grab the
//! `response` field directly.
//!
//! [`Response`]: struct.Response.html
//!
//! ## Authentication
//!
//! The remaining types and methods are explained as part of the [authentication overview][Token],
//! with the exception of `verify_tokens`, which is a simple method to ensure a given token is
//! still valid.
//!
//! # Modules
//!
//! As there are many actions available in the Twitter API, egg-mode divides them roughly into
//! several modules by their shared purpose. Here's a sort of high-level overview, in rough order
//! from "most important" to "less directly used":
//!
//! ## Primary actions
//!
//! These could be considered the "core" actions within the Twitter API that egg-mode has made
//! available.
//!
//! * `tweet`: This module lets you act on tweets. Here you can find actions to load a user's
//! timeline, post a new tweet, or like and retweet individual posts.
//! * `user`: This module lets you act on users, be it by following or unfollowing them, loading
//! their profile information, blocking or muting them, or showing the relationship between two
//! users.
//! * `search`: Due to the complexity of searching for tweets, it gets its own module.
//! * `direct`: Here you can work with a user's Direct Messages, either by loading DMs they've sent
//! or received, or by sending new ones.
//! * `list`: This module lets you act on lists, from creating and deleting them, adding and
//! removing users, or loading the posts made by their members.
//! * `media`: This module lets you upload images, GIFs, and videos to Twitter so you can attach
//! them to tweets.
//!
//! ## Secondary actions
//!
//! These modules still contain direct actions for Twitter, but they can be considered as having
//! more of a helper role than something you might use directly.
//!
//! * `place`: Here are actions that look up physical locations that can be attached to tweets, as
//! well at the `Place` struct that appears on tweets with locations attached.
//! * `service`: These are some miscellaneous methods that show information about the Twitter
//! service as a whole, like loading the maximum length of t.co URLs or loading the current Terms
//! of Service or Privacy Policy.
//!
//! ## Helper structs
//!
//! These modules contain some implementations that wrap some pattern seen in multiple "action"
//! modules.
//!
//! * `cursor`: This contains a helper trait and some helper structs that allow effective cursoring
//! through certain collections of results from Twitter.
//! * `entities`: Whenever some text can be returned that may contain links, hashtags, media, or
//! user mentions, its metadata is parsed into something that lives in this module.
//! * `error`: Any interaction with Twitter may result in an error condition, be it from finding a
//! tweet or user that doesn't exist or the network connection being unavailable. All the error
//! types are aggregated into an enum in this module.
#![warn(missing_docs)]
#![warn(unused_extern_crates)]
#![warn(unused_qualifications)]
#[macro_use]
mod common;
mod auth;
pub mod cursor;
pub mod direct;
pub mod entities;
pub mod error;
mod links;
pub mod list;
pub mod media;
pub mod place;
pub mod search;
pub mod service;
pub mod stream;
pub mod tweet;
pub mod user;
pub use crate::auth::{
access_token, authenticate_url, authorize_url, bearer_token, invalidate_bearer, request_token,
verify_tokens, KeyPair, Token,
};
pub use crate::common::Response; | //!
//! To post a new tweet: | random_line_split |
minimum_window_substring.rs | use std::collections::hash_map::Entry::Occupied;
use std::collections::HashMap;
struct Solution {}
impl Solution {
pub fn min_window(&self, s: String, t: String) -> String {
if s.len() < t.len() {
return String::from("");
}
let target_hm = t.chars().fold(HashMap::new(), |mut acc, c| {
*acc.entry(c).or_insert(0) += 1;
acc
});
let s_vec: Vec<char> = s.chars().collect();
let filtered_s: Vec<(usize, char)> = s
.chars()
.enumerate()
.collect::<Vec<(usize, char)>>()
.into_iter()
.filter(|(_, c)| target_hm.get(c).is_some())
.collect();
let mut min_window_chars: Option<(usize, usize)> = None;
let mut curr_hm: HashMap<char, u32> = HashMap::new();
let mut l_idx = 0 as usize;
let mut r_idx = 0 as usize;
while r_idx < filtered_s.len() {
let curr_char = filtered_s[r_idx];
*curr_hm.entry(curr_char.1).or_insert(0) += 1;
// we move the l_idx til we still have a complete set of letters with occurences
while hm_is_valid(&target_hm, &curr_hm) && l_idx <= r_idx {
// if we don't have a window yet, or the length of the curr window is too small
match min_window_chars {
None => {
min_window_chars = Some((filtered_s[l_idx].0, filtered_s[r_idx].0));
}
Some(mwc) => {
if (mwc.1 - mwc.0) > (filtered_s[r_idx].0 - filtered_s[l_idx].0) {
min_window_chars = Some((filtered_s[l_idx].0, filtered_s[r_idx].0));
}
}
}
match curr_hm.entry(filtered_s[l_idx].1) {
Occupied(mut e) => {
let v = e.get_mut();
if v > &mut 1 {
*v -= 1;
} else {
e.remove();
}
}
_ => panic!("char vec curr_hm filled the wrong way"),
};
// dbg!(
// &l_idx,
// &s_vec[filtered_s[l_idx].0..=filtered_s[r_idx].0]
// .to_vec()
// .iter()
// .collect::<String>(),
// &curr_hm,
// );
l_idx += 1;
}
r_idx += 1;
}
match min_window_chars {
None => String::from(""),
Some(mwc) => s_vec[mwc.0..=mwc.1].to_vec().iter().collect(),
}
}
}
pub fn hm_is_valid(target_hm: &HashMap<char, u32>, test_hm: &HashMap<char, u32>) -> bool {
if target_hm == test_hm {
return true;
}
for (c, count) in target_hm.iter() {
match test_hm.get(c) {
None => return false,
Some(test_count) if test_count < count => return false,
_ => (),
}
}
true
} |
#[test]
fn solution() {
let sol = Solution {};
assert_eq!(
sol.min_window(String::from("ADOBECODEBANC"), String::from("ABC")),
"BANC"
);
assert_eq!(sol.min_window(String::from(""), String::from("ABC")), "");
assert_eq!(sol.min_window(String::from("RTY"), String::from("ABC")), "");
assert_eq!(
sol.min_window(String::from("AAAAAAAAA"), String::from("A")),
"A"
);
assert_eq!(sol.min_window(String::from("a"), String::from("aa")), "");
assert_eq!(sol.min_window(String::from("aa"), String::from("aa")), "aa");
assert_eq!(
sol.min_window(String::from("bba"), String::from("ab")),
"ba"
);
}
} |
#[cfg(test)]
mod tests {
use super::Solution; | random_line_split |
minimum_window_substring.rs | use std::collections::hash_map::Entry::Occupied;
use std::collections::HashMap;
struct Solution {}
impl Solution {
pub fn min_window(&self, s: String, t: String) -> String | let mut l_idx = 0 as usize;
let mut r_idx = 0 as usize;
while r_idx < filtered_s.len() {
let curr_char = filtered_s[r_idx];
*curr_hm.entry(curr_char.1).or_insert(0) += 1;
// we move the l_idx til we still have a complete set of letters with occurences
while hm_is_valid(&target_hm, &curr_hm) && l_idx <= r_idx {
// if we don't have a window yet, or the length of the curr window is too small
match min_window_chars {
None => {
min_window_chars = Some((filtered_s[l_idx].0, filtered_s[r_idx].0));
}
Some(mwc) => {
if (mwc.1 - mwc.0) > (filtered_s[r_idx].0 - filtered_s[l_idx].0) {
min_window_chars = Some((filtered_s[l_idx].0, filtered_s[r_idx].0));
}
}
}
match curr_hm.entry(filtered_s[l_idx].1) {
Occupied(mut e) => {
let v = e.get_mut();
if v > &mut 1 {
*v -= 1;
} else {
e.remove();
}
}
_ => panic!("char vec curr_hm filled the wrong way"),
};
// dbg!(
// &l_idx,
// &s_vec[filtered_s[l_idx].0..=filtered_s[r_idx].0]
// .to_vec()
// .iter()
// .collect::<String>(),
// &curr_hm,
// );
l_idx += 1;
}
r_idx += 1;
}
match min_window_chars {
None => String::from(""),
Some(mwc) => s_vec[mwc.0..=mwc.1].to_vec().iter().collect(),
}
}
}
pub fn hm_is_valid(target_hm: &HashMap<char, u32>, test_hm: &HashMap<char, u32>) -> bool {
if target_hm == test_hm {
return true;
}
for (c, count) in target_hm.iter() {
match test_hm.get(c) {
None => return false,
Some(test_count) if test_count < count => return false,
_ => (),
}
}
true
}
#[cfg(test)]
mod tests {
use super::Solution;
#[test]
fn solution() {
let sol = Solution {};
assert_eq!(
sol.min_window(String::from("ADOBECODEBANC"), String::from("ABC")),
"BANC"
);
assert_eq!(sol.min_window(String::from(""), String::from("ABC")), "");
assert_eq!(sol.min_window(String::from("RTY"), String::from("ABC")), "");
assert_eq!(
sol.min_window(String::from("AAAAAAAAA"), String::from("A")),
"A"
);
assert_eq!(sol.min_window(String::from("a"), String::from("aa")), "");
assert_eq!(sol.min_window(String::from("aa"), String::from("aa")), "aa");
assert_eq!(
sol.min_window(String::from("bba"), String::from("ab")),
"ba"
);
}
}
| {
if s.len() < t.len() {
return String::from("");
}
let target_hm = t.chars().fold(HashMap::new(), |mut acc, c| {
*acc.entry(c).or_insert(0) += 1;
acc
});
let s_vec: Vec<char> = s.chars().collect();
let filtered_s: Vec<(usize, char)> = s
.chars()
.enumerate()
.collect::<Vec<(usize, char)>>()
.into_iter()
.filter(|(_, c)| target_hm.get(c).is_some())
.collect();
let mut min_window_chars: Option<(usize, usize)> = None;
let mut curr_hm: HashMap<char, u32> = HashMap::new(); | identifier_body |
minimum_window_substring.rs | use std::collections::hash_map::Entry::Occupied;
use std::collections::HashMap;
struct Solution {}
impl Solution {
pub fn min_window(&self, s: String, t: String) -> String {
if s.len() < t.len() {
return String::from("");
}
let target_hm = t.chars().fold(HashMap::new(), |mut acc, c| {
*acc.entry(c).or_insert(0) += 1;
acc
});
let s_vec: Vec<char> = s.chars().collect();
let filtered_s: Vec<(usize, char)> = s
.chars()
.enumerate()
.collect::<Vec<(usize, char)>>()
.into_iter()
.filter(|(_, c)| target_hm.get(c).is_some())
.collect();
let mut min_window_chars: Option<(usize, usize)> = None;
let mut curr_hm: HashMap<char, u32> = HashMap::new();
let mut l_idx = 0 as usize;
let mut r_idx = 0 as usize;
while r_idx < filtered_s.len() {
let curr_char = filtered_s[r_idx];
*curr_hm.entry(curr_char.1).or_insert(0) += 1;
// we move the l_idx til we still have a complete set of letters with occurences
while hm_is_valid(&target_hm, &curr_hm) && l_idx <= r_idx {
// if we don't have a window yet, or the length of the curr window is too small
match min_window_chars {
None => {
min_window_chars = Some((filtered_s[l_idx].0, filtered_s[r_idx].0));
}
Some(mwc) => {
if (mwc.1 - mwc.0) > (filtered_s[r_idx].0 - filtered_s[l_idx].0) {
min_window_chars = Some((filtered_s[l_idx].0, filtered_s[r_idx].0));
}
}
}
match curr_hm.entry(filtered_s[l_idx].1) {
Occupied(mut e) => {
let v = e.get_mut();
if v > &mut 1 {
*v -= 1;
} else {
e.remove();
}
}
_ => panic!("char vec curr_hm filled the wrong way"),
};
// dbg!(
// &l_idx,
// &s_vec[filtered_s[l_idx].0..=filtered_s[r_idx].0]
// .to_vec()
// .iter()
// .collect::<String>(),
// &curr_hm,
// );
l_idx += 1;
}
r_idx += 1;
}
match min_window_chars {
None => String::from(""),
Some(mwc) => s_vec[mwc.0..=mwc.1].to_vec().iter().collect(),
}
}
}
pub fn hm_is_valid(target_hm: &HashMap<char, u32>, test_hm: &HashMap<char, u32>) -> bool {
if target_hm == test_hm {
return true;
}
for (c, count) in target_hm.iter() {
match test_hm.get(c) {
None => return false,
Some(test_count) if test_count < count => return false,
_ => (),
}
}
true
}
#[cfg(test)]
mod tests {
use super::Solution;
#[test]
fn | () {
let sol = Solution {};
assert_eq!(
sol.min_window(String::from("ADOBECODEBANC"), String::from("ABC")),
"BANC"
);
assert_eq!(sol.min_window(String::from(""), String::from("ABC")), "");
assert_eq!(sol.min_window(String::from("RTY"), String::from("ABC")), "");
assert_eq!(
sol.min_window(String::from("AAAAAAAAA"), String::from("A")),
"A"
);
assert_eq!(sol.min_window(String::from("a"), String::from("aa")), "");
assert_eq!(sol.min_window(String::from("aa"), String::from("aa")), "aa");
assert_eq!(
sol.min_window(String::from("bba"), String::from("ab")),
"ba"
);
}
}
| solution | identifier_name |
small-enum-range-edge.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*!
* Tests the range assertion wraparound case in trans::middle::adt::load_discr.
*/
#[repr(u8)]
#[derive(Copy)]
enum | { Lu = 0, Hu = 255 }
static CLu: Eu = Eu::Lu;
static CHu: Eu = Eu::Hu;
#[repr(i8)]
#[derive(Copy)]
enum Es { Ls = -128, Hs = 127 }
static CLs: Es = Es::Ls;
static CHs: Es = Es::Hs;
pub fn main() {
assert_eq!((Eu::Hu as u8) + 1, Eu::Lu as u8);
assert_eq!((Es::Hs as i8) + 1, Es::Ls as i8);
assert_eq!(CLu as u8, Eu::Lu as u8);
assert_eq!(CHu as u8, Eu::Hu as u8);
assert_eq!(CLs as i8, Es::Ls as i8);
assert_eq!(CHs as i8, Es::Hs as i8);
}
| Eu | identifier_name |
small-enum-range-edge.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*!
* Tests the range assertion wraparound case in trans::middle::adt::load_discr.
*/
#[repr(u8)]
#[derive(Copy)]
enum Eu { Lu = 0, Hu = 255 }
static CLu: Eu = Eu::Lu;
static CHu: Eu = Eu::Hu;
#[repr(i8)]
#[derive(Copy)]
enum Es { Ls = -128, Hs = 127 }
static CLs: Es = Es::Ls;
static CHs: Es = Es::Hs;
pub fn main() | {
assert_eq!((Eu::Hu as u8) + 1, Eu::Lu as u8);
assert_eq!((Es::Hs as i8) + 1, Es::Ls as i8);
assert_eq!(CLu as u8, Eu::Lu as u8);
assert_eq!(CHu as u8, Eu::Hu as u8);
assert_eq!(CLs as i8, Es::Ls as i8);
assert_eq!(CHs as i8, Es::Hs as i8);
} | identifier_body |
|
small-enum-range-edge.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*!
* Tests the range assertion wraparound case in trans::middle::adt::load_discr. | #[repr(u8)]
#[derive(Copy)]
enum Eu { Lu = 0, Hu = 255 }
static CLu: Eu = Eu::Lu;
static CHu: Eu = Eu::Hu;
#[repr(i8)]
#[derive(Copy)]
enum Es { Ls = -128, Hs = 127 }
static CLs: Es = Es::Ls;
static CHs: Es = Es::Hs;
pub fn main() {
assert_eq!((Eu::Hu as u8) + 1, Eu::Lu as u8);
assert_eq!((Es::Hs as i8) + 1, Es::Ls as i8);
assert_eq!(CLu as u8, Eu::Lu as u8);
assert_eq!(CHu as u8, Eu::Hu as u8);
assert_eq!(CLs as i8, Es::Ls as i8);
assert_eq!(CHs as i8, Es::Hs as i8);
} | */
| random_line_split |
uio.rs | //! Vectored I/O
use crate::Result;
use crate::errno::Errno;
use libc::{self, c_int, c_void, size_t, off_t};
use std::marker::PhantomData;
use std::os::unix::io::RawFd;
/// Low-level vectored write to a raw file descriptor
///
/// See also [writev(2)](https://pubs.opengroup.org/onlinepubs/9699919799/functions/writev.html)
pub fn writev(fd: RawFd, iov: &[IoVec<&[u8]>]) -> Result<usize> {
let res = unsafe { libc::writev(fd, iov.as_ptr() as *const libc::iovec, iov.len() as c_int) };
Errno::result(res).map(|r| r as usize)
}
/// Low-level vectored read from a raw file descriptor
///
/// See also [readv(2)](https://pubs.opengroup.org/onlinepubs/9699919799/functions/readv.html)
pub fn readv(fd: RawFd, iov: &mut [IoVec<&mut [u8]>]) -> Result<usize> {
let res = unsafe { libc::readv(fd, iov.as_ptr() as *const libc::iovec, iov.len() as c_int) };
Errno::result(res).map(|r| r as usize)
}
/// Write to `fd` at `offset` from buffers in `iov`.
///
/// Buffers in `iov` will be written in order until all buffers have been written
/// or an error occurs. The file offset is not changed.
///
/// See also: [`writev`](fn.writev.html) and [`pwrite`](fn.pwrite.html)
#[cfg(not(target_os = "redox"))]
#[cfg_attr(docsrs, doc(cfg(all())))]
pub fn pwritev(fd: RawFd, iov: &[IoVec<&[u8]>],
offset: off_t) -> Result<usize> {
#[cfg(target_env = "uclibc")]
let offset = offset as libc::off64_t; // uclibc doesn't use off_t
let res = unsafe {
libc::pwritev(fd, iov.as_ptr() as *const libc::iovec, iov.len() as c_int, offset)
};
Errno::result(res).map(|r| r as usize)
}
/// Read from `fd` at `offset` filling buffers in `iov`.
///
/// Buffers in `iov` will be filled in order until all buffers have been filled,
/// no more bytes are available, or an error occurs. The file offset is not
/// changed.
///
/// See also: [`readv`](fn.readv.html) and [`pread`](fn.pread.html)
#[cfg(not(target_os = "redox"))]
#[cfg_attr(docsrs, doc(cfg(all())))]
pub fn preadv(fd: RawFd, iov: &[IoVec<&mut [u8]>],
offset: off_t) -> Result<usize> {
#[cfg(target_env = "uclibc")]
let offset = offset as libc::off64_t; // uclibc doesn't use off_t
let res = unsafe {
libc::preadv(fd, iov.as_ptr() as *const libc::iovec, iov.len() as c_int, offset)
};
Errno::result(res).map(|r| r as usize)
}
/// Low-level write to a file, with specified offset.
///
/// See also [pwrite(2)](https://pubs.opengroup.org/onlinepubs/9699919799/functions/pwrite.html)
// TODO: move to unistd
pub fn pwrite(fd: RawFd, buf: &[u8], offset: off_t) -> Result<usize> {
let res = unsafe {
libc::pwrite(fd, buf.as_ptr() as *const c_void, buf.len() as size_t,
offset)
};
Errno::result(res).map(|r| r as usize)
}
/// Low-level read from a file, with specified offset.
///
/// See also [pread(2)](https://pubs.opengroup.org/onlinepubs/9699919799/functions/pread.html)
// TODO: move to unistd
pub fn pread(fd: RawFd, buf: &mut [u8], offset: off_t) -> Result<usize>{
let res = unsafe {
libc::pread(fd, buf.as_mut_ptr() as *mut c_void, buf.len() as size_t,
offset)
};
Errno::result(res).map(|r| r as usize)
}
/// A slice of memory in a remote process, starting at address `base`
/// and consisting of `len` bytes.
///
/// This is the same underlying C structure as [`IoVec`](struct.IoVec.html),
/// except that it refers to memory in some other process, and is
/// therefore not represented in Rust by an actual slice as `IoVec` is. It
/// is used with [`process_vm_readv`](fn.process_vm_readv.html)
/// and [`process_vm_writev`](fn.process_vm_writev.html).
#[cfg(any(target_os = "linux", target_os = "android"))]
#[cfg_attr(docsrs, doc(cfg(all())))]
#[repr(C)]
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
pub struct RemoteIoVec {
/// The starting address of this slice (`iov_base`).
pub base: usize,
/// The number of bytes in this slice (`iov_len`).
pub len: usize,
}
feature! {
#![feature = "process"]
/// Write data directly to another process's virtual memory
/// (see [`process_vm_writev`(2)]).
///
/// `local_iov` is a list of [`IoVec`]s containing the data to be written,
/// and `remote_iov` is a list of [`RemoteIoVec`]s identifying where the
/// data should be written in the target process. On success, returns the
/// number of bytes written, which will always be a whole
/// number of `remote_iov` chunks.
///
/// This requires the same permissions as debugging the process using
/// [ptrace]: you must either be a privileged process (with
/// `CAP_SYS_PTRACE`), or you must be running as the same user as the
/// target process and the OS must have unprivileged debugging enabled.
///
/// This function is only available on Linux and Android(SDK23+).
///
/// [`process_vm_writev`(2)]: https://man7.org/linux/man-pages/man2/process_vm_writev.2.html
/// [ptrace]:../ptrace/index.html
/// [`IoVec`]: struct.IoVec.html
/// [`RemoteIoVec`]: struct.RemoteIoVec.html
#[cfg(all(any(target_os = "linux", target_os = "android"), not(target_env = "uclibc")))]
pub fn process_vm_writev(
pid: crate::unistd::Pid,
local_iov: &[IoVec<&[u8]>],
remote_iov: &[RemoteIoVec]) -> Result<usize>
{
let res = unsafe {
libc::process_vm_writev(pid.into(),
local_iov.as_ptr() as *const libc::iovec, local_iov.len() as libc::c_ulong,
remote_iov.as_ptr() as *const libc::iovec, remote_iov.len() as libc::c_ulong, 0)
};
Errno::result(res).map(|r| r as usize)
}
/// Read data directly from another process's virtual memory
/// (see [`process_vm_readv`(2)]).
///
/// `local_iov` is a list of [`IoVec`]s containing the buffer to copy
/// data into, and `remote_iov` is a list of [`RemoteIoVec`]s identifying
/// where the source data is in the target process. On success,
/// returns the number of bytes written, which will always be a whole
/// number of `remote_iov` chunks.
///
/// This requires the same permissions as debugging the process using
/// [`ptrace`]: you must either be a privileged process (with
/// `CAP_SYS_PTRACE`), or you must be running as the same user as the
/// target process and the OS must have unprivileged debugging enabled.
///
/// This function is only available on Linux and Android(SDK23+).
///
/// [`process_vm_readv`(2)]: https://man7.org/linux/man-pages/man2/process_vm_readv.2.html
/// [`ptrace`]:../ptrace/index.html
/// [`IoVec`]: struct.IoVec.html
/// [`RemoteIoVec`]: struct.RemoteIoVec.html
#[cfg(all(any(target_os = "linux", target_os = "android"), not(target_env = "uclibc")))]
pub fn process_vm_readv(
pid: crate::unistd::Pid,
local_iov: &[IoVec<&mut [u8]>],
remote_iov: &[RemoteIoVec]) -> Result<usize>
{
let res = unsafe {
libc::process_vm_readv(pid.into(),
local_iov.as_ptr() as *const libc::iovec, local_iov.len() as libc::c_ulong,
remote_iov.as_ptr() as *const libc::iovec, remote_iov.len() as libc::c_ulong, 0)
};
Errno::result(res).map(|r| r as usize)
}
}
/// A vector of buffers.
///
/// Vectored I/O methods like [`writev`] and [`readv`] use this structure for
/// both reading and writing. Each `IoVec` specifies the base address and
/// length of an area in memory.
#[repr(transparent)]
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
pub struct IoVec<T>(pub(crate) libc::iovec, PhantomData<T>);
impl<T> IoVec<T> {
/// View the `IoVec` as a Rust slice.
#[inline]
pub fn as_slice(&self) -> &[u8] {
use std::slice;
unsafe {
slice::from_raw_parts(
self.0.iov_base as *const u8,
self.0.iov_len as usize)
}
}
}
impl<'a> IoVec<&'a [u8]> {
#[cfg(all(feature = "mount", target_os = "freebsd"))]
pub(crate) fn from_raw_parts(base: *mut c_void, len: usize) -> Self {
IoVec(libc::iovec {
iov_base: base,
iov_len: len
}, PhantomData)
}
/// Create an `IoVec` from a Rust slice.
pub fn from_slice(buf: &'a [u8]) -> IoVec<&'a [u8]> {
IoVec(libc::iovec { | }, PhantomData)
}
}
impl<'a> IoVec<&'a mut [u8]> {
/// Create an `IoVec` from a mutable Rust slice.
pub fn from_mut_slice(buf: &'a mut [u8]) -> IoVec<&'a mut [u8]> {
IoVec(libc::iovec {
iov_base: buf.as_ptr() as *mut c_void,
iov_len: buf.len() as size_t,
}, PhantomData)
}
}
// The only reason IoVec isn't automatically Send+Sync is because libc::iovec
// contains raw pointers.
unsafe impl<T> Send for IoVec<T> where T: Send {}
unsafe impl<T> Sync for IoVec<T> where T: Sync {} | iov_base: buf.as_ptr() as *mut c_void,
iov_len: buf.len() as size_t, | random_line_split |
uio.rs | //! Vectored I/O
use crate::Result;
use crate::errno::Errno;
use libc::{self, c_int, c_void, size_t, off_t};
use std::marker::PhantomData;
use std::os::unix::io::RawFd;
/// Low-level vectored write to a raw file descriptor
///
/// See also [writev(2)](https://pubs.opengroup.org/onlinepubs/9699919799/functions/writev.html)
pub fn writev(fd: RawFd, iov: &[IoVec<&[u8]>]) -> Result<usize> {
let res = unsafe { libc::writev(fd, iov.as_ptr() as *const libc::iovec, iov.len() as c_int) };
Errno::result(res).map(|r| r as usize)
}
/// Low-level vectored read from a raw file descriptor
///
/// See also [readv(2)](https://pubs.opengroup.org/onlinepubs/9699919799/functions/readv.html)
pub fn readv(fd: RawFd, iov: &mut [IoVec<&mut [u8]>]) -> Result<usize> {
let res = unsafe { libc::readv(fd, iov.as_ptr() as *const libc::iovec, iov.len() as c_int) };
Errno::result(res).map(|r| r as usize)
}
/// Write to `fd` at `offset` from buffers in `iov`.
///
/// Buffers in `iov` will be written in order until all buffers have been written
/// or an error occurs. The file offset is not changed.
///
/// See also: [`writev`](fn.writev.html) and [`pwrite`](fn.pwrite.html)
#[cfg(not(target_os = "redox"))]
#[cfg_attr(docsrs, doc(cfg(all())))]
pub fn | (fd: RawFd, iov: &[IoVec<&[u8]>],
offset: off_t) -> Result<usize> {
#[cfg(target_env = "uclibc")]
let offset = offset as libc::off64_t; // uclibc doesn't use off_t
let res = unsafe {
libc::pwritev(fd, iov.as_ptr() as *const libc::iovec, iov.len() as c_int, offset)
};
Errno::result(res).map(|r| r as usize)
}
/// Read from `fd` at `offset` filling buffers in `iov`.
///
/// Buffers in `iov` will be filled in order until all buffers have been filled,
/// no more bytes are available, or an error occurs. The file offset is not
/// changed.
///
/// See also: [`readv`](fn.readv.html) and [`pread`](fn.pread.html)
#[cfg(not(target_os = "redox"))]
#[cfg_attr(docsrs, doc(cfg(all())))]
pub fn preadv(fd: RawFd, iov: &[IoVec<&mut [u8]>],
offset: off_t) -> Result<usize> {
#[cfg(target_env = "uclibc")]
let offset = offset as libc::off64_t; // uclibc doesn't use off_t
let res = unsafe {
libc::preadv(fd, iov.as_ptr() as *const libc::iovec, iov.len() as c_int, offset)
};
Errno::result(res).map(|r| r as usize)
}
/// Low-level write to a file, with specified offset.
///
/// See also [pwrite(2)](https://pubs.opengroup.org/onlinepubs/9699919799/functions/pwrite.html)
// TODO: move to unistd
pub fn pwrite(fd: RawFd, buf: &[u8], offset: off_t) -> Result<usize> {
let res = unsafe {
libc::pwrite(fd, buf.as_ptr() as *const c_void, buf.len() as size_t,
offset)
};
Errno::result(res).map(|r| r as usize)
}
/// Low-level read from a file, with specified offset.
///
/// See also [pread(2)](https://pubs.opengroup.org/onlinepubs/9699919799/functions/pread.html)
// TODO: move to unistd
pub fn pread(fd: RawFd, buf: &mut [u8], offset: off_t) -> Result<usize>{
let res = unsafe {
libc::pread(fd, buf.as_mut_ptr() as *mut c_void, buf.len() as size_t,
offset)
};
Errno::result(res).map(|r| r as usize)
}
/// A slice of memory in a remote process, starting at address `base`
/// and consisting of `len` bytes.
///
/// This is the same underlying C structure as [`IoVec`](struct.IoVec.html),
/// except that it refers to memory in some other process, and is
/// therefore not represented in Rust by an actual slice as `IoVec` is. It
/// is used with [`process_vm_readv`](fn.process_vm_readv.html)
/// and [`process_vm_writev`](fn.process_vm_writev.html).
#[cfg(any(target_os = "linux", target_os = "android"))]
#[cfg_attr(docsrs, doc(cfg(all())))]
#[repr(C)]
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
pub struct RemoteIoVec {
/// The starting address of this slice (`iov_base`).
pub base: usize,
/// The number of bytes in this slice (`iov_len`).
pub len: usize,
}
feature! {
#![feature = "process"]
/// Write data directly to another process's virtual memory
/// (see [`process_vm_writev`(2)]).
///
/// `local_iov` is a list of [`IoVec`]s containing the data to be written,
/// and `remote_iov` is a list of [`RemoteIoVec`]s identifying where the
/// data should be written in the target process. On success, returns the
/// number of bytes written, which will always be a whole
/// number of `remote_iov` chunks.
///
/// This requires the same permissions as debugging the process using
/// [ptrace]: you must either be a privileged process (with
/// `CAP_SYS_PTRACE`), or you must be running as the same user as the
/// target process and the OS must have unprivileged debugging enabled.
///
/// This function is only available on Linux and Android(SDK23+).
///
/// [`process_vm_writev`(2)]: https://man7.org/linux/man-pages/man2/process_vm_writev.2.html
/// [ptrace]:../ptrace/index.html
/// [`IoVec`]: struct.IoVec.html
/// [`RemoteIoVec`]: struct.RemoteIoVec.html
#[cfg(all(any(target_os = "linux", target_os = "android"), not(target_env = "uclibc")))]
pub fn process_vm_writev(
pid: crate::unistd::Pid,
local_iov: &[IoVec<&[u8]>],
remote_iov: &[RemoteIoVec]) -> Result<usize>
{
let res = unsafe {
libc::process_vm_writev(pid.into(),
local_iov.as_ptr() as *const libc::iovec, local_iov.len() as libc::c_ulong,
remote_iov.as_ptr() as *const libc::iovec, remote_iov.len() as libc::c_ulong, 0)
};
Errno::result(res).map(|r| r as usize)
}
/// Read data directly from another process's virtual memory
/// (see [`process_vm_readv`(2)]).
///
/// `local_iov` is a list of [`IoVec`]s containing the buffer to copy
/// data into, and `remote_iov` is a list of [`RemoteIoVec`]s identifying
/// where the source data is in the target process. On success,
/// returns the number of bytes written, which will always be a whole
/// number of `remote_iov` chunks.
///
/// This requires the same permissions as debugging the process using
/// [`ptrace`]: you must either be a privileged process (with
/// `CAP_SYS_PTRACE`), or you must be running as the same user as the
/// target process and the OS must have unprivileged debugging enabled.
///
/// This function is only available on Linux and Android(SDK23+).
///
/// [`process_vm_readv`(2)]: https://man7.org/linux/man-pages/man2/process_vm_readv.2.html
/// [`ptrace`]:../ptrace/index.html
/// [`IoVec`]: struct.IoVec.html
/// [`RemoteIoVec`]: struct.RemoteIoVec.html
#[cfg(all(any(target_os = "linux", target_os = "android"), not(target_env = "uclibc")))]
pub fn process_vm_readv(
pid: crate::unistd::Pid,
local_iov: &[IoVec<&mut [u8]>],
remote_iov: &[RemoteIoVec]) -> Result<usize>
{
let res = unsafe {
libc::process_vm_readv(pid.into(),
local_iov.as_ptr() as *const libc::iovec, local_iov.len() as libc::c_ulong,
remote_iov.as_ptr() as *const libc::iovec, remote_iov.len() as libc::c_ulong, 0)
};
Errno::result(res).map(|r| r as usize)
}
}
/// A vector of buffers.
///
/// Vectored I/O methods like [`writev`] and [`readv`] use this structure for
/// both reading and writing. Each `IoVec` specifies the base address and
/// length of an area in memory.
#[repr(transparent)]
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
pub struct IoVec<T>(pub(crate) libc::iovec, PhantomData<T>);
impl<T> IoVec<T> {
/// View the `IoVec` as a Rust slice.
#[inline]
pub fn as_slice(&self) -> &[u8] {
use std::slice;
unsafe {
slice::from_raw_parts(
self.0.iov_base as *const u8,
self.0.iov_len as usize)
}
}
}
impl<'a> IoVec<&'a [u8]> {
#[cfg(all(feature = "mount", target_os = "freebsd"))]
pub(crate) fn from_raw_parts(base: *mut c_void, len: usize) -> Self {
IoVec(libc::iovec {
iov_base: base,
iov_len: len
}, PhantomData)
}
/// Create an `IoVec` from a Rust slice.
pub fn from_slice(buf: &'a [u8]) -> IoVec<&'a [u8]> {
IoVec(libc::iovec {
iov_base: buf.as_ptr() as *mut c_void,
iov_len: buf.len() as size_t,
}, PhantomData)
}
}
impl<'a> IoVec<&'a mut [u8]> {
/// Create an `IoVec` from a mutable Rust slice.
pub fn from_mut_slice(buf: &'a mut [u8]) -> IoVec<&'a mut [u8]> {
IoVec(libc::iovec {
iov_base: buf.as_ptr() as *mut c_void,
iov_len: buf.len() as size_t,
}, PhantomData)
}
}
// The only reason IoVec isn't automatically Send+Sync is because libc::iovec
// contains raw pointers.
unsafe impl<T> Send for IoVec<T> where T: Send {}
unsafe impl<T> Sync for IoVec<T> where T: Sync {}
| pwritev | identifier_name |
lib.rs | // Copyright 2017 Pants project contributors (see CONTRIBUTORS.md).
// Licensed under the Apache License, Version 2.0 (see LICENSE).
#![deny(warnings)]
// Enable all clippy lints except for many of the pedantic ones. It's a shame this needs to be copied and pasted across crates, but there doesn't appear to be a way to include inner attributes from a common source.
#![deny(
clippy::all,
clippy::default_trait_access,
clippy::expl_impl_clone_on_copy,
clippy::if_not_else,
clippy::needless_continue,
clippy::unseparated_literal_suffix,
// TODO: Falsely triggers for async/await:
// see https://github.com/rust-lang/rust-clippy/issues/5360
// clippy::used_underscore_binding
)]
// It is often more clear to show that nothing is being moved.
#![allow(clippy::match_ref_pats)]
// Subjective style.
#![allow(
clippy::len_without_is_empty,
clippy::redundant_field_names,
clippy::too_many_arguments
)]
// Default isn't as big a deal as people seem to think it is.
#![allow(clippy::new_without_default, clippy::new_ret_no_self)]
// Arc<Mutex> can be more clear than needing to grok Orderings:
#![allow(clippy::mutex_atomic)]
// We only use unsafe pointer dereferences in our no_mangle exposed API, but it is nicer to list
// just the one minor call as unsafe, than to mark the whole function as unsafe which may hide
// other unsafeness.
#![allow(clippy::not_unsafe_ptr_arg_deref)]
#![type_length_limit = "43757804"]
| mod core;
mod externs;
mod interning;
mod intrinsics;
mod nodes;
mod scheduler;
mod selectors;
mod session;
mod tasks;
mod types;
pub use crate::context::{Core, ExecutionStrategyOptions, RemotingOptions};
pub use crate::core::{Failure, Function, Key, Params, TypeId, Value};
pub use crate::intrinsics::Intrinsics;
pub use crate::scheduler::{ExecutionRequest, ExecutionTermination, Scheduler};
pub use crate::session::Session;
pub use crate::tasks::{Rule, Tasks};
pub use crate::types::Types; | mod context; | random_line_split |
pending.rs | /*
* Copyright (C) 2017 AltOS-Rust Team
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
use interrupt::Hardware;
#[derive(Copy, Clone, Debug)]
pub struct ISPR(u32);
#[derive(Copy, Clone, Debug)]
pub struct ICPR(u32);
impl ISPR {
pub fn set_pending(&mut self, hardware: Hardware) {
let interrupt = hardware as u8;
self.0 |= 0b1 << interrupt;
}
pub fn | (&self, hardware: Hardware) -> bool {
let interrupt = hardware as u8;
self.0 & (0b1 << interrupt)!= 0
}
}
impl ICPR {
pub fn clear_pending(&mut self, hardware: Hardware) {
let interrupt = hardware as u8;
self.0 |= 0b1 << interrupt;
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_ispr_set_pending() {
let mut ispr = ISPR(0);
ispr.set_pending(Hardware::Flash);
assert_eq!(ispr.0, 0b1 << 3);
}
#[test]
fn test_ispr_interrupt_is_pending() {
let ispr = ISPR(0b1 << 5);
assert!(ispr.interrupt_is_pending(Hardware::Exti01));
assert!(!ispr.interrupt_is_pending(Hardware::Usb));
}
#[test]
fn test_icpr_clear_pending() {
let mut icpr = ICPR(0);
icpr.clear_pending(Hardware::Flash);
assert_eq!(icpr.0, 0b1 << 3);
}
}
| interrupt_is_pending | identifier_name |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.